index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ReduceProcessorFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.demux.processor.reducer;
import java.util.HashMap;
import org.apache.log4j.Logger;
public class ReduceProcessorFactory {
static Logger log = Logger.getLogger(ReduceProcessorFactory.class);
// TODO
// add new mapper package at the end.
// We should have a more generic way to do this.
// Ex: read from config
// list of alias
// and
// alias -> processor class
// ******** WARNING ********
// If the ReduceProcessor is not there use Identity instead
private static HashMap<String, ReduceProcessor> processors = new HashMap<String, ReduceProcessor>(); // registry
private ReduceProcessorFactory() {
}
/**
* Register a specific parser for a {@link ReduceProcessor} implementation.
* @param reduceType is data type assigned by mapper
* @param processor is parser class for reducer
*/
public static synchronized void register(String reduceType,
ReduceProcessor processor) {
log.info("register " + processor.getClass().getName()
+ " for this recordType :" + reduceType);
if (processors.containsKey(reduceType)) {
throw new DuplicateReduceProcessorException(
"Duplicate processor for recordType:" + reduceType);
}
ReduceProcessorFactory.processors.put(reduceType, processor);
}
public static ReduceProcessor getProcessor(String processorClass) throws UnknownReduceTypeException {
if (processors.containsKey(processorClass)) {
return processors.get(processorClass);
} else {
ReduceProcessor processor = null;
try {
processor = (ReduceProcessor) Class.forName(processorClass).newInstance();
} catch (ClassNotFoundException e) {
throw new UnknownReduceTypeException("Unknown reducer class for:" + processorClass, e);
} catch (Exception e) {
throw new UnknownReduceTypeException("error constructing processor", e);
}
// TODO using a ThreadSafe/reuse flag to actually decide if we want
// to reuse the same processor again and again
register(processorClass, processor);
return processor;
}
}
}
| 8,400 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/AbstractProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.writer.hbase.Reporter;
import org.apache.hadoop.chukwa.util.HBaseUtil;
import org.apache.hadoop.hbase.client.Put;
import org.apache.log4j.Logger;
public abstract class AbstractProcessor {
static Logger LOG = Logger.getLogger(AbstractProcessor.class);
protected int entryCount = 0;
protected String primaryKeyHelper;
protected String sourceHelper;
protected byte[] key = null;
byte[] CF = "t".getBytes(Charset.forName("UTF-8"));
boolean chunkInErrorSaved = false;
ArrayList<Put> output = null;
ArrayList<Put> meta = null;
Reporter reporter = null;
long time = System.currentTimeMillis();
Chunk chunk = null;
MessageDigest md5 = null;
public AbstractProcessor() throws NoSuchAlgorithmException {
md5 = MessageDigest.getInstance("md5");
}
protected abstract void parse(byte[] recordEntry) throws Throwable;
/**
* Generic metric function to add a metric to HBase with full primary key and
* source computed.
*
* @param time is timestamp in epoch
* @param metric is metric name
* @param source is data source name
* @param value is metric value in bytes
* @param output is an array list of Put operations
*/
public void addRecord(long time, String metric, String source, byte[] value,
ArrayList<Put> output) {
String primaryKey = new StringBuilder(primaryKeyHelper).append(".")
.append(metric).toString();
byte[] key = HBaseUtil.buildKey(time, primaryKey, source);
Put put = new Put(key);
byte[] timeInBytes = ByteBuffer.allocate(8).putLong(time).array();
put.addColumn(CF, timeInBytes, time, value);
output.add(put);
reporter.putMetric(chunk.getDataType(), primaryKey);
reporter.putSource(chunk.getDataType(), source);
}
public void addRecord(String primaryKey, String value) {
addRecord(primaryKey, value.getBytes(Charset.forName("UTF-8")));
}
/**
* Generic function to add a metric to HBase metric table, this function
* assumes "time" and "source" have been defined and will construct primaryKey
* only, without recompute time and source md5.
*
* @param metric is metric name
* @param value is metric value in bytes
*/
public void addRecord(String metric, byte[] value) {
String primaryKey = new StringBuilder(primaryKeyHelper).append(".")
.append(metric).toString();
byte[] key = HBaseUtil.buildKey(time, primaryKey, sourceHelper);
Put put = new Put(key);
byte[] timeInBytes = ByteBuffer.allocate(8).putLong(time).array();
put.addColumn(CF, timeInBytes, time, value);
output.add(put);
reporter.putMetric(chunk.getDataType(), primaryKey);
}
/**
* Process a chunk to store in HBase.
*
* @param chunk is a Chukwa chunk
* @param output is an array of Put operations
* @param reporter is a reporter to track progress
* @throws Throwable if there is problem parsing data
*/
public void process(Chunk chunk, ArrayList<Put> output, Reporter reporter)
throws Throwable {
this.output = output;
this.reporter = reporter;
this.chunk = chunk;
this.primaryKeyHelper = chunk.getDataType();
this.sourceHelper = chunk.getSource();
reporter.putSource(primaryKeyHelper, sourceHelper);
parse(chunk.getData());
addMeta();
}
protected void addMeta() {
byte[] key = HBaseUtil.buildKey(time, chunk.getDataType(), sourceHelper);
Put put = new Put(key);
String family = "a";
byte[] timeInBytes = ByteBuffer.allocate(8).putLong(time).array();
put.addColumn(family.getBytes(Charset.forName("UTF-8")), timeInBytes, time, chunk.getTags().getBytes(Charset.forName("UTF-8")));
output.add(put);
}
}
| 8,401 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/ChukwaMetricsProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.writer.hbase.Reporter;
import org.apache.hadoop.hbase.client.Put;
import org.apache.log4j.Logger;
public class ChukwaMetricsProcessor extends HadoopMetricsProcessor {
static Logger LOG = Logger.getLogger(ChukwaMetricsProcessor.class);
public ChukwaMetricsProcessor() throws NoSuchAlgorithmException {
super();
}
/**
* Process cluster name and store in HBase.
*
* @param chunk is a Chukwa data chunk
* @param output is a list of Put operations
* @param reporter is progress reporter
* @throws Throwable if unable to send data
*/
@Override
public void process(Chunk chunk, ArrayList<Put> output, Reporter reporter)
throws Throwable {
this.output = output;
this.reporter = reporter;
this.chunk = chunk;
this.primaryKeyHelper = chunk.getDataType();
this.sourceHelper = chunk.getSource();
String clusterName = chunk.getTag("cluster");
reporter.putSource(primaryKeyHelper, sourceHelper);
reporter.putClusterName(primaryKeyHelper, clusterName);
parse(chunk.getData());
addMeta();
}
}
| 8,402 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/HadoopMetricsProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
import java.nio.charset.Charset;
import java.security.NoSuchAlgorithmException;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.apache.log4j.Logger;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class HadoopMetricsProcessor extends AbstractProcessor {
static Logger LOG = Logger.getLogger(HadoopMetricsProcessor.class);
static final String timestampField = "timestamp";
static final String contextNameField = "contextName";
static final String recordNameField = "recordName";
static final String hostName = "Hostname";
static final String processName = "ProcessName";
static final byte[] cf = "t".getBytes(Charset.forName("UTF-8"));
public HadoopMetricsProcessor() throws NoSuchAlgorithmException {
}
@Override
protected void parse(byte[] recordEntry) throws Throwable {
String body = new String(recordEntry, Charset.forName("UTF-8"));
int start = 0;
int end = 0;
try {
while(true) {
start = body.indexOf('{', end);
end = body.indexOf('}', start)+1;
if (start == -1)
break;
JSONObject json = (JSONObject) JSONValue.parse(body.substring(start,end));
time = ((Long) json.get(timestampField)).longValue();
String contextName = (String) json.get(contextNameField);
String recordName = (String) json.get(recordNameField);
String src = ((String) json.get(hostName)).toLowerCase();
if(json.get(processName)!=null) {
src = new StringBuilder(src).append(":").append(json.get(processName)).toString();
}
for(Entry<String, Object> entry : (Set<Map.Entry>) json.entrySet()) {
String keyName = entry.getKey();
if (timestampField.intern() == keyName.intern()) {
continue;
} else if (contextNameField.intern() == keyName.intern()) {
continue;
} else if (recordNameField.intern() == keyName.intern()) {
continue;
} else if (hostName.intern() == keyName.intern()) {
continue;
} else if (processName.intern() == keyName.intern()) {
continue;
} else {
if(json.get(keyName)!=null) {
String v = entry.getValue().toString();
String primaryKey = new StringBuilder(contextName).append(".")
.append(recordName).append(".").append(keyName).toString();
addRecord(time, primaryKey, src, v.getBytes(Charset.forName("UTF-8")), output);
}
}
}
}
} catch(Exception e) {
LOG.warn("Unparsable data:");
LOG.warn(body);
LOG.warn(ExceptionUtil.getStackTrace(e));
// Skip unparsable data.
}
}
}
| 8,403 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/ProcessorFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
import java.util.HashMap;
import org.apache.log4j.Logger;
public class ProcessorFactory {
static Logger log = Logger.getLogger(ProcessorFactory.class);
private static HashMap<String, AbstractProcessor> processors = new HashMap<String, AbstractProcessor>(); // registry
public ProcessorFactory() {
}
public static AbstractProcessor getProcessor(String parserClass)
throws UnknownRecordTypeException {
if (processors.containsKey(parserClass)) {
return processors.get(parserClass);
} else {
AbstractProcessor processor = null;
try {
processor = (AbstractProcessor) Class.forName(parserClass).getConstructor()
.newInstance();
} catch (ClassNotFoundException e) {
throw new UnknownRecordTypeException("Unknown parserClass:"
+ parserClass, e);
} catch (Exception e) {
throw new UnknownRecordTypeException("error constructing processor", e);
}
// TODO using a ThreadSafe/reuse flag to actually decide if we want
// to reuse the same processor again and again
processors.put(parserClass, processor);
return processor;
}
}
}
| 8,404 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/LogEntry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
public class LogEntry {
private SimpleDateFormat sdf = new SimpleDateFormat(
"yyyy-MM-dd HH:mm");
private Date date;
private String logLevel;
private String className;
private String body;
public LogEntry(String recordEntry) throws ParseException {
String dStr = recordEntry.substring(0, 23);
date = sdf.parse(dStr);
int start = 24;
int idx = recordEntry.indexOf(' ', start);
logLevel = recordEntry.substring(start, idx);
start = idx + 1;
idx = recordEntry.indexOf(' ', start);
className = recordEntry.substring(start, idx - 1);
body = recordEntry.substring(idx + 1);
}
public Date getDate() {
return (Date) date.clone();
}
public void setDate(Date date) {
this.date = (Date) date.clone();
}
public String getLogLevel() {
return logLevel;
}
public String getClassName() {
return className;
}
public String getBody() {
return body;
}
}
| 8,405 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/DefaultProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.chukwa.util.HBaseUtil;
import org.apache.hadoop.hbase.client.Put;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
public class DefaultProcessor extends AbstractProcessor {
public DefaultProcessor() throws NoSuchAlgorithmException {
super();
// TODO Auto-generated constructor stub
}
static Logger LOG = Logger.getLogger(DefaultProcessor.class);
@Override
protected void parse(byte[] recordEntry) throws Throwable {
byte[] key = HBaseUtil.buildKey(time, chunk.getDataType(),
chunk.getSource());
Put put = new Put(key);
byte[] timeInBytes = ByteBuffer.allocate(8).putLong(time).array();
put.addColumn("t".getBytes(Charset.forName("UTF-8")), timeInBytes,
chunk.getData());
output.add(put);
Type defaultType = new TypeToken<Map<String, String>>() {
}.getType();
Gson gson = new Gson();
Map<String, String> meta = new HashMap<String, String>();
meta.put("sig", new String(key, Charset.forName("UTF-8")));
meta.put("type", "unknown");
String buffer = gson.toJson(meta, defaultType);
reporter.put(chunk.getDataType(), chunk.getSource(), buffer);
}
}
| 8,406 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/SystemMetrics.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Demux parser for system metrics data collected through
* org.apache.hadoop.chukwa.datacollection.adaptor.sigar.SystemMetrics.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
import java.nio.charset.Charset;
import java.security.NoSuchAlgorithmException;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
public class SystemMetrics extends AbstractProcessor {
public SystemMetrics() throws NoSuchAlgorithmException {
super();
}
@Override
protected void parse(byte[] recordEntry) throws Throwable {
String buffer = new String(recordEntry, Charset.forName("UTF-8"));
JSONObject json = (JSONObject) JSONValue.parse(buffer);
time = ((Long) json.get("timestamp")).longValue();
ChukwaRecord record = new ChukwaRecord();
JSONArray cpuList = (JSONArray) json.get("cpu");
double combined = 0.0;
double user = 0.0;
double sys = 0.0;
double idle = 0.0;
int actualSize = 0;
for (int i = 0; i < cpuList.size(); i++) {
JSONObject cpu = (JSONObject) cpuList.get(i);
// Work around for sigar returning null sometimes for cpu metrics on
// pLinux
if (cpu.get("combined") == null) {
continue;
}
actualSize++;
combined = combined + Double.parseDouble(cpu.get("combined").toString());
user = user + Double.parseDouble(cpu.get("user").toString());
sys = sys + Double.parseDouble(cpu.get("sys").toString());
idle = idle + Double.parseDouble(cpu.get("idle").toString());
for(Entry<String, Object> entry : (Set<Map.Entry>) cpu.entrySet()) {
String key = entry.getKey();
addRecord("cpu." + key + "." + i, String.valueOf(entry.getValue()));
}
}
combined = combined / actualSize;
user = user / actualSize;
sys = sys / actualSize;
idle = idle / actualSize;
addRecord("cpu.combined", Double.toString(combined));
addRecord("cpu.user", Double.toString(user));
addRecord("cpu.idle", Double.toString(idle));
addRecord("cpu.sys", Double.toString(sys));
addRecord("Uptime", json.get("uptime").toString());
JSONArray loadavg = (JSONArray) json.get("loadavg");
addRecord("LoadAverage.1", loadavg.get(0).toString());
addRecord("LoadAverage.5", loadavg.get(1).toString());
addRecord("LoadAverage.15", loadavg.get(2).toString());
record = new ChukwaRecord();
JSONObject memory = (JSONObject) json.get("memory");
for(Entry<String, Object> entry : (Set<Map.Entry>) memory.entrySet()) {
String key = entry.getKey();
addRecord("memory." + key, String.valueOf(entry.getValue()));
}
record = new ChukwaRecord();
JSONObject swap = (JSONObject) json.get("swap");
for(Map.Entry<String, Object> entry : (Set<Map.Entry>) swap.entrySet()) {
addRecord("swap." + entry.getKey(), String.valueOf(entry.getValue()));
}
double rxBytes = 0;
double rxDropped = 0;
double rxErrors = 0;
double rxPackets = 0;
double txBytes = 0;
double txCollisions = 0;
double txErrors = 0;
double txPackets = 0;
record = new ChukwaRecord();
JSONArray netList = (JSONArray) json.get("network");
for (int i = 0; i < netList.size(); i++) {
JSONObject netIf = (JSONObject) netList.get(i);
for(Map.Entry<String, Object> entry : (Set<Map.Entry>) netIf.entrySet()) {
String key = entry.getKey();
long value = 0;
if(entry.getValue() instanceof Long) {
value = (Long) entry.getValue();
}
record.add(key + "." + i, String.valueOf(entry.getValue()));
if (i != 0) {
if (key.equals("RxBytes")) {
rxBytes = rxBytes + value;
} else if (key.equals("RxDropped")) {
rxDropped = rxDropped + value;
} else if (key.equals("RxErrors")) {
rxErrors = rxErrors + value;
} else if (key.equals("RxPackets")) {
rxPackets = rxPackets + value;
} else if (key.equals("TxBytes")) {
txBytes = txBytes + value;
} else if (key.equals("TxCollisions")) {
txCollisions = txCollisions + value;
} else if (key.equals("TxErrors")) {
txErrors = txErrors + value;
} else if (key.equals("TxPackets")) {
txPackets = txPackets + value;
}
}
}
}
addRecord("network.RxBytes", Double.toString(rxBytes));
addRecord("network.RxDropped", Double.toString(rxDropped));
addRecord("network.RxErrors", Double.toString(rxErrors));
addRecord("network.RxPackets", Double.toString(rxPackets));
addRecord("network.TxBytes", Double.toString(txBytes));
addRecord("network.TxCollisions", Double.toString(txCollisions));
addRecord("network.TxErrors", Double.toString(txErrors));
addRecord("network.TxPackets", Double.toString(txPackets));
double readBytes = 0;
double reads = 0;
double writeBytes = 0;
double writes = 0;
double total = 0;
double used = 0;
record = new ChukwaRecord();
JSONArray diskList = (JSONArray) json.get("disk");
for (int i = 0; i < diskList.size(); i++) {
JSONObject disk = (JSONObject) diskList.get(i);
for(Entry<String, Object> entry : (Set<Map.Entry>) disk.entrySet()) {
String key = entry.getKey();
long value = 0;
if(entry.getValue() instanceof Long) {
value = (Long) entry.getValue();
}
record.add(key + "." + i, String.valueOf(entry.getValue()));
if (key.equals("ReadBytes")) {
readBytes = readBytes + value;
} else if (key.equals("Reads")) {
reads = reads + Long.valueOf(value);;
} else if (key.equals("WriteBytes")) {
writeBytes = writeBytes + value;
} else if (key.equals("Writes")) {
writes = writes + value;
} else if (key.equals("Total")) {
total = total + value;
} else if (key.equals("Used")) {
used = used + value;
}
}
}
double percentUsed = used / total;
addRecord("disk.ReadBytes", Double.toString(readBytes));
addRecord("disk.Reads", Double.toString(reads));
addRecord("disk.WriteBytes", Double.toString(writeBytes));
addRecord("disk.Writes", Double.toString(writes));
addRecord("disk.Total", Double.toString(total));
addRecord("disk.Used", Double.toString(used));
addRecord("disk.PercentUsed", Double.toString(percentUsed));
}
}
| 8,407 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/hbase/UnknownRecordTypeException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.hbase;
public class UnknownRecordTypeException extends Exception {
/**
*
*/
private static final long serialVersionUID = 8925135975093252279L;
public UnknownRecordTypeException() {
}
public UnknownRecordTypeException(String message) {
super(message);
}
public UnknownRecordTypeException(Throwable cause) {
super(cause);
}
public UnknownRecordTypeException(String message, Throwable cause) {
super(message, cause);
}
}
| 8,408 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | // File generated by hadoop record compiler. Do not edit.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
public class ChukwaRecordKey extends org.apache.hadoop.record.Record {
private static final org.apache.hadoop.record.meta.RecordTypeInfo _rio_recTypeInfo;
private static org.apache.hadoop.record.meta.RecordTypeInfo _rio_rtiFilter;
private static int[] _rio_rtiFilterFields;
static {
_rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo(
"ChukwaRecordKey");
_rio_recTypeInfo.addField("reduceType",
org.apache.hadoop.record.meta.TypeID.StringTypeID);
_rio_recTypeInfo.addField("key",
org.apache.hadoop.record.meta.TypeID.StringTypeID);
}
private String reduceType;
private String key;
public ChukwaRecordKey() {
}
public ChukwaRecordKey(final String reduceType, final String key) {
this.reduceType = reduceType;
this.key = key;
}
public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
return _rio_recTypeInfo;
}
public static void setTypeFilter(
org.apache.hadoop.record.meta.RecordTypeInfo rti) {
if (null == rti)
return;
_rio_rtiFilter = rti;
_rio_rtiFilterFields = null;
}
private static void setupRtiFields() {
if (null == _rio_rtiFilter)
return;
// we may already have done this
if (null != _rio_rtiFilterFields)
return;
int _rio_i, _rio_j;
_rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()];
for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) {
_rio_rtiFilterFields[_rio_i] = 0;
}
java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter
.getFieldTypeInfos().iterator();
_rio_i = 0;
while (_rio_itFilter.hasNext()) {
org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter
.next();
java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo
.getFieldTypeInfos().iterator();
_rio_j = 1;
while (_rio_it.hasNext()) {
org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
if (_rio_tInfo.equals(_rio_tInfoFilter)) {
_rio_rtiFilterFields[_rio_i] = _rio_j;
break;
}
_rio_j++;
}
_rio_i++;
}
}
public String getReduceType() {
return reduceType;
}
public void setReduceType(final String reduceType) {
this.reduceType = reduceType;
}
public String getKey() {
return key;
}
public void setKey(final String key) {
this.key = key;
}
public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a,
final String _rio_tag) throws java.io.IOException {
_rio_a.startRecord(this, _rio_tag);
_rio_a.writeString(reduceType, "reduceType");
_rio_a.writeString(key, "key");
_rio_a.endRecord(this, _rio_tag);
}
private void deserializeWithoutFilter(
final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
throws java.io.IOException {
_rio_a.startRecord(_rio_tag);
reduceType = _rio_a.readString("reduceType");
key = _rio_a.readString("key");
_rio_a.endRecord(_rio_tag);
}
public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a,
final String _rio_tag) throws java.io.IOException {
if (null == _rio_rtiFilter) {
deserializeWithoutFilter(_rio_a, _rio_tag);
return;
}
// if we're here, we need to read based on version info
_rio_a.startRecord(_rio_tag);
setupRtiFields();
for (int _rio_i = 0; _rio_i < _rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
if (1 == _rio_rtiFilterFields[_rio_i]) {
reduceType = _rio_a.readString("reduceType");
} else if (2 == _rio_rtiFilterFields[_rio_i]) {
key = _rio_a.readString("key");
} else {
java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter
.getFieldTypeInfos());
org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i)
.getFieldID(), typeInfos.get(_rio_i).getTypeID());
}
}
_rio_a.endRecord(_rio_tag);
}
public int compareTo(final Object _rio_peer_) throws ClassCastException {
if (!(_rio_peer_ instanceof ChukwaRecordKey)) {
throw new ClassCastException("Comparing different types of records.");
}
ChukwaRecordKey _rio_peer = (ChukwaRecordKey) _rio_peer_;
int _rio_ret = 0;
_rio_ret = reduceType.compareTo(_rio_peer.reduceType);
if (_rio_ret != 0)
return _rio_ret;
_rio_ret = key.compareTo(_rio_peer.key);
if (_rio_ret != 0)
return _rio_ret;
return _rio_ret;
}
public boolean equals(final Object _rio_peer_) {
if (!(_rio_peer_ instanceof ChukwaRecordKey)) {
return false;
}
if (_rio_peer_ == this) {
return true;
}
ChukwaRecordKey _rio_peer = (ChukwaRecordKey) _rio_peer_;
boolean _rio_ret = false;
_rio_ret = reduceType.equals(_rio_peer.reduceType);
if (!_rio_ret)
return _rio_ret;
_rio_ret = key.equals(_rio_peer.key);
if (!_rio_ret)
return _rio_ret;
return _rio_ret;
}
public Object clone() throws CloneNotSupportedException {
super.clone();
ChukwaRecordKey _rio_other = new ChukwaRecordKey();
_rio_other.reduceType = this.reduceType;
_rio_other.key = this.key;
return _rio_other;
}
public int hashCode() {
int _rio_result = 17;
int _rio_ret;
_rio_ret = reduceType.hashCode();
_rio_result = 37 * _rio_result + _rio_ret;
_rio_ret = key.hashCode();
_rio_result = 37 * _rio_result + _rio_ret;
return _rio_result;
}
public static String signature() {
return "LChukwaRecordKey(ss)";
}
public static class Comparator extends
org.apache.hadoop.record.RecordComparator {
public Comparator() {
super(ChukwaRecordKey.class);
}
static public int slurpRaw(byte[] b, int s, int l) {
try {
int os = s;
{
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += (z + i);
l -= (z + i);
}
{
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += (z + i);
l -= (z + i);
}
return (os - s);
} catch (java.io.IOException e) {
throw new RuntimeException(e);
}
}
static public int compareRaw(byte[] b1, int s1, int l1, byte[] b2, int s2,
int l2) {
try {
int os1 = s1;
{
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
l1 -= z1;
l2 -= z2;
int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
s2, i2);
if (r1 != 0) {
return (r1 < 0) ? -1 : 0;
}
s1 += i1;
s2 += i2;
l1 -= i1;
l1 -= i2;
}
{
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
l1 -= z1;
l2 -= z2;
int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
s2, i2);
if (r1 != 0) {
return (r1 < 0) ? -1 : 0;
}
s1 += i1;
s2 += i2;
l1 -= i1;
l1 -= i2;
}
return (os1 - s1);
} catch (java.io.IOException e) {
throw new RuntimeException(e);
}
}
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int ret = compareRaw(b1, s1, l1, b2, s2, l2);
return (ret == -1) ? -1 : ((ret == 0) ? 1 : 0);
}
}
static {
org.apache.hadoop.record.RecordComparator.define(ChukwaRecordKey.class,
new Comparator());
}
}
| 8,409 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.chukwa.Chunk;
/**
* Various utility methods.
*
*/
public class RecordUtil {
static Pattern clusterPattern = Pattern
.compile("(.*)?cluster=\"(.*?)\"(.*)?");
public static String getClusterName(Record record) {
String tags = record.getValue(Record.tagsField);
if (tags != null) {
Matcher matcher = clusterPattern.matcher(tags);
if (matcher.matches()) {
return matcher.group(2);
}
}
return "undefined";
}
/**
* Uses a compiled pattern, so theoretically faster than
* Chunk.getTag().
* @param chunk - a chunk of binary blob
* @return cluster name
*
*/
public static String getClusterName(Chunk chunk) {
String tags = chunk.getTags();
if (tags != null) {
Matcher matcher = clusterPattern.matcher(tags);
if (matcher.matches()) {
return matcher.group(2);
}
}
return "undefined";
}
}
| 8,410 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// File generated by hadoop record compiler. Do not edit.
package org.apache.hadoop.chukwa.extraction.engine;
import java.io.Serializable;
public class ChukwaRecordJT extends org.apache.hadoop.record.Record implements Serializable {
private static final long serialVersionUID = 15015L;
private static final org.apache.hadoop.record.meta.RecordTypeInfo _rio_recTypeInfo;
private static org.apache.hadoop.record.meta.RecordTypeInfo _rio_rtiFilter;
private static int[] _rio_rtiFilterFields;
static {
_rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo(
"ChukwaRecordJT");
_rio_recTypeInfo.addField("time",
org.apache.hadoop.record.meta.TypeID.LongTypeID);
_rio_recTypeInfo.addField("mapFields",
new org.apache.hadoop.record.meta.MapTypeID(
org.apache.hadoop.record.meta.TypeID.StringTypeID,
org.apache.hadoop.record.meta.TypeID.BufferTypeID));
}
protected long time;
protected java.util.TreeMap<String, org.apache.hadoop.record.Buffer> mapFields;
public ChukwaRecordJT() {
}
public ChukwaRecordJT(
final long time,
final java.util.TreeMap<String, org.apache.hadoop.record.Buffer> mapFields) {
this.time = time;
this.mapFields = mapFields;
}
public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
return _rio_recTypeInfo;
}
public static void setTypeFilter(
org.apache.hadoop.record.meta.RecordTypeInfo rti) {
if (null == rti)
return;
_rio_rtiFilter = rti;
_rio_rtiFilterFields = null;
}
private static void setupRtiFields() {
if (null == _rio_rtiFilter)
return;
// we may already have done this
if (null != _rio_rtiFilterFields)
return;
int _rio_i, _rio_j;
_rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()];
for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) {
_rio_rtiFilterFields[_rio_i] = 0;
}
java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter
.getFieldTypeInfos().iterator();
_rio_i = 0;
while (_rio_itFilter.hasNext()) {
org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter
.next();
java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo
.getFieldTypeInfos().iterator();
_rio_j = 1;
while (_rio_it.hasNext()) {
org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
if (_rio_tInfo.equals(_rio_tInfoFilter)) {
_rio_rtiFilterFields[_rio_i] = _rio_j;
break;
}
_rio_j++;
}
_rio_i++;
}
}
public long getTime() {
return time;
}
public void setTime(final long time) {
this.time = time;
}
public java.util.TreeMap<String, org.apache.hadoop.record.Buffer> getMapFields() {
return mapFields;
}
public void setMapFields(
final java.util.TreeMap<String, org.apache.hadoop.record.Buffer> mapFields) {
this.mapFields = mapFields;
}
public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a,
final String _rio_tag) throws java.io.IOException {
_rio_a.startRecord(this, _rio_tag);
_rio_a.writeLong(time, "time");
{
_rio_a.startMap(mapFields, "mapFields");
java.util.Set<java.util.Map.Entry<String, org.apache.hadoop.record.Buffer>> _rio_es1 = mapFields
.entrySet();
for (java.util.Iterator<java.util.Map.Entry<String, org.apache.hadoop.record.Buffer>> _rio_midx1 = _rio_es1
.iterator(); _rio_midx1.hasNext();) {
java.util.Map.Entry<String, org.apache.hadoop.record.Buffer> _rio_me1 = _rio_midx1
.next();
String _rio_k1 = _rio_me1.getKey();
org.apache.hadoop.record.Buffer _rio_v1 = _rio_me1.getValue();
_rio_a.writeString(_rio_k1, "_rio_k1");
_rio_a.writeBuffer(_rio_v1, "_rio_v1");
}
_rio_a.endMap(mapFields, "mapFields");
}
_rio_a.endRecord(this, _rio_tag);
}
private void deserializeWithoutFilter(
final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
throws java.io.IOException {
_rio_a.startRecord(_rio_tag);
time = _rio_a.readLong("time");
{
org.apache.hadoop.record.Index _rio_midx1 = _rio_a.startMap("mapFields");
mapFields = new java.util.TreeMap<String, org.apache.hadoop.record.Buffer>();
for (; !_rio_midx1.done(); _rio_midx1.incr()) {
String _rio_k1;
_rio_k1 = _rio_a.readString("_rio_k1");
org.apache.hadoop.record.Buffer _rio_v1;
_rio_v1 = _rio_a.readBuffer("_rio_v1");
mapFields.put(_rio_k1, _rio_v1);
}
_rio_a.endMap("mapFields");
}
_rio_a.endRecord(_rio_tag);
}
public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a,
final String _rio_tag) throws java.io.IOException {
if (null == _rio_rtiFilter) {
deserializeWithoutFilter(_rio_a, _rio_tag);
return;
}
// if we're here, we need to read based on version info
_rio_a.startRecord(_rio_tag);
setupRtiFields();
for (int _rio_i = 0; _rio_i < _rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
if (1 == _rio_rtiFilterFields[_rio_i]) {
time = _rio_a.readLong("time");
} else if (2 == _rio_rtiFilterFields[_rio_i]) {
{
org.apache.hadoop.record.Index _rio_midx1 = _rio_a
.startMap("mapFields");
mapFields = new java.util.TreeMap<String, org.apache.hadoop.record.Buffer>();
for (; !_rio_midx1.done(); _rio_midx1.incr()) {
String _rio_k1;
_rio_k1 = _rio_a.readString("_rio_k1");
org.apache.hadoop.record.Buffer _rio_v1;
_rio_v1 = _rio_a.readBuffer("_rio_v1");
mapFields.put(_rio_k1, _rio_v1);
}
_rio_a.endMap("mapFields");
}
} else {
java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter
.getFieldTypeInfos());
org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i)
.getFieldID(), typeInfos.get(_rio_i).getTypeID());
}
}
_rio_a.endRecord(_rio_tag);
}
public int compareTo(final Object _rio_peer_) throws ClassCastException {
if (!(_rio_peer_ instanceof ChukwaRecordJT)) {
throw new ClassCastException("Comparing different types of records.");
}
ChukwaRecordJT _rio_peer = (ChukwaRecordJT) _rio_peer_;
int _rio_ret = 0;
_rio_ret = (time == _rio_peer.time) ? 0
: ((time < _rio_peer.time) ? -1 : 1);
if (_rio_ret != 0)
return _rio_ret;
{
java.util.Set<String> _rio_set10 = mapFields.keySet();
java.util.Set<String> _rio_set20 = _rio_peer.mapFields.keySet();
java.util.Iterator<String> _rio_miter10 = _rio_set10.iterator();
java.util.Iterator<String> _rio_miter20 = _rio_set20.iterator();
while(_rio_miter10.hasNext() && _rio_miter20.hasNext()) {
String _rio_k10 = _rio_miter10.next();
String _rio_k20 = _rio_miter20.next();
_rio_ret = _rio_k10.compareTo(_rio_k20);
if (_rio_ret != 0) {
return _rio_ret;
}
}
_rio_ret = (_rio_set10.size() - _rio_set20.size());
}
if (_rio_ret != 0)
return _rio_ret;
return _rio_ret;
}
public boolean equals(final Object _rio_peer_) {
if (!(_rio_peer_ instanceof ChukwaRecordJT)) {
return false;
}
if (_rio_peer_ == this) {
return true;
}
ChukwaRecordJT _rio_peer = (ChukwaRecordJT) _rio_peer_;
boolean _rio_ret = false;
_rio_ret = (time == _rio_peer.time);
if (!_rio_ret)
return _rio_ret;
_rio_ret = mapFields.equals(_rio_peer.mapFields);
if (!_rio_ret)
return _rio_ret;
return _rio_ret;
}
public Object clone() throws CloneNotSupportedException {
super.clone();
ChukwaRecordJT _rio_other = new ChukwaRecordJT();
_rio_other.time = this.time;
_rio_other.mapFields = (java.util.TreeMap<String, org.apache.hadoop.record.Buffer>) this.mapFields
.clone();
return _rio_other;
}
public int hashCode() {
int _rio_result = 17;
int _rio_ret;
_rio_ret = (int) (time ^ (time >>> 32));
_rio_result = 37 * _rio_result + _rio_ret;
_rio_ret = mapFields.hashCode();
_rio_result = 37 * _rio_result + _rio_ret;
return _rio_result;
}
public static String signature() {
return "LChukwaRecordJT(l{sB})";
}
public static class Comparator extends
org.apache.hadoop.record.RecordComparator implements Serializable {
public Comparator() {
super(ChukwaRecordJT.class);
}
static public int slurpRaw(byte[] b, int s, int l) {
try {
int os = s;
{
long i = org.apache.hadoop.record.Utils.readVLong(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += z;
l -= z;
}
{
int mi1 = org.apache.hadoop.record.Utils.readVInt(b, s);
int mz1 = org.apache.hadoop.record.Utils.getVIntSize(mi1);
s += mz1;
l -= mz1;
for (int midx1 = 0; midx1 < mi1; midx1++) {
{
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += (z + i);
l -= (z + i);
}
{
int i = org.apache.hadoop.record.Utils.readVInt(b, s);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s += z + i;
l -= (z + i);
}
}
}
return (os - s);
} catch (java.io.IOException e) {
throw new RuntimeException(e);
}
}
static public int compareRaw(byte[] b1, int s1, int l1, byte[] b2, int s2,
int l2) {
try {
int os1 = s1;
{
long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
if (i1 != i2) {
return ((i1 - i2) < 0) ? -1 : 0;
}
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
l1 -= z1;
l2 -= z2;
}
{
int mi11 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int mi21 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int mz11 = org.apache.hadoop.record.Utils.getVIntSize(mi11);
int mz21 = org.apache.hadoop.record.Utils.getVIntSize(mi21);
s1 += mz11;
s2 += mz21;
l1 -= mz11;
l2 -= mz21;
for (int midx1 = 0; midx1 < mi11 && midx1 < mi21; midx1++) {
{
int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
s1 += z1;
s2 += z2;
l1 -= z1;
l2 -= z2;
int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1,
b2, s2, i2);
if (r1 != 0) {
return (r1 < 0) ? -1 : 0;
}
s1 += i1;
s2 += i2;
l1 -= i1;
l1 -= i2;
}
{
int i = org.apache.hadoop.record.Utils.readVInt(b1, s1);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s1 += z + i;
l1 -= (z + i);
}
{
int i = org.apache.hadoop.record.Utils.readVInt(b2, s2);
int z = org.apache.hadoop.record.Utils.getVIntSize(i);
s2 += z + i;
l2 -= (z + i);
}
}
if (mi11 != mi21) {
return (mi11 < mi21) ? -1 : 0;
}
}
return (os1 - s1);
} catch (java.io.IOException e) {
throw new RuntimeException(e);
}
}
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int ret = compareRaw(b1, s1, l1, b2, s2, l2);
return (ret == -1) ? -1 : ((ret == 0) ? 1 : 0);
}
}
static {
org.apache.hadoop.record.RecordComparator.define(ChukwaRecordJT.class,
new Comparator());
}
}
| 8,411 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
import java.nio.charset.Charset;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.hadoop.record.Buffer;
public class ChukwaRecord extends ChukwaRecordJT implements Record {
public ChukwaRecord() {
}
public void add(String key, String value) {
synchronized (this) {
if (this.mapFields == null) {
this.mapFields = new TreeMap<String, Buffer>();
}
}
this.mapFields.put(key, new Buffer(value.getBytes(Charset.forName("UTF-8"))));
}
public String[] getFields() {
return this.mapFields.keySet().toArray(new String[0]);
}
public String getValue(String field) {
if (this.mapFields.containsKey(field)) {
return new String(this.mapFields.get(field).get(), Charset.forName("UTF-8"));
} else {
return null;
}
}
public boolean containsField(String field) {
return this.mapFields.containsKey(field);
}
public void removeValue(String field) {
if (this.mapFields.containsKey(field)) {
this.mapFields.remove(field);
}
}
@Override
public String toString() {
Set<Map.Entry<String, Buffer>> f = this.mapFields.entrySet();
Iterator<Map.Entry<String, Buffer>> it = f.iterator();
Map.Entry<String, Buffer> entry = null;
StringBuilder sb = new StringBuilder();
sb.append("<event ");
StringBuilder body = new StringBuilder();
String key = null;
String val = null;
boolean hasBody = false;
String bodyVal = null;
while (it.hasNext()) {
entry = it.next();
key = entry.getKey().intern();
val = new String(entry.getValue().get(), Charset.forName("UTF-8"));
if (key.intern() == Record.bodyField.intern()) {
hasBody = true;
bodyVal = val;
} else {
sb.append(key).append("=\"").append(val).append("\" ");
body.append(key).append(" = ").append(val).append("<br>");
}
}
if (hasBody) {
sb.append(">").append(bodyVal);
} else {
sb.append(">").append(body);
}
sb.append("</event>");
return sb.toString();
}
}
| 8,412 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/SearchResult.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
import java.util.List;
import java.util.TreeMap;
public interface SearchResult {
public void setToken(Token token);
public Token getToken();
public TreeMap<Long, List<Record>> getRecords();
public void setRecords(TreeMap<Long, List<Record>> records);
}
| 8,413 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
import java.util.List;
import java.util.TreeMap;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceFactory;
public class ChukwaSearchService implements SearchService {
private DataSourceFactory dataSourceFactory = DataSourceFactory.getInstance();
public SearchResult search(String cluster, String[] dataSources, long t0,
long t1, String filter, Token token) throws DataSourceException {
SearchResult result = new ChukwaSearchResult();
TreeMap<Long, List<Record>> records = new TreeMap<Long, List<Record>>();
result.setRecords(records);
for (int i = 0; i < dataSources.length; i++) {
DataSource ds = dataSourceFactory.getDataSource(dataSources[i]);
ds.search(result, cluster, dataSources[i], t0, t1, filter, token);
}
return result;
}
}
| 8,414 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/SearchService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
public interface SearchService {
public SearchResult search(String cluster, String[] dataSources, long t0,
long t1, String filter, Token token) throws DataSourceException;
}
| 8,415 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/Token.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
public class Token {
public String key = null;
public boolean hasMore = false;
public boolean getMore() {
return hasMore;
}
}
| 8,416 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/Record.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
public interface Record {
public static final String bodyField = "body";
public static final String sourceField = "csource";
public static final String applicationField = "capp";
public static final String tagsField = "ctags";
public static final String chunkDataField = "cchunkData";
public static final String chunkExceptionField = "cchunkException";
public static final String classField = "class";
public static final String logLevelField = "logLevel";
// public static final String streamNameField = "sname";
// public static final String typeField = "type";
// public static final String rawField = "raw";
// public static final String fieldSeparator = ":";
public long getTime();
public void add(String key, String value);
public String[] getFields();
public String getValue(String field);
public String toString();
}
| 8,417 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchResult.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine;
import java.util.List;
import java.util.TreeMap;
public class ChukwaSearchResult implements SearchResult {
private TreeMap<Long, List<Record>> records;
private Token token = null;
public TreeMap<Long, List<Record>> getRecords() {
return records;
}
public void setRecords(TreeMap<Long, List<Record>> records) {
this.records = records;
}
public Token getToken() {
return token;
}
public void setToken(Token token) {
this.token = token;
}
}
| 8,418 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class DsDirectory {
private static Object lock = new Object();
private static DsDirectory dsDirectory = null;
private static final String[] emptyArray = new String[0];
private String rootFolder = null;
private DataConfig dataConfig = null;
private static FileSystem fs = null;
private Configuration conf = null;
private DsDirectory() {
dataConfig = new DataConfig();
conf = new Configuration();
try {
fs = FileSystem.get(conf);
} catch (IOException e) {
e.printStackTrace();
}
rootFolder = dataConfig.get("chukwa.engine.dsDirectory.rootFolder");
if (!rootFolder.endsWith("/")) {
rootFolder += "/";
}
}
public static DsDirectory getInstance() {
synchronized (lock) {
if (dsDirectory == null) {
dsDirectory = new DsDirectory();
}
}
return dsDirectory;
}
public String[] list(String cluster) throws DataSourceException {
List<String> datasources = new ArrayList<String>();
try {
FileStatus[] fileStat = fs.listStatus(new Path(rootFolder + cluster));
for (FileStatus fstat : fileStat) {
if (fstat.isDir()) {
datasources.add(fstat.getPath().getName());
}
}
} catch (IOException e) {
e.printStackTrace();
throw new DataSourceException(e);
}
return datasources.toArray(emptyArray);
}
public static void main(String[] args) throws DataSourceException {
DsDirectory dsd = DsDirectory.getInstance();
String[] dss = dsd.list("unknown");
for (String d : dss) {
System.out.println(d);
}
}
}
| 8,419 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource;
public class DataSourceException extends Exception {
/**
*
*/
private static final long serialVersionUID = -3648370237965886781L;
public DataSourceException() {
}
public DataSourceException(String message) {
super(message);
}
public DataSourceException(Throwable cause) {
super(cause);
}
public DataSourceException(String message, Throwable cause) {
super(message, cause);
}
}
| 8,420 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource;
import java.util.HashMap;
import org.apache.hadoop.chukwa.extraction.engine.datasource.database.DatabaseDS;
import org.apache.hadoop.chukwa.extraction.engine.datasource.record.ChukwaRecordDataSource;
public class DataSourceFactory {
private static Object lock = new Object();
private static DataSourceFactory factory = null;
private HashMap<String, DataSource> dataSources = new HashMap<String, DataSource>();
private DataSourceFactory() {
// TODO load from config Name + class + threadSafe?
DataSource databaseDS = new DatabaseDS();
dataSources.put("MRJob", databaseDS);
dataSources.put("HodJob", databaseDS);
dataSources.put("QueueInfo", databaseDS);
}
public static DataSourceFactory getInstance() {
synchronized (lock) {
if (factory == null) {
factory = new DataSourceFactory();
}
}
return factory;
}
public DataSource getDataSource(String datasourceName)
throws DataSourceException {
if (dataSources.containsKey(datasourceName)) {
return dataSources.get(datasourceName);
} else {
DataSource hsdfsDS = new ChukwaRecordDataSource();
dataSources.put(datasourceName, hsdfsDS);
return hsdfsDS;
// TODO proto only!
// throw new DataSourceException("Unknown datasource");
}
}
}
| 8,421 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource;
import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
import org.apache.hadoop.chukwa.extraction.engine.Token;
public interface DataSource {
public SearchResult search(SearchResult result, String cluster,
String dataSource, long t0, long t1, String filter, Token token)
throws DataSourceException;
public boolean isThreadSafe();
}
| 8,422 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Log Event Prototype
// From event_viewer.jsp
package org.apache.hadoop.chukwa.extraction.engine.datasource.database;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeMap;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.Record;
import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
import org.apache.hadoop.chukwa.extraction.engine.Token;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
//import org.apache.hadoop.chukwa.hicc.ClusterConfig;
public class DatabaseDS implements DataSource {
private static final Log log = LogFactory.getLog(DatabaseDS.class);
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE",
justification = "Dynamic based upon tables in the database")
public SearchResult search(SearchResult result, String cluster,
String dataSource, long t0, long t1, String filter, Token token)
throws DataSourceException {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd kk:mm:ss");
String timeField = null;
TreeMap<Long, List<Record>> records = result.getRecords();
if (cluster == null) {
cluster = "demo";
}
if (dataSource.equalsIgnoreCase("MRJob")) {
timeField = "LAUNCH_TIME";
} else if (dataSource.equalsIgnoreCase("HodJob")) {
timeField = "StartTime";
} else {
timeField = "timestamp";
}
String startS = formatter.format(t0);
String endS = formatter.format(t1);
Statement stmt = null;
ResultSet rs = null;
try {
String dateclause = timeField + " >= '" + startS + "' and " + timeField
+ " <= '" + endS + "'";
// ClusterConfig cc = new ClusterConfig();
String jdbc = ""; // cc.getURL(cluster);
Connection conn = org.apache.hadoop.chukwa.util.DriverManagerUtil.getConnection(jdbc);
stmt = conn.createStatement();
String query = "";
query = "select * from " + dataSource + " where " + dateclause + ";";
rs = stmt.executeQuery(query);
if (stmt.execute(query)) {
rs = stmt.getResultSet();
ResultSetMetaData rmeta = rs.getMetaData();
int col = rmeta.getColumnCount();
while (rs.next()) {
ChukwaRecord event = new ChukwaRecord();
StringBuilder cell = new StringBuilder();;
long timestamp = 0;
for (int i = 1; i < col; i++) {
String value = rs.getString(i);
if (value != null) {
cell.append(" ");
cell.append(rmeta.getColumnName(i));
cell.append(":");
cell.append(value);
}
if (rmeta.getColumnName(i).equals(timeField)) {
timestamp = rs.getLong(i);
event.setTime(timestamp);
}
}
boolean isValid = false;
if (filter == null || filter.equals("")) {
isValid = true;
} else if (cell.indexOf(filter) > 0) {
isValid = true;
}
if (!isValid) {
continue;
}
event.add(Record.bodyField, cell.toString());
event.add(Record.sourceField, cluster + "." + dataSource);
if (records.containsKey(timestamp)) {
records.get(timestamp).add(event);
} else {
List<Record> list = new LinkedList<Record>();
list.add(event);
records.put(event.getTime(), list);
}
}
}
} catch (SQLException e) {
e.printStackTrace();
throw new DataSourceException(e);
} finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException sqlEx) {
log.debug(ExceptionUtil.getStackTrace(sqlEx));
}
rs = null;
}
if (stmt != null) {
try {
stmt.close();
} catch (SQLException sqlEx) {
log.debug(ExceptionUtil.getStackTrace(sqlEx));
}
stmt = null;
}
}
return result;
}
public boolean isThreadSafe() {
return true;
}
}
| 8,423 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.Record;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class ChukwaFileParser {
static final int timestampField = 0;
@SuppressWarnings("deprecation")
public static List<Record> readData(String cluster, String dataSource,
int maxRows, long t1, long t0, long maxOffset, String filter,
String fileName, FileSystem fs) throws MalformedFileFormat {
// String source = "NameNode." + fileName;
List<Record> records = new LinkedList<Record>();
FSDataInputStream dataIS = null;
int lineCount = 0;
try {
if (!fs.exists(new Path(fileName))) {
System.out.println("fileName not there!");
return records;
}
System.out.println("NameNodeParser Open [" + fileName + "]");
dataIS = fs.open(new Path(fileName));
System.out.println("NameNodeParser Open2 [" + fileName + "]");
long timestamp = 0;
int listSize = 0;
String line = null;
String[] data = null;
long offset = 0;
do {
offset = dataIS.getPos();
// Need TODO something here
// if (offset > maxOffset)
// {
// break;
// }
line = dataIS.readLine();
lineCount++;
// System.out.println("NameNodeParser Line [" +line + "]");
if (line != null) {
// empty lines
if (line.length() < 14) {
// System.out.println("NameNodeParser Line < 14! [" +line + "]");
continue;
}
// System.out.println("Line [" +line + "]");
data = line.split("\t");// Default separator for TextOutputFormat!
try {
timestamp = Long.parseLong(data[timestampField]);
} catch (Exception e) {
e.printStackTrace();
// throw new MalformedFileFormat(e);
}
if (timestamp < t0) {
// System.out.println("Line not in range. Skipping: " +line);
// System.out.println("Search for: " + new Date(t0) + " is :" + new
// Date(timestamp));
continue;
} else if ((timestamp < t1) && (offset < maxOffset)) // JB (epochTS <
// maxDate)
{
// System.out.println("In Range: " + line);
boolean valid = false;
if ((filter == null || filter.equals(""))) {
valid = true;
} else if (line.indexOf(filter) > 0) {
valid = true;
}
if (valid) {
// System.out.println("In Range In Filter: " + line);
ChukwaRecord record = new ChukwaRecord();
record.setTime(timestamp);
record.add("offset", "" + offset);
record.add(Record.bodyField, data[1]);
record.add(Record.sourceField, dataSource);
records.add(record);
listSize = records.size();
if (listSize > maxRows) {
records.remove(0);
// System.out.println("==========>>>>>REMOVING: " + e);
}
} else {
// System.out.println(
// "In Range ==================>>>>>>>>> OUT Regex: " + line);
}
} else {
// System.out.println("Line out of range. Stopping now: " +line);
break;
}
}
} while (line != null);
} catch (IOException e) {
e.printStackTrace();
} finally {
System.out.println("File: " + fileName + " Line count: " + lineCount);
try {
if(dataIS != null) {
dataIS.close();
}
} catch (IOException e) {
}
}
return records;
}
}
| 8,424 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaRecordDataSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaSearchResult;
import org.apache.hadoop.chukwa.extraction.engine.Record;
import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
import org.apache.hadoop.chukwa.extraction.engine.Token;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.log4j.Logger;
public class ChukwaRecordDataSource implements DataSource {
// TODO need some cleanup after 1st production
// First implementation to get it working with the new directory structure
static Logger log = Logger.getLogger(ChukwaRecordDataSource.class);
private static final int dayFolder = 100;
private static final int hourFolder = 200;
private static final int rawFolder = 300;
static final String[] raws = { "0", "5", "10", "15", "20", "25", "30", "35",
"40", "45", "50", "55" };
private static FileSystem fs = null;
private static ChukwaConfiguration conf = null;
private static String rootDsFolder = null;
private static DataConfig dataConfig = null;
static {
dataConfig = new DataConfig();
rootDsFolder = dataConfig.get("chukwa.engine.dsDirectory.rootFolder");
conf = new ChukwaConfiguration();
try {
fs = FileSystem.get(conf);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public boolean isThreadSafe() {
return true;
}
@Override
public SearchResult search(SearchResult result, String cluster,
String dataSource, long t0, long t1, String filter, Token token)
throws DataSourceException {
String filePath = rootDsFolder + "/" + cluster + "/";
log.debug("filePath [" + filePath + "]");
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(t0);
SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyyMMdd");
int maxCount = 200;
List<Record> records = new ArrayList<Record>();
ChukwaDSInternalResult res = new ChukwaDSInternalResult();
if (token != null) {
// token.key = day + "|" + hour + "|" + raw + "|" + spill + "|" +
// res.currentTs + "|"+ res.position + "|"+ res.fileName;
try {
String[] vars = token.key.split("\\|");
res.day = vars[0];
res.hour = Integer.parseInt(vars[1]);
res.rawIndex = Integer.parseInt(vars[2]);
res.spill = Integer.parseInt(vars[3]);
res.currentTs = Long.parseLong(vars[4]);
res.position = Long.parseLong(vars[5]);
res.fileName = vars[5];
log.info("Token is not null! :" + token.key);
} catch (Exception e) {
log.error("Incalid Key: [" + token.key + "] exception: ", e);
}
} else {
log.debug("Token is null!");
}
try {
do {
log.debug("start Date [" + calendar.getTime() + "]");
String workingDay = sdf.format(calendar.getTime());
int workingHour = calendar.get(Calendar.HOUR_OF_DAY);
int startRawIndex = 0;
if (token != null) {
workingDay = res.day;
workingHour = res.hour;
startRawIndex = res.rawIndex;
} else {
token = new Token();
}
log.debug("workingDay " + workingDay);
log.debug("workingHour " + workingHour);
if (exist(dayFolder, filePath, dataSource, workingDay, null, null)) {
// Extract Data for Day
if (containsRotateFlag(dayFolder, filePath, dataSource, workingDay,
null)) {
// read data from day
// SystemMetrics/20080922/SystemMetrics_20080922.1.evt
log.debug("fs.exists(workingDayRotatePath) ");
extractRecords(res, ChukwaRecordDataSource.dayFolder, filePath,
dataSource, workingDay, null, -1, token, records, maxCount, t0,
t1, filter);
maxCount = maxCount - records.size();
if ((maxCount <= 0) || (res.currentTs > t1)) {
break;
}
} // End process Day File
else // check for hours
{
log.debug("check for hours");
for (int hour = 0; hour < 24; hour++) {
if (workingDay.equals(res.day) && hour < workingHour) {
continue;
}
log.debug(" Hour? -->" + filePath + dataSource + "/"
+ workingDay + "/" + hour);
if (exist(dayFolder, filePath, dataSource, workingDay, "" + hour,
null)) {
if (containsRotateFlag(dayFolder, filePath, dataSource,
workingDay, "" + hour)) {
// read data from Hour
// SystemMetrics/20080922/12/SystemMetrics_20080922_12.1.evt
extractRecords(res, ChukwaRecordDataSource.hourFolder,
filePath, dataSource, workingDay, "" + hour, -1, token,
records, maxCount, t0, t1, filter);
} else // check for raw
{
log.debug("Working on Raw");
for (int rawIndex = startRawIndex; rawIndex < 12; rawIndex++) {
// read data from Raw
//SystemMetrics/20080922/0/25/SystemMetrics_20080922_0_25.1.
// evt
if (exist(dayFolder, filePath, dataSource, workingDay, ""
+ hour, raws[rawIndex])) {
extractRecords(res, ChukwaRecordDataSource.rawFolder,
filePath, dataSource, workingDay, "" + hour,
rawIndex, token, records, maxCount, t0, t1, filter);
maxCount = maxCount - records.size();
if ((maxCount <= 0) || (res.currentTs > t1)) {
break;
}
} else {
log.debug("<<<<<<<<<Working on Raw Not exist--> "
+ filePath + dataSource + "/" + workingDay + "/"
+ workingHour + "/" + raws[rawIndex]);
}
res.spill = 1;
}
}
} // End if (fs.exists(new Path(filePath + workingDay+ "/" +
// hour)))
maxCount = maxCount - records.size();
if ((maxCount <= 0) || (res.currentTs > t1)) {
break;
}
} // End process all Hourly/raw files
}
}
maxCount = maxCount - records.size();
if ((maxCount <= 0) || (res.currentTs > t1)) {
break;
}
// move to the next day
calendar.add(Calendar.DAY_OF_MONTH, +1);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
} while (calendar.getTimeInMillis() < t1);
} catch (Exception e) {
e.printStackTrace();
throw new DataSourceException(e);
}
TreeMap<Long, List<Record>> recordsInResult = result.getRecords();
for (Record record : records) {
long timestamp = record.getTime();
if (recordsInResult.containsKey(timestamp)) {
recordsInResult.get(timestamp).add(record);
} else {
List<Record> list = new LinkedList<Record>();
list.add(record);
recordsInResult.put(timestamp, list);
}
}
result.setToken(token);
return result;
}
public void extractRecords(ChukwaDSInternalResult res, int directoryType,
String rootFolder, String dataSource, String day, String hour,
int rawIndex, Token token, List<Record> records, int maxRows, long t0,
long t1, String filter) throws Exception {
// for each spill file
// extract records
int spill = res.spill;
boolean workdone = false;
do {
String fileName = buildFileName(directoryType, rootFolder, dataSource,
spill, day, hour, rawIndex);
log.debug("extractRecords : " + fileName);
if (fs.exists(new Path(fileName))) {
readData(res, token, fileName, maxRows, t0, t1, filter);
res.spill = spill;
List<Record> localRecords = res.records;
log.debug("localRecords size : " + localRecords.size());
maxRows = maxRows - localRecords.size();
if (maxRows <= 0) {
workdone = true;
}
records.addAll(localRecords);
log.debug("AFTER fileName [" + fileName + "] count="
+ localRecords.size() + " maxCount=" + maxRows);
spill++;
} else {
// no more spill
workdone = true;
}
} while (!workdone);
token.key = day + "|" + hour + "|" + rawIndex + "|" + spill + "|"
+ res.currentTs + "|" + res.position + "|" + res.fileName;
}
public void readData(ChukwaDSInternalResult res, Token token,
String fileName, int maxRows, long t0, long t1, String filter)
throws Exception {
List<Record> records = new LinkedList<Record>();
res.records = records;
SequenceFile.Reader r = null;
if (filter != null) {
filter = filter.toLowerCase();
}
try {
if (!fs.exists(new Path(fileName))) {
log.debug("fileName not there!");
return;
}
log.debug("Parser Open [" + fileName + "]");
long timestamp = 0;
int listSize = 0;
ChukwaRecordKey key = new ChukwaRecordKey();
ChukwaRecord record = new ChukwaRecord();
r = new SequenceFile.Reader(fs, new Path(fileName), conf);
log.debug("readData Open2 [" + fileName + "]");
if ((fileName.equals(res.fileName)) && (res.position != -1)) {
r.seek(res.position);
}
res.fileName = fileName;
while (r.next(key, record)) {
if (record != null) {
res.position = r.getPosition();
timestamp = record.getTime();
res.currentTs = timestamp;
log.debug("\nSearch for startDate: " + new Date(t0) + " is :"
+ new Date(timestamp));
if (timestamp < t0) {
// log.debug("Line not in range. Skipping: " +record);
continue;
} else if (timestamp < t1) {
log.debug("In Range: " + record.toString());
boolean valid = false;
if ((filter == null || filter.equals(""))) {
valid = true;
} else if (isValid(record, filter)) {
valid = true;
}
if (valid) {
records.add(record);
record = new ChukwaRecord();
listSize = records.size();
if (listSize >= maxRows) {
// maxRow so stop here
// Update token
token.key = key.getKey();
token.hasMore = true;
break;
}
} else {
log.debug("In Range ==================>>>>>>>>> OUT Regex: "
+ record);
}
} else {
log.debug("Line out of range. Stopping now: " + record);
// Update Token
token.key = key.getKey();
token.hasMore = false;
break;
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
r.close();
} catch (Exception e) {
}
}
}
public boolean containsRotateFlag(int directoryType, String rootFolder,
String dataSource, String workingDay, String workingHour)
throws Exception {
boolean contains = false;
switch (directoryType) {
case ChukwaRecordDataSource.dayFolder:
// SystemMetrics/20080922/rotateDone
contains = fs.exists(new Path(rootFolder + dataSource + "/" + workingDay
+ "/rotateDone"));
break;
case ChukwaRecordDataSource.hourFolder:
// SystemMetrics/20080922/12/rotateDone
contains = fs.exists(new Path(rootFolder + dataSource + "/" + workingDay
+ "/" + workingHour + "/rotateDone"));
break;
default:
contains = fs.exists(new Path(rootFolder + dataSource + "/" + workingDay
+ "/rotateDone"));
break;
}
return contains;
}
public boolean exist(int directoryType, String rootFolder, String dataSource,
String workingDay, String workingHour, String raw) throws Exception {
boolean contains = false;
switch (directoryType) {
case ChukwaRecordDataSource.dayFolder:
// SystemMetrics/20080922/rotateDone
contains = fs
.exists(new Path(rootFolder + dataSource + "/" + workingDay));
break;
case ChukwaRecordDataSource.hourFolder:
// SystemMetrics/20080922/12/rotateDone
contains = fs.exists(new Path(rootFolder + dataSource + "/" + workingDay
+ "/" + workingHour));
break;
case ChukwaRecordDataSource.rawFolder:
// SystemMetrics/20080922/12/rotateDone
contains = fs.exists(new Path(rootFolder + dataSource + "/" + workingDay
+ "/" + workingHour + "/" + raw));
break;
default:
contains = fs
.exists(new Path(rootFolder + dataSource + "/" + workingDay));
break;
}
return contains;
}
protected boolean isValid(ChukwaRecord record, String filter) {
String[] fields = record.getFields();
for (String field : fields) {
if (record.getValue(field).toLowerCase().indexOf(filter) >= 0) {
return true;
}
}
return false;
}
public String buildFileName(int directoryType, String rootFolder,
String dataSource, int spill, String day, String hour, int rawIndex) {
String fileName = null;
// TODO use StringBuilder
// TODO revisit the way we're building fileName
switch (directoryType) {
case ChukwaRecordDataSource.dayFolder:
// SystemMetrics/20080922/SystemMetrics_20080922.1.evt
fileName = rootFolder + "/" + dataSource + "/" + day + "/" + dataSource
+ "_" + day + "." + spill + ".evt";
break;
case ChukwaRecordDataSource.hourFolder:
// SystemMetrics/20080922/12/SystemMetrics_20080922_12.1.evt
fileName = rootFolder + "/" + dataSource + "/" + day + "/" + hour + "/"
+ dataSource + "_" + day + "_" + hour + "." + spill + ".evt";
break;
case ChukwaRecordDataSource.rawFolder:
// SystemMetrics/20080922/0/25/SystemMetrics_20080922_0_25.1.evt
fileName = rootFolder + "/" + dataSource + "/" + day + "/" + hour + "/"
+ raws[rawIndex] + "/" + dataSource + "_" + day + "_" + hour + "_"
+ raws[rawIndex] + "." + spill + ".evt";
break;
default:
fileName = rootFolder + "/" + dataSource + "/" + day + "/" + dataSource
+ "_" + day + "." + spill + ".evt";
break;
}
log.debug("buildFileName :" + fileName);
return fileName;
}
public static void main(String[] args) throws DataSourceException {
ChukwaRecordDataSource ds = new ChukwaRecordDataSource();
SearchResult result = new ChukwaSearchResult();
result.setRecords(new TreeMap<Long, List<Record>>());
String cluster = args[0];
String dataSource = args[1];
long t0 = Long.parseLong(args[2]);
long t1 = Long.parseLong(args[3]);
String filter = null;
Token token = null;
if (args.length >= 5 && !args[4].equalsIgnoreCase("null")) {
filter = args[4];
}
if (args.length == 6) {
token = new Token();
token.key = args[5];
System.out.println("token :" + token.key);
}
System.out.println("cluster :" + cluster);
System.out.println("dataSource :" + dataSource);
System.out.println("t0 :" + t0);
System.out.println("t1 :" + t1);
System.out.println("filter :" + filter);
ds.search(result, cluster, dataSource, t0, t1, filter, token);
TreeMap<Long, List<Record>> records = result.getRecords();
for(Entry<Long, List<Record>> entry : records.entrySet()) {
long ts = entry.getKey();
System.out.println("\n\nTimestamp: " + new Date(ts));
List<Record> list = entry.getValue();
for (int i = 0; i < list.size(); i++) {
System.out.println(list.get(i));
}
}
if (result.getToken() != null) {
System.out.println("Key -->" + result.getToken().key);
}
}
}
| 8,425 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaSequenceFileParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
import java.io.IOException;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.extraction.engine.Record;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
public class ChukwaSequenceFileParser {
public static List<Record> readData(String cluster, String dataSource,
int maxRows, long t1, long t0, long maxOffset, String filter,
String fileName, FileSystem fs, Configuration conf)
throws MalformedFileFormat {
// String source = "NameNode." + fileName;
List<Record> records = new LinkedList<Record>();
SequenceFile.Reader r = null;
int lineCount = 0;
if (filter != null) {
filter = filter.toLowerCase();
}
try {
if (!fs.exists(new Path(fileName))) {
System.out.println("fileName not there!");
return records;
}
System.out.println("NameNodeParser Open [" + fileName + "]");
r = new SequenceFile.Reader(fs, new Path(fileName), conf);
System.out.println("NameNodeParser Open2 [" + fileName + "]");
long timestamp = 0;
int listSize = 0;
long offset = 0;
// HdfsWriter.HdfsWriterKey key = new HdfsWriter.HdfsWriterKey();
ChukwaRecordKey key = new ChukwaRecordKey();
ChukwaRecord record = new ChukwaRecord();
while (r.next(key, record)) {
lineCount++;
System.out.println("NameNodeParser Line ["
+ record.getValue(Record.bodyField) + "]");
if (record != null) {
timestamp = record.getTime();
if (timestamp < t0) {
System.out.println("Line not in range. Skipping: "
+ record.getValue(Record.bodyField));
System.out.println("Search for: " + new Date(t0) + " is :"
+ new Date(timestamp));
continue;
} else if ((timestamp < t1) && (offset < maxOffset)) // JB (epochTS <
// maxDate)
{
System.out
.println("In Range: " + record.getValue(Record.bodyField));
boolean valid = false;
if ((filter == null || filter.equals(""))) {
valid = true;
} else if (isValid(record, filter)) {
valid = true;
}
if (valid) {
records.add(record);
record = new ChukwaRecord();
listSize = records.size();
if (listSize > maxRows) {
records.remove(0);
System.out.println("==========>>>>>REMOVING: "
+ record.getValue(Record.bodyField));
}
} else {
System.out
.println("In Range ==================>>>>>>>>> OUT Regex: "
+ record.getValue(Record.bodyField));
}
} else {
System.out.println("Line out of range. Stopping now: "
+ record.getValue(Record.bodyField));
break;
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
System.out.println("File: " + fileName + " Line count: " + lineCount);
if (r != null) {
try {
r.close();
} catch (IOException e) {
}
}
}
return records;
}
protected static boolean isValid(ChukwaRecord record, String filter) {
String[] fields = record.getFields();
for (String field : fields) {
if (record.getValue(field).toLowerCase().indexOf(filter) >= 0) {
return true;
}
}
return false;
}
}
| 8,426 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/RecordDS.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeMap;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.extraction.engine.Record;
import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
import org.apache.hadoop.chukwa.extraction.engine.Token;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
import org.apache.hadoop.fs.FileSystem;
public class RecordDS implements DataSource {
private static FileSystem fs = null;
private static ChukwaConfiguration conf = null;
private static String rootFolder = null;
private static DataConfig dataConfig = null;
static {
dataConfig = new DataConfig();
rootFolder = dataConfig.get("chukwa.engine.dsDirectory.rootFolder");
conf = new ChukwaConfiguration();
try {
fs = FileSystem.get(conf);
} catch (IOException e) {
e.printStackTrace();
}
}
public SearchResult search(SearchResult result, String cluster,
String dataSource, long t0, long t1, String filter, Token token)
throws DataSourceException {
String filePath = rootFolder + "/" + cluster + "/" + dataSource;
System.out.println("filePath [" + filePath + "]");
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(t1);
TreeMap<Long, List<Record>> records = result.getRecords();
int maxCount = 200;
SimpleDateFormat sdf = new java.text.SimpleDateFormat("_yyyyMMdd_HH_");
do {
System.out.println("start Date [" + calendar.getTime() + "]");
String fileName = sdf.format(calendar.getTime());
int minutes = calendar.get(Calendar.MINUTE);
int dec = minutes / 10;
fileName += dec;
int m = minutes - (dec * 10);
if (m < 5) {
fileName += "0.1.evt";
} else {
fileName += "5.1.evt";
}
fileName = filePath + "/" + dataSource + fileName;
// System.out.println("JB fileName [" +fileName + "]");
try {
System.out.println("BEFORE fileName [" + fileName + "]");
// List<Record> evts =
// ChukwaFileParser.readData(cluster,dataSource,maxCount, t1, t0,
// Long.MAX_VALUE, filter, fileName, fs);
List<Record> evts = ChukwaSequenceFileParser.readData(cluster,
dataSource, maxCount, t1, t0, Long.MAX_VALUE, filter, fileName, fs,
conf);
maxCount = maxCount - evts.size();
System.out.println("AFTER fileName [" + fileName + "] count="
+ evts.size() + " maxCount=" + maxCount);
for (Record evt : evts) {
System.out.println("AFTER Loop [" + evt.toString() + "]");
long timestamp = evt.getTime();
if (records.containsKey(timestamp)) {
records.get(timestamp).add(evt);
} else {
List<Record> list = new LinkedList<Record>();
list.add(evt);
records.put(timestamp, list);
}
}
} catch (Exception e) {
e.printStackTrace();
}
if (maxCount <= 0) {
System.out.println("BREAKING LOOP AFTER [" + fileName + "] maxCount="
+ maxCount);
break;
}
calendar.add(Calendar.MINUTE, -5);
System.out.println("calendar [" + calendar.getTimeInMillis() + "] ");
System.out.println("end [" + (t0 - 1000 * 60 * 5) + "] ");
} while (calendar.getTimeInMillis() > (t0 - 1000 * 60 * 5)); // <= need some
// code here
// Need more than this to compute the end
return result;
}
public boolean isThreadSafe() {
return true;
}
}
| 8,427 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/MalformedFileFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
public class MalformedFileFormat extends DataSourceException {
/**
*
*/
private static final long serialVersionUID = 2180898410952691571L;
public MalformedFileFormat() {
super();
}
public MalformedFileFormat(String message, Throwable cause) {
super(message, cause);
}
public MalformedFileFormat(String message) {
super(message);
}
public MalformedFileFormat(Throwable cause) {
super(cause);
}
}
| 8,428 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaDSInternalResult.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
import java.util.List;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.extraction.engine.Record;
public class ChukwaDSInternalResult {
List<Record> records = null;
String day = null;
int hour = 0;
int rawIndex = 0;
int spill = 1;
long position = -1;
long currentTs = -1;
String fileName = null;
ChukwaRecordKey key = null;
public ChukwaRecordKey getKey() {
return key;
}
protected void setKey(ChukwaRecordKey key) {
this.key = key;
}
}
| 8,429 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/ReturnCodeBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
@XmlRootElement
@XmlType(propOrder={"code", "message"})
public class ReturnCodeBean {
public static final int FAIL=0;
public static final int SUCCESS=1;
private int code;
private String message;
public ReturnCodeBean() {
}
public ReturnCodeBean(int code, String message) {
this.code=code;
this.message=message;
}
@XmlElement
public int getCode() {
return code;
}
@XmlElement
public String getMessage() {
return message;
}
public void setCode(int code) {
this.code=code;
}
public void setMessage(String message) {
this.message=message;
}
}
| 8,430 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/ViewBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.nio.charset.Charset;
import java.text.ParseException;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
@XmlRootElement
@XmlType
public class ViewBean {
private PagesBean[] pages;
private String description;
private String owner;
private String name;
private String permissionType;
private static Log log = LogFactory.getLog(ViewBean.class);
public ViewBean() {
}
public ViewBean(byte[] buffer) throws ParseException {
JSONParser parser = new JSONParser();
try {
JSONObject json = (JSONObject) parser.parse(new String(buffer, Charset.forName("UTF-8")));
if(json.containsKey("description")) {
this.description = (String) json.get("description");
} else {
this.description = "";
}
this.owner= (String) json.get("owner");
this.name= (String) json.get("name");
this.permissionType= (String) json.get("permissionType");
int size = ((JSONArray) json.get("pages")).size();
PagesBean[] pages = new PagesBean[size];
JSONArray pagesArray = (JSONArray) json.get("pages");
for(int i=0;i<size;i++) {
pages[i] = new PagesBean((JSONObject) pagesArray.get(i));
}
this.pages=pages;
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new ParseException(ExceptionUtil.getStackTrace(e), 0);
}
}
@XmlElement
public PagesBean[] getPages() {
return pages.clone();
}
@XmlElement
public int getPagesCount() {
return pages.length;
}
@XmlElement
public String getDescription() {
return this.description;
}
@XmlElement
public String getOwner() {
return this.owner;
}
@XmlElement
public String getName() {
return this.name;
}
@XmlElement
public String getPermissionType() {
return this.permissionType;
}
public void setPages(PagesBean[] pages) {
this.pages = (PagesBean[]) pages.clone();
}
public void setDescription(String description) {
this.description = description;
}
public void setOwner(String owner) {
this.owner = owner;
}
public void setName(String name) {
this.name = name;
}
public void setPermissionType(String permissionType) {
this.permissionType = permissionType;
}
public void update() {
if(this.pages!=null) {
for(PagesBean page : pages) {
page.update();
}
}
}
@SuppressWarnings("unchecked")
public JSONObject deserialize() {
update();
JSONObject view = new JSONObject();
try {
view.put("name", this.name);
view.put("owner", this.owner);
view.put("permissionType", this.permissionType);
view.put("description", this.description);
JSONArray ja = new JSONArray();
for(int i=0;i<this.pages.length;i++) {
ja.add(this.pages[i].deserialize());
}
view.put("pages", (JSONArray) ja);
} catch (Exception e){
log.error(ExceptionUtil.getStackTrace(e));
}
return view;
}
}
| 8,431 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/ClientTraceBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
public class ClientTraceBean {
private String action;
private long size = 0L;
private String src;
private String dest;
private String date;
@XmlElement
public String getAction() {
return action;
}
public void setAction(String action) {
this.action = action;
}
@XmlElement
public String getSrc() {
return src;
}
public void setSrc(String src) {
this.src = src;
}
@XmlElement
public String getDest() {
return dest;
}
public void setDest(String dest) {
this.dest = dest;
}
@XmlElement
public long getSize() {
return size;
}
public void setSize(long size) {
this.size=size;
}
@XmlElement
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
}
| 8,432 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/UserBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.text.ParseException;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
@XmlRootElement
@XmlType(propOrder={"id", "views", "properties"})
public class UserBean {
private String id;
private JSONArray views;
private JSONObject properties;
private static Log log = LogFactory.getLog(UserBean.class);
public UserBean() {
views = new JSONArray();
properties = new JSONObject();
}
public UserBean(JSONObject json) throws ParseException {
try {
id = (String) json.get("id");
views = (JSONArray) json.get("views");
if(json.containsKey("properties")) {
properties = (JSONObject) json.get("properties");
} else {
properties = new JSONObject();
}
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new ParseException("Error parsing user object.",0);
}
}
@XmlElement
public String getId() {
return id;
}
@XmlElement
public JSONArray getViews() {
return views;
}
@XmlElement
public String getProperties() {
return properties.toString();
}
public void setProperties(String buffer) {
try {
this.properties = (JSONObject) JSONValue.parse(buffer);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
public String getPropertyValue(String key) {
return (String) this.properties.get(key);
}
public void setId(String id) {
this.id=id;
}
public void setViews(JSONArray ja) {
this.views=ja;
}
public void setProperty(String key, String value) throws ParseException {
try {
this.properties.put(key, value);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new ParseException("Error parsing user object.",0);
}
}
public JSONObject deserialize() {
JSONObject json = new JSONObject();
try {
json.put("id", this.id);
json.put("views", this.views);
json.put("properties", this.properties);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
return json;
}
}
| 8,433 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/ParametersBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.text.ParseException;
import java.util.Collection;
import java.util.HashSet;
import javax.xml.bind.annotation.XmlElement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class ParametersBean {
private static Log log = LogFactory.getLog(ParametersBean.class);
private String name=null;
private String type=null;
private Collection<String> value=null;
private String control=null;
private String label=null;
private String callback=null;
private int edit=1;
private OptionBean[] options=null;
public ParametersBean() {
}
public ParametersBean(JSONObject json) throws ParseException {
name=(String) json.get("name");
type=(String) json.get("type");
if(json.containsKey("value")) {
if(json.get("value").getClass()==JSONArray.class) {
JSONArray ja = (JSONArray) json.get("value");
Collection<String> c = new HashSet<String>();
for(int i = 0; i < ja.size(); i++) {
c.add((String) ja.get(i));
}
this.value = c;
} else {
Collection<String> c = new HashSet<String>();
c.add((String)json.get("value"));
this.value = c;
}
}
if(json.containsKey("label")) {
label=(String) json.get("label");
} else {
label=(String) json.get("name");
}
if(json.get("type").toString().intern()=="custom".intern()) {
control=(String) json.get("control");
}
if(json.containsKey("callback")) {
callback=(String) json.get("callback");
}
if(json.containsKey("options")) {
JSONArray aj = (JSONArray) json.get("options");
options = new OptionBean[aj.size()];
for(int i=0;i<aj.size();i++) {
OptionBean o = new OptionBean((JSONObject) aj.get(i));
options[i]=o;
}
}
if(json.containsKey("edit")) {
if(json.get("edit").getClass().equals(String.class)) {
edit=Integer.parseInt((String)json.get("edit"));
} else if(json.get("edit").getClass().equals(Long.class)) {
edit=((Long)json.get("edit")).intValue();
}
}
}
@XmlElement
public String getName() {
return name;
}
@XmlElement
public String getType() {
return type;
}
@XmlElement
public Collection<String> getValue() {
return value;
}
@XmlElement
public String getControl() {
return control;
}
@XmlElement
public String getLabel() {
return label;
}
@XmlElement
public OptionBean[] getOptions() {
if(options==null) {
options = new OptionBean[1];
}
return options.clone();
}
@XmlElement
public int getEdit() {
return edit;
}
@XmlElement
public String getCallback() {
return callback;
}
public void setName(String name) {
this.name = name;
}
public void setType(String type) {
this.type = type;
}
public void setValue(Collection<String> value) {
this.value = value;
}
public void setControl(String control) {
this.control = control;
}
public void setLabel(String label) {
this.label = label;
}
public void setOptions(OptionBean[] options) {
this.options = (OptionBean[]) options.clone();
}
public void setEdit(int edit) {
this.edit = edit;
}
public void setCallback(String callback) {
this.callback = callback;
}
public JSONObject deserialize() {
JSONObject json = new JSONObject();
try {
json.put("name",this.name);
json.put("type",this.type);
if(this.value!=null) {
JSONArray ja = new JSONArray();
for(String s : this.value) {
ja.add(s);
}
json.put("value", ja);
}
if(control!=null) {
json.put("control",this.control);
}
json.put("label",this.label);
json.put("edit",this.edit);
if(this.callback!=null) {
json.put("callback", callback);
}
if(options!=null) {
JSONArray ja = new JSONArray();
for(int i=0;i<options.length;i++) {
ja.add(this.options[i].deserialize());
}
json.put("options", ja);
}
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
return json;
}
}
| 8,434 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/ColumnBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.text.ParseException;
import javax.xml.bind.annotation.XmlElement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class ColumnBean {
private WidgetBean[] widgets;
private static Log log = LogFactory.getLog(ColumnBean.class);
public ColumnBean() {
}
public ColumnBean(JSONArray json) throws ParseException {
try {
widgets = new WidgetBean[json.size()];
for(int i=0;i<json.size();i++) {
widgets[i]=new WidgetBean((JSONObject) json.get(i));
}
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new ParseException(ExceptionUtil.getStackTrace(e), 0);
}
}
@XmlElement
public WidgetBean[] getWidgets() {
return widgets.clone();
}
public void setWidgets(WidgetBean[] ws) {
widgets=(WidgetBean[]) ws.clone();
}
public void update() {
for(int i=0;i<widgets.length;i++) {
widgets[i].update();
}
}
public JSONArray deserialize() {
JSONArray ja = new JSONArray();
for(int i=0;i<widgets.length;i++) {
ja.add(widgets[i].deserialize());
}
return ja;
}
}
| 8,435 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/PagesBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.text.ParseException;
import javax.xml.bind.annotation.XmlElement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class PagesBean {
private static Log log = LogFactory.getLog(PagesBean.class);
private String title;
private int[] columnSizes;
private ColumnBean[] layout;
private int columns;
public PagesBean() {
}
public PagesBean(JSONObject json) throws ParseException {
try {
title = (String) json.get("title");
columns = ((Long) json.get("columns")).intValue();
JSONArray layout = (JSONArray) json.get("layout");
this.layout = new ColumnBean[layout.size()];
for(int i=0;i<layout.size();i++) {
ColumnBean c = new ColumnBean((JSONArray) layout.get(i));
this.layout[i]=c;
}
if(json.containsKey("colSize")) {
JSONArray ja = (JSONArray) json.get("colSize");
columnSizes = new int[ja.size()];
for(int i=0; i< ja.size(); i++) {
columnSizes[i] = ((Long) ja.get(i)).intValue();
}
}
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new ParseException(ExceptionUtil.getStackTrace(e), 0);
}
}
@XmlElement
public String getTitle() {
return title;
}
// @XmlElement
// public String getLayoutStyle() {
// return layoutStyle;
// }
@XmlElement(name="layout")
public ColumnBean[] getLayout() {
return layout.clone();
}
public void update() {
for(int i=0;i<layout.length;i++) {
layout[i].update();
}
}
public void setTitle(String title) {
this.title = title;
}
public void setLayout(ColumnBean[] layout) {
this.layout = (ColumnBean[]) layout.clone();
}
@XmlElement(name="colSize")
public int[] getColSize() {
return this.columnSizes.clone();
}
public void setColSize(int[] size) {
this.columnSizes = (int[]) size.clone();
}
@XmlElement(name="columns")
public int getColumns() {
return this.columns;
}
public void setColumns(int columns) {
this.columns = columns;
}
public JSONObject deserialize() {
JSONObject json = new JSONObject();
JSONArray ja = new JSONArray();
JSONArray sizes = new JSONArray();
try {
json.put("title", this.title);
for(int i=0;i<layout.length;i++) {
ja.add(layout[i].deserialize());
}
json.put("layout", (JSONArray) ja);
json.put("columns", layout.length);
if(columnSizes!=null) {
for(int colSize : columnSizes) {
sizes.add(colSize);
}
}
json.put("colSize", (JSONArray) sizes);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
return json;
}
}
| 8,436 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/ConfigBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.text.ParseException;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONObject;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
@XmlRootElement
@XmlType(propOrder={"key", "value"})
public class ConfigBean {
private String key = null;
private String value = null;
private static Log log = LogFactory.getLog(ViewBean.class);
public ConfigBean() {
}
public ConfigBean(JSONObject json) throws ParseException {
try {
key = (String) json.get("key");
value = (String) json.get("value");
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new ParseException("Error parsing user object.",0);
}
}
@XmlElement
public String getKey() {
return key;
}
@XmlElement
public String getValue() {
return value;
}
public void setKey(String key) {
this.key = key;
}
public void setValue(String value) {
this.value = value;
}
}
| 8,437 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/CatalogBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@XmlRootElement
@XmlType(propOrder={"type", "id", "label", "children"})
public class CatalogBean {
private static final Log log = LogFactory.getLog(CatalogBean.class);
private List<CatalogBean> children = new ArrayList<CatalogBean>();
private String type = "text";
private String label = null;
private String id = null;
public CatalogBean() {
}
@XmlElement
public String getType() {
return type;
}
@XmlElement
public String getId() {
return id;
}
@XmlElement
public String getLabel() {
return label;
}
@XmlElement
public List<CatalogBean> getChildren() {
return children;
}
public void setType(String type) {
this.type = type;
}
public void setId(String id) {
this.id = id;
}
public void setLabel(String label) {
this.label = label;
}
public void setChildren(List<CatalogBean> children) {
this.children = children;
}
public void addCatalog(WidgetBean widget) {
String[] path = widget.getCategories().split(",");
List<CatalogBean> tracker = this.children;
if(tracker==null) {
tracker = new ArrayList<CatalogBean>();
}
for(int i=0;i<path.length;i++) {
boolean duplicate = false;
for(int j=0;j<tracker.size();j++) {
if(tracker.get(j).getLabel().intern()==path[i].intern()) {
duplicate = true;
tracker = tracker.get(j).getChildren();
continue;
}
}
if(!duplicate) {
tracker = addCategory(tracker, widget.getId(), path[i]);
}
}
tracker = addCategory(tracker, widget.getId(), widget.getTitle());
}
public List<CatalogBean> addCategory(List<CatalogBean> tracker, String id, String label) {
CatalogBean c = new CatalogBean();
c.setId(id);
c.setLabel(label);
tracker.add(c);
return c.getChildren();
}
}
| 8,438 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/WidgetBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.text.ParseException;
import java.util.Collection;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.apache.hadoop.chukwa.datastore.WidgetStore;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
@XmlRootElement
@XmlType(propOrder={"id", "title", "version", "categories", "url", "description","refresh","parameters"})
public class WidgetBean {
private String id;
private String title;
private String version;
private String categories;
private String url;
private String description;
private int refresh;
private ParametersBean[] parameters;
private static Log log = LogFactory.getLog(WidgetBean.class);
public WidgetBean() {
}
public WidgetBean(JSONObject json) throws ParseException {
try {
this.id=(String) json.get("id");
this.title=(String) json.get("title");
this.version=(String) json.get("version");
this.categories=(String) json.get("categories");
this.url=(String) json.get("url");
this.description=(String) json.get("description");
if(json.get("refresh").getClass().getName().equals("String")) {
int refresh = Integer.parseInt((String) json.get("refresh"));
this.refresh = refresh;
} else if(json.get("refresh").getClass().getName().equals("Long")) {
this.refresh = ((Long) json.get("refresh")).intValue();
}
try {
int size = ((JSONArray) json.get("parameters")).size();
ParametersBean[] list = new ParametersBean[size];
for(int i=0;i<size;i++) {
JSONArray jsonArray = (JSONArray) json.get("parameters");
list[i] = new ParametersBean((JSONObject) jsonArray.get(i));
}
this.parameters=list;
} catch (Exception e) {
this.parameters=null;
}
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new ParseException(ExceptionUtil.getStackTrace(e), 0);
}
}
@XmlElement
public String getId() {
return id;
}
@XmlElement
public String getTitle() {
return title;
}
@XmlElement
public String getVersion() {
return version;
}
@XmlElement
public String getCategories() {
return categories;
}
@XmlElement
public String getUrl() {
return url;
}
@XmlElement
public String getDescription() {
return description;
}
@XmlElement
public int getRefresh() {
return refresh;
}
@XmlElement
public ParametersBean[] getParameters() {
return parameters.clone();
}
public void setId(String id) {
this.id=id;
}
public void setUrl(String url) {
this.url=url;
}
public void setTitle(String title) {
this.title=title;
}
public void setDescription(String description) {
this.description=description;
}
public void setVersion(String version) {
this.version=version;
}
public void setCategories(String categories) {
this.categories=categories;
}
public void setRefresh(int refresh) {
this.refresh=refresh;
}
public void setParameters(ParametersBean[] p) {
this.parameters=p.clone();
}
public void update() {
try {
WidgetBean widget = WidgetStore.list().get(this.id);
if(widget!=null) {
if(widget.getVersion().intern()!=this.version.intern()) {
this.categories=widget.getCategories();
this.title=widget.getTitle();
this.version=widget.getVersion();
this.url=widget.getUrl();
this.description=widget.getDescription();
ParametersBean[] plist = widget.getParameters();
for(int i=0;i<this.parameters.length;i++) {
Collection<String> value = this.parameters[i].getValue();
for(int j=0;j<plist.length;j++) {
if(plist[j].getName().intern()==this.parameters[i].getName().intern()) {
plist[j].setValue(value);
}
}
}
this.parameters=plist;
}
} else {
log.info("Widget "+this.id+" is deprecated.");
}
} catch (IllegalAccessException e) {
log.error("Unable to update widget: "+this.id+" "+ExceptionUtil.getStackTrace(e));
}
}
public JSONObject deserialize() {
JSONObject json = new JSONObject();
try {
json.put("id", this.id);
json.put("title", this.title);
json.put("description", this.description);
json.put("version", this.version);
json.put("categories", this.categories);
json.put("refresh", this.refresh);
json.put("url", this.url);
JSONArray ja = new JSONArray();
if(this.parameters!=null) {
for(int i=0;i<this.parameters.length;i++) {
ja.add(this.parameters[i].deserialize());
}
}
json.put("parameters", (JSONArray) ja);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
return json;
}
}
| 8,439 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/CategoryBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@XmlType(propOrder={"type", "label", "children"})
public class CategoryBean {
private static final Log log = LogFactory.getLog(CategoryBean.class);
private List<CategoryBean> children = new ArrayList<CategoryBean>();
private String type = "text";
private String label = null;
public CategoryBean() {
}
@XmlElement
public String getType() {
return type;
}
@XmlElement
public String getLabel() {
return label;
}
@XmlElement
public List<CategoryBean> getChildren() {
return children;
}
public void setType(String type) {
this.type = type;
}
public void setLabel(String label) {
this.label = label;
}
public void setChildren(List<CategoryBean> children) {
this.children = children;
}
}
| 8,440 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/bean/OptionBean.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.bean;
import java.text.ParseException;
import javax.xml.bind.annotation.XmlElement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONObject;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class OptionBean {
private String label;
private String value;
private static Log log = LogFactory.getLog(OptionBean.class);
public OptionBean() {
}
public OptionBean(JSONObject json) throws ParseException {
try {
label = (String) json.get("label");
value = (String) json.get("value");
} catch (Exception e) {
throw new ParseException(ExceptionUtil.getStackTrace(e), 0);
}
}
@XmlElement
public String getLabel() {
return label;
}
@XmlElement
public String getValue() {
return value;
}
public void setLabel(String label) {
this.label=label;
}
public void setValue(String value) {
this.value=value;
}
public void update() {
}
public JSONObject deserialize() {
JSONObject json = new JSONObject();
try {
json.put("label", label);
json.put("value", value);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
return json;
}
}
| 8,441 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/resource/ViewContextResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.resource;
import com.sun.jersey.api.json.JSONJAXBContext;
import org.apache.hadoop.chukwa.rest.bean.ViewBean;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.JAXBContext;
@Provider
public class ViewContextResolver implements ContextResolver<JAXBContext> {
private JAXBContext context;
private Set<Class<?>> types;
protected Class<?>[] classTypes = new Class[] {ViewBean.class};
protected Set<String> jsonArray = new HashSet<String>(5) {
{
add("pages");
add("layout");
add("colSize");
add("widgets");
add("parameters");
add("options");
}
};
public ViewContextResolver() throws Exception {
Map props = new HashMap<String, Object>();
props.put(JSONJAXBContext.JSON_NOTATION, JSONJAXBContext.JSONNotation.MAPPED);
props.put(JSONJAXBContext.JSON_ROOT_UNWRAPPING, Boolean.TRUE);
props.put(JSONJAXBContext.JSON_ARRAYS, jsonArray);
this.types = new HashSet<Class<?>>(Arrays.asList(classTypes));
this.context = new JSONJAXBContext(classTypes, props);
}
public JAXBContext getContext(Class<?> objectType) {
return (types.contains(objectType)) ? context : null;
}
// private final JAXBContext context;
//
// public ViewContextResolver() throws Exception {
// this.context = new JSONJAXBContext(JSONConfiguration.natural().build(), "package.of.your.model");
// }
//
// public JAXBContext getContext(Class<?> objectType) {
// return context;
// }
}
| 8,442 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/resource/WidgetResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.resource;
import java.util.HashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.rest.bean.CatalogBean;
import org.apache.hadoop.chukwa.rest.bean.ReturnCodeBean;
import org.apache.hadoop.chukwa.rest.bean.WidgetBean;
import org.apache.hadoop.chukwa.datastore.WidgetStore;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
@Path ("/widget1")
public class WidgetResource {
private static Log log = LogFactory.getLog(WidgetResource.class);
@GET
@Path("wid/{wid}")
public WidgetBean getProfile(@PathParam("wid") String wid) {
HashMap<String, WidgetBean> list;
try {
list = WidgetStore.list();
} catch (IllegalAccessException e) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("Widget does not exist.").build());
}
return list.get(wid);
}
@PUT
@Consumes("application/json")
public ReturnCodeBean saveWidget(WidgetBean widget) {
try {
WidgetStore ws = new WidgetStore();
ws.set(widget);
} catch(Exception e) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("Widget save failed.").build());
}
return new ReturnCodeBean(ReturnCodeBean.SUCCESS,"Saved");
}
@GET
@Path("catalog")
public CatalogBean getWidgetCatalog() {
CatalogBean result;
try {
result = WidgetStore.getCatalog();
} catch (IllegalAccessException e) {
throw new WebApplicationException(Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity("No catalog exists.").build());
}
return result;
}
}
| 8,443 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/resource/UserResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.resource;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.rest.bean.ReturnCodeBean;
import org.apache.hadoop.chukwa.rest.bean.UserBean;
import org.apache.hadoop.chukwa.datastore.UserStore;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
@Path ("/user")
public class UserResource {
protected static final Log log = LogFactory.getLog(UserResource.class);
@GET
@Path("uid/{uid}")
public UserBean getProfile(@PathParam("uid") String uid) {
UserStore user;
UserBean result;
try {
user = new UserStore(uid);
result = user.get();
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("User does not exist.").build());
}
return result;
}
@PUT
@Consumes("application/json")
public ReturnCodeBean setProfile(UserBean user) {
try {
UserStore us = new UserStore(user.getId());
us.set(user);
} catch(Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("User does not exist.").build());
}
return new ReturnCodeBean(ReturnCodeBean.SUCCESS,"Saved.");
}
@GET
@Path("list")
@Produces("application/javascript")
public String getUserList() {
String result = "";
try {
result = UserStore.list().toString();
} catch (IllegalAccessException e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("User does not exist.").build());
}
return result;
}
}
| 8,444 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/resource/ClientTrace.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.resource;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.dataloader.SocketDataLoader;
import org.apache.hadoop.chukwa.rest.bean.ClientTraceBean;
/**
* Client Trace REST API for parsing client trace log file and convert
* data into consumable format for web browser and web services.
*/
@Path("clienttrace")
public class ClientTrace {
protected static final Log log = LogFactory.getLog(ClientTrace.class);
private static SocketDataLoader sdl = null;
// Client trace log file pattern
private final Pattern pattern =
Pattern.compile("(.+?) (.+?),(.+?) (.+?) src\\: /?(.+?):(.+?), dest\\: /?(.+?):(.+?), bytes\\: (\\d+), op\\: (.+?), cli(.+?)");
/**
* Get a list of the most recent client trace activities.
* The extracted elements are:
*
* @return list of client trace objects
*
*/
@GET
public List<ClientTraceBean> getTrace() {
if(sdl==null) {
sdl = new SocketDataLoader("ClientTrace");
} else if(!sdl.running()) {
sdl.start();
}
List<ClientTraceBean> list = new ArrayList<ClientTraceBean>();
try {
Collection<Chunk> clist = sdl.read();
for(Chunk c : clist) {
if(c!=null && c.getData()!=null) {
String action = "";
long size = 0;
String data = new String(c.getData(), Charset.forName("UTF-8"));
String[] entries = data.split("\n");
for(String entry : entries) {
Matcher m = pattern.matcher(entry);
if(m.matches()) {
ClientTraceBean ctb = new ClientTraceBean();
size = Long.parseLong(m.group(9));
action = m.group(10);
StringBuilder date = new StringBuilder();
date.append(m.group(1));
date.append(" ");
date.append(m.group(2));
ctb.setDate(date.toString());
ctb.setSrc(m.group(5));
ctb.setDest(m.group(7));
ctb.setAction(action);
ctb.setSize(size);
list.add(ctb);
} else {
log.error("Unparsable line: "+entry);
}
}
}
}
} catch(NoSuchElementException e) {
log.debug("No data available for client trace.");
}
return list;
}
}
| 8,445 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/resource/ViewResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.resource;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.rest.bean.ReturnCodeBean;
import org.apache.hadoop.chukwa.rest.bean.ViewBean;
import org.apache.hadoop.chukwa.datastore.ViewStore;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
@Path ("/view")
public class ViewResource {
protected static final Log log = LogFactory.getLog(ViewResource.class);
@GET
@Path("vid/{vid}")
public ViewBean getView(@Context HttpServletRequest request, @PathParam("vid") String vid) {
ViewStore view;
ViewBean vr;
String uid = request.getRemoteUser();
try {
view = new ViewStore(uid, vid);
vr = view.get();
if(request.getRemoteUser().intern()!=vr.getOwner().intern() && vr.getPermissionType().intern()!="public".intern()) {
throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN).entity("permission denied.").build());
}
} catch (IllegalAccessException e) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("View does not exist.").build());
}
return vr;
}
@PUT
@Consumes("application/json")
public ReturnCodeBean setView(@Context HttpServletRequest request, ViewBean view) {
try {
if(request.getRemoteUser().intern()==view.getOwner().intern()) {
ViewStore vs = new ViewStore(view.getOwner(), view.getName());
vs.set(view);
} else {
throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN)
.entity("Permission denied.").build());
}
} catch (IllegalAccessException e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("View save failed.").build());
}
return new ReturnCodeBean(ReturnCodeBean.SUCCESS,"Saved");
}
@POST
@Path("permission")
public ReturnCodeBean changeViewPermission(@Context HttpServletRequest request, @FormParam("owner") String owner, @FormParam("view_vid") String vid, @FormParam("permission") String permission) {
try {
if(owner.intern()==request.getRemoteUser().intern()) {
ViewStore vs = new ViewStore(owner, vid);
ViewBean view = vs.get();
vs.delete();
view.setPermissionType(permission);
vs.set(view);
} else {
throw new Exception("Permission denied.");
}
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new WebApplicationException(Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity("View save failed.").build());
}
return new ReturnCodeBean(ReturnCodeBean.SUCCESS,"Saved");
}
@POST
public ReturnCodeBean changeView(@Context HttpServletRequest request, @FormParam("owner") String owner, @FormParam("view_vid") String oldVid, @FormParam("view_name") String name) {
try {
ViewStore vs;
if(oldVid!=null) {
vs = new ViewStore(owner, oldVid);
} else {
vs = new ViewStore(null, "default");
}
ViewBean view = vs.get();
view.setOwner(request.getRemoteUser());
view.setName(name);
view.setDescription(name);
if(oldVid==null) {
view.setPermissionType("private");
}
vs = new ViewStore(request.getRemoteUser(), name);
vs.set(view);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new WebApplicationException(Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity("View save failed.").build());
}
return new ReturnCodeBean(ReturnCodeBean.SUCCESS,"Saved");
}
@DELETE
@Path("delete/{owner}/vid/{vid}")
public ReturnCodeBean deleteView(@Context HttpServletRequest request, @PathParam("owner") String owner, @PathParam("vid") String vid) {
try {
if(owner.intern()==request.getRemoteUser().intern()) {
ViewStore vs = new ViewStore(owner, vid);
vs.delete();
} else {
throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN)
.entity("View delete failed.").build());
}
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new WebApplicationException(Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity("View delete failed.").build());
}
return new ReturnCodeBean(ReturnCodeBean.SUCCESS,"Deleted");
}
@GET
@Path("list")
public String getUserViewList(@Context HttpServletRequest request) {
String result = "";
String uid = null;
try {
if(uid==null) {
uid = request.getRemoteUser();
}
result = ViewStore.list(uid).toJSONString();
} catch (Exception e) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("View does not exist.").build());
}
return result;
}
}
| 8,446 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/rest/resource/WidgetContextResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.rest.resource;
import com.sun.jersey.api.json.JSONJAXBContext;
import org.apache.hadoop.chukwa.rest.bean.CatalogBean;
import org.apache.hadoop.chukwa.rest.bean.CategoryBean;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
import javax.xml.bind.JAXBContext;
@Provider
public class WidgetContextResolver implements ContextResolver<JAXBContext> {
private JAXBContext context;
private Set<Class<?>> types;
protected Class<?>[] classTypes = new Class[] {CatalogBean.class, CategoryBean.class};
protected Set<String> jsonArray = new HashSet<String>(1) {
{
add("children");
}
};
public WidgetContextResolver() throws Exception {
Map props = new HashMap<String, Object>();
props.put(JSONJAXBContext.JSON_NOTATION, JSONJAXBContext.JSONNotation.MAPPED);
props.put(JSONJAXBContext.JSON_ROOT_UNWRAPPING, Boolean.TRUE);
props.put(JSONJAXBContext.JSON_ARRAYS, jsonArray);
this.types = new HashSet<Class<?>>(Arrays.asList(classTypes));
this.context = new JSONJAXBContext(classTypes, props);
}
public JAXBContext getContext(Class<?> objectType) {
return (types.contains(objectType)) ? context : null;
}
}
| 8,447 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/ColorPicker.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
public class ColorPicker {
private String color = "#ff5757";
private int index = 0;
public ColorPicker() {
color = "#ff5757";
}
public String get(int counter) {
if ((counter % 6) == 0) {
String purple = Integer.toHexString(256 - (counter % 255));
color = "#" + purple + "57" + purple;
} else if ((counter % 5) == 0) {
String red = Integer.toHexString(256 - (counter % 255));
color = "#" + red +"5757";
} else if ((counter % 4) == 0) {
String yellow = Integer.toHexString(256 - (counter % 255 * 20));
color = "#FF" + yellow + "00";
} else if ((counter % 3) == 0) {
String green = Integer.toHexString(256 - (counter % 255));
color = "#57" + green + "57";
} else if ((counter % 2) == 0) {
String cyan = Integer.toHexString(256 - (counter % 255));
color = "#57" + cyan + cyan;
} else {
String blue = Integer.toHexString(256 - (counter % 255));
color = "#5757" + blue + "";
}
return this.color;
}
public String getNext() {
index++;
return get(index);
}
}
| 8,448 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import javax.servlet.http.*;
import org.apache.hadoop.chukwa.util.XssFilter;
import java.util.Calendar;
import java.util.TimeZone;
import java.util.StringTokenizer;
import java.text.SimpleDateFormat;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.mdimension.jchronic.Chronic;
import com.mdimension.jchronic.Options;
import com.mdimension.jchronic.utils.Span;
public class TimeHandler {
private HttpSession session = null;
private TimeZone tz = null;
private long start = 0;
private long end = 0;
private String startDate = null;
private String startHour = null;
private String startMin = null;
private String endDate = null;
private String endHour = null;
private String endMin = null;
private String startS = null;
private String endS = null;
private XssFilter xf = null;
private static Log log=LogFactory.getLog(TimeHandler.class);
public TimeHandler(HttpServletRequest request) {
this.tz = TimeZone.getTimeZone("UTC");
init(request);
}
public TimeHandler(HttpServletRequest request, String tz) {
if (tz != null) {
this.tz = TimeZone.getTimeZone(tz);
} else {
this.tz = TimeZone.getTimeZone("UTC");
}
init(request);
}
/*
* Using the Chronic library to parse the english string
* and convert it to a long (millis seconds since 1970)
*/
public long parseDateShorthand(String d) {
Calendar now = Calendar.getInstance();
long l=now.getTimeInMillis();
d=d.trim();
if (d.compareToIgnoreCase("now")!=0) {
Options options= new Options(false);
options.setCompatibilityMode(true);
options.setNow(now);
try {
Span span = Chronic.parse(d, options);
l = span.getBegin()*1000;
} catch (Exception e) {
// exception when parsing
log.error("parse error for: "+d);
}
}
/*
* debug
*/
/*
SimpleDateFormat sf =
new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
Date ld=new Date(l);
log.error("Convert:"+d+" to "+Long.toString(l)+" - "+sf.format(ld)+ "-"+ld.getTime());
*/
return l;
}
public void parsePeriodValue(String period) {
Calendar now = Calendar.getInstance();
this.start = now.getTimeInMillis();
this.end = now.getTimeInMillis();
if (period.equals("last1hr")) {
start = end - (60 * 60 * 1000);
} else if (period.equals("last2hr")) {
start = end - (2 * 60 * 60 * 1000);
} else if (period.equals("last3hr")) {
start = end - (3 * 60 * 60 * 1000);
} else if (period.equals("last6hr")) {
start = end - (6 * 60 * 60 * 1000);
} else if (period.equals("last12hr")) {
start = end - (12 * 60 * 60 * 1000);
} else if (period.equals("last24hr")) {
start = end - (24 * 60 * 60 * 1000);
} else if (period.equals("last7d")) {
start = end - (7 * 24 * 60 * 60 * 1000);
} else if (period.equals("last30d")) {
start = end - (30L * 24 * 60 * 60 * 1000);
} else if (period.equals("lastyear")) {
start = end - (365L * 24 * 60 * 60 * 1000);
} else if (period.startsWith("custom;")) {
// default value is between 2 days ago and now
String startString="2 days ago";
String endString="now";
// tokenize the value to "custom;2 days ago;now"
StringTokenizer st=new StringTokenizer(period,";");
if (st.hasMoreTokens()) {
st.nextToken(); // skip the first token
if (st.hasMoreTokens()) {
startString=st.nextToken();
if (st.hasMoreTokens()) {
endString=st.nextToken();
}
}
}
// parse the parameter strings
start = parseDateShorthand(startString);
end = parseDateShorthand(endString);
}
}
public void init(HttpServletRequest request) {
xf = new XssFilter(request);
Calendar now = Calendar.getInstance();
this.session = request.getSession();
if (request.getParameter("time_type") == null
&& session.getAttribute("time_type") == null
&& session.getAttribute("period") == null
&& request.getParameter("period") == null) {
end = now.getTimeInMillis();
start = end - 60 * 60 * 1000;
session.setAttribute("period", "last1hr");
session.setAttribute("time_type", "last");
session.setAttribute("start", "" + start);
session.setAttribute("end", "" + end);
} else if (request.getParameter("period") != null
&& !"".equals(request.getParameter("period"))) {
String period = xf.getParameter("period");
parsePeriodValue(period);
} else if (request.getParameter("start") != null
&& request.getParameter("end") != null) {
start = Long.parseLong(request.getParameter("start"));
end = Long.parseLong(request.getParameter("end"));
} else if ("range".equals(session.getAttribute("time_type"))) {
start = Long.parseLong((String) session.getAttribute("start"));
end = Long.parseLong((String) session.getAttribute("end"));
} else if ("last".equals(session.getAttribute("time_type"))
&& session.getAttribute("period") != null) {
String period = (String) session.getAttribute("period");
parsePeriodValue(period);
}
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat formatHour = new SimpleDateFormat("HH");
SimpleDateFormat formatMin = new SimpleDateFormat("mm");
formatter.setTimeZone(this.tz);
formatDate.setTimeZone(this.tz);
formatHour.setTimeZone(this.tz);
formatMin.setTimeZone(this.tz);
startS = formatter.format(start);
this.startDate = formatDate.format(start);
this.startHour = formatHour.format(start);
this.startMin = formatMin.format(start);
endS = formatter.format(end);
this.endDate = formatDate.format(end);
this.endHour = formatHour.format(end);
this.endMin = formatMin.format(end);
}
public String getStartDate(String format) {
SimpleDateFormat formatter = new SimpleDateFormat(format);
formatter.setTimeZone(this.tz);
return formatter.format(this.start);
}
public String getStartDate() {
return this.startDate;
}
public String getStartHour() {
return this.startHour;
}
public String getStartMinute() {
return this.startMin;
}
public String getStartTimeText() {
return this.startS;
}
public long getStartTime() {
return start;
}
public String getEndDate(String format) {
SimpleDateFormat formatter = new SimpleDateFormat(format);
formatter.setTimeZone(this.tz);
return formatter.format(this.end);
}
public String getEndDate() {
return this.endDate;
}
public String getEndHour() {
return this.endHour;
}
public String getEndMinute() {
return this.endMin;
}
public String getEndTimeText() {
return this.endS;
}
public long getEndTime() {
return end;
}
}
| 8,449 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/Iframe.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Enumeration;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.chukwa.util.XssFilter;
public class Iframe extends HttpServlet {
public static final long serialVersionUID = 100L;
@Override
protected void doTrace(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
}
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
String id;
String height = "100%";
XssFilter xf = null;
xf = new XssFilter(request);
if (xf.getParameter("boxId") != null) {
id = xf.getParameter("boxId");
} else {
id = "0";
}
response.setContentType("text/html; chartset=UTF-8//IGNORE");
response.setHeader("boxId", xf.getParameter("boxId"));
PrintWriter out = response.getWriter();
StringBuffer source = new StringBuffer();
String requestURL = request.getRequestURL().toString().replaceFirst("iframe/", "");
if(requestURL.indexOf("/hicc/")!=-1) {
requestURL = requestURL.substring(requestURL.indexOf("/hicc/"));
}
source.append(requestURL);
source.append("?");
Enumeration names = request.getParameterNames();
while (names.hasMoreElements()) {
String key = xf.filter((String) names.nextElement());
String[] values = xf.getParameterValues(key);
if(values!=null) {
for (int i = 0; i < values.length; i++) {
source.append(key + "=" + values[i] + "&");
}
if (key.toLowerCase().intern() == "height".intern()) {
height = xf.getParameter(key);
}
}
}
StringBuffer output = new StringBuffer();
output.append("<html><body><iframe id=\"iframe");
output.append(id);
output.append("\" name=\"iframe");
output.append(id);
output.append("\" src=\"");
output.append(source);
output.append("\" width=\"100%\" height=\"");
output.append(height);
output.append("\" frameborder=\"0\" style=\"overflow: hidden\"></iframe>");
out.println(output.toString());
}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
doGet(request, response);
}
}
| 8,450 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.mortbay.jetty.Server;
import org.mortbay.xml.XmlConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class HiccWebServer {
private static Log log = LogFactory.getLog(HiccWebServer.class);
private static URL serverConf = HiccWebServer.class.getResource("/WEB-INF/jetty.xml");
private Server server = null;
private String chukwaHdfs;
private String hiccData;
private static HiccWebServer instance = null;
private static final Configuration config = new Configuration();
protected static final ChukwaConfiguration chukwaConf = new ChukwaConfiguration();
protected HiccWebServer() {
}
public static HiccWebServer getInstance() {
if(instance==null) {
instance = new HiccWebServer();
}
if(serverConf==null) {
log.error("Unable to locate jetty-web.xml.");
throw new RuntimeException("Unable to locate jetty-web.xml.");
}
return instance;
}
public void start() {
try {
chukwaHdfs = config.get("fs.defaultFS")+File.separator+chukwaConf.get("chukwa.data.dir");
hiccData = chukwaHdfs+File.separator+"hicc";
setupDefaultData();
run();
} catch(Exception e) {
log.error("HDFS unavailable, check configuration in chukwa-env.sh.");
throw new RuntimeException("Bail out!");
}
}
public static Configuration getConfig() {
return config;
}
public List<String> getResourceListing(String path) throws URISyntaxException, IOException {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
URL dirURL = contextClassLoader.getResource(path);
if (dirURL == null) {
dirURL = contextClassLoader.getResource(path);
}
if (dirURL.getProtocol().equals("jar")) {
/* A JAR path */
String jarPath = dirURL.getPath().substring(5, dirURL.getPath().indexOf("!")); //strip out only the JAR file
JarFile jar = new JarFile(jarPath);
Enumeration<JarEntry> entries = jar.entries(); //gives ALL entries in jar
List<String> result = new ArrayList<String>(); //avoid duplicates in case it is a subdirectory
while(entries.hasMoreElements()) {
String name = entries.nextElement().getName();
if (name.startsWith(path)) { //filter according to the path
String entry = name.substring(path.length());
int checkSubdir = entry.indexOf("/");
if (checkSubdir == 0 && entry.length()>1) {
// if it is a subdirectory, we just return the directory name
result.add(name);
}
}
}
jar.close();
return result;
}
throw new UnsupportedOperationException("Cannot list files for URL "+dirURL);
}
public void populateDir(List<String> files, Path path) {
try {
FileSystem fs = FileSystem.get(config);
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
for(String source : files) {
String name = source.substring(source.indexOf(File.separator));
Path dest = new Path(path.toString()+File.separator+name);
InputStream is = contextClassLoader.getResourceAsStream(source);
StringBuilder sb = new StringBuilder();
String line = null;
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")));
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
FSDataOutputStream out = fs.create(dest);
out.write(sb.toString().getBytes(Charset.forName("UTF-8")));
out.close();
reader.close();
} catch(IOException e) {
log.error("Error writing file: "+dest.toString());
}
}
} catch(IOException e) {
log.error("HDFS unavailable, check configuration in chukwa-env.sh.");
}
}
public void setupDefaultData() {
Path hiccPath = new Path(hiccData);
try {
FileSystem fs = FileSystem.get(config);
if(!fs.exists(hiccPath)) {
log.info("Initializing HICC Datastore.");
// Create chukwa directory
if(!fs.exists(new Path(chukwaHdfs))) {
fs.mkdirs(new Path(chukwaHdfs));
}
// Create hicc directory
fs.mkdirs(hiccPath);
// Populate widgets repository
StringBuffer hiccWidgets = new StringBuffer();
hiccWidgets.append(hiccData);
hiccWidgets.append(File.separator);
hiccWidgets.append("widgets");
Path widgetsPath = new Path(hiccWidgets.toString());
fs.mkdirs(widgetsPath);
List<String> widgets = getResourceListing("descriptors");
populateDir(widgets, widgetsPath);
// Create views directory
StringBuffer hiccViews = new StringBuffer();
hiccViews.append(hiccData);
hiccViews.append(File.separator);
hiccViews.append("views");
fs.mkdirs(new Path(hiccViews.toString()));
// Create users repository
StringBuffer hiccUsers = new StringBuffer();
hiccUsers.append(hiccViews);
hiccUsers.append(File.separator);
hiccUsers.append("users");
fs.mkdirs(new Path(hiccUsers.toString()));
// Populate public repository
StringBuffer hiccPublic = new StringBuffer();
hiccPublic.append(hiccViews);
hiccPublic.append(File.separator);
hiccPublic.append("public");
Path viewsPath = new Path(hiccPublic.toString());
fs.mkdirs(viewsPath);
List<String> views = getResourceListing("views");
populateDir(views, viewsPath);
log.info("HICC Datastore initialization completed.");
}
} catch (IOException ex) {
log.error(ExceptionUtil.getStackTrace(ex));
} catch (URISyntaxException ex) {
log.error(ExceptionUtil.getStackTrace(ex));
}
}
public void run() {
server = new Server();
XmlConfiguration configuration;
try {
configuration = new XmlConfiguration(serverConf);
configuration.configure(server);
server.start();
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
public void shutdown() {
try {
server.stop();
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
public static void main(String[] args) {
HiccWebServer hicc = HiccWebServer.getInstance();
hicc.start();
System.out.close();
System.err.close();
}
}
| 8,451 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.util.Calendar;
import java.util.TimeZone;
import java.text.SimpleDateFormat;
import java.util.HashMap;
public class OfflineTimeHandler {
private TimeZone tz = null;
private long start = 0;
private long end = 0;
private String startDate = null;
private String startHour = null;
private String startMin = null;
private String endDate = null;
private String endHour = null;
private String endMin = null;
private String startS = null;
private String endS = null;
public OfflineTimeHandler(HashMap<String, String> map) {
this.tz = TimeZone.getTimeZone("UTC");
init(map);
}
public OfflineTimeHandler(HashMap<String, String> map, String tz) {
if (tz != null) {
this.tz = TimeZone.getTimeZone(tz);
} else {
this.tz = TimeZone.getTimeZone("UTC");
}
init(map);
}
public void init(HashMap<String, String> map) {
Calendar now = Calendar.getInstance();
if (map == null ||
(map.get("time_type") == null && map.get("period") == null)) {
end = now.getTimeInMillis();
start = end - 60 * 60 * 1000;
} else if (map.get("period") != null
&& !map.get("period").equals("")) {
String period = map.get("period");
this.start = now.getTimeInMillis();
this.end = now.getTimeInMillis();
if (period.equals("last1hr")) {
start = end - (60 * 60 * 1000);
} else if (period.equals("last2hr")) {
start = end - (2 * 60 * 60 * 1000);
} else if (period.equals("last3hr")) {
start = end - (3 * 60 * 60 * 1000);
} else if (period.equals("last6hr")) {
start = end - (6 * 60 * 60 * 1000);
} else if (period.equals("last12hr")) {
start = end - (12 * 60 * 60 * 1000);
} else if (period.equals("last24hr")) {
start = end - (24 * 60 * 60 * 1000);
} else if (period.equals("last7d")) {
start = end - (7 * 24 * 60 * 60 * 1000);
} else if (period.equals("last30d")) {
start = end - (30 * 24 * 60 * 60 * 1000);
}
} else if (map.get("start") != null
&& map.get("end") != null) {
start = Long.parseLong(map.get("start"));
end = Long.parseLong(map.get("end"));
} else if (map.get("time_type").equals("range")) {
start = Long.parseLong(map.get("start"));
end = Long.parseLong(map.get("end"));
} else if (map.get("time_type").equals("last")
&& map.get("period") != null) {
String period = map.get("period");
this.start = now.getTimeInMillis();
this.end = now.getTimeInMillis();
if (period.equals("last1hr")) {
start = end - (60 * 60 * 1000);
} else if (period.equals("last2hr")) {
start = end - (2 * 60 * 60 * 1000);
} else if (period.equals("last3hr")) {
start = end - (3 * 60 * 60 * 1000);
} else if (period.equals("last6hr")) {
start = end - (6 * 60 * 60 * 1000);
} else if (period.equals("last12hr")) {
start = end - (12 * 60 * 60 * 1000);
} else if (period.equals("last24hr")) {
start = end - (24 * 60 * 60 * 1000);
} else if (period.equals("last7d")) {
start = end - (7 * 24 * 60 * 60 * 1000);
} else if (period.equals("last30d")) {
start = end - (30L * 24 * 60 * 60 * 1000);
}
}
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd");
SimpleDateFormat formatHour = new SimpleDateFormat("HH");
SimpleDateFormat formatMin = new SimpleDateFormat("mm");
formatter.setTimeZone(this.tz);
formatDate.setTimeZone(this.tz);
formatHour.setTimeZone(this.tz);
formatMin.setTimeZone(this.tz);
startS = formatter.format(start);
this.startDate = formatDate.format(start);
this.startHour = formatHour.format(start);
this.startMin = formatMin.format(start);
endS = formatter.format(end);
this.endDate = formatDate.format(end);
this.endHour = formatHour.format(end);
this.endMin = formatMin.format(end);
}
public String getStartDate(String format) {
SimpleDateFormat formatter = new SimpleDateFormat(format);
formatter.setTimeZone(this.tz);
return formatter.format(this.start);
}
public String getStartDate() {
return this.startDate;
}
public String getStartHour() {
return this.startHour;
}
public String getStartMinute() {
return this.startMin;
}
public String getStartTimeText() {
return this.startS;
}
public long getStartTime() {
return start;
}
public String getEndDate(String format) {
SimpleDateFormat formatter = new SimpleDateFormat(format);
formatter.setTimeZone(this.tz);
return formatter.format(this.end);
}
public String getEndDate() {
return this.endDate;
}
public String getEndHour() {
return this.endHour;
}
public String getEndMinute() {
return this.endMin;
}
public String getEndTimeText() {
return this.endS;
}
public long getEndTime() {
return end;
}
}
| 8,452 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
public class ClusterConfig {
private Set<String> clusterMap = null;
static public String getContents(File aFile) {
// ...checks on aFile are elided
StringBuffer contents = new StringBuffer();
try {
BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8")));
try {
String line = null; // not declared within while loop
/*
* readLine is a bit quirky : it returns the content of a line MINUS the
* newline. it returns null only for the END of the stream. it returns
* an empty String if two newlines appear in a row.
*/
while ((line = input.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
} finally {
input.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
public ClusterConfig() {
long end = System.currentTimeMillis();
long start = end - 3600000L;
if(clusterMap==null) {
clusterMap = ChukwaHBaseStore.getClusterNames(start, end);
}
}
public Iterator<String> getClusters() {
Iterator<String> i = clusterMap.iterator();
return i;
}
}
| 8,453 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.net.*;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.io.*;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.apache.log4j.Logger;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class JSONLoader {
public JSONArray jsonData;
private static Logger log = Logger.getLogger(JSONLoader.class);
static public String getContents(String source) {
// ...checks on aFile are elided
StringBuffer contents = new StringBuffer();
try {
// use buffering, reading one line at a time
// FileReader always assumes default encoding is OK!
URL yahoo = new URL(source);
BufferedReader in = new BufferedReader(new InputStreamReader(yahoo
.openStream(), Charset.forName("UTF-8")));
String inputLine;
while ((inputLine = in.readLine()) != null) {
contents.append(inputLine);
contents.append(System.getProperty("line.separator"));
}
in.close();
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
public JSONLoader(String source) {
String buffer = getContents(source);
try {
JSONObject rows = (JSONObject) JSONValue.parse(buffer);
jsonData = (JSONArray) JSONValue.parse(rows.get("rows").toString());
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
}
public String getTS(int i) {
String ts = null;
try {
ts = ((JSONObject) jsonData.get(i)).get("ts").toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return ts;
}
public String getTags(int i) {
String tags = null;
try {
tags = ((JSONObject) jsonData.get(i)).get("tags")
.toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return tags;
}
public String getValue(int i) {
String value = null;
try {
value = ((JSONObject) jsonData.get(i)).get("value")
.toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return value;
}
public int length() {
return jsonData.size();
}
}
| 8,454 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/Config.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
public class Config {
}
| 8,455 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.SimpleTagSupport;
import java.io.IOException;
import java.util.*;
public class ViewsTag extends SimpleTagSupport {
Views views = new Views();
public void doTag() throws JspException, IOException {
for (int i = 0; i < views.length(); i++) {
int j = 0;
getJspContext().setAttribute("key", views.getKey(i));
Iterator permission = views.getPermission(i);
String[] authUsers = new String[100];
for (Iterator perm = permission; perm.hasNext();) {
String who = perm.next().toString();
authUsers[j] = who;
getJspContext().setAttribute( "permission."+who+".read",
views.getReadPermission(i,who) );
getJspContext().setAttribute( "permission."+who+".write",
views.getWritePermission(i,who) );
j = j + 1;
}
getJspContext().setAttribute( "permission", authUsers );
getJspContext().setAttribute("owner", views.getOwner(i));
getJspContext().setAttribute("description", views.getDescription(i));
getJspBody().invoke(null);
}
}
}
| 8,456 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/ImageSlicer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.awt.Graphics;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.imageio.ImageIO;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class ImageSlicer {
private BufferedImage src = null;
private Log log = LogFactory.getLog(ImageSlicer.class);
private String sandbox = System.getenv("CHUKWA_HOME")+File.separator+"webapps"+File.separator+"sandbox"+File.separator;
private int maxLevel = 0;
public ImageSlicer() {
}
/*
* Prepare a large image for tiling.
*
* Load an image from a file. Resize the image so that it is square,
* with dimensions that are an even power of two in length (e.g. 512,
* 1024, 2048, ...). Then, return it.
*
*/
public BufferedImage prepare(String filename) {
try {
src = ImageIO.read(new File(filename));
} catch (IOException e) {
log.error("Image file does not exist:"+filename+", can not render image.");
}
XYData fullSize = new XYData(1, 1);
while(fullSize.getX()<src.getWidth() || fullSize.getY()<src.getHeight()) {
fullSize.set(fullSize.getX()*2, fullSize.getY()*2);
}
float scaleX = (float)fullSize.getX()/src.getWidth();
float scaleY = (float)fullSize.getY()/src.getHeight();
log.info("Image size: ("+src.getWidth()+","+src.getHeight()+")");
log.info("Scale size: ("+scaleX+","+scaleY+")");
AffineTransform at =
AffineTransform.getScaleInstance(scaleX,scaleY);
// AffineTransform.getScaleInstance((fullSize.getX()-src.getWidth())/2,(fullSize.getY()-src.getHeight())/2);
AffineTransformOp op = new AffineTransformOp(at, AffineTransformOp.TYPE_BILINEAR);
BufferedImage dest = op.filter(src, null);
return dest;
}
/*
* Extract a single tile from a larger image.
*
* Given an image, a zoom level (int), a quadrant (column, row tuple;
* ints), and an output size, crop and size a portion of the larger
* image. If the given zoom level would result in scaling the image up,
* throw an error - no need to create information where none exists.
*
*/
public BufferedImage tile(BufferedImage image, int level, XYData quadrant, XYData size, boolean efficient) throws Exception {
double scale = Math.pow(2, level);
if(efficient) {
/* efficient: crop out the area of interest first, then scale and copy it */
XYData inverSize = new XYData((int)(image.getWidth(null)/(size.getX()*scale)),
(int)(image.getHeight(null)/(size.getY()*scale)));
XYData topLeft = new XYData(quadrant.getX()*size.getX()*inverSize.getX(),
quadrant.getY()*size.getY()*inverSize.getY());
XYData newSize = new XYData((size.getX()*inverSize.getX()),
(size.getY()*inverSize.getY()));
if(inverSize.getX()<1.0 || inverSize.getY() < 1.0) {
throw new Exception("Requested zoom level ("+level+") is too high.");
}
image = image.getSubimage(topLeft.getX(), topLeft.getY(), newSize.getX(), newSize.getY());
BufferedImage zoomed = new BufferedImage(size.getX(), size.getY(), BufferedImage.TYPE_INT_RGB);
zoomed.getGraphics().drawImage(image, 0, 0, size.getX(), size.getY(), null);
if(level>maxLevel) {
maxLevel = level;
}
return zoomed;
} else {
/* inefficient: copy the whole image, scale it and then crop out the area of interest */
XYData newSize = new XYData((int)(size.getX()*scale), (int)(size.getY()*scale));
XYData topLeft = new XYData(quadrant.getX()*size.getX(), quadrant.getY()*size.getY());
if(newSize.getX() > image.getWidth(null) || newSize.getY() > image.getHeight(null)) {
throw new Exception("Requested zoom level ("+level+") is too high.");
}
AffineTransform tx = new AffineTransform();
AffineTransformOp op = new AffineTransformOp(tx, AffineTransformOp.TYPE_BILINEAR);
tx.scale(scale, scale);
image = op.filter(image, null);
BufferedImage zoomed = image.getSubimage(topLeft.getX(), topLeft.getY(), newSize.getX(), newSize.getY());
if(level>maxLevel) {
maxLevel = level;
}
return zoomed;
}
}
/*
* Recursively subdivide a large image into small tiles.
*
* Given an image, a zoom level (int), a quadrant (column, row tuple;
* ints), and an output size, cut the image into even quarters and
* recursively subdivide each, then generate a combined tile from the
* resulting subdivisions. If further subdivision would result in
* scaling the image up, use tile() to turn the image itself into a
* tile.
*/
public BufferedImage subdivide(BufferedImage image, int level, XYData quadrant, XYData size, String prefix) {
if(image.getWidth()<=size.getX()*Math.pow(2, level)) {
try {
BufferedImage outputImage = tile(image, level, quadrant, size, true);
write(outputImage, level, quadrant, prefix);
return outputImage;
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
BufferedImage zoomed = new BufferedImage(size.getX()*2, size.getY()*2, BufferedImage.TYPE_INT_RGB);
Graphics g = zoomed.getGraphics();
XYData newQuadrant = new XYData(quadrant.getX() * 2 + 0, quadrant.getY() * 2 + 0);
g.drawImage(subdivide(image, level+1, newQuadrant, size, prefix), 0, 0, null);
newQuadrant = new XYData(quadrant.getX()*2 + 0, quadrant.getY()*2 + 1);
g.drawImage(subdivide(image, level+1, newQuadrant, size, prefix), 0, size.getY(), null);
newQuadrant = new XYData(quadrant.getX()*2 + 1, quadrant.getY()*2 + 0);
g.drawImage(subdivide(image, level+1, newQuadrant, size, prefix), size.getX(), 0, null);
newQuadrant = new XYData(quadrant.getX()*2 + 1, quadrant.getY()*2 + 1);
g.drawImage(subdivide(image, level+1, newQuadrant, size, prefix), size.getX(), size.getY(), null);
BufferedImage outputImage = new BufferedImage(size.getX(), size.getY(), BufferedImage.TYPE_INT_RGB);
outputImage.getGraphics().drawImage(zoomed, 0, 0, size.getX(), size.getY(), null);
write(outputImage, level, quadrant, prefix);
return outputImage;
}
/*
* Write image file.
*/
public void write(BufferedImage image, int level, XYData quadrant, String prefix) {
StringBuilder outputFile = new StringBuilder();
outputFile.append(sandbox);
outputFile.append(File.separator);
outputFile.append(prefix);
outputFile.append("-");
outputFile.append(level);
outputFile.append("-");
outputFile.append(quadrant.getX());
outputFile.append("-");
outputFile.append(quadrant.getY());
outputFile.append(".png");
FileOutputStream fos;
try {
fos = new FileOutputStream(outputFile.toString());
ImageIO.write(image, "PNG", fos);
fos.close();
} catch (IOException e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
public int process(String filename) {
Pattern p = Pattern.compile("(.*)\\.(.*)");
Matcher m = p.matcher(filename);
if(m.matches()) {
String prefix = m.group(1);
String fullPath = sandbox + File.separator + filename;
subdivide(prepare(fullPath), 0, new XYData(0, 0), new XYData(256, 256), prefix);
return maxLevel;
}
return 0;
}
}
class XYData {
private int x = 0;
private int y = 0;
public XYData(int x, int y) {
this.x=x;
this.y=y;
}
public void set(int x, int y) {
this.x=x;
this.y=y;
}
public int getX() {
return x;
}
public int getY() {
return y;
}
}
| 8,457 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/Workspace.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import java.sql.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.util.XssFilter;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class Workspace extends HttpServlet {
public static final long serialVersionUID = 101L;
private static final Log log = LogFactory.getLog(Workspace.class);
private String path = System.getenv("CHUKWA_DATA_DIR");
private JSONObject hash = new JSONObject();
transient private XssFilter xf;
@Override
protected void doTrace(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
}
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
xf = new XssFilter(request);
response.setContentType("text/plain");
String method = xf.getParameter("method");
if (method.equals("get_views_list")) {
getViewsList(request, response);
}
if (method.equals("get_view")) {
getView(request, response);
}
if (method.equals("save_view")) {
saveView(request, response);
}
if (method.equals("change_view_info")) {
changeViewInfo(request, response);
}
if (method.equals("get_widget_list")) {
getWidgetList(request, response);
}
if (method.equals("clone_view")) {
cloneView(request, response);
}
if (method.equals("delete_view")) {
deleteView(request, response);
}
}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
doGet(request, response);
}
static public String getContents(File aFile) {
// ...checks on aFile are elided
StringBuffer contents = new StringBuffer();
try {
// use buffering, reading one line at a time
// FileReader always assumes default encoding is OK!
BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8")));
try {
String line = null; // not declared within while loop
/*
* readLine is a bit quirky : it returns the content of a line MINUS the
* newline. it returns null only for the END of the stream. it returns
* an empty String if two newlines appear in a row.
*/
while ((line = input.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
} finally {
input.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
public void setContents(String fName, String buffer) {
try {
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fName), Charset.forName("UTF-8")));
out.write(buffer);
out.close();
} catch (Exception e) {
System.err.println("Error: " + e.getMessage());
}
}
public void cloneView(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
PrintWriter out = response.getWriter();
String name = xf.getParameter("name");
String template = xf.getParameter("clone_name");
File aFile = new File(path + "/views/" + template);
String config = getContents(aFile);
int i = 0;
boolean check = true;
while (check) {
String tmpName = name;
if (i > 0) {
tmpName = name + i;
}
File checkFile = new File(path + "/views/" + tmpName + ".view");
check = checkFile.exists();
if (!check) {
name = tmpName;
}
i = i + 1;
}
setContents(path + "/views/" + name + ".view", config);
File deleteCache = new File(path + "/views/workspace_view_list.cache");
if(!deleteCache.delete()) {
log.warn("Can not delete "+path + "/views/workspace_view_list.cache");
}
genViewCache(path + "/views");
aFile = new File(path + "/views/workspace_view_list.cache");
String viewsCache = getContents(aFile);
out.println(viewsCache);
}
public void deleteView(HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
String name = xf.getParameter("name");
File aFile = new File(path + "/views/" + name + ".view");
if(!aFile.delete()) {
log.warn("Can not delete " + path + "/views/" + name + ".view");
}
File deleteCache = new File(path + "/views/workspace_view_list.cache");
if(!deleteCache.delete()) {
log.warn("Can not delete "+path + "/views/workspace_view_list.cache");
}
genViewCache(path + "/views");
}
public void getViewsList(HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
PrintWriter out = response.getWriter();
genViewCache(path + "/views");
File aFile = new File(path + "/views/workspace_view_list.cache");
String viewsCache = getContents(aFile);
out.println(viewsCache);
}
public void getView(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
PrintWriter out = response.getWriter();
String id = xf.getParameter("id");
genViewCache(path + "/views");
File aFile = new File(path + "/views/" + id + ".view");
String view = getContents(aFile);
out.println(view);
}
public void changeViewInfo(HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
PrintWriter out = response.getWriter();
String id = xf.getParameter("name");
String config = request.getParameter("config");
try {
JSONObject jt = (JSONObject) JSONValue.parse(config);
File aFile = new File(path + "/views/" + id + ".view");
String original = getContents(aFile);
JSONObject updateObject = (JSONObject) JSONValue.parse(original);
updateObject.put("description", jt.get("description"));
setContents(path + "/views/" + id + ".view", updateObject.toString());
if (!rename(id, jt.get("description").toString())) {
throw new Exception("Rename view file failed");
}
File deleteCache = new File(path + "/views/workspace_view_list.cache");
if(!deleteCache.delete()) {
log.warn("Can not delete "+path + "/views/workspace_view_list.cache");
}
genViewCache(path + "/views");
out.println("Workspace is stored successfully.");
} catch (Exception e) {
out.println("Workspace store failed.");
}
}
public void saveView(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
PrintWriter out = response.getWriter();
String id = xf.getParameter("name");
String config = request.getParameter("config");
setContents(path + "/views/" + id + ".view", config);
out.println("Workspace is stored successfully.");
}
public void getWidgetList(HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
PrintWriter out = response.getWriter();
genWidgetCache(path + "/descriptors");
File aFile = new File(path + "/descriptors/workspace_plugin.cache");
String viewsCache = getContents(aFile);
out.println(viewsCache);
}
private void genViewCache(String source) {
File cacheFile = new File(source + "/workspace_view_list.cache");
if (!cacheFile.exists()) {
File dir = new File(source);
File[] filesWanted = dir.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.endsWith(".view");
}
});
if(filesWanted!=null) {
JSONObject[] cacheGroup = new JSONObject[filesWanted.length];
for (int i = 0; i < filesWanted.length; i++) {
String buffer = getContents(filesWanted[i]);
try {
JSONObject jt = (JSONObject) JSONValue.parse(buffer);
String fn = filesWanted[i].getName();
jt.put("key", fn.substring(0, (fn.length() - 5)));
cacheGroup[i] = jt;
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
}
String viewList = convertObjectsToViewList(cacheGroup);
setContents(source + "/workspace_view_list.cache", viewList);
}
}
}
public String convertObjectsToViewList(JSONObject[] objArray) {
JSONArray jsonArr = new JSONArray();
JSONObject permission = new JSONObject();
JSONObject user = new JSONObject();
try {
permission.put("read", 1);
permission.put("modify", 1);
user.put("all", permission);
} catch (Exception e) {
System.err.println("JSON Exception: " + e.getMessage());
}
for (int i = 0; i < objArray.length; i++) {
try {
JSONObject jsonObj = new JSONObject();
jsonObj.put("key", objArray[i].get("key"));
jsonObj.put("description", objArray[i].get("description"));
jsonObj.put("owner", "");
jsonObj.put("permission", user);
jsonArr.add(jsonObj);
} catch (Exception e) {
System.err.println("JSON Exception: " + e.getMessage());
}
}
return jsonArr.toString();
}
private void genWidgetCache(String source) {
File cacheFile = new File(source + "/workspace_plugin.cache");
File cacheDir = new File(source);
if (!cacheFile.exists()
|| cacheFile.lastModified() < cacheDir.lastModified()) {
File dir = new File(source);
File[] filesWanted = dir.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.endsWith(".descriptor");
}
});
if(filesWanted!=null) {
JSONObject[] cacheGroup = new JSONObject[filesWanted.length];
for (int i = 0; i < filesWanted.length; i++) {
String buffer = getContents(filesWanted[i]);
try {
JSONObject jt = (JSONObject) JSONValue.parse(buffer);
cacheGroup[i] = jt;
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
}
String widgetList = convertObjectsToWidgetList(cacheGroup);
setContents(source + "/workspace_plugin.cache", widgetList);
}
}
}
public String convertObjectsToWidgetList(JSONObject[] objArray) {
JSONObject jsonObj = new JSONObject();
JSONArray jsonArr = new JSONArray();
for (int i = 0; i < objArray.length; i++) {
jsonArr.add(objArray[i]);
}
try {
jsonObj.put("detail", jsonArr);
} catch (Exception e) {
System.err.println("JSON Exception: " + e.getMessage());
}
for (int i = 0; i < objArray.length; i++) {
try {
String[] categoriesArray = objArray[i].get("categories").toString()
.split(",");
hash = addToHash(hash, categoriesArray, objArray[i]);
} catch (Exception e) {
System.err.println("JSON Exception: " + e.getMessage());
}
}
try {
jsonObj.put("children", hash);
} catch (Exception e) {
System.err.println("JSON Exception: " + e.getMessage());
}
return jsonObj.toString();
}
public JSONObject addToHash(JSONObject hash, String[] categoriesArray,
JSONObject obj) {
JSONObject subHash = hash;
for (int i = 0; i < categoriesArray.length; i++) {
String id = categoriesArray[i];
if (i >= categoriesArray.length - 1) {
try {
subHash.put("leaf:" + obj.get("title"), obj.get("id"));
} catch (Exception e) {
System.err.println("JSON Exception: " + e.getMessage());
}
} else {
try {
subHash = (JSONObject) subHash.get("node:" + id);
} catch (Exception e) {
try {
JSONObject tmpHash = new JSONObject();
subHash.put("node:" + id, tmpHash);
subHash = tmpHash;
} catch (Exception ex) {
log.debug(ExceptionUtil.getStackTrace(e));
}
}
}
}
return hash;
}
private boolean rename(String id, String desc) {
try {
File view = new File(path + "/views/" + id + ".view");
File newFile = new File(path + File.separator + "views" + File.separator
+ desc + ".view");
if(!view.renameTo(newFile)) {
log.warn("Can not rename " + path + "/views/" + id + ".view to " +
path + File.separator + "views" + File.separator + desc + ".view");
}
} catch (Exception e) {
return false;
}
return true;
}
}
| 8,458 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/AreaCalculator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.util.TreeMap;
import java.util.Map.Entry;
public class AreaCalculator {
public static TreeMap<String, Double> getAreas(
TreeMap<String, TreeMap<String, Double>> dataSet)
{
TreeMap<String, Double> areas = new TreeMap<String, Double>();
for (Entry<String, TreeMap<String, Double>> entry : dataSet.entrySet()) {
String key = entry.getKey();
Double area = getArea(entry.getValue());
areas.put(key, area);
}
return areas;
}
public static Double getArea(TreeMap<String, Double> data) {
double area = 0;
boolean first = true;
double x0, x1, y0, y1;
x0 = x1 = y0 = y1 = 0;
for (Entry<String, Double> entry : data.entrySet()) {
double x = Double.parseDouble(entry.getKey());
double y = entry.getValue();
if (first) {
x0 = x;
y0 = y;
first = false;
} else {
x1 = x;
y1 = y;
area += getArea(x0, y0, x1, y1);
x0 = x1;
y0 = y1;
}
}
return area;
}
public static Double getArea(double x0, double y0, double x1, double y1) {
return (x1 - x0) * (y0 + y1) / 2;
}
}
| 8,459 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/Views.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class Views {
public JSONArray viewsData;
private String path = System.getenv("CHUKWA_DATA_DIR")
+ "/views/workspace_view_list.cache";
private static final Log log = LogFactory.getLog(Views.class);
static public String getContents(File aFile) {
// ...checks on aFile are elided
StringBuffer contents = new StringBuffer();
try {
// use buffering, reading one line at a time
// FileReader always assumes default encoding is OK!
BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8")));
try {
String line = null; // not declared within while loop
/*
* readLine is a bit quirky : it returns the content of a line MINUS the
* newline. it returns null only for the END of the stream. it returns
* an empty String if two newlines appear in a row.
*/
while ((line = input.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
} finally {
input.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
return contents.toString();
}
public Views() {
File aFile = new File(path);
String buffer = getContents(aFile);
try {
viewsData = (JSONArray) JSONValue.parse(buffer);
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
}
public String getOwner(int i) {
String owner = null;
try {
owner = ((JSONObject) viewsData.get(i)).get("owner")
.toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return owner;
}
public Iterator getPermission(int i) {
Iterator permission = null;
try {
permission = ((JSONObject) ((JSONObject) viewsData.get(i))
.get("permission")).keySet().iterator();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return permission;
}
public String getReadPermission(int i, String who) {
String read = null;
try {
JSONObject view = (JSONObject) viewsData.get(i);
JSONObject permission = (JSONObject) view.get("permission");
JSONObject user = (JSONObject) permission.get(who);
read = user.get("read").toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return read;
}
public String getWritePermission(int i, String who) {
String write = null;
try {
write = ((JSONObject) ((JSONObject) ((JSONObject) viewsData.get(i)).get("permission")).get(who)).get("write").toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return write;
}
public String getDescription(int i) {
String description = null;
try {
description = ((JSONObject) viewsData.get(i)).get(
"description").toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return description;
}
public String getKey(int i) {
String key = null;
try {
key = ((JSONObject) viewsData.get(i)).get("key").toString();
} catch (Exception e) {
log.debug(ExceptionUtil.getStackTrace(e));
}
return key;
}
public int length() {
return viewsData.size();
}
}
| 8,460 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc;
import java.util.TreeMap;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.List;
import java.sql.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class DatasetMapper {
private String jdbc;
private static Log log = LogFactory.getLog(DatasetMapper.class);
private TreeMap<String, TreeMap<String, Double>> dataset;
private List<String> labels;
public DatasetMapper(String jdbc) {
this.jdbc = jdbc;
this.dataset = new TreeMap<String, TreeMap<String, Double>>();
this.labels = new ArrayList<String>();
}
public void execute(String query, boolean groupBySecondColumn,
boolean calculateSlope, String formatTime, List<Object> parameters) {
dataset.clear();
try {
// The newInstance() call is a work around for some
// broken Java implementations
org.apache.hadoop.chukwa.util.DriverManagerUtil.loadDriver().newInstance();
} catch (Exception ex) {
log.error("failed to load driver", ex);
// handle the error
}
Connection conn = null;
PreparedStatement stmt = null;
ResultSet rs = null;
labels.clear();
double max = 0.0;
long timeWindowSize=0;
long previousTime=0;
try {
conn = org.apache.hadoop.chukwa.util.DriverManagerUtil.getConnection(jdbc);
stmt = conn.prepareStatement(query);
if(query.indexOf("?")!=-1) {
for(int i=0;i<parameters.size();i++) {
int index = i+1;
stmt.setObject(index,parameters.get(i));
}
}
// rs = stmt.executeQuery(query);
if (stmt.execute()) {
rs = stmt.getResultSet();
ResultSetMetaData rmeta = rs.getMetaData();
int col = rmeta.getColumnCount();
double[] previousArray = new double[col + 1];
for (int k = 0; k < col; k++) {
previousArray[k] = 0.0;
}
int i = 0;
java.util.TreeMap<String, Double> data = null;
HashMap<String, Double> previousHash = new HashMap<String, Double>();
HashMap<String, Integer> xAxisMap = new HashMap<String, Integer>();
while (rs.next()) {
String label = "";
if (rmeta.getColumnType(1) == java.sql.Types.TIMESTAMP) {
long time = rs.getTimestamp(1).getTime();
if(timeWindowSize==0) {
timeWindowSize=1;
previousTime=time;
} else if(time!=previousTime) {
timeWindowSize=(time-previousTime)/60000;
previousTime=time;
}
label = "" + time;
} else {
label = rs.getString(1);
}
if (!xAxisMap.containsKey(label)) {
xAxisMap.put(label, i);
labels.add(label);
i++;
}
if (groupBySecondColumn) {
String item = rs.getString(2);
// Get the data from the row using the series column
for (int j = 3; j <= col; j++) {
item = rs.getString(2) + " " + rmeta.getColumnName(j);
data = dataset.get(item);
if (data == null) {
data = new java.util.TreeMap<String, Double>();
}
if (calculateSlope) {
double current = rs.getDouble(j);
double tmp = 0L;
if (data.size() > 1) {
tmp = (current - previousHash.get(item).doubleValue())/timeWindowSize;
if(timeWindowSize<=0) {
tmp = Double.NaN;
}
} else {
tmp = 0;
}
if (tmp < 0) {
tmp = Double.NaN;
}
previousHash.put(item, current);
if (tmp > max) {
max = tmp;
}
data.put(label, tmp);
} else {
double current = rs.getDouble(j);
if (current > max) {
max = current;
}
data.put(label, current);
}
dataset.put(item, data);
}
} else {
for (int j = 2; j <= col; j++) {
String item = rmeta.getColumnName(j);
// Get the data from the row using the column name
double current = rs.getDouble(j);
if (current > max) {
max = current;
}
data = dataset.get(item);
if (data == null) {
data = new java.util.TreeMap<String, Double>();
}
if (calculateSlope) {
double tmp = current;
if (data.size() > 1) {
tmp = (tmp - previousArray[j])/timeWindowSize;
if(timeWindowSize<=0) {
tmp = Double.NaN;
}
} else {
tmp = 0.0;
}
if (tmp < 0) {
tmp = Double.NaN;
}
previousArray[j] = current;
data.put(label, tmp);
} else {
data.put(label, current);
}
dataset.put(item, data);
}
}
}
} else {
log.error("query is not executed.");
}
// Now do something with the ResultSet ....
} catch (SQLException ex) {
// handle any errors
log.error("SQLException: " + ex.getMessage() + " on query: " + query);
log.error("SQLState: " + ex.getSQLState());
log.error("VendorError: " + ex.getErrorCode());
} catch (Exception ex) {
log.debug(ExceptionUtil.getStackTrace(ex));
} finally {
// it is a good idea to release
// resources in a finally{} block
// in reverse-order of their creation
// if they are no-longer needed
if (rs != null) {
try {
rs.close();
} catch (SQLException sqlEx) {
log.debug(ExceptionUtil.getStackTrace(sqlEx));
}
rs = null;
}
if (stmt != null) {
try {
stmt.close();
} catch (SQLException sqlEx) {
log.debug(ExceptionUtil.getStackTrace(sqlEx));
}
stmt = null;
}
if (conn != null) {
try {
conn.close();
} catch (SQLException sqlEx) {
log.debug(ExceptionUtil.getStackTrace(sqlEx));
}
conn = null;
}
}
}
public List<String> getXAxisMap() {
return labels;
}
public TreeMap<String, TreeMap<String, Double>> getDataset() {
return dataset;
}
}
| 8,461 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/proxy/HttpProxy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.proxy;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.log4j.Logger;
/**
* HTTP Proxy Servlet for Solr
*
*/
public class HttpProxy extends HttpServlet {
private static final long serialVersionUID = 7574L;
private final String USER_AGENT = "Mozilla/5.0";
private final static String SOLR_URL = "chukwa.solr.url";
private final static Logger LOG = Logger.getLogger(HttpProxy.class);
private String solrUrl = null;
public HttpProxy() {
super();
ChukwaConfiguration conf = new ChukwaConfiguration();
solrUrl = conf.get(SOLR_URL);
}
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
// Create Get request dynamically to remote server
String url = solrUrl + request.getPathInfo()
+ "?" + request.getQueryString();
URL obj = new URL(url);
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
// optional default is GET
con.setRequestMethod("GET");
// add request header
con.setRequestProperty("User-Agent", USER_AGENT);
int responseCode = con.getResponseCode();
LOG.info("Sending 'GET' request to URL : " + url);
LOG.info("Response Code : " + responseCode);
BufferedReader in = new BufferedReader(new InputStreamReader(
con.getInputStream(), Charset.forName("UTF-8")));
String inputLine;
StringBuffer response1 = new StringBuffer();
ServletOutputStream sout = response.getOutputStream();
while ((inputLine = in.readLine()) != null) {
response1.append(inputLine);
sout.write(inputLine.getBytes(Charset.forName("UTF-8")));
}
in.close();
sout.flush();
}
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
// Create Post request dynamically to remote server
String url = solrUrl + request.getPathInfo();
URL obj = new URL(url);
HttpURLConnection con = (HttpURLConnection) obj.openConnection();
// add reuqest header
con.setRequestMethod("POST");
con.setRequestProperty("User-Agent", USER_AGENT);
con.setRequestProperty("Accept-Language", "en-US,en;q=0.5");
StringBuilder sb = new StringBuilder();
@SuppressWarnings("rawtypes")
Map map = request.getParameterMap();
for (Object key : request.getParameterMap().keySet()) {
if (sb.length() > 0) {
sb.append('&');
}
String keyStr = (String) key;
String[] temp = (String[]) map.get(keyStr);
for (String s : temp) {
sb.append(URLEncoder.encode(keyStr, "UTF-8")).append('=')
.append(URLEncoder.encode(s, "UTF-8"));
}
}
String urlParameters = sb.toString();
// Send post request
con.setDoOutput(true);
DataOutputStream wr = new DataOutputStream(con.getOutputStream());
wr.writeBytes(urlParameters);
wr.flush();
wr.close();
int responseCode = con.getResponseCode();
LOG.debug("\nSending 'POST' request to URL : " + url);
LOG.debug("Post parameters : " + urlParameters);
LOG.debug("Response Code : " + responseCode);
BufferedReader in = new BufferedReader(new InputStreamReader(
con.getInputStream(), Charset.forName("UTF-8")));
String inputLine;
StringBuffer response1 = new StringBuffer();
ServletOutputStream sout = response.getOutputStream();
while ((inputLine = in.readLine()) != null) {
response1.append(inputLine);
sout.write(inputLine.getBytes(Charset.forName("UTF-8")));
}
in.close();
sout.flush();
}
}
| 8,462 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/BarOptions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
public class BarOptions extends SeriesOptions {
public boolean zero;
public boolean stepByStep = true;
public int barWidth = 4;
public String align;
public boolean horizontal;
public BarOptions() {
fill = true;
}
public boolean getZero() {
return zero;
}
public void setZero(boolean zero) {
this.zero = zero;
}
public boolean getStepByStep() {
return stepByStep;
}
public void setStepByStep(boolean stepByStep) {
this.stepByStep = stepByStep;
}
public int getBarWidth() {
return barWidth;
}
public void setBarWidth(int barWidth) {
this.barWidth = barWidth;
}
public String getAlign() {
return align;
}
public void setAlign(String align) {
this.align = align;
}
public boolean getHorizontal() {
return this.horizontal;
}
public void setHorizontal(boolean horizontal) {
this.horizontal = horizontal;
}
}
| 8,463 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Series.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.log4j.Logger;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(propOrder={})
public class Series {
private JSONObject series;
static Logger log = Logger.getLogger(Series.class);
public Series(String name) {
series = new JSONObject();
try {
series.put("name", name);
} catch (Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
public void add(long x, double y) {
try {
if(!series.containsKey("data")) {
series.put("data", new JSONArray());
}
JSONArray xy = new JSONArray();
xy.add(x);
xy.add(y);
((JSONArray)series.get("data")).add(xy);
} catch(Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
}
}
public String toString() {
return series.toString();
}
public Object toJSONObject() {
return series;
}
public String getData() {
return (String) series.get("data");
}
}
| 8,464 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/ChartType.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
public enum ChartType {
TIME_SERIES, PIE, CIRCLE, TILE, BAR, DONUT;
}
| 8,465 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SessionAttribute.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
public class SessionAttribute {
public String key;
public String value;
public SessionAttribute(String id, String value) {
this.key = id;
this.value = value;
}
public String getKey() {
return this.key;
}
public String getValue() {
return this.value;
}
}
| 8,466 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Dashboard.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
import java.util.ArrayList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
@XmlAccessorType(XmlAccessType.PUBLIC_MEMBER)
public class Dashboard {
public ArrayList<Widget> grid = null;
public void add(Widget widget) {
if(grid==null) {
grid = new ArrayList<Widget>();
}
grid.add(widget);
}
}
| 8,467 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Chart.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
@XmlAccessorType(XmlAccessType.PUBLIC_MEMBER)
public class Chart {
private String id;
private ChartType type;
private String title;
private List<SeriesMetaData> series;
private boolean xLabelOn;
private boolean yLabelOn;
private boolean yRightLabelOn;
private int width;
private int height;
private List<String> xLabelRange;
private HashMap<String, Long> xLabelRangeHash;
private boolean legend = true;
private String xLabel = "";
private String yLabel = "";
private String yRightLabel = "";
private double max = 0;
private double min = 0;
private boolean userDefinedMax = true;
private boolean userDefinedMin = true;
private String yUnitType = "";
private String icon = "";
private String bannerText = "";
private String suffixText = "";
private String threshold = "";
public Chart(String id) {
this.id = id;
this.type = ChartType.TIME_SERIES;
this.title = "Untitled Chart";
this.xLabelOn = true;
this.yLabelOn = true;
this.width = 100;
this.height = 100;
this.legend = true;
this.max = 0;
this.userDefinedMax = false;
this.userDefinedMin = false;
}
public ChartType getType() {
return this.type;
}
public void setType(ChartType type) {
this.type = type;
}
public void setYMax(double max) {
this.max = max;
this.userDefinedMax = true;
}
public double getYMax() {
return this.max;
}
public boolean getUserDefinedMax() {
return this.userDefinedMax;
}
public void setYMin(double min) {
this.min = min;
this.userDefinedMin = true;
}
public double getYMin() {
return this.min;
}
public boolean getUserDefinedMin() {
return this.userDefinedMin;
}
public void setSize(int width, int height) {
this.width = width;
this.height = height;
}
public int getWidth() {
return this.width;
}
public int getHeight() {
return this.height;
}
public void setTitle(String title) {
this.title = title;
}
public String getTitle() {
return this.title;
}
public void setId(String id) {
this.id = id;
}
public String getId() {
return this.id;
}
public void setSeries(List<SeriesMetaData> series) {
this.series = series;
}
public List<SeriesMetaData> getSeries() {
return this.series;
}
public void setXAxisLabelsOn(boolean toggle) {
xLabelOn = toggle;
}
public boolean isXAxisLabelsOn() {
return xLabelOn;
}
public void setYAxisLabels(boolean toggle) {
yLabelOn = toggle;
}
public boolean isYAxisLabelsOn() {
return yLabelOn;
}
public void setYAxisRightLabels(boolean toggle) {
yRightLabelOn = toggle;
}
public boolean isYAxisRightLabelsOn() {
return yRightLabelOn;
}
public void setXAxisLabel(String label) {
xLabel = label;
}
public String getXAxisLabel() {
return xLabel;
}
public void setYAxisLabel(String label) {
yLabel = label;
}
public String getYAxisLabel() {
return yLabel;
}
public void setYAxisRightLabel(String label) {
yRightLabel = label;
}
public String getYAxisRightLabel() {
return yRightLabel;
}
public void setXLabelsRange(List<String> range) {
xLabelRange = range;
xLabelRangeHash = new HashMap<String, Long>();
long value = 0;
for (String label : range) {
xLabelRangeHash.put(label, value);
value++;
}
}
public List<String> getXLabelsRange() {
return xLabelRange;
}
public void setLegend(boolean toggle) {
legend = toggle;
}
public boolean getLegend() {
return legend;
}
public void setYUnitType(String yUnitType) {
this.yUnitType = yUnitType;
}
public String getYUnitType() {
return this.yUnitType;
}
public void setIcon(String icon) {
this.icon = icon;
}
public String getIcon() {
return this.icon;
}
public void setBannerText(String bannerText) {
this.bannerText = bannerText;
}
public String getBannerText() {
return this.bannerText;
}
public void setSuffixText(String suffixText) {
this.suffixText = suffixText;
}
public String getSuffixText() {
return this.suffixText;
}
public void setThreshold(String direction) {
this.threshold = direction;
}
public String getThreshold() {
return this.threshold;
}
/**
* Create a chart object.
* @param id is unique chart identifier
* @param title is searchable name of the chart
* @param metrics is list of metric names to render chart
* @param source is data source name
* @param yunitType is y axis unit type
* @return Chart object
* @throws URISyntaxException if metrics name can not compose valid URL syntax
*/
public static synchronized Chart createChart(String id,
String title, String[] metrics, String source, String yunitType) throws URISyntaxException {
Chart chart = new Chart(id);
chart.setYUnitType(yunitType);
chart.setTitle(title);
ArrayList<SeriesMetaData> series = new ArrayList<SeriesMetaData>();
for(String metric : metrics) {
SeriesMetaData s = new SeriesMetaData();
s.setLabel(metric + "/" + source);
s.setUrl(new URI("/hicc/v1/metrics/series/" + metric + "/"
+ source));
LineOptions l = new LineOptions();
s.setLineOptions(l);
series.add(s);
}
chart.setSeries(series);
return chart;
}
/**
* Create a chart in HBase by specifying parameters.
* @param id is unique chart identifier
* @param title is searchable name of the chart
* @param metrics is list of metric names to render ring chart
* @param source is data source name
* @param suffixLabel is text label to append to metric values
* @param direction sets the threshold to have either upper limit or lower limit
* @return Chart object
* @throws URISyntaxException if metrics name can not compose valid URL syntax
*/
public static synchronized Chart createCircle(String id,
String title, String[] metrics, String source, String suffixLabel, String direction) throws URISyntaxException {
Chart chart = new Chart(id);
chart.setSuffixText(suffixLabel);
chart.setTitle(title);
chart.setThreshold(direction);
ArrayList<SeriesMetaData> series = new ArrayList<SeriesMetaData>();
for(String metric : metrics) {
SeriesMetaData s = new SeriesMetaData();
s.setLabel(metric + "/" + source);
s.setUrl(new URI("/hicc/v1/metrics/series/" + metric + "/"
+ source));
series.add(s);
}
chart.setSeries(series);
return chart;
}
/**
* Create a tile in HBase by specifying parameters.
* @param id is unique tile identifier
* @param title is searchable name of the tile widget
* @param bannerText is description of the tile widget
* @param suffixLabel is text label to append to metric values
* @param metrics is list of metric names to render tile widget
* @param source is data source name
* @param icon is emoji symbol to render beside tile widget
* @return Chart object
* @throws URISyntaxException if metrics name can not compose valid URL syntax
*/
public static synchronized Chart createTile(String id, String title,
String bannerText, String suffixLabel, String[] metrics, String source,
String icon) throws URISyntaxException {
Chart chart = new Chart(id);
chart.setTitle(title);
chart.setBannerText(bannerText);
chart.setSuffixText(suffixLabel);
chart.setIcon(icon);
List<SeriesMetaData> smd = new ArrayList<SeriesMetaData>();
for (String metric : metrics) {
SeriesMetaData series = new SeriesMetaData();
series.setUrl(new URI("/hicc/v1/metrics/series/" + metric + "/" + source));
smd.add(series);
}
chart.setSeries(smd);
return chart;
}
}
| 8,468 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/PointOptions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
public class PointOptions extends SeriesOptions {
public int radius;
public String symbol = "circle";
public PointOptions() {
radius = 5;
}
public int getRadius() {
return radius;
}
public void setRadius(int radius) {
this.radius = radius;
}
public String getSymbol() {
return symbol;
}
public void setSymbol(String symbol) {
this.symbol = symbol;
}
}
| 8,469 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/HeatMapPoint.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
import javax.xml.bind.annotation.XmlElement;
public class HeatMapPoint {
@XmlElement
public int x;
@XmlElement
public int y;
@XmlElement
public double count;
HeatMapPoint() {
}
HeatMapPoint(int x, int y, double count) {
this.x = x;
this.y = y;
this.count = count;
}
public HeatMapPoint get() {
return this;
}
public void put(int x, int y, double count) {
this.x = x;
this.y = y;
this.count = count;
}
}
| 8,470 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/LineOptions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
public class LineOptions extends SeriesOptions {
public boolean zero;
public boolean steps;
public boolean getZero() {
return zero;
}
public void setZero(boolean zero) {
this.zero = zero;
}
public boolean getSteps() {
return steps;
}
public void setSteps(boolean steps) {
this.steps = steps;
}
}
| 8,471 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SeriesMetaData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
import java.net.URI;
import java.util.ArrayList;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
public class SeriesMetaData implements Cloneable {
@XmlElement
public URI url;
@XmlElement
public String color;
@XmlElement
public String label;
@XmlElement
public LineOptions lines;
@XmlElement
public BarOptions bars;
@XmlElement
public PointOptions points;
@XmlElement
public int xaxis;
@XmlElement
public int yaxis;
@XmlElement
public boolean clickable;
@XmlElement
public boolean hoverable;
@XmlElement
public int shadowSize;
@XmlElement
public int highlightColor;
public ArrayList<ArrayList<Number>> data = null;
public SeriesMetaData() {
}
public void setUrl(URI url) {
this.url = url;
}
public URI getUrl() {
return url;
}
public void setLineOptions(LineOptions lines) {
this.lines = lines;
}
public LineOptions getLineOptions() {
return lines;
}
public void setBarOptions(BarOptions bars) {
this.bars = bars;
}
public BarOptions getBarOptions() {
return bars;
}
public void setPointOptions(PointOptions points) {
this.points = points;
}
public PointOptions getPointOptions() {
return points;
}
public void setColor(String color) {
this.color = color;
}
public String getColor() {
return color;
}
public void setData(ArrayList<ArrayList<Number>> data) {
this.data = data;
}
public ArrayList<ArrayList<Number>> getData() {
return data;
}
public void setLabel(String label) {
this.label = label;
}
public String getLabel() {
return label;
}
@Override
public Object clone()throws CloneNotSupportedException{
return super.clone();
}
}
| 8,472 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Heatmap.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
import java.util.ArrayList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(propOrder={})
public class Heatmap {
@XmlElement
private ArrayList<HeatMapPoint> data;
@XmlElement
private double max = 1.0;
@XmlElement
private int radius;
@XmlElement
private int series;
public Heatmap() {
this.data = new ArrayList<HeatMapPoint>();
}
public void put(int x, int y, double v) {
HeatMapPoint point = new HeatMapPoint(x, y, v);
data.add(point);
}
public ArrayList<HeatMapPoint> getHeatmap() {
return data;
}
public double getMax() {
return max;
}
public void putMax(double max) {
this.max = max;
}
public int getRadius() {
return radius;
}
public void putRadius(int radius) {
this.radius = radius;
}
public int getSeries() {
return series;
}
public void putSeries(int series) {
this.series = series;
}
}
| 8,473 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/Widget.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
import java.net.URI;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
public class Widget {
@XmlElement
public int col;
@XmlElement
public int row;
@XmlElement
public int size_x;
@XmlElement
public int size_y;
@XmlElement
public String title;
@XmlElement
public URI src;
@XmlElement
public String[] tokens;
public int getCol() {
return col;
}
public void setCol(int col) {
this.col = col;
}
public int getRow() {
return row;
}
public void setRow(int row) {
this.row = row;
}
public int getSize_x() {
return size_x;
}
public void setSize_x(int size_x) {
this.size_x = size_x;
}
public int getSize_y() {
return size_y;
}
public void setSize_y(int size_y) {
this.size_y = size_y;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public URI getSrc() {
return src;
}
public void setSrc(URI src) {
this.src = src;
}
public String[] getTokens() {
return tokens.clone();
}
public void setTokens(String[] tokens) {
this.tokens = tokens.clone();
}
public void tokenize() {
String[] tokens = title.split(" ");
this.tokens = tokens;
}
}
| 8,474 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/bean/SeriesOptions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.bean;
public class SeriesOptions {
public boolean show = true;
public boolean fill = false;
public int lineWidth;
public String fillColor;
public boolean getFill() {
return fill;
}
public void setFill(boolean fill) {
this.fill = fill;
}
public boolean getShow() {
return show;
}
public void setShow(boolean show) {
this.show = show;
}
public int getLineWidth() {
return lineWidth;
}
public void setLineWidth(int lineWidth) {
this.lineWidth = lineWidth;
}
public String getFillColor() {
return fillColor;
}
public void setFillColor(String fillColor) {
this.fillColor = fillColor;
}
}
| 8,475 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/HeatmapController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.TimeHandler;
import org.apache.hadoop.chukwa.hicc.bean.Heatmap;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
@Path("/heatmap")
public class HeatmapController {
static Logger log = Logger.getLogger(HeatmapController.class);
@Context
VelocityEngine velocity;
/**
* Render a heatmap
* @param request is HTTP request object
* @param metricGroup is metric group name
* @param metric is metric name
* @param start is start time in yyyyMMddHHmmss format
* @param end is end time in yyyyMMddHHmmss format
* @param max is maximum possible value of the heatmap
* @param scale is the range of possible values
* @param width is width of the image
* @param height is height of the image
* @return html page of login screen
*
* @response.representation.200.doc Login screen
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example available in HICC UI
*/
@GET
@Path("{metricGroup}/{metric}")
@Produces(MediaType.APPLICATION_JSON)
public Heatmap getHeatmap(@Context HttpServletRequest request,
@PathParam("metricGroup") String metricGroup,
@PathParam("metric") String metric, @QueryParam("start") String start,
@QueryParam("end") String end,
@QueryParam("max") @DefaultValue("1.0") double max,
@QueryParam("scale") @DefaultValue("100") double scale,
@QueryParam("width") @DefaultValue("700") int width,
@QueryParam("height") @DefaultValue("400") int height) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
Heatmap heatmap = null;
long startTime = 0;
long endTime = 0;
TimeHandler time = new TimeHandler(request);
try {
if (start != null) {
startTime = sdf.parse(start).getTime();
} else {
startTime = time.getStartTime();
}
if (end != null) {
endTime = sdf.parse(end).getTime();
} else {
endTime = time.getEndTime();
}
heatmap = ChukwaHBaseStore.getHeatmap(metricGroup, metric, startTime,
endTime, max, scale, width, height);
} catch (Throwable e) {
log.error(ExceptionUtil.getStackTrace(e));
}
return heatmap;
}
/**
* Render a heatmap from HBase series
*
* @param metricGroup is metric group name
* @param metric is metric name
* @param width is width of the image
* @param height is height of the image
* @param title is title of the heatmap
* @param yLabel is y axis label for the heatmap
* @return heatmap chart in html
*/
@GET
@Path("render/{metricGroup}/{metric}")
@Produces(MediaType.TEXT_HTML)
public String heatmapTemplate(@PathParam("metricGroup") @DefaultValue("SystemMetrics") String metricGroup,
@PathParam("metric") @DefaultValue("cpu.combined.") String metric,
@QueryParam("width") @DefaultValue("700px") String width,
@QueryParam("height") @DefaultValue("400px") String height,
@QueryParam("title") @DefaultValue("CPU") String title,
@QueryParam("yLabel") @DefaultValue("device") String yLabel) {
StringBuilder url = new StringBuilder();
url.append("/hicc/v1/heatmap/").append(metricGroup).append("/").append(metric);
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
context.put("url", url.toString());
context.put("width", width);
context.put("height", height);
context.put("title", title);
context.put("yLabel", yLabel);
Template template = velocity.getTemplate("heatmap.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
}
| 8,476 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.lang.reflect.Type;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import org.apache.hadoop.chukwa.datacollection.agent.rest.Examples;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.TimeHandler;
import org.apache.hadoop.chukwa.hicc.bean.Series;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
@Path("/metrics")
public class MetricsController {
/**
* Query metrics stored in HBase table
*
* @param request is HTTP request object
* @param metric is metric name
* @param source is data source
* @param start is start time
* @param end is end time
* @return Metrics JSON
*
*/
@GET
@Path("series/{metric}/{source}")
@Produces("application/json")
public String getSeries(@Context HttpServletRequest request, @PathParam("metric") String metric, @PathParam("source") String source, @QueryParam("start") String start, @QueryParam("end") String end) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
String buffer = "";
Series series;
long startTime = 0;
long endTime = 0;
TimeHandler time = new TimeHandler(request);
try {
if(start!=null) {
startTime = sdf.parse(start).getTime();
} else {
startTime = time.getStartTime();
}
if(end!=null) {
endTime = sdf.parse(end).getTime();
} else {
endTime = time.getEndTime();
}
series = ChukwaHBaseStore.getSeries(metric, source, startTime, endTime);
buffer = series.toString();
} catch (ParseException e) {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity("Start/End date parse error. Format: yyyyMMddHHmmss.").build());
}
return buffer;
}
/**
* Query metric series by session key, this is useful to query same metric from
* multiple data sources, such as multiple hosts
*
* @param request is HTTP request object
* @param metricGroup is metric group name
* @param metric is metric name
* @param skey is session key which maps to multiple data sources
* @param start is start time
* @param end is end time
* @return List of metric series
*/
@GET
@Path("series/{metricGroup}/{metric}/session/{sessionKey}")
@Produces("application/json")
public String getSeriesBySessionAttribute(@Context HttpServletRequest request, @PathParam("metricGroup") String metricGroup, @PathParam("metric") String metric, @PathParam("sessionKey") String skey, @QueryParam("start") String start, @QueryParam("end") String end) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
String buffer = "";
long startTime = 0;
long endTime = 0;
TimeHandler time = new TimeHandler(request);
try {
if(start!=null) {
startTime = sdf.parse(start).getTime();
} else {
startTime = time.getStartTime();
}
if(end!=null) {
endTime = sdf.parse(end).getTime();
} else {
endTime = time.getEndTime();
}
if(skey!=null) {
HttpSession session = request.getSession();
String[] sourcekeys = (session.getAttribute(skey).toString()).split(",");
Type seriesListType =new TypeToken<ArrayList<Series>>(){}.getType();
ArrayList<Series> seriesList = new ArrayList<Series>();
for(String source : sourcekeys) {
if (source == null || source.equals("")) {
continue;
}
Series output = ChukwaHBaseStore.getSeries(metricGroup, metric, source, startTime, endTime);
seriesList.add(output);
}
buffer = new Gson().toJson(seriesList, seriesListType);
} else {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity("No session attribute key defined.").build());
}
} catch (ParseException e) {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity("Start/End date parse error. Format: yyyyMMddHHmmss.").build());
}
return buffer;
}
/**
* Query all metric group names in HBase
*
* @return a list of metric groups
*/
@GET
@Path("schema")
@Produces("application/json")
public String getTables() {
Set<String> metricGroups = ChukwaHBaseStore.getMetricGroups();
Type metricGroupsType = new TypeToken<List<String>>(){}.getType();
String groups = new Gson().toJson(metricGroups, metricGroupsType);
return groups;
}
/**
* Query metric names by metric group
*
* @param metricGroup is name of metric group
* @return a list of metric names
*
*/
@GET
@Path("schema/{metricGroup}")
@Produces("application/json")
public String getMetrics(@PathParam("metricGroup") String metricGroup) {
Set<String> metricNames = ChukwaHBaseStore.getMetricNames(metricGroup);
Type metricsType = new TypeToken<List<String>>(){}.getType();
String metrics = new Gson().toJson(metricNames, metricsType);
return metrics;
}
/**
* Query metrics source names by metric group
*
* @param request HTTP Request object
* @param metricGroup is name of metric group
* @return a list of metric source names
*
*/
@GET
@Path("source/{metricGroup}")
@Produces("application/json")
public String getSourceNames(@Context HttpServletRequest request, @PathParam("metricGroup") String metricGroup) {
Set<String> sourceNames = ChukwaHBaseStore.getSourceNames(metricGroup);
Type rowsType = new TypeToken<List<String>>(){}.getType();
String rows = new Gson().toJson(sourceNames, rowsType);
return rows;
}
}
| 8,477 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/DashboardController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.io.StringWriter;
import java.util.Set;
import java.net.InetAddress;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.bean.Dashboard;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import com.google.gson.Gson;
@Path("dashboard")
public class DashboardController {
static Logger LOG = Logger.getLogger(DashboardController.class);
@Context
private ServletContext context;
@Context
VelocityEngine velocity;
/**
* Load a dashboard view
*
* @param request HTTP request object
* @param id is dashboard unique identifier
* @return Dashboard view in JSON structure
*
* @response.representation.200.doc Display dashboard JSON structure
* @response.representation.200.mediaType application/json
* @response.representation.200.example {@link Examples#SYSTEM_DASHBOARD}
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("load/{id}")
public String load(@Context HttpServletRequest request, @PathParam("id") String id) {
Gson gson = new Gson();
Dashboard dash = ChukwaHBaseStore.getDashboard(id, request.getRemoteUser());
String json = gson.toJson(dash);
return json;
}
/**
* Save a dashboard view
*
* @param request HTTP request object
* @param id is dashboard unique identifier
* @param buffer is JSON structure of dashboard view
* @return Status of the dashboard save
*
* @request.representation.example {@link Examples#USER_DASHBOARD}
* @response.representation.200.doc Display save status code
* @response.representation.200.mediaType text/plain
*/
@PUT
@Path("save/{id}")
@Consumes(MediaType.APPLICATION_JSON)
public Response save(@Context HttpServletRequest request, @PathParam("id") String id, String buffer) {
Gson gson = new Gson();
Dashboard dash = gson.fromJson(buffer, Dashboard.class);
boolean result = ChukwaHBaseStore.updateDashboard(id, request.getRemoteUser(), dash);
if(!result) {
return Response.status(Status.BAD_REQUEST).build();
}
return Response.ok().build();
}
/**
* Return HTTP request connection user name
*
* @param request HTTP request object
* @return username
*
* @response.representation.200.doc Username
* @response.representation.200.mediaType text/plain
* @response.representation.200.example admin
*/
@GET
@Path("whoami")
@Produces(MediaType.TEXT_PLAIN)
public String whoami(@Context HttpServletRequest request) {
return request.getRemoteUser();
}
/**
* Render Quick links to various services web portals
*
* @return HTML page of quick links
*
* @response.representation.200.doc Display quick link widget
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example is availabe on HICC UI
*/
@GET
@Path("quicklinks")
@Produces(MediaType.TEXT_HTML)
public String quicklinks() {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
Configuration hconf = HBaseConfiguration.create();
Configuration hadoop = new Configuration();
String nn = "";
String rm = "";
String hm = "";
Set<String> sourceNames = ChukwaHBaseStore.getSourceNames("");
for (String source : sourceNames) {
String[] sourceParts = source.split(":");
if(sourceParts.length<2) {
continue;
}
if(sourceParts[1].equals("NameNode")) {
String[] parts = hadoop.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY).split(":");
StringBuilder buffer = new StringBuilder();
try {
InetAddress address = InetAddress.getByName(sourceParts[0]);
buffer.append(address.getHostAddress());
} catch (Exception e) {
buffer.append(sourceParts[0]);
}
buffer.append(":");
buffer.append(parts[1]);
nn = buffer.toString();
} else if(sourceParts[1].equals("ResourceManager")) {
String[] parts = hadoop.get(YarnConfiguration.RM_WEBAPP_ADDRESS).split(":");
StringBuilder buffer = new StringBuilder();
try {
InetAddress address = InetAddress.getByName(sourceParts[0]);
buffer.append(address.getHostAddress());
} catch (Exception e) {
buffer.append(sourceParts[0]);
}
buffer.append(":");
buffer.append(parts[1]);
rm = buffer.toString();
} else if(sourceParts[1].equals("Master")) {
StringBuilder buffer = new StringBuilder();
try {
InetAddress address = InetAddress.getByName(sourceParts[0]);
buffer.append(address.getHostAddress());
} catch (Exception e) {
buffer.append(sourceParts[0]);
}
buffer.append(":");
buffer.append(hconf.getInt("hbase.master.info.port", HConstants.DEFAULT_MASTER_INFOPORT));
hm = buffer.toString();
}
}
try {
context.put("nn", nn);
context.put("rm", rm);
context.put("hm", hm);
Template template = velocity.getTemplate("quick-links.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
}
| 8,478 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/TileController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.io.StringWriter;
import java.lang.reflect.Type;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.TimeHandler;
import org.apache.hadoop.chukwa.hicc.bean.Chart;
import org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
@Path("/tile")
public class TileController extends ChartController{
static Logger LOG = Logger.getLogger(ChartController.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
@Context
VelocityEngine velocity;
/**
* Render a banner
*
* @param id Reference ID of Chart stored in HBase chukwa_meta table.
* @return html chart widget
*
* @response.representation.200.doc Render a banner base on chart id
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example available in HICC UI
*/
@GET
@Path("draw/{id}")
@Produces(MediaType.TEXT_HTML)
public String draw(@PathParam("id") String id) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Chart chart = ChukwaHBaseStore.getChart(id);
List<SeriesMetaData> series = chart.getSeries();
Gson gson = new Gson();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
Template template = velocity.getTemplate("tile.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Preview a banner tile
*
* @param buffer is tile object in JSON
* @return html for rendering a banner tile
*
* @response.representation.200.doc Preview a banner
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example available in HICC UI
*/
@PUT
@Path("preview")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.TEXT_HTML)
public String preview(String buffer) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Gson gson = new Gson();
Chart chart = gson.fromJson(buffer, Chart.class);
List<SeriesMetaData> series = chart.getSeries();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
Template template = velocity.getTemplate("tile.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Preview a series JSON for banner tile
*
* @param request HTTP request object
* @param buffer is banner tile configuration
*
* @request.representation.example {@link Examples#CPU_SERIES_METADATA}
* @response.representation.200.doc Preview a banner series
* @response.representation.200.mediaType application/json
* @response.representation.200.example Example available in REST API
*/
@PUT
@Path("preview/series")
@Produces(MediaType.APPLICATION_JSON)
public String previewSeries(@Context HttpServletRequest request, String buffer) {
Type listType = new TypeToken<ArrayList<SeriesMetaData>>() {
}.getType();
long startTime = 0;
long endTime = 0;
TimeHandler time = new TimeHandler(request);
startTime = time.getStartTime();
endTime = time.getEndTime();
Gson gson = new Gson();
ArrayList<SeriesMetaData> series = gson.fromJson(buffer, listType);
List<String> data = ChukwaHBaseStore.getData(series, startTime, endTime);
String result = gson.toJson(data);
return result;
}
}
| 8,479 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/ChartController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.io.StringWriter;
import java.lang.reflect.Type;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.TimeHandler;
import org.apache.hadoop.chukwa.hicc.bean.Chart;
import org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.sun.jersey.api.Responses;
@Path("/chart")
public class ChartController {
static Logger LOG = Logger.getLogger(ChartController.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
@Context
VelocityEngine velocity;
/**
* Render chart using flot.js
*
* @param id Reference ID of Chart stored in HBase chukwa_meta table.
* @return chart widget
*
*/
@GET
@Path("draw/{id}")
@Produces(MediaType.TEXT_HTML)
public String draw(@PathParam("id") String id) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Chart chart = ChukwaHBaseStore.getChart(id);
List<SeriesMetaData> series = chart.getSeries();
Gson gson = new Gson();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
Template template = velocity.getTemplate("chart.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Describe chart meta data
*
* @param id Chart ID
* @return chart meta data
*
* @response.representation.200.doc Display chart configuration options
* @response.representation.200.mediaType application/json
* @response.representation.200.example {@link Examples#CPU_UTILIZATION}
*
*/
@GET
@Path("describe/{id}")
@Produces(MediaType.APPLICATION_JSON)
public String describe(@PathParam("id") String id) {
Chart chart = ChukwaHBaseStore.getChart(id);
Gson gson = new Gson();
String buffer = gson.toJson(chart);
return buffer;
}
/**
* Create a new chart meta data
*
* @param buffer holds incoming JSON of Chart object
* @return Web response code
*
* @request.representation.example {@link Examples#MEMORY_UTILIZATION}
*
*/
@POST
@Path("save")
@Consumes(MediaType.APPLICATION_JSON)
public Response create(String buffer) {
Gson gson = new Gson();
Chart chart = gson.fromJson(buffer, Chart.class);
String id = ChukwaHBaseStore.createChart(chart);
if (id != null) {
return Response.ok(id).build();
}
return Responses.notAcceptable().build();
}
/**
* Save chart meta data
*
* @param id is unique identifier of Chart object
* @param buffer holds incoming JSON of Chart object
* @return Web response code
*
* @request.representation.example {@link Examples#DISK_UTILIZATION}
*
*/
@PUT
@Path("save/{id}")
@Consumes(MediaType.APPLICATION_JSON)
public Response save(@PathParam("id") String id, String buffer) {
Gson gson = new Gson();
Chart chart = gson.fromJson(buffer, Chart.class);
ChukwaHBaseStore.putChart(id, chart);
return Response.ok().build();
}
/**
* Display a chart base on chart configuration from REST API input
*
* @param buffer holds incoming JSON of Chart object
* @return segment of chart HTML output
*
* @request.representation.example {@link Examples#NETWORK_UTILIZATION}
*
*/
@PUT
@Path("preview")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.TEXT_HTML)
public String preview(String buffer) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Gson gson = new Gson();
Chart chart = gson.fromJson(buffer, Chart.class);
List<SeriesMetaData> series = chart.getSeries();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
Template template = velocity.getTemplate("chart.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Display metrics series in JSON
*
* @param request HTTP request object
* @param buffer list of SeriesMetaData
* @return metrics JSON
*
* @request.representation.example {@link Examples#CPU_SERIES_METADATA}
* @response.representation.200.doc Display series data in JSON
* @response.representation.200.mediaType application/json
*
*/
@PUT
@Path("preview/series")
@Consumes(MediaType.APPLICATION_JSON)
@Produces("application/json")
public String previewSeries(@Context HttpServletRequest request, String buffer) {
Type listType = new TypeToken<ArrayList<SeriesMetaData>>() {
}.getType();
long startTime = 0;
long endTime = 0;
TimeHandler time = new TimeHandler(request);
startTime = time.getStartTime();
endTime = time.getEndTime();
Gson gson = new Gson();
ArrayList<SeriesMetaData> series = gson.fromJson(buffer, listType);
series = ChukwaHBaseStore.getChartSeries(series, startTime, endTime);
String result = gson.toJson(series);
return result;
}
}
| 8,480 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/Examples.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.chukwa.hicc.bean.Chart;
import org.apache.hadoop.chukwa.hicc.bean.Dashboard;
import org.apache.hadoop.chukwa.hicc.bean.Series;
import org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData;
import org.apache.hadoop.chukwa.hicc.bean.Widget;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="MS_SHOULD_BE_FINAL")
public class Examples {
// Chart examples
public static Chart SYSTEM_LOAD_AVERAGE;
public static Chart CPU_UTILIZATION;
public static Chart MEMORY_UTILIZATION;
public static Chart DISK_UTILIZATION;
public static Chart NETWORK_UTILIZATION;
public static Chart SWAP_UTILIZATION;
public static Chart NAMENODE_MEMORY;
public static Chart HDFS_USAGE;
public static Chart RESOURCE_MANAGER_MEMORY;
public static Chart NODE_MANAGER_HEALTH;
public static Chart HDFS_HA;
public static Chart HDFS_LOAD;
public static Chart NAMENODE_RPC_LATENCY;
public static Chart DATANODES;
public static Chart HBASE_MASTER_MEMORY;
// Widget examples
public static Widget SYSTEM_LOAD_AVERAGE_WIDGET;
public static Widget WELCOME_PAGE_WIDGET;
public static Widget TRIAL_DOWNLOAD_WIDGET;
public static Widget CLUSTER_RUNNING_WIDGET;
public static Widget USER_WORKING_WIDGET;
public static Widget APP_RUNNING_WIDGET;
public static Widget TRIAL_ABANDON_RATE_WIDGET;
public static Widget CLUSTERS_HEALTH_WIDGET;
public static Widget TOP_ACTIVE_CLUSTERS_WIDGET;
public static Widget TOP_APP_WIDGET;
public static Widget APP_USAGE_WIDGET;
public static Widget QUICK_LINKS_WIDGET;
public static Widget LOG_SEARCH_WIDGET;
public static Widget YARN_APP_WIDGET;
public static Widget HDFS_WIDGET;
public static Widget HBASE_TABLE_WIDGET;
public static Widget TOP_USER_WIDGET;
public static Widget HDFS_HA_STATE_WIDGET;
public static Widget HDFS_LOAD_WIDGET;
public static Widget HDFS_NAMENODE_LATENCY_WIDGET;
public static Widget DATANODES_HEALTH_WIDGET;
public static Widget NODE_MANAGERS_HEALTH_WIDGET;
public static Widget HDFS_REMAINING_WIDGET;
public static Widget NAMENODE_MEMORY_WIDGET;
public static Widget RESOURCE_MANAGER_MEMORY_WIDGET;
public static Widget HBASE_MASTER_MOMORY_WIDGET;
public static Widget CPU_UTILIZATION_WIDGET;
public static Widget MEMORY_UTILIZATION_WIDGET;
public static Widget SWAP_UTILIZATION_WIDGET;
public static Widget DISK_UTILIZATION_WIDGET;
public static Widget NETWORK_UTILIZATION_WIDGET;
public static Widget CPU_HEAPMAP_WIDGET;
public static Widget HDFS_UI_WIDGET;
public static Widget HBASE_MASTER_UI_WIDGET;
public static List<Widget> WIDGET_LIST;
public static Dashboard DEFAULT_DASHBOARD;
public static Dashboard USER_DASHBOARD;
public static Dashboard SYSTEM_DASHBOARD;
// series examples
public static Series CPU_METRICS;
// SeriesMetaData examples
public static List<SeriesMetaData> CPU_SERIES_METADATA;
public static List<SeriesMetaData> HDFS_USAGE_SERIES_METADATA;
static {
try {
final String hostname = InetAddress.getLocalHost().getHostName().toLowerCase();
final String[] metrics = { "SystemMetrics.LoadAverage.1" };
SYSTEM_LOAD_AVERAGE = Chart.createChart("1", "System Load Average", metrics, hostname, "");
final String[] cpuMetrics = { "SystemMetrics.cpu.combined", "SystemMetrics.cpu.sys", "SystemMetrics.cpu.user" };
CPU_UTILIZATION = Chart.createChart("2", "CPU Utilization", cpuMetrics, hostname, "percent");
final String[] memMetrics = { "SystemMetrics.memory.FreePercent", "SystemMetrics.memory.UsedPercent"};
MEMORY_UTILIZATION = Chart.createChart("3", "Memory Utilization", memMetrics, hostname, "percent");
final String[] diskMetrics = { "SystemMetrics.disk.ReadBytes", "SystemMetrics.disk.WriteBytes" };
DISK_UTILIZATION = Chart.createChart("4", "Disk Utilization", diskMetrics, hostname, "bytes-decimal");
final String[] netMetrics = { "SystemMetrics.network.TxBytes", "SystemMetrics.network.RxBytes" };
NETWORK_UTILIZATION = Chart.createChart("5", "Network Utilization", netMetrics, hostname, "bytes");
final String[] swapMetrics = { "SystemMetrics.swap.Total", "SystemMetrics.swap.Used", "SystemMetrics.swap.Free" };
SWAP_UTILIZATION = Chart.createChart("6", "Swap Utilization", swapMetrics, hostname, "bytes-decimal");
// Namenode heap usage
StringBuilder namenode = new StringBuilder();
namenode.append(hostname);
namenode.append(":NameNode");
final String[] namenodeHeap = { "HadoopMetrics.jvm.JvmMetrics.MemHeapUsedM", "HadoopMetrics.jvm.JvmMetrics.MemHeapMaxM" };
NAMENODE_MEMORY = Chart.createCircle("7", "Namenode Memory", namenodeHeap, namenode.toString(), "%", "up");
// HDFS Usage
final String[] hdfsUsage = { "HadoopMetrics.dfs.FSNamesystem.CapacityRemainingGB", "HadoopMetrics.dfs.FSNamesystem.CapacityTotalGB" };
HDFS_USAGE = Chart.createCircle("8", "HDFS Remaining", hdfsUsage, hostname, "%", "down");
// Resource Manager Memory
StringBuilder rmnode = new StringBuilder();
rmnode.append(hostname);
rmnode.append(":ResourceManager");
final String[] rmHeap = { "HadoopMetrics.jvm.JvmMetrics.MemHeapUsedM", "HadoopMetrics.jvm.JvmMetrics.MemHeapMaxM" };
RESOURCE_MANAGER_MEMORY = Chart.createCircle("9", "Resource Manager Memory", rmHeap, rmnode.toString(), "%", "up");
// Node Managers Health
final String[] nmh = { "HadoopMetrics.yarn.ClusterMetrics.NumActiveNMs", "HadoopMetrics.yarn.ClusterMetrics.NumLostNMs" };
NODE_MANAGER_HEALTH = Chart.createTile("10", "Node Managers Health", "Node Managers", "Active/Lost", nmh, hostname, "glyphicon-th");
// High Availability State
final String[] ha = { "HadoopMetrics.dfs.FSNamesystem.HAState" };
HDFS_HA = Chart.createTile("11", "HDFS High Availability State", "HDFS High Availability", "", ha, hostname, "glyphicon-random");
// HDFS Load
final String[] hdfsLoad = { "HadoopMetrics.dfs.FSNamesystem.TotalLoad" };
HDFS_LOAD = Chart.createTile("12", "HDFS Load Average", "HDFS Load", "", hdfsLoad, hostname, "glyphicon-signal");
// Namenode RPC Latency
final String[] nnLatency = { "HadoopMetrics.rpc.rpc.RpcProcessingTimeAvgTime" };
NAMENODE_RPC_LATENCY = Chart.createTile("13", "NameNode Latency", "NameNode RPC Latency", "Milliseconds", nnLatency, hostname, "glyphicon-tasks");
// Datanode Health
final String[] dnHealth = { "HadoopMetrics.dfs.FSNamesystem.StaleDataNodes" };
DATANODES = Chart.createTile("14", "Datanodes Health", "Datanodes", "Dead", dnHealth, hostname, "glyphicon-hdd");
// HBase Master Memory
StringBuilder hbaseMaster = new StringBuilder();
hbaseMaster.append(hostname);
hbaseMaster.append(":Master");
final String[] hbm = { "HBaseMetrics.jvm.JvmMetrics.MemHeapUsedM", "HBaseMetrics.jvm.JvmMetrics.MemHeapMaxM" };
HBASE_MASTER_MEMORY = Chart.createCircle("15", "HBase Master Memory", hbm, hbaseMaster.toString(), "%", "up");
CPU_SERIES_METADATA = CPU_UTILIZATION.getSeries();
HDFS_USAGE_SERIES_METADATA = HDFS_USAGE.getSeries();
// CPU_METRICS = new Series("SystemMetrics.LoadAverage.1");
// CPU_METRICS.add(1234567890L, 0.0d);
// CPU_METRICS.add(1234567891L, 1.0d);
// CPU_METRICS.add(1234567892L, 2.0d);
// CPU_METRICS.add(1234567893L, 3.0d);
// Populate default widgets
Widget widget = new Widget();
widget.setTitle("System Load Average");
widget.setSrc(new URI("/hicc/v1/chart/draw/1"));
widget.setCol(1);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
SYSTEM_LOAD_AVERAGE_WIDGET = widget;
// Populate default dashboard
Dashboard dashboard = new Dashboard();
widget = new Widget();
widget.setTitle("Welcome Page");
widget.setSrc(new URI("/hicc/welcome.html"));
widget.setCol(1);
widget.setRow(1);
widget.setSize_x(12);
widget.setSize_y(7);
WELCOME_PAGE_WIDGET = widget;
dashboard.add(WELCOME_PAGE_WIDGET);
DEFAULT_DASHBOARD = dashboard;
widget = new Widget();
widget.setTitle("Trial Downloading");
widget.setSrc(new URI("/hicc/home/downloads.html"));
widget.setCol(1);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
TRIAL_DOWNLOAD_WIDGET = widget;
widget = new Widget();
widget.setTitle("Cluster Running");
widget.setSrc(new URI("/hicc/home/clusters.html"));
widget.setCol(3);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
CLUSTER_RUNNING_WIDGET = widget;
widget = new Widget();
widget.setTitle("Users Working");
widget.setSrc(new URI("/hicc/home/users.html"));
widget.setCol(5);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
USER_WORKING_WIDGET = widget;
widget = new Widget();
widget.setTitle("Applications Running");
widget.setSrc(new URI("/hicc/home/apps.html"));
widget.setCol(7);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
APP_RUNNING_WIDGET = widget;
widget = new Widget();
widget.setTitle("Trial Abandon Rate");
widget.setSrc(new URI("/hicc/v1/circles/draw/11"));
widget.setCol(1);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
TRIAL_ABANDON_RATE_WIDGET = widget;
widget = new Widget();
widget.setTitle("Clusters Health");
widget.setSrc(new URI("/hicc/v1/circles/draw/12"));
widget.setCol(3);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
CLUSTERS_HEALTH_WIDGET = widget;
widget = new Widget();
widget.setTitle("Top Active Clusters");
widget.setSrc(new URI("/hicc/clusters/"));
widget.setCol(5);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
TOP_ACTIVE_CLUSTERS_WIDGET = widget;
widget = new Widget();
widget.setTitle("Top Applications");
widget.setSrc(new URI("/hicc/apps/"));
widget.setCol(7);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
TOP_APP_WIDGET = widget;
widget = new Widget();
widget.setTitle("Applications Usage");
widget.setSrc(new URI("/hicc/apps/apps-usage.html"));
widget.setCol(7);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
APP_USAGE_WIDGET = widget;
// Populate user dashboards
dashboard = new Dashboard();
widget = new Widget();
widget.setTitle("Quick Links");
widget.setSrc(new URI("/hicc/v1/dashboard/quicklinks"));
widget.setCol(1);
widget.setRow(1);
widget.setSize_x(12);
widget.setSize_y(7);
QUICK_LINKS_WIDGET = widget;
dashboard.add(QUICK_LINKS_WIDGET);
// Log Search widget
widget = new Widget();
widget.setTitle("Log Search");
widget.setSrc(new URI("/hicc/ajax-solr/chukwa"));
widget.setCol(1);
widget.setRow(1);
widget.setSize_x(6);
widget.setSize_y(6);
LOG_SEARCH_WIDGET = widget;
// Applications
widget = new Widget();
widget.setTitle("YARN Applications");
widget.setSrc(new URI("http://localhost:8088/"));
widget.setCol(1);
widget.setRow(7);
widget.setSize_x(6);
widget.setSize_y(6);
YARN_APP_WIDGET = widget;
// Hadoop Distributed File System
widget = new Widget();
widget.setTitle("HDFS");
widget.setSrc(new URI("http://localhost:50070/explorer.html#/"));
widget.setCol(1);
widget.setRow(7);
widget.setSize_x(6);
widget.setSize_y(6);
HDFS_WIDGET = widget;
// HBase Tables
widget = new Widget();
widget.setTitle("HBase Tables");
widget.setSrc(new URI("http://localhost:50654/tablesDetailed.jsp"));
widget.setCol(1);
widget.setRow(14);
widget.setSize_x(6);
widget.setSize_y(6);
HBASE_TABLE_WIDGET = widget;
widget = new Widget();
widget.setTitle("Top Applications");
widget.setSrc(new URI("/hicc/apps/"));
widget.setCol(1);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(2);
TOP_APP_WIDGET = widget;
widget = new Widget();
widget.setTitle("Top Users");
widget.setSrc(new URI("/hicc/users/"));
widget.setCol(1);
widget.setRow(3);
widget.setSize_x(2);
widget.setSize_y(2);
TOP_USER_WIDGET = widget;
USER_DASHBOARD = dashboard;
// Populate system dashboards
dashboard = new Dashboard();
widget = new Widget();
widget.setTitle("HDFS High Availability State");
widget.setSrc(new URI("/hicc/v1/tile/draw/11"));
widget.setCol(1);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
HDFS_HA_STATE_WIDGET = widget;
dashboard.add(HDFS_HA_STATE_WIDGET);
widget = new Widget();
widget.setTitle("HDFS Load");
widget.setSrc(new URI("/hicc/v1/tile/draw/12"));
widget.setCol(3);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
HDFS_LOAD_WIDGET = widget;
dashboard.add(HDFS_LOAD_WIDGET);
widget = new Widget();
widget.setTitle("HDFS Namenode Latency");
widget.setSrc(new URI("/hicc/v1/tile/draw/13"));
widget.setCol(5);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
HDFS_NAMENODE_LATENCY_WIDGET = widget;
dashboard.add(HDFS_NAMENODE_LATENCY_WIDGET);
widget = new Widget();
widget.setTitle("Datanodes Health");
widget.setSrc(new URI("/hicc/v1/tile/draw/14"));
widget.setCol(7);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
DATANODES_HEALTH_WIDGET = widget;
dashboard.add(DATANODES_HEALTH_WIDGET);
widget = new Widget();
widget.setTitle("Node Managers Health");
widget.setSrc(new URI("/hicc/v1/tile/draw/10"));
widget.setCol(9);
widget.setRow(1);
widget.setSize_x(2);
widget.setSize_y(1);
NODE_MANAGERS_HEALTH_WIDGET = widget;
dashboard.add(NODE_MANAGERS_HEALTH_WIDGET);
widget = new Widget();
widget.setTitle("HDFS Remaining");
widget.setSrc(new URI("/hicc/v1/circles/draw/8"));
widget.setCol(1);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
HDFS_REMAINING_WIDGET = widget;
dashboard.add(HDFS_REMAINING_WIDGET);
widget = new Widget();
widget.setTitle("Namenode Memory");
widget.setSrc(new URI("/hicc/v1/circles/draw/7"));
widget.setCol(3);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
NAMENODE_MEMORY_WIDGET = widget;
dashboard.add(NAMENODE_MEMORY_WIDGET);
widget = new Widget();
widget.setTitle("Resource Manager Memory");
widget.setSrc(new URI("/hicc/v1/circles/draw/9"));
widget.setCol(5);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
RESOURCE_MANAGER_MEMORY_WIDGET = widget;
dashboard.add(RESOURCE_MANAGER_MEMORY_WIDGET);
widget = new Widget();
widget.setTitle("HBase Master Memory");
widget.setSrc(new URI("/hicc/v1/circles/draw/15"));
widget.setCol(7);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(2);
HBASE_MASTER_MOMORY_WIDGET = widget;
dashboard.add(HBASE_MASTER_MOMORY_WIDGET);
widget = new Widget();
widget.setTitle("System Load Average");
widget.setSrc(new URI("/hicc/v1/chart/draw/1"));
widget.setCol(9);
widget.setRow(2);
widget.setSize_x(2);
widget.setSize_y(1);
SYSTEM_LOAD_AVERAGE_WIDGET = widget;
dashboard.add(SYSTEM_LOAD_AVERAGE_WIDGET);
widget = new Widget();
widget.setTitle("CPU Utilization");
widget.setSrc(new URI("/hicc/v1/chart/draw/2"));
widget.setCol(9);
widget.setRow(3);
widget.setSize_x(2);
widget.setSize_y(1);
CPU_UTILIZATION_WIDGET = widget;
dashboard.add(CPU_UTILIZATION_WIDGET);
widget = new Widget();
widget.setTitle("Memory Utilization");
widget.setSrc(new URI("/hicc/v1/chart/draw/3"));
widget.setCol(9);
widget.setRow(4);
widget.setSize_x(2);
widget.setSize_y(1);
MEMORY_UTILIZATION_WIDGET = widget;
dashboard.add(MEMORY_UTILIZATION_WIDGET);
widget = new Widget();
widget.setTitle("Swap Utilization");
widget.setSrc(new URI("/hicc/v1/chart/draw/6"));
widget.setCol(9);
widget.setRow(5);
widget.setSize_x(2);
widget.setSize_y(1);
SWAP_UTILIZATION_WIDGET = widget;
dashboard.add(SWAP_UTILIZATION_WIDGET);
widget = new Widget();
widget.setTitle("Disk Utilization");
widget.setSrc(new URI("/hicc/v1/chart/draw/4"));
widget.setCol(1);
widget.setRow(4);
widget.setSize_x(4);
widget.setSize_y(2);
DISK_UTILIZATION_WIDGET = widget;
dashboard.add(DISK_UTILIZATION_WIDGET);
widget = new Widget();
widget.setTitle("Network Utilization");
widget.setSrc(new URI("/hicc/v1/chart/draw/5"));
widget.setCol(5);
widget.setRow(4);
widget.setSize_x(4);
widget.setSize_y(2);
NETWORK_UTILIZATION_WIDGET = widget;
dashboard.add(NETWORK_UTILIZATION_WIDGET);
SYSTEM_DASHBOARD = dashboard;
// CPU heatmap
widget = new Widget();
widget.setTitle("CPU Heatmap");
widget.setSrc(new URI("/hicc/v1/heatmap/render/SystemMetrics/cpu.combined."));
widget.setCol(1);
widget.setRow(5);
widget.setSize_x(6);
widget.setSize_y(5);
CPU_HEAPMAP_WIDGET = widget;
// HDFS Namenode
widget = new Widget();
widget.setTitle("HDFS UI");
widget.setSrc(new URI("http://localhost:50070/"));
widget.setCol(1);
widget.setRow(11);
widget.setSize_x(6);
widget.setSize_y(6);
HDFS_UI_WIDGET = widget;
// HBase Master
widget = new Widget();
widget.setTitle("HBase Master UI");
widget.setSrc(new URI("http://localhost:16010/"));
widget.setCol(1);
widget.setRow(18);
widget.setSize_x(6);
widget.setSize_y(6);
HBASE_MASTER_UI_WIDGET = widget;
WIDGET_LIST = new ArrayList<Widget>();
WIDGET_LIST.add(HDFS_HA_STATE_WIDGET);
WIDGET_LIST.add(HDFS_UI_WIDGET);
WIDGET_LIST.add(HDFS_LOAD_WIDGET);
} catch (URISyntaxException e) {
} catch (UnknownHostException e) {
}
}
}
| 8,481 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/WidgetController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.bean.Widget;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
@Path("widget")
public class WidgetController {
static Logger LOG = Logger.getLogger(WidgetController.class);
@Context
private ServletContext context;
@PostConstruct
@Singleton
public void init() {
}
/**
* List widgets
*
* @param limit is number of widgets to return
* @param offset is position in the widget list
* @return list of widgets in HBase database
*
* @response.representation.200.doc Display list of HICC supported widgets
* @response.representation.200.mediaType application/json
* @response.representation.200.example {@link Examples#WIDGET_LIST}
*/
@GET
@Path("list")
@Produces(MediaType.APPLICATION_JSON)
public String listWidget(@DefaultValue("1000") @QueryParam("limit") int limit,
@DefaultValue("0") @QueryParam("offset") int offset) {
List<Widget> widgets = ChukwaHBaseStore.listWidget(limit, offset);
Gson gson = new Gson();
String json = gson.toJson(widgets);
return json;
}
/**
* Search for widget base on widget title
* @param query is search critieria
* @return list of widgets matched search critieria
*
* @response.representation.200.doc Display list of HICC widget that matches query
* @response.representation.200.mediaType application/json
* @response.representation.200.example {@link Examples#WIDGET_LIST}
*/
@GET
@Path("search/{query}")
public String searchWidget(@PathParam("query") String query) {
List<Widget> widgets = ChukwaHBaseStore.searchWidget(query);
Gson gson = new Gson();
String json = gson.toJson(widgets);
return json;
}
/**
* View widget details
* @param title is title of the widget
* @return widget configuration
*
* @response.representation.200.doc View widget details
* @response.representation.200.mediaType application/json
* @response.representation.200.example {@link Examples#SYSTEM_LOAD_AVERAGE_WIDGET}
*/
@GET
@Path("view/{title}")
public String viewWidget(@PathParam("title") String title) {
Widget w = ChukwaHBaseStore.viewWidget(title);
Gson gson = new Gson();
String json = gson.toJson(w);
return json;
}
/**
* Create a widget
* @param buffer is widget configuration
* @return Creation status code
*
* @request.representation.example {@link Examples#WELCOME_PAGE_WIDGET}
* @response.representation.200.doc Widget creation successful
* @response.representation.200.mediaType text/plain
* @response.representation.200.example 200 OK
* @response.representation.400.doc Widget creation unsuccessful
* @response.representation.400.mediaType text/plain
* @response.representation.400.example 400 Bad Request
*/
@POST
@Path("create")
@Consumes(MediaType.APPLICATION_JSON)
public Response createWidget(String buffer) {
Gson gson = new Gson();
Widget widget = gson.fromJson(buffer, Widget.class);
boolean result = ChukwaHBaseStore.createWidget(widget);
if(!result) {
return Response.status(Status.BAD_REQUEST).build();
}
return Response.ok().build();
}
/**
* Update a widget
* @param title is widget title
* @param buffer is widget object in JSON
* @return Update status code
*
* @request.representation.example {@link Examples#WELCOME_PAGE_WIDGET}
* @response.representation.200.doc Widget update is successful
* @response.representation.200.mediaType text/plain
* @response.representation.200.example 200 OK
* @response.representation.400.doc Widget update unsuccessful
* @response.representation.400.mediaType text/plain
* @response.representation.400.example 400 Bad Request
*/
@PUT
@Path("update/{title}")
@Consumes(MediaType.APPLICATION_JSON)
public Response updateWidget(@PathParam("title") String title, String buffer){
Gson gson = new Gson();
Widget widget = gson.fromJson(buffer, Widget.class);
boolean result = ChukwaHBaseStore.updateWidget(title, widget);
if(!result) {
return Response.status(Status.BAD_REQUEST).build();
}
return Response.ok().build();
}
/**
* Delete a widget
* @param title is widget title
* @return Widget delete status code
*
* @response.representation.200.doc Widget deletion successful
* @response.representation.200.mediaType text/plain
* @response.representation.200.example 200 OK
* @response.representation.400.doc Widget deletion unsuccessful
* @response.representation.400.mediaType text/plain
* @response.representation.400.example 400 Bad Request
*/
@DELETE
@Path("delete/{title}")
public Response deleteWidget(@PathParam("title") String title) {
boolean result = ChukwaHBaseStore.deleteWidget(title);
if(!result) {
return Response.status(Status.BAD_REQUEST).build();
}
return Response.ok().build();
}
}
| 8,482 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/SessionController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
@Path("session")
public class SessionController {
static Logger LOG = Logger.getLogger(SessionController.class);
/**
* Utility to get session attributes
*
* @param request is HTTP request object
* @param id is session key
* @return session attribute
*/
@GET
@Path("key/{id}")
@Produces(MediaType.APPLICATION_JSON)
public String draw(@Context HttpServletRequest request, @PathParam("id") String id) {
String value = (String) request.getSession().getAttribute(id);
Map<String, String> map = new HashMap<String, String>();
map.put(id, value);
Gson gson = new Gson();
String json = gson.toJson(map);
return json;
}
/**
* Store session attributes
*
* @param request is HTTP request object
* @param buffer is session key value pairs in JSON
* @return session update status code
*/
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Path("save")
public Response save(@Context HttpServletRequest request, String buffer) {
Gson gson = new Gson();
Type stringStringMap = new TypeToken<Map<String, String>>(){}.getType();
Map<String,String> map = gson.fromJson(buffer, stringStringMap);
for(Entry<String, String> entry : (Set<Map.Entry<String, String>>) map.entrySet()) {
request.getSession().setAttribute(entry.getKey(), entry.getValue());
}
return Response.ok().build();
}
}
| 8,483 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/LoginController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.io.StringWriter;
import java.net.URI;
import java.net.URISyntaxException;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import com.sun.jersey.api.client.ClientResponse.Status;
@Path("/login")
public class LoginController {
@Context
VelocityEngine velocity;
static {
ChukwaHBaseStore.populateDefaults();
}
/**
* Password verification login screen
*
* @param buffer holds any user input
* @return html page of login screen
*
* @response.representation.200.doc Login screen
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example available in HICC UI
*/
@GET
@Path("check")
public String login(String buffer) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Template template = velocity.getTemplate("login.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Password verification REST API
*
* @param request is HTTP request object
* @return login status code
*
* @response.representation.200.doc User login successful
* @response.representation.200.mediaType text/plain
* @response.representation.200.example 200 OK
* @response.representation.403.doc Login denied
* @response.representation.403.mediaType text/plain
* @response.representation.403.example 403 FORBIDDEN
*/
@POST
@Path("check")
public Response check(@Context HttpServletRequest request) {
VelocityContext context = new VelocityContext();
if(request.getRemoteUser()!=null) {
URI location;
try {
location = new URI("/hicc/");
return Response.temporaryRedirect(location).build();
} catch (URISyntaxException e) {
}
}
context.put("invalid", true);
Template template = velocity.getTemplate("login.vm");
StringWriter sw = new StringWriter();
template.merge(context, sw);
return Response.status(Status.FORBIDDEN).entity(sw.toString()).build();
}
}
| 8,484 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/VelocityResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.lang.reflect.Type;
import javax.servlet.ServletContext;
import javax.ws.rs.core.Context;
import javax.ws.rs.ext.Provider;
import org.apache.log4j.Logger;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.runtime.RuntimeConstants;
import com.sun.jersey.core.spi.component.ComponentContext;
import com.sun.jersey.core.spi.component.ComponentScope;
import com.sun.jersey.spi.inject.Injectable;
import com.sun.jersey.spi.inject.InjectableProvider;
@Provider
public class VelocityResolver implements InjectableProvider<Context, Type> {
@Context
private ServletContext servletContext;
private VelocityEngine ve;
private static Logger LOG = Logger.getLogger(VelocityResolver.class);
public final static String LOGGER_NAME = VelocityResolver.class.getName();
/**
* Jersey configuration for setting up Velocity configuration.
*/
@Override
public Injectable<VelocityEngine> getInjectable(ComponentContext arg0,
Context arg1, Type c) {
if (c.equals(VelocityEngine.class)) {
return new Injectable<VelocityEngine>() {
public VelocityEngine getValue() {
if (ve == null) {
LOG.info("Ready to start velocity");
ve = new VelocityEngine();
ve.setProperty(RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS,
"org.apache.velocity.runtime.log.Log4JLogChute");
ve.setProperty("runtime.log.logsystem.log4j.logger",
LOGGER_NAME);
ve.setProperty(RuntimeConstants.RESOURCE_LOADER,
"webapp");
ve.setProperty("webapp.resource.loader.class",
"org.apache.velocity.tools.view.WebappResourceLoader");
ve.setProperty("webapp.resource.loader.path",
"/WEB-INF/vm/");
ve.setApplicationAttribute(
"javax.servlet.ServletContext", servletContext);
try {
ve.init();
LOG.info("Velocity is loaded");
} catch (Exception e) {
LOG.error("Error when initializing Velocity", e);
}
}
return ve;
}
};
}
return null;
}
public ComponentScope getScope() {
return ComponentScope.Singleton;
}
}
| 8,485 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/CirclesController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.io.StringWriter;
import java.lang.reflect.Type;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.TimeHandler;
import org.apache.hadoop.chukwa.hicc.bean.Chart;
import org.apache.hadoop.chukwa.hicc.bean.Series;
import org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
@Path("/circles")
public class CirclesController {
static Logger LOG = Logger.getLogger(CirclesController.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
@Context
VelocityEngine velocity;
/**
* Render circle using jquery circliful.js
*
* @param id Title of the tile.
* @param invert Toggle to display warning, error color by upper bound or lower bound.
* @return html circle widget.
*
* @response.representation.200.doc Render circleful chart
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example is availabe on HICC UI
*/
@GET
@Path("draw/{id}")
@Produces(MediaType.TEXT_HTML)
public String draw(@PathParam("id") String id, @DefaultValue("false") @QueryParam("invert") boolean invert) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Chart chart = ChukwaHBaseStore.getChart(id);
List<SeriesMetaData> series = chart.getSeries();
Gson gson = new Gson();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
Template template = velocity.getTemplate("circles.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Preview circle graph with a set of chart configuration
*
* @param buffer is chart object
* @return html text of circle graph
*
* @request.representation.example {@link Examples#NAMENODE_MEMORY}
*/
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.TEXT_HTML)
@Path("preview")
public String preview(String buffer) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Gson gson = new Gson();
Chart chart = gson.fromJson(buffer, Chart.class);
List<SeriesMetaData> series = chart.getSeries();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
Template template = velocity.getTemplate("circles.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Circle graph data can be calculated based on either a ratio of two metrics or
* the selected metric is a percentage metric for rendering circle graph.
*
* @param request is HTTP request object
* @param buffer is list of SeriesMetaData for fetching series data
* @return JSON output series data
*
* @request.representation.example {@link Examples#HDFS_USAGE_SERIES_METADATA}
* @response.representation.200.doc Display series data in JSON
* @response.representation.200.mediaType application/json
*/
@PUT
@Consumes(MediaType.APPLICATION_JSON)
@Path("preview/series")
@Produces("application/json")
public String previewSeries(@Context HttpServletRequest request, String buffer) {
Type listType = new TypeToken<ArrayList<SeriesMetaData>>() {
}.getType();
long startTime = 0;
long endTime = 0;
TimeHandler time = new TimeHandler(request);
startTime = time.getStartTime();
endTime = time.getEndTime();
Gson gson = new Gson();
ArrayList<SeriesMetaData> series = gson.fromJson(buffer, listType);
double percent;
List<String> data = ChukwaHBaseStore.getData(series, startTime, endTime);
if(series.size()>=2) {
double x = 0;
double y = 1;
try {
x = Double.parseDouble(data.get(0));
} catch(NumberFormatException e) {
x = 0;
}
try {
y = Double.parseDouble(data.get(1));
} catch(NumberFormatException e) {
y = 1;
}
percent = x / y * 100d;
} else {
double x = Double.parseDouble(data.get(0));
percent = x;
}
percent = Math.round(percent * 100d) / 100d;
Series answer = new Series("circle");
answer.add(endTime, percent);
String result = gson.toJson(answer);
return result;
}
}
| 8,486 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/hicc/rest/PieChartController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.hicc.rest;
import java.io.StringWriter;
import java.lang.reflect.Type;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.hadoop.chukwa.datastore.ChukwaHBaseStore;
import org.apache.hadoop.chukwa.hicc.TimeHandler;
import org.apache.hadoop.chukwa.hicc.bean.Chart;
import org.apache.hadoop.chukwa.hicc.bean.ChartType;
import org.apache.hadoop.chukwa.hicc.bean.SeriesMetaData;
import org.apache.log4j.Logger;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
@Path("/piechart")
public class PieChartController extends ChartController{
static Logger LOG = Logger.getLogger(ChartController.class);
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
@Context
VelocityEngine velocity;
/**
* Render pie chart using chartist.js
*
* @param id Reference ID of Chart stored in HBase chukwa_meta table.
* @return html chart widget
*
* @response.representation.200.doc Preview a pie chart
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example available in HICC UI
*/
@GET
@Path("draw/{id}")
@Produces(MediaType.TEXT_HTML)
public String draw(@PathParam("id") String id) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Chart chart = ChukwaHBaseStore.getChart(id);
List<SeriesMetaData> series = chart.getSeries();
Gson gson = new Gson();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
context.put("chartTypeDonut", ChartType.DONUT);
Template template = velocity.getTemplate("pie.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Preview a pie chart
*
* @param buffer is pie chart object
*
* @response.representation.200.doc Preview a pie chart
* @response.representation.200.mediaType text/html
* @response.representation.200.example Example available in HICC UI
*/
@PUT
@Path("preview")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.TEXT_HTML)
public String preview(String buffer) {
VelocityContext context = new VelocityContext();
StringWriter sw = null;
try {
Gson gson = new Gson();
Chart chart = gson.fromJson(buffer, Chart.class);
List<SeriesMetaData> series = chart.getSeries();
String seriesMetaData = gson.toJson(series);
context.put("chart", chart);
context.put("seriesMetaData", seriesMetaData);
context.put("chartTypeDonut", ChartType.DONUT);
Template template = velocity.getTemplate("pie.vm");
sw = new StringWriter();
template.merge(context, sw);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return sw.toString();
}
/**
* Preview a series JSON for pie chart
*
* @param request HTTP request object
* @param buffer is pie chart configuration
*
* @request.representation.example {@link Examples#CPU_SERIES_METADATA}
* @response.representation.200.doc Preview a pie chart series
* @response.representation.200.mediaType application/json
* @response.representation.200.example Example available in REST API
*/
@PUT
@Path("preview/series")
@Consumes(MediaType.APPLICATION_JSON)
@Produces("application/json")
public String previewSeries(@Context HttpServletRequest request, String buffer) {
Type listType = new TypeToken<ArrayList<SeriesMetaData>>() {
}.getType();
long startTime = 0;
long endTime = 0;
TimeHandler time = new TimeHandler(request);
startTime = time.getStartTime();
endTime = time.getEndTime();
Gson gson = new Gson();
ArrayList<SeriesMetaData> series = gson.fromJson(buffer, listType);
List<String> data = ChukwaHBaseStore.getData(series, startTime, endTime);
String result = gson.toJson(data);
return result;
}
}
| 8,487 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.dataloader;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.concurrent.Callable;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.database.DatabaseConfig;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
import org.apache.hadoop.chukwa.extraction.engine.RecordUtil;
import org.apache.hadoop.chukwa.util.ClusterConfig;
import org.apache.hadoop.chukwa.util.DatabaseWriter;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.chukwa.util.RegexUtil;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
public class MetricDataLoader implements Callable {
private static Log log = LogFactory.getLog(MetricDataLoader.class);
private Statement stmt = null;
private ResultSet rs = null;
private DatabaseConfig mdlConfig = null;
private HashMap<String, String> normalize = null;
private HashMap<String, String> transformer = null;
private HashMap<String, Float> conversion = null;
private HashMap<String, String> dbTables = null;
private HashMap<String, HashMap<String, Integer>> dbSchema = null;
private String newSpace = "-";
private boolean batchMode = true;
private Connection conn = null;
private Path source = null;
private ChukwaConfiguration conf = null;
private FileSystem fs = null;
private String jdbc_url = "";
public MetricDataLoader(String fileName) throws IOException {
conf = new ChukwaConfiguration();
fs = FileSystem.get(conf);
}
/** Creates a new instance of DBWriter
* @param conf Chukwa Configuration
* @param fs Hadoop File System
* @param fileName Chukwa Sequence file */
public MetricDataLoader(ChukwaConfiguration conf, FileSystem fs, String fileName) {
source = new Path(fileName);
this.conf = conf;
this.fs = fs;
}
private void initEnv(String cluster) throws Exception {
mdlConfig = new DatabaseConfig();
transformer = mdlConfig.startWith("metric.");
conversion = new HashMap<String, Float>();
normalize = mdlConfig.startWith("normalize.");
dbTables = mdlConfig.startWith("report.db.name.");
Iterator<?> entries = mdlConfig.iterator();
while (entries.hasNext()) {
String entry = entries.next().toString();
if (entry.startsWith("conversion.")) {
String[] metrics = entry.split("=");
try {
float convertNumber = Float.parseFloat(metrics[1]);
conversion.put(metrics[0], convertNumber);
} catch (NumberFormatException ex) {
log.error(metrics[0] + " is not a number.");
}
}
}
log.debug("cluster name:" + cluster);
if (!cluster.equals("")) {
ClusterConfig cc = new ClusterConfig();
jdbc_url = cc.getURL(cluster);
}
try {
DatabaseWriter dbWriter = new DatabaseWriter(cluster);
conn = dbWriter.getConnection();
} catch(Exception ex) {
throw new Exception("JDBC URL does not exist for:"+jdbc_url);
}
log.debug("Initialized JDBC URL: " + jdbc_url);
HashMap<String, String> dbNames = mdlConfig.startWith("report.db.name.");
Iterator<String> ki = dbNames.keySet().iterator();
dbSchema = new HashMap<String, HashMap<String, Integer>>();
while (ki.hasNext()) {
String recordType = ki.next().toString();
String table = dbNames.get(recordType);
try {
ResultSet rs = conn.getMetaData().getColumns(null, null, table+"_template", null);
HashMap<String, Integer> tableSchema = new HashMap<String, Integer>();
while(rs.next()) {
String name = rs.getString("COLUMN_NAME");
int type = rs.getInt("DATA_TYPE");
tableSchema.put(name, type);
StringBuilder metricName = new StringBuilder();
metricName.append("metric.");
metricName.append(recordType.substring(15));
metricName.append(".");
metricName.append(name);
String mdlKey = metricName.toString().toLowerCase();
if(!transformer.containsKey(mdlKey)) {
transformer.put(mdlKey, name);
}
}
rs.close();
dbSchema.put(table, tableSchema);
} catch (SQLException ex) {
log.debug("table: " + table
+ " template does not exist, MDL will not load data for this table.");
}
}
stmt = conn.createStatement();
conn.setAutoCommit(false);
}
public void interrupt() {
}
private String escape(String s, String c) {
String ns = s.trim();
Pattern pattern = Pattern.compile(" +");
Matcher matcher = pattern.matcher(ns);
String s2 = matcher.replaceAll(c);
return s2;
}
public static String escapeQuotes( String s ) {
StringBuffer sb = new StringBuffer();
int index;
int length = s.length();
char ch;
for( index = 0; index < length; ++index ) {
if(( ch = s.charAt( index )) == '\"' ) {
sb.append( "\\\"" );
} else if( ch == '\\' ) {
sb.append( "\\\\" );
} else if( ch == '\'' ) {
sb.append( "\\'" );
} else {
sb.append( ch );
}
}
return( sb.toString());
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
"SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE",
justification = "Dynamic based upon tables in the database")
public boolean run() throws IOException {
boolean first=true;
log.info("StreamName: " + source.getName());
SequenceFile.Reader reader = null;
try {
// The newInstance() call is a work around for some
// broken Java implementations
reader = new SequenceFile.Reader(fs, source, conf);
} catch (Exception ex) {
// handle the error
log.error(ex, ex);
}
long currentTimeMillis = System.currentTimeMillis();
boolean isSuccessful = true;
String recordType = null;
ChukwaRecordKey key = new ChukwaRecordKey();
ChukwaRecord record = new ChukwaRecord();
String cluster = null;
int numOfRecords = 0;
try {
Pattern p = Pattern.compile("(.*)\\-(\\d+)$");
int batch = 0;
while (reader !=null && reader.next(key, record)) {
numOfRecords++;
if(first) {
try {
cluster = RecordUtil.getClusterName(record);
initEnv(cluster);
first=false;
} catch(Exception ex) {
log.error("Initialization failed for: "+cluster+". Please check jdbc configuration.");
return false;
}
}
String sqlTime = DatabaseWriter.formatTimeStamp(record.getTime());
log.debug("Timestamp: " + record.getTime());
log.debug("DataType: " + key.getReduceType());
String[] fields = record.getFields();
String table = null;
String[] priKeys = null;
HashMap<String, HashMap<String, String>> hashReport = new HashMap<String, HashMap<String, String>>();
StringBuilder normKey = new StringBuilder();
String node = record.getValue("csource");
recordType = key.getReduceType().toLowerCase();
String dbKey = "report.db.name." + recordType;
Matcher m = p.matcher(recordType);
if (dbTables.containsKey(dbKey)) {
String tableName = mdlConfig.get(dbKey);
if (!RegexUtil.isRegex(tableName)) {
log.error("Error parsing 'tableName' as a regex: "
+ RegexUtil.regexError(tableName));
return false;
}
String[] tmp = mdlConfig.findTableName(tableName, record
.getTime(), record.getTime());
table = tmp[0];
} else if(m.matches()) {
String timePartition = "_week";
int timeSize = Integer.parseInt(m.group(2));
if(timeSize == 5) {
timePartition = "_month";
} else if(timeSize == 30) {
timePartition = "_quarter";
} else if(timeSize == 180) {
timePartition = "_year";
} else if(timeSize == 720) {
timePartition = "_decade";
}
int partition = (int) (record.getTime() / timeSize);
StringBuilder tmpDbKey = new StringBuilder();
tmpDbKey.append("report.db.name.");
tmpDbKey.append(m.group(1));
if(dbTables.containsKey(tmpDbKey.toString())) {
StringBuilder tmpTable = new StringBuilder();
tmpTable.append(dbTables.get(tmpDbKey.toString()));
tmpTable.append("_");
tmpTable.append(partition);
tmpTable.append("_");
tmpTable.append(timePartition);
table = tmpTable.toString();
} else {
log.debug(tmpDbKey.toString() + " does not exist.");
continue;
}
} else {
log.debug(dbKey + " does not exist.");
continue;
}
log.debug("table name:" + table);
try {
priKeys = mdlConfig.get("report.db.primary.key." + recordType).split(
",");
} catch (Exception nullException) {
log.debug(ExceptionUtil.getStackTrace(nullException));
}
for (String field : fields) {
String keyName = escape(field.toLowerCase(), newSpace);
String keyValue = escape(record.getValue(field).toLowerCase(),
newSpace);
StringBuilder buildKey = new StringBuilder();
buildKey.append("normalize.");
buildKey.append(recordType);
buildKey.append(".");
buildKey.append(keyName);
if (normalize.containsKey(buildKey.toString())) {
if (normKey.toString().equals("")) {
normKey.append(keyName);
normKey.append(".");
normKey.append(keyValue);
} else {
normKey.append(".");
normKey.append(keyName);
normKey.append(".");
normKey.append(keyValue);
}
}
StringBuilder normalizedKey = new StringBuilder();
normalizedKey.append("metric.");
normalizedKey.append(recordType);
normalizedKey.append(".");
normalizedKey.append(normKey);
if (hashReport.containsKey(node)) {
HashMap<String, String> tmpHash = hashReport.get(node);
tmpHash.put(normalizedKey.toString(), keyValue);
hashReport.put(node, tmpHash);
} else {
HashMap<String, String> tmpHash = new HashMap<String, String>();
tmpHash.put(normalizedKey.toString(), keyValue);
hashReport.put(node, tmpHash);
}
}
for (String field : fields) {
String valueName = escape(field.toLowerCase(), newSpace);
String valueValue = escape(record.getValue(field).toLowerCase(),
newSpace);
StringBuilder buildKey = new StringBuilder();
buildKey.append("metric.");
buildKey.append(recordType);
buildKey.append(".");
buildKey.append(valueName);
if (!normKey.toString().equals("")) {
buildKey = new StringBuilder();
buildKey.append("metric.");
buildKey.append(recordType);
buildKey.append(".");
buildKey.append(normKey);
buildKey.append(".");
buildKey.append(valueName);
}
String normalizedKey = buildKey.toString();
if (hashReport.containsKey(node)) {
HashMap<String, String> tmpHash = hashReport.get(node);
tmpHash.put(normalizedKey, valueValue);
hashReport.put(node, tmpHash);
} else {
HashMap<String, String> tmpHash = new HashMap<String, String>();
tmpHash.put(normalizedKey, valueValue);
hashReport.put(node, tmpHash);
}
}
for(Entry<String, HashMap<String, String>> entry : hashReport.entrySet()) {
HashMap<String, String> recordSet = entry.getValue();
// Map any primary key that was not included in the report keyName
StringBuilder sqlPriKeys = new StringBuilder();
try {
for (String priKey : priKeys) {
if (priKey.equals("timestamp")) {
sqlPriKeys.append(priKey);
sqlPriKeys.append(" = \"");
sqlPriKeys.append(sqlTime);
sqlPriKeys.append("\"");
}
if (!priKey.equals(priKeys[priKeys.length - 1])) {
sqlPriKeys.append(sqlPriKeys);
sqlPriKeys.append(", ");
}
}
} catch (Exception nullException) {
// ignore if primary key is empty
log.debug(ExceptionUtil.getStackTrace(nullException));
}
// Map the hash objects to database table columns
StringBuilder sqlValues = new StringBuilder();
boolean firstValue = true;
for(Entry<String, String> fi : recordSet.entrySet()) {
String fieldKey = fi.getKey();
String fieldValue = fi.getValue();
if (transformer.containsKey(fieldKey) && transformer.get(fieldKey).intern()!="_delete".intern()) {
if (!firstValue) {
sqlValues.append(", ");
}
try {
if (dbSchema.get(dbTables.get(dbKey)).get(
transformer.get(fieldKey)) == java.sql.Types.VARCHAR
|| dbSchema.get(dbTables.get(dbKey)).get(
transformer.get(fieldKey)) == java.sql.Types.BLOB) {
String conversionKey = "conversion." + fieldKey;
if (conversion.containsKey(conversionKey)) {
sqlValues.append(transformer.get(fieldKey));
sqlValues.append("=");
sqlValues.append(fieldValue);
sqlValues.append(conversion.get(conversionKey).toString());
} else {
sqlValues.append(transformer.get(fieldKey));
sqlValues.append("=\'");
sqlValues.append(escapeQuotes(fieldValue));
sqlValues.append("\'");
}
} else if (dbSchema.get(dbTables.get(dbKey)).get(
transformer.get(fieldKey)) == java.sql.Types.TIMESTAMP) {
SimpleDateFormat formatter = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
Date recordDate = new Date();
recordDate.setTime(Long.parseLong(fieldValue));
sqlValues.append(transformer.get(fieldKey));
sqlValues.append("=\"");
sqlValues.append(formatter.format(recordDate));
sqlValues.append("\"");
} else if (dbSchema.get(dbTables.get(dbKey)).get(
transformer.get(fieldKey)) == java.sql.Types.BIGINT
|| dbSchema.get(dbTables.get(dbKey)).get(
transformer.get(fieldKey)) == java.sql.Types.TINYINT
|| dbSchema.get(dbTables.get(dbKey)).get(
transformer.get(fieldKey)) == java.sql.Types.INTEGER) {
long tmp = 0;
try {
tmp = Long.parseLong(fieldValue);
String conversionKey = "conversion." + fieldKey;
if (conversion.containsKey(conversionKey)) {
tmp = tmp
* Long.parseLong(conversion.get(conversionKey)
.toString());
}
} catch (Exception e) {
tmp = 0;
}
sqlValues.append(transformer.get(fieldKey));
sqlValues.append("=");
sqlValues.append(tmp);
} else {
double tmp = 0;
tmp = Double.parseDouble(fieldValue);
String conversionKey = "conversion." + fieldKey;
if (conversion.containsKey(conversionKey)) {
tmp = tmp
* Double.parseDouble(conversion.get(conversionKey)
.toString());
}
if (Double.isNaN(tmp)) {
tmp = 0;
}
sqlValues.append(transformer.get(fieldKey));
sqlValues.append("=");
sqlValues.append(tmp);
}
firstValue = false;
} catch (NumberFormatException ex) {
String conversionKey = "conversion." + fieldKey;
if (conversion.containsKey(conversionKey)) {
sqlValues.append(transformer.get(fieldKey));
sqlValues.append("=");
sqlValues.append(recordSet.get(fieldKey));
sqlValues.append(conversion.get(conversionKey).toString());
} else {
sqlValues.append(transformer.get(fieldKey));
sqlValues.append("='");
sqlValues.append(escapeQuotes(recordSet.get(fieldKey)));
sqlValues.append("'");
}
firstValue = false;
} catch (NullPointerException ex) {
log.error("dbKey:" + dbKey + " fieldKey:" + fieldKey
+ " does not contain valid MDL structure.");
}
}
}
StringBuilder sql = new StringBuilder();
if (sqlPriKeys.length() > 0) {
sql.append("INSERT INTO ");
sql.append(table);
sql.append(" SET ");
sql.append(sqlPriKeys.toString());
sql.append(",");
sql.append(sqlValues.toString());
sql.append(" ON DUPLICATE KEY UPDATE ");
sql.append(sqlPriKeys.toString());
sql.append(",");
sql.append(sqlValues.toString());
sql.append(";");
} else {
if(sqlValues.length() > 0) {
sql.append("INSERT INTO ");
sql.append(table);
sql.append(" SET ");
sql.append(sqlValues.toString());
sql.append(" ON DUPLICATE KEY UPDATE ");
sql.append(sqlValues.toString());
sql.append(";");
}
}
if(sql.length() > 0) {
log.trace(sql);
if (batchMode) {
stmt.addBatch(sql.toString());
batch++;
} else {
stmt.execute(sql.toString());
}
if (batchMode && batch > 20000) {
int[] updateCounts = stmt.executeBatch();
log.info("Batch mode inserted=" + updateCounts.length + "records.");
batch = 0;
}
}
}
}
if (batchMode) {
int[] updateCounts = stmt.executeBatch();
log.info("Batch mode inserted=" + updateCounts.length + "records.");
}
} catch (SQLException ex) {
// handle any errors
isSuccessful = false;
log.error(ex, ex);
log.error("SQLException: " + ex.getMessage());
log.error("SQLState: " + ex.getSQLState());
log.error("VendorError: " + ex.getErrorCode());
// throw an exception up the chain to give the PostProcessorManager a chance to retry
throw new IOException (ex);
} catch (Exception e) {
isSuccessful = false;
log.error(ExceptionUtil.getStackTrace(e));
// throw an exception up the chain to give the PostProcessorManager a chance to retry
throw new IOException (e);
} finally {
if (batchMode && conn!=null) {
try {
conn.commit();
log.info("batchMode commit done");
} catch (SQLException ex) {
log.error(ex, ex);
log.error("SQLException: " + ex.getMessage());
log.error("SQLState: " + ex.getSQLState());
log.error("VendorError: " + ex.getErrorCode());
}
}
long latencyMillis = System.currentTimeMillis() - currentTimeMillis;
int latencySeconds = ((int) (latencyMillis + 500)) / 1000;
String logMsg = (isSuccessful ? "Saved" : "Error occurred in saving");
log.info(logMsg + " (" + recordType + ","
+ cluster + ") " + latencySeconds + " sec. numOfRecords: " + numOfRecords);
if (rs != null) {
try {
rs.close();
} catch (SQLException ex) {
log.error(ex, ex);
log.error("SQLException: " + ex.getMessage());
log.error("SQLState: " + ex.getSQLState());
log.error("VendorError: " + ex.getErrorCode());
}
rs = null;
}
if (stmt != null) {
try {
stmt.close();
} catch (SQLException ex) {
log.error(ex, ex);
log.error("SQLException: " + ex.getMessage());
log.error("SQLState: " + ex.getSQLState());
log.error("VendorError: " + ex.getErrorCode());
}
stmt = null;
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ex) {
log.error(ex, ex);
log.error("SQLException: " + ex.getMessage());
log.error("SQLState: " + ex.getSQLState());
log.error("VendorError: " + ex.getErrorCode());
}
conn = null;
}
if (reader != null) {
try {
reader.close();
} catch (Exception e) {
log.warn("Could not close SequenceFile.Reader:" ,e);
}
reader = null;
}
}
return true;
}
public Boolean call() throws IOException {
return run();
}
public static void main(String[] args) {
try {
MetricDataLoader mdl = new MetricDataLoader(args[0]);
mdl.run();
} catch (Exception e) {
e.printStackTrace();
}
}
}
| 8,488 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/dataloader/FSMDataLoader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.dataloader;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.chukwa.analysis.salsa.fsm.FSMBuilder;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.hadoop.conf.Configuration;
public class FSMDataLoader extends DataLoaderFactory {
private static Log log = LogFactory.getLog(FSMDataLoader.class);
protected MetricDataLoader threads[] = null;
private static String DATA_LOADER_THREAD_LIMIT = "chukwa.data.loader.threads.limit";
private int size = 1;
private CompletionService completion = null;
private ExecutorService executor = null;
private static String[] mappers = {
"org.apache.hadoop.chukwa.analysis.salsa.fsm.DataNodeClientTraceMapper",
"org.apache.hadoop.chukwa.analysis.salsa.fsm.TaskTrackerClientTraceMapper",
"org.apache.hadoop.chukwa.analysis.salsa.fsm.JobHistoryTaskDataMapper"
};
public FSMDataLoader() {
}
public void load(ChukwaConfiguration conf, FileSystem fs, FileStatus[] fileList) throws IOException {
if(executor==null) {
try {
this.size = Integer.parseInt(conf.get(DATA_LOADER_THREAD_LIMIT));
} catch(Exception e) {
this.size = 1;
}
executor = Executors.newFixedThreadPool(size);
}
if(completion==null) {
completion = new ExecutorCompletionService(executor);
}
try {
// Locate directory output directories of the current demux, and create a unique directory list.
HashSet<Path> inputPaths = new HashSet<Path>();
HashSet<Path> outputPaths = new HashSet<Path>();
int counter = 0;
for(int i=0;i<fileList.length;i++) {
Path temp = fileList[i].getPath().getParent();
if(!inputPaths.contains(temp)) {
inputPaths.add(temp);
}
}
String outputDir= conf.get("chukwa.tmp.data.dir")+File.separator+"fsm_"+System.currentTimeMillis()+"_";
if(inputPaths.size()>0) {
Configuration fsmConf = new Configuration();
// Run fsm map reduce job for dn, tt, and jobhist.
for(String mapper : mappers) {
String[] args = new String[inputPaths.size()+3];
args[0]="-in";
int k=2;
boolean hasData=false;
for(Path temp : inputPaths) {
String tempPath = temp.toUri().toString();
if((mapper.intern()==mappers[0].intern() && tempPath.indexOf("ClientTraceDetailed")>0) ||
(mapper.intern()==mappers[1].intern() && tempPath.indexOf("ClientTraceDetailed")>0) ||
(mapper.intern()==mappers[2].intern() && tempPath.indexOf("TaskData")>0) ||
(mapper.intern()==mappers[2].intern() && tempPath.indexOf("JobData")>0)) {
args[k]=tempPath;
k++;
hasData=true;
}
}
args[1]=k-2+"";
fsmConf.set("chukwa.salsa.fsm.mapclass", mapper);
args[k]=outputDir+mapper;
Path outputPath = new Path(args[k]);
outputPaths.add(outputPath);
if(hasData) {
int res = ToolRunner.run(fsmConf, new FSMBuilder(), args);
log.debug("Job Status: "+res);
}
}
}
// Find the mapreduce output and load to MDL.
for(Path outputPath : outputPaths) {
Path searchDir = new Path(outputPath.toUri().toString()+"/*/*/*.evt");
log.info("Search dir:"+searchDir.toUri().toString());
FileStatus[] outputList = fs.globStatus(searchDir);
if(outputList!=null) {
for(int j=0;j<outputList.length;j++) {
String outputFile = outputList[j].getPath().toUri().toString();
log.info("FSM -> MDL loading: "+outputFile);
completion.submit(new MetricDataLoader(conf, fs, outputFile));
counter++;
}
} else {
log.warn("No output to load.");
}
}
for(int i=0;i<counter;i++) {
completion.take().get();
}
// Clean up mapreduce output of fsm.
for(Path dir : outputPaths) {
fs.delete(dir, true);
}
} catch(Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new IOException();
}
}
public void shutdown() throws InterruptedException {
executor.shutdown();
executor.awaitTermination(30, TimeUnit.SECONDS);
executor.shutdownNow();
}
}
| 8,489 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/dataloader/DataLoaderFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.dataloader;
import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
public abstract class DataLoaderFactory {
ChukwaConfiguration conf = null;
FileSystem fs = null;
protected FileStatus[] source = null;
public DataLoaderFactory() {
}
/**
* @param conf Chukwa Configuration
* @param fs Hadoop File System
* @param src List of files to load
* @throws IOException if error loading files
*/
public void load(ChukwaConfiguration conf, FileSystem fs, FileStatus[] src) throws IOException {
this.source=src.clone();
this.conf=conf;
this.fs=fs;
}
public FileStatus[] getSource() {
return Arrays.copyOf(source, source.length);
}
protected FileSystem getFileSystem() {
return fs;
}
protected ChukwaConfiguration getConf() {
return conf;
}
}
| 8,490 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/dataloader/MetricDataLoaderPool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.dataloader;
import java.io.IOException;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
public class MetricDataLoaderPool extends DataLoaderFactory {
private static Log log = LogFactory.getLog(MetricDataLoaderPool.class);
protected MetricDataLoader threads[] = null;
private static String DATA_LOADER_THREAD_LIMIT = "chukwa.data.loader.threads.limit";
private int size = 1;
private CompletionService completion = null;
private ExecutorService executor = null;
public MetricDataLoaderPool() {
}
public void load(ChukwaConfiguration conf, FileSystem fs, FileStatus[] fileList) throws IOException {
if(executor==null) {
try {
this.size = Integer.parseInt(conf.get(DATA_LOADER_THREAD_LIMIT));
} catch(Exception e) {
this.size = 1;
}
executor = Executors.newFixedThreadPool(size);
}
if(completion==null) {
completion = new ExecutorCompletionService(executor);
}
try {
for(int i=0;i<fileList.length;i++) {
String filename = fileList[i].getPath().toUri().toString();
log.info("Processing: "+filename);
completion.submit(new MetricDataLoader(conf, fs, filename));
}
for(int i=0;i<fileList.length;i++) {
completion.take().get();
}
} catch(Exception e) {
log.error(ExceptionUtil.getStackTrace(e));
throw new IOException();
}
}
public void shutdown() throws InterruptedException {
executor.shutdown();
executor.awaitTermination(30, TimeUnit.SECONDS);
executor.shutdownNow();
}
}
| 8,491 |
0 | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa | Create_ds/chukwa/core/src/main/java/org/apache/hadoop/chukwa/dataloader/SocketDataLoader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.chukwa.dataloader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.ChunkImpl;
import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
import org.apache.hadoop.chukwa.datacollection.DataFactory;
import org.apache.hadoop.chukwa.datacollection.writer.SocketTeeWriter;
import org.apache.hadoop.chukwa.util.ExceptionUtil;
import org.apache.log4j.Logger;
/**
* Socket Data Loader, also known as the SDL, is a framework for allowing direct
* access to log data under the Chukwa Collector in a safe and efficient manner.
* Subscribe to chukwaCollector.tee.port for data streaming.
* Defaults socket tee port is 9094.
*/
public class SocketDataLoader implements Runnable {
private String hostname = "localhost";
private int port = 9094;
private static Logger log = Logger.getLogger(SocketDataLoader.class);
private Socket s = null;
private DataInputStream dis = null;
private DataOutputStream dos = null;
private Queue<Chunk> q = new LinkedList<Chunk>();
private String recordType = null;
private boolean running = false;
private static final int QUEUE_MAX = 10;
private Iterator<String> collectors = null;
private static Pattern pattern = Pattern.compile("(.+?)\\://(.+?)\\:(.+?)");
/*
* Create and start an instance of SocketDataLoader.
* @param Record Type
*/
public SocketDataLoader(String recordType) {
this.recordType = recordType;
try {
collectors = DataFactory.getInstance().getCollectorURLs(new ChukwaConfiguration());
} catch (IOException e) {
log.error(ExceptionUtil.getStackTrace(e));
}
Matcher m = pattern.matcher(collectors.next());
// Socket data loader only supports to stream data from a single collector.
// For large deployment, it may require to setup multi-tiers of collectors to
// channel data into a single collector for display.
if(m.matches()) {
hostname = m.group(2);
}
start();
}
/*
* Establish a connection to chukwa collector and filter data stream
* base on record type.
*/
public synchronized void start() {
try {
running = true;
s = new Socket(hostname, port);
try {
s.setSoTimeout(120000);
dos = new DataOutputStream (s.getOutputStream());
StringBuilder output = new StringBuilder();
output.append(SocketTeeWriter.WRITABLE);
if(recordType.toLowerCase().intern()!="all".intern()) {
output.append(" datatype=");
output.append(recordType);
} else {
output.append(" all");
}
output.append("\n");
dos.write((output.toString()).getBytes(Charset.forName("UTF-8")));
} catch (SocketException e) {
log.warn("Error while settin soTimeout to 120000");
}
dis = new DataInputStream(s
.getInputStream());
dis.readFully(new byte[3]); //read "OK\n"
StringBuilder sb = new StringBuilder();
sb.append("Subscribe to ");
sb.append(hostname);
sb.append(":");
sb.append(port);
sb.append(" for record type: ");
sb.append(recordType);
log.info(sb.toString());
Thread t=new Thread (this);
t.start();
} catch (IOException e) {
log.error(ExceptionUtil.getStackTrace(e));
stop();
}
}
/*
* Read the current chunks in the SDL queue.
* @return List of chunks in the SDL queue.
*/
public synchronized Collection<Chunk> read() throws NoSuchElementException {
Collection<Chunk> list = Collections.synchronizedCollection(q);
return list;
}
/*
* Unsubscribe from Chukwa collector and stop streaming.
*/
public synchronized void stop() {
if(s!=null) {
try {
dis.close();
dos.close();
s.close();
StringBuilder sb = new StringBuilder();
sb.append("Unsubscribe from ");
sb.append(hostname);
sb.append(":");
sb.append(port);
sb.append(" for data type: ");
sb.append(recordType);
log.info(sb.toString());
running = false;
} catch (IOException e) {
log.debug("Unable to close Socket Tee client socket.");
}
}
}
/*
* Check if streaming is currently happening for the current instance of SDL.
* @return running state of the SDL,
*/
public boolean running() {
return running;
}
/*
* Background thread for reading data from SocketTeeWriter, and add new data
* into SDL queue.
*/
@Override
public synchronized void run() {
try {
Chunk c;
while ((c = ChunkImpl.read(dis)) != null) {
StringBuilder sb = new StringBuilder();
sb.append("Chunk received, recordType:");
sb.append(c.getDataType());
log.debug(sb);
if(q.size()>QUEUE_MAX) {
q.poll();
}
q.offer(c);
}
} catch (IOException e) {
log.error(ExceptionUtil.getStackTrace(e));
stop();
}
}
}
| 8,492 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/test/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/test/java/org/apache/geronimo/microprofile/metrics/test/ArquillianSetup.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.test;
import org.apache.catalina.Context;
import org.apache.geronimo.microprofile.metrics.cdi.MetricsExtension;
import org.apache.meecrowave.Meecrowave;
import org.apache.meecrowave.arquillian.MeecrowaveContainer;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.annotation.Metric;
import org.jboss.arquillian.container.spi.client.container.DeployableContainer;
import org.jboss.arquillian.container.spi.context.annotation.ContainerScoped;
import org.jboss.arquillian.container.spi.context.annotation.DeploymentScoped;
import org.jboss.arquillian.container.spi.event.container.AfterDeploy;
import org.jboss.arquillian.container.spi.event.container.AfterStart;
import org.jboss.arquillian.container.spi.event.container.BeforeUnDeploy;
import org.jboss.arquillian.container.test.spi.client.deployment.ApplicationArchiveProcessor;
import org.jboss.arquillian.core.api.Instance;
import org.jboss.arquillian.core.api.InstanceProducer;
import org.jboss.arquillian.core.api.annotation.Inject;
import org.jboss.arquillian.core.api.annotation.Observes;
import org.jboss.arquillian.core.spi.LoadableExtension;
import org.jboss.arquillian.test.spi.TestClass;
import org.jboss.arquillian.test.spi.TestEnricher;
import org.jboss.arquillian.test.spi.event.suite.Before;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import javax.enterprise.inject.spi.BeanManager;
import javax.enterprise.inject.spi.CDI;
import javax.enterprise.util.AnnotationLiteral;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.stream.Stream;
public class ArquillianSetup implements LoadableExtension {
@Override
public void register(final ExtensionBuilder extensionBuilder) {
extensionBuilder.observer(EnvSetup.class)
.override(DeployableContainer.class, MeecrowaveContainer.class, TckContainer.class)
.service(TestEnricher.class, ParamEnricher.class)
.service(ApplicationArchiveProcessor.class, EnsureTestIsInTheArchiveProcessor.class);
}
public static class EnvSetup {
@Inject
@DeploymentScoped
private InstanceProducer<BeanManager> beanManagerInstanceProducer;
@Inject
@DeploymentScoped
private InstanceProducer<ClassLoader> appClassLoaderInstanceProducer;
@Inject
@ContainerScoped
private InstanceProducer<Meecrowave> container;
public void onDeploy(@Observes final AfterStart afterStart) throws Exception {
final DeployableContainer<?> deployableContainer = afterStart.getDeployableContainer();
final Field container = MeecrowaveContainer.class.getDeclaredField("container");
container.setAccessible(true);
final Meecrowave meecrowave = Meecrowave.class.cast(container.get(deployableContainer));
this.container.set(meecrowave);
}
public void onDeploy(@Observes final AfterDeploy afterDeploy) {
final Meecrowave meecrowave = container.get();
final ClassLoader appLoader = Context.class.cast(meecrowave.getTomcat().getHost().findChildren()[0]).getLoader().getClassLoader();
appClassLoaderInstanceProducer.set(appLoader);
final Thread thread = Thread.currentThread();
thread.setContextClassLoader(appLoader);
beanManagerInstanceProducer.set(CDI.current().getBeanManager());
}
public void onUndeploy(@Observes final BeforeUnDeploy beforeUnDeploy) {
final ClassLoader cl = container.get().getTomcat().getServer().getParentClassLoader();
Thread.currentThread().setContextClassLoader(cl);
}
public void enrich(@Observes final Before before) throws Exception {
final Thread thread = Thread.currentThread();
final ClassLoader classLoader = thread.getContextClassLoader();
thread.setContextClassLoader(appClassLoaderInstanceProducer.get());
try {
container.get().inject(before.getTestInstance()).close();
} finally {
thread.setContextClassLoader(classLoader);
}
}
}
public static class ParamEnricher implements TestEnricher {
@Inject
@DeploymentScoped
private Instance<BeanManager> beanManagerInstanceProducer;
@Inject
@DeploymentScoped
private Instance<ClassLoader> appClassLoaderInstanceProducer;
@Override
public void enrich(final Object testCase) {
// no-op
}
@Override
public Object[] resolve(final Method method) {
return Stream.of(method.getParameters())
.map(p -> {
final Thread thread = Thread.currentThread();
final ClassLoader classLoader = thread.getContextClassLoader();
thread.setContextClassLoader(appClassLoaderInstanceProducer.get());
try {
final CDI<Object> cdi = CDI.current();
final Annotation[] qualifiers = Stream.of(p.getAnnotations())
.filter(it -> cdi.getBeanManager().isQualifier(it.annotationType()))
.toArray(Annotation[]::new);
return cdi.select(p.getType(), fixQualifiers(qualifiers)).get();
} catch (final RuntimeException re) {
re.printStackTrace(); // easier to debug when some test fail since TCK inject metrics as params
return null;
} finally {
thread.setContextClassLoader(classLoader);
}
})
.toArray();
}
private Annotation[] fixQualifiers(final Annotation[] qualifiers) {
final MetricsExtension metricsExtension = CDI.current().select(MetricsExtension.class).get();
return Stream.of(qualifiers)
.map(it -> {
if (Metric.class == it.annotationType()) { // we make tags and name binding so ensure it uses the right values
final Metric delegate = Metric.class.cast(it);
final String[] tags = new MetricID(delegate.name(), metricsExtension.createTags(delegate.tags()))
.getTagsAsList().stream()
.map(t -> t.getTagName() + '=' + t.getTagValue())
.distinct()
.toArray(String[]::new);
return new MetricLiteral(delegate, tags);
}
return it;
})
.toArray(Annotation[]::new);
}
}
private static class MetricLiteral extends AnnotationLiteral<Metric> implements Metric {
private final Metric delegate;
private final String[] tags;
private MetricLiteral(final Metric delegate, final String[] tags) {
this.delegate = delegate;
this.tags = tags;
}
@Override
public String name() {
return delegate.name();
}
@Override
public String[] tags() {
return tags;
}
@Override
public boolean absolute() {
return delegate.absolute();
}
@Override
public String displayName() {
return delegate.displayName();
}
@Override
public String description() {
return delegate.description();
}
@Override
public String unit() {
return delegate.unit();
}
}
public static class EnsureTestIsInTheArchiveProcessor implements ApplicationArchiveProcessor {
@Override
public void process(final Archive<?> archive, final TestClass testClass) {
if (JavaArchive.class.isInstance(archive) && !archive.contains(testClass.getName().replace('.', '/') + ".class")) {
JavaArchive.class.cast(archive).addClass(testClass.getJavaClass());
}
}
}
}
| 8,493 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/test/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/test/java/org/apache/geronimo/microprofile/metrics/test/TckContainer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.test;
import static java.lang.ClassLoader.getSystemClassLoader;
import static java.lang.String.format;
import static java.util.Optional.of;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.loader.WebappClassLoaderBase;
import org.apache.catalina.loader.WebappLoader;
import org.apache.meecrowave.Meecrowave;
import org.apache.meecrowave.arquillian.MeecrowaveConfiguration;
import org.apache.meecrowave.arquillian.MeecrowaveContainer;
import org.apache.meecrowave.io.IO;
import org.jboss.arquillian.container.spi.client.protocol.ProtocolDescription;
import org.jboss.arquillian.container.spi.client.protocol.metadata.HTTPContext;
import org.jboss.arquillian.container.spi.client.protocol.metadata.ProtocolMetaData;
import org.jboss.arquillian.container.spi.client.protocol.metadata.Servlet;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.exporter.ZipExporter;
public class TckContainer extends MeecrowaveContainer {
private final Map<Archive<?>, Runnable> onUnDeploy = new HashMap<>();
@Override
public void setup(final MeecrowaveConfiguration configuration) {
super.setup(configuration);
getConfiguration().setWatcherBouncing(-1);
}
@Override
public ProtocolMetaData deploy(final Archive<?> archive) {
final File dump = toArchiveDump(archive);
archive.as(ZipExporter.class).exportTo(dump, true);
final String context = ""; // forced by tcks :(
onUnDeploy.put(archive, () -> {
getContainer().undeploy(""); // cause we forced the context name
IO.delete(dump);
of(new File(getContainer().getBase(), "webapps/ROOT")).filter(File::exists).ifPresent(IO::delete);
});
final Meecrowave container = getContainer();
container.deployWebapp(new Meecrowave.DeploymentMeta(context, dump, c -> {
c.setLoader(new WebappLoader() {
@Override
protected void startInternal() throws LifecycleException {
super.startInternal();
final WebappClassLoaderBase webappClassLoaderBase = WebappClassLoaderBase.class.cast(getClassLoader());
try {
final Method setJavaseClassLoader = WebappClassLoaderBase.class.getDeclaredMethod("setJavaseClassLoader", ClassLoader.class);
setJavaseClassLoader.setAccessible(true);
setJavaseClassLoader.invoke(webappClassLoaderBase, getSystemClassLoader());
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
});
}));
final Meecrowave.Builder configuration = container.getConfiguration();
final int port = configuration.isSkipHttp() ? configuration.getHttpsPort() : configuration.getHttpPort();
System.setProperty("test.url", format("http://localhost:%d", port)); // for tck
return new ProtocolMetaData().addContext(new HTTPContext(configuration.getHost(), port).add(new Servlet("arquillian", context)));
}
@Override
public void undeploy(final Archive<?> archive) { // we rename the archive so the context so we must align the undeploy
Runnable remove = onUnDeploy.remove(archive);
if (remove == null && onUnDeploy.size() == 1) { // assume it is the one
final Archive<?> key = onUnDeploy.keySet().iterator().next();
remove = onUnDeploy.remove(key);
}
if (remove != null) {
remove.run();
} else {
Logger.getLogger(getClass().getName())
.warning("Can't find " + archive + " to undeploy it, it can break next tests");
}
}
private Meecrowave.Builder getConfiguration() {
try {
final Field field = getClass().getSuperclass().getDeclaredField("configuration");
field.setAccessible(true);
return Meecrowave.Builder.class.cast(field.get(this));
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
private Meecrowave getContainer() {
try {
final Field field = getClass().getSuperclass().getDeclaredField("container");
field.setAccessible(true);
return Meecrowave.class.cast(field.get(this));
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
private File toArchiveDump(final Archive<?> argValue) {
try {
final Method method = getClass().getSuperclass().getDeclaredMethod("toArchiveDump", Archive.class);
method.setAccessible(true);
return File.class.cast(method.invoke(this, argValue));
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
}
| 8,494 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/cdi/SimplyTimedInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.cdi;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.SimpleTimer;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.annotation.SimplyTimed;
import javax.annotation.Priority;
import javax.enterprise.inject.Intercepted;
import javax.enterprise.inject.spi.AnnotatedType;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.inject.Inject;
import javax.interceptor.AroundConstruct;
import javax.interceptor.AroundInvoke;
import javax.interceptor.Interceptor;
import javax.interceptor.InvocationContext;
import java.io.Serializable;
import java.lang.reflect.Executable;
import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Stream;
import static java.util.Optional.ofNullable;
@SimplyTimed
@Interceptor
@Priority(Interceptor.Priority.LIBRARY_BEFORE)
public class SimplyTimedInterceptor implements Serializable {
@Inject
private MetricRegistry registry;
@Inject
@Intercepted
private Bean<?> bean;
@Inject
private BeanManager beanManager;
@Inject
private MetricsExtension extension;
private transient volatile ConcurrentMap<Executable, SimpleTimer> timers = new ConcurrentHashMap<>();
@AroundConstruct
public Object onConstructor(final InvocationContext context) throws Exception {
return findTimer(context.getConstructor()).time(context::proceed);
}
@AroundInvoke
public Object onMethod(final InvocationContext context) throws Exception {
return findTimer(context.getMethod()).time(context::proceed);
}
private SimpleTimer findTimer(final Executable executable) {
if (timers == null) {
synchronized (this) {
if (timers == null) {
timers = new ConcurrentHashMap<>();
}
}
}
SimpleTimer timer = timers.get(executable);
if (timer == null) {
final AnnotatedType<?> type = beanManager.createAnnotatedType(bean.getBeanClass());
final SimplyTimed timed = Stream.concat(type.getMethods().stream(), type.getConstructors().stream())
.filter(it -> it.getJavaMember().equals(executable))
.findFirst()
.map(m -> m.getAnnotation(SimplyTimed.class))
.orElse(null);
final String name = Names.findName(
Modifier.isAbstract(executable.getDeclaringClass().getModifiers()) ? type.getJavaClass() : executable.getDeclaringClass(),
executable, timed == null ? null : timed.name(), timed != null && timed.absolute(),
ofNullable(extension.getAnnotation(type, SimplyTimed.class)).map(SimplyTimed::name).orElse(""));
timer = registry.getSimpleTimer(new MetricID(name, extension.createTags(timed == null ? new String[0] : timed.tags())));
if (timer == null) {
throw new IllegalStateException("No timer with name [" + name + "] found in registry [" + registry + "]");
}
timers.putIfAbsent(executable, timer);
}
return timer;
}
}
| 8,495 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/cdi/MetricsExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.cdi;
import org.apache.geronimo.microprofile.metrics.common.BaseMetrics;
import org.apache.geronimo.microprofile.metrics.common.GaugeImpl;
import org.apache.geronimo.microprofile.metrics.common.RegistryImpl;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.metrics.Counter;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.Histogram;
import org.eclipse.microprofile.metrics.Metadata;
import org.eclipse.microprofile.metrics.Meter;
import org.eclipse.microprofile.metrics.Metric;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.MetricType;
import org.eclipse.microprofile.metrics.SimpleTimer;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.Timer;
import org.eclipse.microprofile.metrics.annotation.ConcurrentGauge;
import org.eclipse.microprofile.metrics.annotation.Counted;
import org.eclipse.microprofile.metrics.annotation.Metered;
import org.eclipse.microprofile.metrics.annotation.RegistryType;
import org.eclipse.microprofile.metrics.annotation.SimplyTimed;
import org.eclipse.microprofile.metrics.annotation.Timed;
import javax.enterprise.context.Dependent;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.Any;
import javax.enterprise.inject.Default;
import javax.enterprise.inject.Produces;
import javax.enterprise.inject.Stereotype;
import javax.enterprise.inject.spi.AfterBeanDiscovery;
import javax.enterprise.inject.spi.AfterDeploymentValidation;
import javax.enterprise.inject.spi.Annotated;
import javax.enterprise.inject.spi.AnnotatedCallable;
import javax.enterprise.inject.spi.AnnotatedType;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.enterprise.inject.spi.BeforeBeanDiscovery;
import javax.enterprise.inject.spi.BeforeShutdown;
import javax.enterprise.inject.spi.Extension;
import javax.enterprise.inject.spi.InjectionPoint;
import javax.enterprise.inject.spi.ProcessAnnotatedType;
import javax.enterprise.inject.spi.ProcessInjectionPoint;
import javax.enterprise.inject.spi.ProcessProducerField;
import javax.enterprise.inject.spi.ProcessProducerMethod;
import javax.enterprise.inject.spi.WithAnnotations;
import javax.enterprise.inject.spi.configurator.BeanConfigurator;
import javax.enterprise.util.AnnotationLiteral;
import javax.enterprise.util.Nonbinding;
import java.lang.annotation.Annotation;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.stream.Stream;
import static java.util.Optional.of;
import static java.util.Optional.ofNullable;
public class MetricsExtension implements Extension { // must not explicitly depend on jaxrs since it is dropped in nojaxrs bundle
private static final Tag[] NO_TAG = new Tag[0];
private MetricRegistry applicationRegistry;
private MetricRegistry baseRegistry;
private MetricRegistry vendorRegistry;
private final Map<MetricID, Metadata> registrations = new HashMap<>();
private final Map<MetricID, Function<BeanManager, Gauge<?>>> gaugeFactories = new HashMap<>();
private final Collection<Runnable> producersRegistrations = new ArrayList<>();
private final Collection<CreationalContext<?>> creationalContexts = new ArrayList<>();
void vetoEndpointIfNotActivated(@Observes final ProcessAnnotatedType<?> processAnnotatedType) {
// default is secured so deploy
final String name = processAnnotatedType.getAnnotatedType().getJavaClass().getName();
if ("false".equalsIgnoreCase(System.getProperty("geronimo.metrics.jaxrs.activated")) &&
name.equals("org.apache.geronimo.microprofile.metrics.jaxrs.CdiMetricsEndpoints")) {
processAnnotatedType.veto();
} else if ("org.apache.geronimo.microprofile.metrics.common.jaxrs.MetricsEndpoints".equals(name)) { // can happen in shades
processAnnotatedType.veto();
}
}
// can happen in shades
void vetoDefaultRegistry(@Observes final ProcessAnnotatedType<RegistryImpl> processAnnotatedType) {
processAnnotatedType.veto();
}
void letOtherExtensionsUseRegistries(@Observes final BeforeBeanDiscovery beforeBeanDiscovery, final BeanManager beanManager) {
final Tag[] globalTags = OptionalConfig.findTags();
applicationRegistry = new RegistryImpl(MetricRegistry.Type.APPLICATION, globalTags);
baseRegistry = new RegistryImpl(MetricRegistry.Type.BASE, globalTags);
vendorRegistry = new RegistryImpl(MetricRegistry.Type.VENDOR, globalTags);
beforeBeanDiscovery.addQualifier(RegistryType.class);
beanManager.fireEvent(applicationRegistry);
beanManager.fireEvent(applicationRegistry, new RegistryTypeImpl(MetricRegistry.Type.APPLICATION));
beanManager.fireEvent(baseRegistry, new RegistryTypeImpl(MetricRegistry.Type.BASE));
beanManager.fireEvent(vendorRegistry, new RegistryTypeImpl(MetricRegistry.Type.VENDOR));
// we make @Metric.name binding (to avoid to write producers relying on injection point)
beforeBeanDiscovery.configureQualifier(org.eclipse.microprofile.metrics.annotation.Metric.class)
.methods().stream().filter(method -> {
final String name = method.getAnnotated().getJavaMember().getName();
return name.equals("name") || name.equals("tags");
}).forEach(method -> method.remove(a -> a.annotationType() == Nonbinding.class));
}
void onMetric(@Observes final ProcessProducerField<? extends Metric, ?> processProducerField, final BeanManager beanManager) {
final org.eclipse.microprofile.metrics.annotation.Metric config = processProducerField.getAnnotated()
.getAnnotation(org.eclipse.microprofile.metrics.annotation.Metric.class);
if (config == null) {
return;
}
final Class<?> clazz = findClass(processProducerField.getAnnotated().getBaseType());
if (clazz == null || !Metric.class.isAssignableFrom(clazz)) {
return;
}
final Member javaMember = processProducerField.getAnnotatedProducerField().getJavaMember();
final Bean<?> bean = processProducerField.getBean();
producersRegistrations.add(() -> registerProducer(beanManager, config, clazz, javaMember, bean));
}
void onMetric(@Observes ProcessProducerMethod<? extends Metric, ?> processProducerMethod,
final BeanManager beanManager) {
final org.eclipse.microprofile.metrics.annotation.Metric config = processProducerMethod.getAnnotated()
.getAnnotation(org.eclipse.microprofile.metrics.annotation.Metric.class);
if (config == null) {
return;
}
final Class<?> clazz = findClass(processProducerMethod.getAnnotated().getBaseType());
if (clazz == null || !Gauge.class.isAssignableFrom(clazz)) {
return;
}
final Member javaMember = processProducerMethod.getAnnotatedProducerMethod().getJavaMember();
final Bean<?> bean = processProducerMethod.getBean();
producersRegistrations.add(() -> registerProducer(beanManager, config, clazz, javaMember, bean));
}
void onMetric(@Observes final ProcessInjectionPoint<?, ?> metricInjectionPointProcessEvent) {
final InjectionPoint injectionPoint = metricInjectionPointProcessEvent.getInjectionPoint();
final Class<?> clazz = findClass(injectionPoint.getType());
if (clazz == null || !Metric.class.isAssignableFrom(clazz)) {
return;
}
final Annotated annotated = injectionPoint.getAnnotated();
final org.eclipse.microprofile.metrics.annotation.Metric config = annotated.getAnnotation(
org.eclipse.microprofile.metrics.annotation.Metric.class);
final MetricType type = findType(clazz);
if (config != null) {
final String name = Names.findName(injectionPoint.getMember().getDeclaringClass(), injectionPoint.getMember(),
of(config.name()).filter(it -> !it.isEmpty()).orElseGet(injectionPoint.getMember()::getName), config.absolute(),
"");
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(config.displayName())
.withDescription(config.description())
.withType(type)
.withUnit(config.unit())
.build();
final MetricID id = new MetricID(name, createTags(config.tags()));
addRegistration(metadata, id);
if (!name.equals(config.name())) {
final Annotation[] newQualifiers = Stream.concat(metricInjectionPointProcessEvent.getInjectionPoint().getQualifiers().stream()
.filter(it -> it.annotationType() != org.eclipse.microprofile.metrics.annotation.Metric.class),
Stream.of(new MetricImpl(metadata, id)))
.toArray(Annotation[]::new);
metricInjectionPointProcessEvent.configureInjectionPoint()
.qualifiers(newQualifiers);
}
} else {
final String name = MetricRegistry.name(injectionPoint.getMember().getDeclaringClass(), injectionPoint.getMember().getName());
final Metadata metadata = Metadata.builder().withName(name).withType(type).build();
final MetricID metricID = new MetricID(name);
addRegistration(metadata, metricID);
// ensure the injection uses the qualifier since we'll not register it without
final Annotation[] newQualifiers = Stream.concat(metricInjectionPointProcessEvent.getInjectionPoint().getQualifiers().stream()
.filter(it -> it.annotationType() != Default.class),
Stream.of(new MetricImpl(metadata, metricID)))
.toArray(Annotation[]::new);
metricInjectionPointProcessEvent.configureInjectionPoint()
.qualifiers(newQualifiers);
}
}
void findInterceptorMetrics(@Observes @WithAnnotations({
Counted.class,
SimplyTimed.class,
Timed.class,
ConcurrentGauge.class,
org.eclipse.microprofile.metrics.annotation.Metered.class,
org.eclipse.microprofile.metrics.annotation.Gauge.class
}) final ProcessAnnotatedType<?> pat) {
final AnnotatedType<?> annotatedType = pat.getAnnotatedType();
final Class<?> javaClass = annotatedType.getJavaClass();
if (javaClass.getName().startsWith("org.apache.geronimo.microprofile.metrics.") ||
Modifier.isAbstract(javaClass.getModifiers()) ||
javaClass.isInterface()) {
return;
}
Stream.concat(annotatedType.getMethods().stream(), annotatedType.getConstructors().stream())
.filter(method -> method.getJavaMember().getDeclaringClass() == javaClass || Modifier.isAbstract(method.getJavaMember().getDeclaringClass().getModifiers()))
.filter(method -> !method.getJavaMember().isSynthetic() && !Modifier.isPrivate(method.getJavaMember().getModifiers()))
.filter(method -> !method.isAnnotationPresent(Produces.class))
.forEach(method -> doRegisterMetric(annotatedType, javaClass, method));
}
void afterBeanDiscovery(@Observes final AfterBeanDiscovery afterBeanDiscovery) {
addBean(afterBeanDiscovery, MetricRegistry.Type.APPLICATION.name(), MetricRegistry.class, new RegistryTypeImpl(MetricRegistry.Type.APPLICATION), applicationRegistry, true);
addBean(afterBeanDiscovery, MetricRegistry.Type.BASE.name(), MetricRegistry.class, new RegistryTypeImpl(MetricRegistry.Type.BASE), baseRegistry, false);
addBean(afterBeanDiscovery, MetricRegistry.Type.VENDOR.name(), MetricRegistry.class, new RegistryTypeImpl(MetricRegistry.Type.VENDOR), vendorRegistry, false);
// metrics
registrations.forEach((id, metadata) -> {
final String idSuffix = id.getName() + "#" + id.getTagsAsString();
switch (metadata.getTypeRaw()) {
case GAUGE:
addBean(afterBeanDiscovery, idSuffix, Gauge.class, new MetricImpl(metadata, id), new Gauge<Object>() {
private final AtomicReference<Gauge<?>> ref = new AtomicReference<>();
@Override
public Object getValue() {
Gauge<?> gauge = ref.get();
if (gauge == null) { // getGauges() is expensive in current form, avoid it
gauge = applicationRegistry.getGauges().get(id);
ref.compareAndSet(null, gauge);
}
return gauge.getValue();
}
}, true);
break;
case TIMER:
addBean(afterBeanDiscovery, idSuffix, Timer.class, new MetricImpl(metadata, id),
applicationRegistry.timer(metadata, id.getTagsAsList().toArray(NO_TAG)), true);
break;
case SIMPLE_TIMER:
addBean(afterBeanDiscovery, idSuffix, SimpleTimer.class, new MetricImpl(metadata, id),
applicationRegistry.simpleTimer(metadata, id.getTagsAsList().toArray(NO_TAG)), true);
break;
case COUNTER:
addBean(afterBeanDiscovery, idSuffix, Counter.class, new MetricImpl(metadata, id),
applicationRegistry.counter(metadata, id.getTagsAsList().toArray(NO_TAG)), true);
break;
case CONCURRENT_GAUGE:
addBean(afterBeanDiscovery, idSuffix, org.eclipse.microprofile.metrics.ConcurrentGauge.class,
new MetricImpl(metadata, id),
applicationRegistry.concurrentGauge(metadata, id.getTagsAsList().toArray(NO_TAG)), true);
break;
case METERED:
addBean(afterBeanDiscovery, idSuffix, Meter.class, new MetricImpl(metadata, id),
applicationRegistry.meter(metadata, id.getTagsAsList().toArray(NO_TAG)), true);
break;
case HISTOGRAM:
addBean(afterBeanDiscovery, idSuffix, Histogram.class, new MetricImpl(metadata, id),
applicationRegistry.histogram(metadata, id.getTagsAsList().toArray(NO_TAG)), true);
break;
default:
}
});
}
void afterDeploymentValidation(@Observes final AfterDeploymentValidation afterDeploymentValidation,
final BeanManager beanManager) {
registrations.entrySet().stream()
.filter(e -> e.getValue().getTypeRaw() == MetricType.GAUGE)
.forEach(entry -> {
final Gauge<?> gauge = gaugeFactories.get(entry.getKey()).apply(beanManager);
applicationRegistry.register(entry.getValue(), gauge, entry.getKey().getTagsAsList().toArray(NO_TAG));
});
producersRegistrations.forEach(Runnable::run);
producersRegistrations.clear();
gaugeFactories.clear();
registrations.clear();
// mainly for tck, to drop if we add real vendor metrics
vendorRegistry.counter("startTime").inc(System.currentTimeMillis());
if (!Boolean.getBoolean("geronimo.metrics.base.skip")) {
new BaseMetrics(baseRegistry).register();
}
}
void beforeShutdown(@Observes final BeforeShutdown beforeShutdown) {
creationalContexts.forEach(CreationalContext::release);
}
private void addRegistration(final Metadata metadata, final MetricID id) {
registrations.putIfAbsent(id, metadata);
}
public Tag[] createTags(final String[] tags) {
return toTags(tags);
}
private static Tag[] toTags(final String[] tags) {
return Stream.of(tags).filter(it -> it.contains("=")).map(it -> {
final int sep = it.indexOf("=");
return new Tag(it.substring(0, sep), it.substring(sep + 1));
}).toArray(Tag[]::new);
}
private void doRegisterMetric(final AnnotatedType<?> annotatedType, final Class<?> javaClass, final AnnotatedCallable<?> method) {
final Member javaMember = method.getJavaMember();
final Counted counted = ofNullable(method.getAnnotation(Counted.class)).orElseGet(() ->
getAnnotation(annotatedType, Counted.class));
if (counted != null) {
final boolean isMethod = method.isAnnotationPresent(Counted.class);
final String name = Names.findName(javaClass, javaMember, isMethod ? counted.name() : "", counted.absolute(),
ofNullable(getAnnotation(annotatedType, Counted.class)).map(Counted::name).orElse(""));
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(counted.displayName())
.withDescription(counted.description())
.withType(MetricType.COUNTER)
.withUnit(counted.unit())
.build();
final MetricID metricID = new MetricID(name, createTags(counted.tags()));
addRegistration(metadata, metricID);
}
final ConcurrentGauge concurrentGauge = ofNullable(method.getAnnotation(ConcurrentGauge.class)).orElseGet(() ->
getAnnotation(annotatedType, ConcurrentGauge.class));
if (concurrentGauge != null) {
final boolean isMethod = method.isAnnotationPresent(ConcurrentGauge.class);
final String name = Names.findName(javaClass, javaMember, isMethod ? concurrentGauge.name() : "", concurrentGauge.absolute(),
ofNullable(getAnnotation(annotatedType, ConcurrentGauge.class)).map(ConcurrentGauge::name).orElse(""));
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(concurrentGauge.displayName())
.withDescription(concurrentGauge.description())
.withType(MetricType.CONCURRENT_GAUGE)
.withUnit(concurrentGauge.unit())
.build();
final MetricID metricID = new MetricID(name, createTags(concurrentGauge.tags()));
addRegistration(metadata, metricID);
}
final Timed timed = ofNullable(method.getAnnotation(Timed.class)).orElseGet(() -> getAnnotation(annotatedType, Timed.class));
if (timed != null) {
final boolean isMethod = method.isAnnotationPresent(Timed.class);
final String name = Names.findName(javaClass, javaMember, isMethod ? timed.name() : "", timed.absolute(),
ofNullable(getAnnotation(annotatedType, Timed.class)).map(Timed::name).orElse(""));
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(timed.displayName())
.withDescription(timed.description())
.withType(MetricType.TIMER)
.withUnit(timed.unit())
.build();
final MetricID metricID = new MetricID(name, createTags(timed.tags()));
addRegistration(metadata, metricID);
}
final SimplyTimed simplyTimed = ofNullable(method.getAnnotation(SimplyTimed.class)).orElseGet(() -> getAnnotation(annotatedType, SimplyTimed.class));
if (simplyTimed != null) {
final boolean isMethod = method.isAnnotationPresent(SimplyTimed.class);
final String name = Names.findName(javaClass, javaMember, isMethod ? simplyTimed.name() : "", simplyTimed.absolute(),
ofNullable(getAnnotation(annotatedType, SimplyTimed.class)).map(SimplyTimed::name).orElse(""));
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(simplyTimed.displayName())
.withDescription(simplyTimed.description())
.withType(MetricType.SIMPLE_TIMER)
.withUnit(simplyTimed.unit())
.build();
final MetricID metricID = new MetricID(name, createTags(simplyTimed.tags()));
addRegistration(metadata, metricID);
}
final Metered metered = ofNullable(method.getAnnotation(Metered.class))
.orElseGet(() -> getAnnotation(annotatedType, Metered.class));
if (metered != null) {
final boolean isMethod = method.isAnnotationPresent(Metered.class);
final String name = Names.findName(javaClass, javaMember, isMethod ? metered.name() : "", metered.absolute(),
ofNullable(getAnnotation(annotatedType, Metered.class)).map(Metered::name).orElse(""));
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(metered.displayName())
.withDescription(metered.description())
.withType(MetricType.METERED)
.withUnit(metered.unit())
.build();
final MetricID metricID = new MetricID(name, createTags(metered.tags()));
addRegistration(metadata, metricID);
}
final org.eclipse.microprofile.metrics.annotation.Gauge gauge = ofNullable(method.getAnnotation(org.eclipse.microprofile.metrics.annotation.Gauge.class))
.orElseGet(() -> getAnnotation(annotatedType, org.eclipse.microprofile.metrics.annotation.Gauge.class));
if (gauge != null) {
final String name = Names.findName(
javaClass, javaMember, gauge.name(), gauge.absolute(),
ofNullable(getAnnotation(annotatedType, org.eclipse.microprofile.metrics.annotation.Gauge.class)).map(org.eclipse.microprofile.metrics.annotation.Gauge::name).orElse(""));
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(gauge.displayName())
.withDescription(gauge.description())
.withType(MetricType.GAUGE)
.withUnit(gauge.unit())
.build();
final MetricID metricID = new MetricID(name, createTags(gauge.tags()));
addRegistration(metadata, metricID);
gaugeFactories.put(metricID, beanManager -> {
final Object reference = getInstance(javaClass, beanManager);
final Method mtd = Method.class.cast(javaMember);
return new GaugeImpl<>(reference, mtd);
});
}
}
private void registerProducer(final BeanManager beanManager, final org.eclipse.microprofile.metrics.annotation.Metric config,
final Class<?> clazz, final Member javaMember, final Bean<?> bean) {
Class<?> beanClass = bean.getBeanClass();
if (beanClass == null) {
beanClass = javaMember.getDeclaringClass();
}
final Metadata metadata = createMetadata(config, clazz, javaMember, beanClass);
applicationRegistry.register(
metadata, Metric.class.cast(getInstance(clazz, beanManager, bean)),
createTags(config.tags()));
}
private Metadata createMetadata(final org.eclipse.microprofile.metrics.annotation.Metric config,
final Class<?> clazz, final Member javaMember, final Class<?> beanClass) {
final String name = Names.findName(beanClass, javaMember,
of(config.name()).filter(it -> !it.isEmpty()).orElseGet(javaMember::getName), config.absolute(),
"");
final Metadata metadata = Metadata.builder()
.withName(name)
.withDisplayName(config.displayName())
.withDescription(config.description())
.withType(findType(clazz))
.withUnit(config.unit())
.build();
final MetricID id = new MetricID(name, createTags(config.tags()));
addRegistration(metadata, id);
return metadata;
}
private MetricType findType(final Class<?> clazz) {
final MetricType type;
if (Counter.class.isAssignableFrom(clazz)) {
type = MetricType.COUNTER;
} else if (Gauge.class.isAssignableFrom(clazz)) {
type = MetricType.GAUGE;
} else if (Meter.class.isAssignableFrom(clazz)) {
type = MetricType.METERED;
} else if (Timer.class.isAssignableFrom(clazz)) {
type = MetricType.TIMER;
} else if (SimpleTimer.class.isAssignableFrom(clazz)) {
type = MetricType.SIMPLE_TIMER;
} else if (Histogram.class.isAssignableFrom(clazz)) {
type = MetricType.HISTOGRAM;
} else if (org.eclipse.microprofile.metrics.ConcurrentGauge.class.isAssignableFrom(clazz)) {
type = MetricType.CONCURRENT_GAUGE;
} else {
type = MetricType.INVALID;
}
return type;
}
private Class<?> findClass(final Type baseType) {
Type type = baseType;
if (ParameterizedType.class.isInstance(baseType)) {
type = ParameterizedType.class.cast(baseType).getRawType();
}
if (Class.class.isInstance(type)) {
return Class.class.cast(type);
}
return null;
}
private Object getInstance(final Class<?> javaClass, final BeanManager beanManager) {
final Bean<?> bean = beanManager.resolve(beanManager.getBeans(javaClass, Default.Literal.INSTANCE));
return getInstance(javaClass, beanManager, bean);
}
private Object getInstance(final Class<?> javaClass, final BeanManager beanManager, final Bean<?> bean) {
final CreationalContext<Object> creationalContext = beanManager.createCreationalContext(null);
final Object reference = beanManager.getReference(bean, javaClass, creationalContext);
if (!beanManager.isNormalScope(bean.getScope())) {
creationalContexts.add(creationalContext);
}
return reference;
}
private void addBean(final AfterBeanDiscovery afterBeanDiscovery,
final String idSuffix,
final Class<?> type,
final Annotation qualifier,
final Object instance,
final boolean addDefaultQualifier) {
final BeanConfigurator<Object> configurator = afterBeanDiscovery.addBean()
.id(MetricsExtension.class.getName() + ":" + type.getName() + ":" + idSuffix)
.beanClass(type)
.scope(Dependent.class) // avoid proxies, tck use assertEquals(proxy, registry.get(xxx))
.createWith(c -> instance);
if (MetricRegistry.class == type) {
configurator.types(MetricRegistry.class, RegistryImpl.class, Objects.class);
} else {
configurator.types(type, Object.class);
}
if (addDefaultQualifier) {
configurator.qualifiers(qualifier, Default.Literal.INSTANCE, Any.Literal.INSTANCE);
} else {
configurator.qualifiers(qualifier, Any.Literal.INSTANCE);
}
}
public <T extends Annotation> T getAnnotation(final AnnotatedType<?> type, final Class<T> expected) {
final T annotation = type.getAnnotation(expected);
if (annotation != null) {
return annotation;
}
// not sexy but not great to use too
return type.getAnnotations().stream()
.filter(a -> a.annotationType().isAnnotationPresent(Stereotype.class))
.map(a -> a.annotationType().getAnnotation(expected))
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
}
private static final class MetricImpl extends AnnotationLiteral<org.eclipse.microprofile.metrics.annotation.Metric> implements org.eclipse.microprofile.metrics.annotation.Metric {
private final Metadata metadata;
private final String[] tags;
private MetricImpl(final Metadata metadata, final MetricID metricID) {
this.metadata = metadata;
this.tags = metricID.getTags().entrySet().stream()
.map(e -> e.getKey() + "=" + e.getValue())
.distinct()
.toArray(String[]::new);
}
@Override
public Class<? extends Annotation> annotationType() {
return org.eclipse.microprofile.metrics.annotation.Metric.class;
}
@Override
public String name() {
return metadata.getName();
}
@Override
public String[] tags() {
return tags;
}
@Override
public boolean absolute() {
return false;
}
@Override
public String displayName() {
return ofNullable(metadata.getDisplayName()).orElse("");
}
@Override
public String description() {
return metadata.getDescription();
}
@Override
public String unit() {
return metadata.getUnit();
}
}
private static final class RegistryTypeImpl extends AnnotationLiteral<RegistryType> implements RegistryType {
private final MetricRegistry.Type type;
private RegistryTypeImpl(final MetricRegistry.Type type) {
this.type = type;
}
@Override
public MetricRegistry.Type type() {
return type;
}
@Override
public Class<? extends Annotation> annotationType() {
return RegistryType.class;
}
}
private static final class OptionalConfig {
private OptionalConfig() {
// no-op
}
public static Tag[] findTags() {
try {
final Config config = ConfigProvider.getConfig();
return config.getOptionalValue("mp.metrics.tags", String.class)
.map(it -> toTags(it.split(",")))
.orElseGet(() -> new Tag[0]);
} catch (final IllegalStateException | ExceptionInInitializerError | NoClassDefFoundError t) {
return new Tag[0];
}
}
}
}
| 8,496 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/cdi/ConcurrentGaugeInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.cdi;
import org.eclipse.microprofile.metrics.ConcurrentGauge;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.Tag;
import javax.annotation.Priority;
import javax.enterprise.inject.Intercepted;
import javax.enterprise.inject.spi.AnnotatedType;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.inject.Inject;
import javax.interceptor.AroundConstruct;
import javax.interceptor.AroundInvoke;
import javax.interceptor.Interceptor;
import javax.interceptor.InvocationContext;
import java.io.Serializable;
import java.lang.reflect.Executable;
import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Stream;
import static java.util.Optional.ofNullable;
@Interceptor
@Priority(Interceptor.Priority.LIBRARY_BEFORE)
@org.eclipse.microprofile.metrics.annotation.ConcurrentGauge
public class ConcurrentGaugeInterceptor implements Serializable {
@Inject
private MetricRegistry registry;
@Inject
@Intercepted
private Bean<?> bean;
@Inject
private BeanManager beanManager;
@Inject
private MetricsExtension extension;
private transient volatile ConcurrentMap<Executable, Meta> gauges = new ConcurrentHashMap<>();
@AroundConstruct
public Object onConstructor(final InvocationContext context) throws Exception {
return invoke(context, context.getConstructor());
}
@AroundInvoke
public Object onMethod(final InvocationContext context) throws Exception {
return invoke(context, context.getMethod());
}
private Object invoke(final InvocationContext context, final Executable executable) throws Exception {
final Meta counter = find(executable);
counter.gauge.inc();
try {
return context.proceed();
} finally {
counter.gauge.dec();
}
}
private Meta find(final Executable executable) {
if (gauges == null) {
synchronized (this) {
if (gauges == null) {
gauges = new ConcurrentHashMap<>();
}
}
}
Meta meta = gauges.get(executable);
if (meta == null) {
final AnnotatedType<?> type = beanManager.createAnnotatedType(bean.getBeanClass());
final org.eclipse.microprofile.metrics.annotation.ConcurrentGauge concurrentGauge = Stream.concat(type.getMethods().stream(), type.getConstructors().stream())
.filter(it -> it.getJavaMember().equals(executable))
.findFirst()
.map(m -> m.getAnnotation(org.eclipse.microprofile.metrics.annotation.ConcurrentGauge.class))
.orElse(null);
final String name = Names.findName(
Modifier.isAbstract(executable.getDeclaringClass().getModifiers()) ? type.getJavaClass() : executable.getDeclaringClass(),
executable, concurrentGauge == null ? null : concurrentGauge.name(),
concurrentGauge != null && concurrentGauge.absolute(),
ofNullable(extension.getAnnotation(type, org.eclipse.microprofile.metrics.annotation.ConcurrentGauge.class))
.map(org.eclipse.microprofile.metrics.annotation.ConcurrentGauge::name)
.orElse(""));
final ConcurrentGauge gauge = registry.getConcurrentGauge(
new MetricID(name, extension.createTags(concurrentGauge == null ? new String[0] : concurrentGauge.tags())));
if (gauge == null) {
throw new IllegalStateException("No counter with name [" + name + "] found in registry [" + registry + "]");
}
meta = new Meta(gauge);
gauges.putIfAbsent(executable, meta);
}
return meta;
}
private static final class Meta {
private final ConcurrentGauge gauge;
private Meta(final ConcurrentGauge gauge) {
this.gauge = gauge;
}
}
}
| 8,497 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/cdi/Names.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.cdi;
import static java.util.Optional.ofNullable;
import java.lang.reflect.Constructor;
import java.lang.reflect.Member;
import org.eclipse.microprofile.metrics.MetricRegistry;
final class Names {
private Names() {
// no-op
}
static String findName(final Class<?> declaring, final Member executable,
final String annotationName, final boolean absolute,
final String prefix) {
if (annotationName == null || annotationName.isEmpty()) {
if (absolute) {
return executable.getName();
}
return MetricRegistry.name(prefix(declaring, prefix),
// bug in the JVM?
Constructor.class.isInstance(executable) ? executable.getDeclaringClass().getSimpleName() : executable.getName());
} else if (absolute) {
return annotationName;
}
return MetricRegistry.name(prefix(declaring, prefix), annotationName);
}
private static String prefix(final Class<?> declaring, final String prefix) {
return prefix.isEmpty() ?
declaring.getName() :
ofNullable(declaring.getPackage())
.map(Package::getName)
.filter(p -> !p.isEmpty())
.map(p -> p + '.' + prefix)
.orElse("");
}
}
| 8,498 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/cdi/TimedInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.cdi;
import static java.util.Optional.ofNullable;
import java.io.Serializable;
import java.lang.reflect.Executable;
import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Stream;
import javax.annotation.Priority;
import javax.enterprise.inject.Intercepted;
import javax.enterprise.inject.spi.AnnotatedType;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.inject.Inject;
import javax.interceptor.AroundConstruct;
import javax.interceptor.AroundInvoke;
import javax.interceptor.Interceptor;
import javax.interceptor.InvocationContext;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.Timer;
import org.eclipse.microprofile.metrics.annotation.Timed;
@Timed
@Interceptor
@Priority(Interceptor.Priority.LIBRARY_BEFORE)
public class TimedInterceptor implements Serializable {
@Inject
private MetricRegistry registry;
@Inject
@Intercepted
private Bean<?> bean;
@Inject
private BeanManager beanManager;
@Inject
private MetricsExtension extension;
private transient volatile ConcurrentMap<Executable, Timer> timers = new ConcurrentHashMap<>();
@AroundConstruct
public Object onConstructor(final InvocationContext context) throws Exception {
return findTimer(context.getConstructor()).time(context::proceed);
}
@AroundInvoke
public Object onMethod(final InvocationContext context) throws Exception {
return findTimer(context.getMethod()).time(context::proceed);
}
private Timer findTimer(final Executable executable) {
if (timers == null) {
synchronized (this) {
if (timers == null) {
timers = new ConcurrentHashMap<>();
}
}
}
Timer timer = timers.get(executable);
if (timer == null) {
final AnnotatedType<?> type = beanManager.createAnnotatedType(bean.getBeanClass());
final Timed timed = Stream.concat(type.getMethods().stream(), type.getConstructors().stream())
.filter(it -> it.getJavaMember().equals(executable))
.findFirst()
.map(m -> m.getAnnotation(Timed.class))
.orElse(null);
final String name = Names.findName(
Modifier.isAbstract(executable.getDeclaringClass().getModifiers()) ? type.getJavaClass() : executable.getDeclaringClass(),
executable, timed == null ? null : timed.name(), timed != null && timed.absolute(),
ofNullable(extension.getAnnotation(type, Timed.class)).map(Timed::name).orElse(""));
timer = registry.getTimer(new MetricID(name, extension.createTags(timed == null ? new String[0] : timed.tags())));
if (timer == null) {
throw new IllegalStateException("No timer with name [" + name + "] found in registry [" + registry + "]");
}
timers.putIfAbsent(executable, timer);
}
return timer;
}
}
| 8,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.