repo stringclasses 1k
values | file_url stringlengths 96 373 | file_path stringlengths 11 294 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 6
values | commit_sha stringclasses 1k
values | retrieved_at stringdate 2026-01-04 14:45:56 2026-01-04 18:30:23 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/Configure.java | cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/Configure.java | package com.embracesource.edh.hbase.table.create;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm;
import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
/**
* @description HBase相关配置参数
*/
public class Configure {
public static final String FAMILY_NAME = "family";
public static final Algorithm COMPRESS_TYPE = Algorithm.SNAPPY;
// TTL(TTL Time To Live 版本存在时间,默认是forever)
public static final boolean USE_TTL = false;
// 会自动去读取classpath下的hbase-site.xml和hbase-default.xml文件,如果没有则需要手动通过创建configuration
// 对象的通过set方法设置
private static Configuration _config = HBaseConfiguration.create();
public static Configuration getHBaseConfig() throws IOException {
return _config;
}
public static void configHTable(HTableDescriptor ht) {
}
// HColumnDescriptor 代表的是column的schema
public static void configColumnFamily(HColumnDescriptor desc) {
desc.setMaxVersions(1);
// 设置使用的过滤器的类型---
// setBloomFilter:指定是否使用BloomFilter,可提高随机查询效率。默认关闭
desc.setBloomFilterType(BloomType.ROW);
// 设定数据压缩类型。默认无压缩
desc.setCompressionType(COMPRESS_TYPE);
}
public static HTableDescriptor genHTableDescriptor(String tableName) {
return genHTableDescriptor(tableName, Short.MIN_VALUE);
}
public static HTableDescriptor genHTableDescriptor(String tableName, short replica) {
HTableDescriptor ht = new HTableDescriptor(tableName);
HColumnDescriptor desc = new HColumnDescriptor(FAMILY_NAME);
if (replica != Short.MIN_VALUE) {
desc.setReplication(replica);
System.out.println("genHTableDescriptor(String,short):replica---"
+ replica);
}
// desc.setLobStoreEnabled(true);
ht.addFamily(desc);
return ht;
}
public static HTableDescriptor genHTableDescriptor(String tableName, short replica, boolean lobenable) {
HTableDescriptor ht = new HTableDescriptor(tableName);
HColumnDescriptor desc = new HColumnDescriptor(FAMILY_NAME);
if (replica != Short.MIN_VALUE) {
desc.setReplication(replica);
System.out.println("genHTableDescriptor(String,short):replica---"
+ replica);
}
desc.setBlbStoreEnabled(true);
ht.addFamily(desc);
return ht;
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/OperateTableUtil.java | cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/OperateTableUtil.java | package com.embracesource.edh.hbase.table.create;
import java.io.IOException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
public class OperateTableUtil {
//private static ResultHTMLGenerater resultHTMLGenerater = new ResultHTMLGenerater();
//创建表是通过HBaseAdmin对象来操作的。HBaseAdmin负责表的META信息处理
private static HBaseAdmin admin = null;
static{
try {
admin = new HBaseAdmin(Configure.getHBaseConfig());
} catch (MasterNotRunningException e) {
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
*@description 创建表,如果存在则先删除此表
*@param tableName 表名
*/
public static void createTable(String tableName) {
boolean result = false;
try {
//删除表
removeTable(tableName);
admin.createTable(genHTableDescriptor(tableName));
result = admin.tableExists(tableName);
} catch (IOException e) {
e.printStackTrace();
} finally {
System.out.println(tableName + "是否创建成功:" + result);
}
}
/**
*@description 根据表名删除一张表
*@param tableName 表名
* @throws IOException
*/
public static void removeTable(String tableName) throws IOException {
//判断是否存在此张表
boolean exists = admin.tableExists(tableName);
if(exists){
//先使此张表离线
admin.disableTable(tableName);
//删除表
admin.deleteTable(tableName);
System.out.println("删除了已经存在的原表:" + tableName);
}
}
/**
*@description 根据表名创建表描述对象,同时设置列族的属性
*/
public static HTableDescriptor genHTableDescriptor(String tableName) {
//HTableDescriptor 代表的是表的schema
HTableDescriptor ht = new HTableDescriptor(tableName);
//HColumnDescriptor 代表的是column的schema
HColumnDescriptor desc = new HColumnDescriptor(Configure.FAMILY_NAME);
Configure.configColumnFamily(desc);
ht.addFamily(desc);
return ht;
}
public static void createTableWithSplitKeys(String tableName) {
boolean result = false;
try {
removeTable(tableName);
/**
* @description Creates a new table with an initial set of empty regions defined by the specified split keys.
* The total number of regions created will be the number of split keys plus one.
* Synchronous operation. Note : Avoid passing empty split key.
* @param desc - table descriptor for table
* @param splitKeys - array of split keys for the initial regions of the table
*/
admin.createTable(genHTableDescriptor(tableName), genSplitKeys());
result = admin.tableExists(tableName);
} catch (IOException e) {
e.printStackTrace();
} finally {
System.out.println("createTableWithSplitKeys(String tableName):---" +result );
}
}
public static byte[][] genSplitKeys() {
return new byte[0][];
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/TableBuilder.java | cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/TableBuilder.java | package com.embracesource.edh.hbase.table.create;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
public class TableBuilder {
/**
* @param args
*/
public static void main(String[] args) {
Configuration conf = HBaseConfiguration.create();
byte[] columnFamily = Bytes.toBytes("f");
String tableName = "t";
try {
ZKUtil.applyClusterKeyToConf(conf, "edh1:2181:/hbase");
HBaseAdmin hba = new HBaseAdmin(conf);
if (hba.tableExists(tableName)) {
hba.disableTable(tableName);
hba.deleteTable(tableName);
}
HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
HColumnDescriptor columnDescriptor = new HColumnDescriptor(columnFamily);
columnDescriptor.setMaxVersions(1);
columnDescriptor.setBloomFilterType(BloomType.ROW);
tableDescriptor.addFamily(columnDescriptor);
hba.createTable(tableDescriptor);
hba.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/TestCreateTable.java | cdh-hbase-examples/src/com/embracesource/edh/hbase/table/create/TestCreateTable.java | package com.embracesource.edh.hbase.table.create;
import java.io.IOException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.util.Bytes;
public class TestCreateTable {
private static HBaseAdmin hba;
public static void main(String[] args) {
try {
boolean isPartitionEnabled = false;
hba = new HBaseAdmin(Configure.getHBaseConfig());
createTable("Test_Table");
createTableWithSplitKeys("Test_Table_SpilitKey");
createTableWithStartAndEndKey("Test_Table_StartKey_EndKey_Num");
// Async methods
// Not finished now
// createPartitionTableAsync("Test_Table_Async_Locator", new
// SuffixClusterLocator());
// createPartitionTableAsyncWithSpiltKeys("Test_Table_Async_SplitKeys_Locator",
// new SuffixClusterLocator());
try {
tableExistFamily(hba, "Test_Table");
if (isPartitionEnabled == true) {
tableExistFamily(hba, "Test_Table_Locator");
}
} catch (IOException e) {
e.printStackTrace();
}
} catch (MasterNotRunningException e) {
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
if (hba != null) {
try {
hba.close();
} catch (IOException e) {
}
}
}
}
public static void tableExistFamily(HBaseAdmin hba, String tableName) throws IOException {
String testColumn1 = "test1";
String testColumn2 = "test2";
boolean before = false;
boolean after = false;
before = existsFamilyName(hba, tableName, testColumn1);
boolean getException = false;
try {
hba.addColumn(tableName, new HColumnDescriptor(testColumn1));
} catch (TableNotDisabledException e) {
getException = true;
} finally {
}
after = existsFamilyName(hba, tableName, testColumn1);
getException = false;
try {
hba.deleteColumn(tableName, testColumn1);
} catch (TableNotDisabledException e) {
getException = true;
} finally {
}
after = existsFamilyName(hba, tableName, testColumn1);
hba.disableTable(tableName);
before = existsFamilyName(hba, tableName, testColumn2);
hba.addColumn(tableName, new HColumnDescriptor(testColumn2));
after = existsFamilyName(hba, tableName, testColumn2);
System.out.println(before + " : " + after);
before = after;
hba.deleteColumn(tableName, testColumn2);
after = existsFamilyName(hba, tableName, testColumn2);
System.out.println(before + " : " + after);
hba.enableTable(tableName);
}
public static boolean existsFamilyName(HBaseAdmin hba, String tableName, String columnName) {
HTableDescriptor[] list;
try {
list = hba.listTables();
for (int i = 0; i < list.length; i++) {
if (list[i].getNameAsString().equals(tableName))
for (HColumnDescriptor hc : list[i].getColumnFamilies()) {
if (hc.getNameAsString().equals(columnName))
return true;
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return false;
}
public static HTableDescriptor genHTableDescriptor(String tableName) {
HTableDescriptor ht = new HTableDescriptor(tableName);
HColumnDescriptor desc = new HColumnDescriptor(Configure.FAMILY_NAME);
Configure.configColumnFamily(desc);
ht.addFamily(desc);
return ht;
}
public static byte[][] genSplitKeys() {
return new byte[0][];
}
private static void removeTable(String tableName) throws IOException {
if (hba.tableExists(tableName)) {
hba.disableTable(tableName);
hba.deleteTable(tableName);
}
}
public static void createTable(String tableName) {
boolean result = false;
try {
removeTable(tableName);
hba.createTable(genHTableDescriptor(tableName));
result = hba.tableExists(tableName);
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
public static void createTableWithSplitKeys(String tableName) {
boolean result = false;
try {
removeTable(tableName);
hba.createTable(genHTableDescriptor(tableName), genSplitKeys());
result = hba.tableExists(tableName);
} catch (IOException e) {
e.printStackTrace();
}
}
public static void createTableWithStartAndEndKey(String tableName) {
boolean result = false;
try {
removeTable(tableName);
hba.createTable(genHTableDescriptor(tableName), Bytes.toBytes("123"),
Bytes.toBytes("456"), 10);
result = hba.tableExists(tableName);
} catch (IOException e) {
e.printStackTrace();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/hbase-importTsv/src/com/asp/tranlog/CreateNewHbase.java | hbase-importTsv/src/com/asp/tranlog/CreateNewHbase.java | package com.asp.tranlog;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.util.Bytes;
public class CreateNewHbase {
/**
* @param args
*/
public static Configuration conf = Global.configuration;
public static void createTable(String tabName)throws Exception{
HBaseAdmin admin = new HBaseAdmin(conf);
if (admin.tableExists(tabName)) {
System.out.println(tabName + " exists!");
admin.close();
return;
}
HTableDescriptor table = new HTableDescriptor(tabName);
table.addFamily(new HColumnDescriptor("f1"));
table.addFamily(new HColumnDescriptor("f2"));
table.addFamily(new HColumnDescriptor("f3"));
table.getFamily(Bytes.toBytes("f1"));
admin.createTable(table);
admin.close();
}
public static void main(String[] args) throws Exception {
createTable("test");
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/hbase-importTsv/src/com/asp/tranlog/TsvImporterMapper.java | hbase-importTsv/src/com/asp/tranlog/TsvImporterMapper.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asp.tranlog;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Mapper;
import com.asp.tranlog.ImportTsv.TsvParser.BadTsvLineException;
/**
* Write table content out to files in hdfs.
*/
public class TsvImporterMapper extends
Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
private static final Log LOG = LogFactory.getLog(TsvImporterMapper.class);
/** Timestamp for all inserted rows */
private long ts;
/** Column seperator */
private String separator;
/** Should skip bad lines */
private boolean skipBadLines;
private Counter badLineCount;
private int[] keyColIndex = null;// The column index that will be used to
// compose to a row key like
// aaaa+bbb+ccc
private int[] keyColLen = null;
private byte[] columnTypes = null;
private char[] colDatetimeFormater = null;// For columns with type
// 'datetime', the formater will
// be saved in this array.
private String charset;
private String hbase_rowkey_separator;
public final static SimpleDateFormat[] datetimeParsers = {
new SimpleDateFormat("MMM dd yyyy hh:mm:ss:SSSaa",
new java.util.Locale("en")),// Dec 7 2012 3:35:30:453PM
new SimpleDateFormat("yyyyMMdd", new java.util.Locale("en")) };
private ImportTsv.TsvParser parser;
public long getTs() {
return ts;
}
public boolean getSkipBadLines() {
return skipBadLines;
}
public Counter getBadLineCount() {
return badLineCount;
}
public void incrementBadLineCount(int count) {
this.badLineCount.increment(count);
}
/**
* Handles initializing this class with objects specific to it (i.e., the
* parser). Common initialization that might be leveraged by a subsclass is
* done in <code>doSetup</code>. Hence a subclass may choose to override
* this method and call <code>doSetup</code> as well before handling it's
* own custom params.
*
* @param context
*/
@Override
protected void setup(Context context) {
doSetup(context);
Configuration conf = context.getConfiguration();
charset = conf.get(ImportTsv.CHARSET_CONF_KEY);
parser = new ImportTsv.TsvParser(conf.get(ImportTsv.COLUMNS_CONF_KEY),
conf.getStrings(ImportTsv.KEYCOLUMNS_CONF_KEY), separator);
keyColIndex = parser.getRowKeyColumnIndex();
keyColLen = parser.getRowKeyColumnLen();
if (keyColIndex == null) {
throw new RuntimeException("No row key column specified");
}
columnTypes = parser.getColType();
if (columnTypes != null) {
colDatetimeFormater = new char[columnTypes.length];
for (int i = 0; i < columnTypes.length; i++)
colDatetimeFormater[i] = 0;
}
}
/**
* Handles common parameter initialization that a subclass might want to
* leverage.
*
* @param context
*/
protected void doSetup(Context context) {
Configuration conf = context.getConfiguration();
// If a custom separator has been used,
// decode it back from Base64 encoding.
separator = conf.get(ImportTsv.SEPARATOR_CONF_KEY);
if (separator == null) {
separator = ImportTsv.DEFAULT_SEPARATOR;
} else {
separator = new String(Base64.decode(separator));
}
hbase_rowkey_separator = conf.get(ImportTsv.SEPARATOR_CONF_ROWKEY);
if (hbase_rowkey_separator == null
|| hbase_rowkey_separator.trim().length() == 0) {
hbase_rowkey_separator = "";
} else {
hbase_rowkey_separator = new String(
Base64.decode(hbase_rowkey_separator));
}
ts = conf.getLong(ImportTsv.TIMESTAMP_CONF_KEY,
System.currentTimeMillis());
skipBadLines = context.getConfiguration().getBoolean(
ImportTsv.SKIP_LINES_CONF_KEY, true);
badLineCount = context.getCounter("ImportTsv", "Bad Lines");
}
/**
* To find a date parser from the datetimeParsers array
*
* @return
*/
protected Date parseTimestamp(byte[] byteVal, int colIdx)
throws ParseException {
Date rtnDate = null;
String dateString = Bytes.toString(byteVal);
if (colDatetimeFormater != null && colDatetimeFormater.length > colIdx) {
int fmtrIdx = colDatetimeFormater[colIdx];
try {
rtnDate = datetimeParsers[fmtrIdx].parse(dateString);
} catch (java.text.ParseException e) {
}
if (rtnDate == null) {
for (int i = 0; i < datetimeParsers.length; i++) {
try {
rtnDate = datetimeParsers[i].parse(dateString);
} catch (java.text.ParseException e) {
}
if (rtnDate != null) {
colDatetimeFormater[colIdx] = (char) i;
break;
}
}
}
}
if (rtnDate == null) {
LOG.error("No supported data format found: " + dateString);
throw new ParseException("Failed to parse date: " + dateString, 0);
}
return rtnDate;
}
/**
* Extract byte array for column specified by colIdx.
*
* @param lineBytes
* @param parsed
* @param colIdx
* @return
*/
protected byte[] getInputColBytes(byte[] lineBytes,
ImportTsv.TsvParser.ParsedLine parsed, int colIdx) {
if (colIdx >= columnTypes.length)
return null;
int colOffset = parsed.getColumnOffset(colIdx);
int colLen = parsed.getColumnLength(colIdx);
byte[] colBytes = new byte[colLen];
Bytes.putBytes(colBytes, 0, lineBytes, colOffset, colLen);
return colBytes;
}
/**
* To create rowkey byte array, the rule is like this: row key can be
* composed by several columns change every columns values to String, if
* column type is date, change to long first if column values are "kv1 ",
* "kv2", " kv3", ... then the row key string will be "kv1 +kv2+ kv3",
* that means the space char will be kept
*
* @param lineBytes
* @param parsed
* @return
* @throws BadTsvLineException
*/
protected byte[] createRowkeyByteArray(byte[] lineBytes,
ImportTsv.TsvParser.ParsedLine parsed) throws BadTsvLineException {
try {
byte[] colBytes = null;
Date tmpDate = null;
StringBuffer sb = new StringBuffer();
for (int i = 0; i < keyColIndex.length; i++) {
if (i > 0 && hbase_rowkey_separator.length() > 0)
sb.append(hbase_rowkey_separator);
colBytes = getInputColBytes(lineBytes, parsed, keyColIndex[i]);
if (colBytes == null)
throw new BadTsvLineException(
"Failed to get column bytes for " + keyColIndex[i]);
String rowCol;
if (columnTypes[keyColIndex[i]] == ImportTsv.COL_TYPE_DATETIME) {
tmpDate = parseTimestamp(colBytes, keyColIndex[i]);
rowCol = Long.toString(tmpDate.getTime());
sb.append(rowCol);
} else if (columnTypes[keyColIndex[i]] == ImportTsv.COL_TYPE_STRING) {
// String lineStr = new String(value.getBytes(), 0,
// value.getLength(), "gb18030");
// byte[] lineBytes = new Text(lineStr).getBytes();
if (StringUtils.isEmpty(charset))
charset = HConstants.UTF8_ENCODING;
String lineStr = new String(colBytes, charset);
colBytes = new Text(lineStr).getBytes();
rowCol = Bytes.toString(colBytes);
// if original string len < specified string len, then use
// substring, else using space to right pad.
if (keyColLen[i] != 0 && rowCol.length() > keyColLen[i])
sb.append(rowCol.substring(0, keyColLen[i]));
else
sb.append(StringUtils.rightPad(rowCol, keyColLen[i]));
} else if (columnTypes[keyColIndex[i]] == ImportTsv.COL_TYPE_INT) {
int intVal = Integer.parseInt(Bytes.toString(colBytes));
rowCol = Integer.toString(intVal);
sb.append(StringUtils.leftPad(rowCol, keyColLen[i], '0'));
} else if (columnTypes[keyColIndex[i]] == ImportTsv.COL_TYPE_DOUBLE) {
double dbval = Double.parseDouble(Bytes.toString(colBytes));
rowCol = Double.toString(dbval);
sb.append(rowCol);
} else if (columnTypes[keyColIndex[i]] == ImportTsv.COL_TYPE_LONG) {
long longVal = Long.parseLong(Bytes.toString(colBytes));
rowCol = Long.toString(longVal);
sb.append(StringUtils.leftPad(rowCol, keyColLen[i], '0'));
} else {
rowCol = Bytes.toString(colBytes);
// if original string len < specified string len, then use
// substring, else using space to right pad.
if (keyColLen[i] != 0 && rowCol.length() > keyColLen[i])
sb.append(rowCol.substring(0, keyColLen[i]));
else
sb.append(StringUtils.rightPad(rowCol, keyColLen[i]));
}
}
return sb.toString().getBytes();
} catch (Exception e) {
throw new BadTsvLineException(e.getMessage());
}
}
/**
*
* @param lineBytes
* @param parsed
* @param colIdx
* @return
*/
protected byte[] convertColBytes(byte[] lineBytes,
ImportTsv.TsvParser.ParsedLine parsed, int colIdx)
throws BadTsvLineException {
byte[] rtn = null;
byte[] srcBytes = getInputColBytes(lineBytes, parsed, colIdx);
try {
if (columnTypes[colIdx] == ImportTsv.COL_TYPE_DATETIME) {
Date tmpDate = parseTimestamp(srcBytes, colIdx);
;
rtn = Bytes.toBytes(tmpDate.getTime());
} else if (columnTypes[colIdx] == ImportTsv.COL_TYPE_INT) {
int intVal = Integer.parseInt(Bytes.toString(srcBytes));
rtn = Bytes.toBytes(intVal);
} else if (columnTypes[colIdx] == ImportTsv.COL_TYPE_DOUBLE) {
double dbval = Double.parseDouble(Bytes.toString(srcBytes));
rtn = Bytes.toBytes(dbval);
} else if (columnTypes[colIdx] == ImportTsv.COL_TYPE_LONG) {
long longVal = Long.parseLong(Bytes.toString(srcBytes));
rtn = Bytes.toBytes(longVal);
} else {
rtn = srcBytes;
}
} catch (Exception e) {
throw new BadTsvLineException(e.getMessage());
}
return rtn;
}
/**
* Convert a line of TSV text into an HBase table row.
*/
@Override
public void map(LongWritable offset, Text value, Context context)
throws IOException {
byte[] lineBytes = value.getBytes();
// String lineStr = new String(value.getBytes(), 0, value.getLength(),
// "gb18030");
// byte[] lineBytes = new Text(lineStr).getBytes();
int i = 0;
try {
ImportTsv.TsvParser.ParsedLine parsed = parser.parse(lineBytes,
value.getLength());
// ImportTsv.TsvParser.ParsedLine parsed = parser.parse(
// lineBytes, Text.utf8Length(lineStr));
byte[] rowKeyBytes = createRowkeyByteArray(lineBytes, parsed);
ImmutableBytesWritable rowKey = new ImmutableBytesWritable(
rowKeyBytes);
Put put = new Put(rowKeyBytes);
put.setWriteToWAL(false);
for (i = 0; i < parsed.getColumnCount(); i++) {
KeyValue kv = null;
if (columnTypes[i] == ImportTsv.COL_TYPE_STRING) {
kv = new KeyValue(rowKeyBytes, parser.getFamily(i),
parser.getQualifier(i), 0,
parser.getQualifier(i).length, ts,
KeyValue.Type.Put, lineBytes,
parsed.getColumnOffset(i),
parsed.getColumnLength(i));
} else {
byte[] colBytes = convertColBytes(lineBytes, parsed, i);
if (colBytes == null)
throw new ImportTsv.TsvParser.BadTsvLineException(
"Failed to get bytes for column " + i);
kv = new KeyValue(rowKeyBytes, parser.getFamily(i),
parser.getQualifier(i), ts, colBytes);
}
if (kv == null)
throw new ImportTsv.TsvParser.BadTsvLineException(
"Failed to get bytes for column " + i);
put.add(kv);
}
context.write(rowKey, put);
} catch (ImportTsv.TsvParser.BadTsvLineException badLine) {
if (skipBadLines) {
System.err.println("Bad line: "
+ new String(lineBytes, "gb18030") + ":" + i + "\n");
LOG.error("Bad line: " + new String(lineBytes, "gb18030") + ","
+ i);
incrementBadLineCount(1);
return;
} else {
throw new IOException(badLine);
}
} catch (IllegalArgumentException e) {
if (skipBadLines) {
System.err.println("Bad line: "
+ new String(lineBytes, "gb18030") + ":" + i + "\n");
LOG.error("Bad line: " + new String(lineBytes, "gb18030") + ","
+ i);
incrementBadLineCount(1);
return;
} else {
throw new IOException(e);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/hbase-importTsv/src/com/asp/tranlog/CreateHbaseTable.java | hbase-importTsv/src/com/asp/tranlog/CreateHbaseTable.java | package com.asp.tranlog;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;
public class CreateHbaseTable {
public static Configuration configuration = Global.configuration;
public static void createTable(String tableName, String[] colNames) {
System.out.println("start create table:" + tableName + "......");
HBaseAdmin hBaseAdmin = null;
try {
hBaseAdmin = new HBaseAdmin(configuration);
if (hBaseAdmin.tableExists(tableName)) {
hBaseAdmin.disableTable(tableName);
hBaseAdmin.deleteTable(tableName);
System.out.println(tableName + " is exist,delete....");
}
HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
if (colNames.length == 0) {
System.out.println("colNames's length is 0");
} else {
for (int i = 0; i < colNames.length; i++) {
tableDescriptor
.addFamily(new HColumnDescriptor(colNames[i]));
}
hBaseAdmin.createTable(tableDescriptor);
}
} catch (MasterNotRunningException e) {
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
hBaseAdmin.close();
} catch (IOException e) {
e.printStackTrace();
}
}
System.out.println("end create table ......");
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/hbase-importTsv/src/com/asp/tranlog/Global.java | hbase-importTsv/src/com/asp/tranlog/Global.java | package com.asp.tranlog;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
public class Global {
public final static String DRIVERNAME = "org.apache.hadoop.hive.jdbc.HiveDriver";
public final static Configuration configuration;
static {
configuration = HBaseConfiguration.create();
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/hbase-importTsv/src/com/asp/tranlog/ImportTsv.java | hbase-importTsv/src/com/asp/tranlog/ImportTsv.java | /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asp.tranlog;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
import org.apache.hadoop.hbase.mapreduce.PutSortReducer;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.hadoop.mapreduce.LzoTextInputFormat;
/**
* Tool to import data from a TSV file.
*
* This tool is rather simplistic - it doesn't do any quoting or escaping, but
* is useful for many data loads.
*
* @see ImportTsv#usage(String)
*/
public class ImportTsv {
final static Logger LOG = LoggerFactory.getLogger(ImportTsv.class);
final static String PRE = ">>>>>>>>>>>>>>>";
final static String NAME = "importtsv";
final static String MAPPER_CONF_KEY = "importtsv.mapper.class";
final static String SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines";
final static String BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output";
final static String COLUMNS_CONF_KEY = "importtsv.columns";
final static String KEYCOLUMNS_CONF_KEY = "importtsv.key.columns";
final static String SEPARATOR_CONF_KEY = "importtsv.separator";
final static String TIMESTAMP_CONF_KEY = "importtsv.timestamp";
final static String INPUT_LZO_KEY = "importtsv.input.codec";
final static String DEFAULT_SEPARATOR = "\t";
final static String SEPARATOR_CONF_ROWKEY = "importtsv.separator.rowkey";
final static Class DEFAULT_MAPPER = TsvImporterMapper.class;
final static String CHARSET_CONF_KEY = "importtsv.charset.rowkey";
public final static byte COL_TYPE_STRING = 0;
public final static byte COL_TYPE_INT = 1;
public final static byte COL_TYPE_LONG = 2;
public final static byte COL_TYPE_DOUBLE = 3;
public final static byte COL_TYPE_DATETIME = 4;// if the source type is
// datetime, then will parse
// it to long
static class TsvParser {
/**
* Column families and qualifiers mapped to the TSV columns
*/
private final byte[][] families;
private final byte[][] qualifiers;
private final byte[] colType; // 0- String, 1- int, 2- long, 3- double,
// 4- timestamp
private int[] keyColIndex = null;// Save the columns that will make up
// the row key.
private int[] keyColLen = null;// Save the length of ever column that
// will compose the row key.
private final byte separatorByte;
// private int rowKeyColumnIndex;
public static String ROWKEY_COLUMN_SPEC = "HBASE_ROW_KEY";
/**
* @param columnsSpecification
* the list of columns to parser out, comma separated. The
* row key should be the special token
* TsvParser.ROWKEY_COLUMN_SPEC
*/
public TsvParser(String columnsSpecification, String[] keyColumns,
String separatorStr) {
// Configure separator
byte[] separator = Bytes.toBytes(separatorStr);
Preconditions.checkArgument(separator.length == 1,
"TsvParser only supports single-byte separators");
separatorByte = separator[0];
// Configure columns
ArrayList<String> columnStrings = Lists.newArrayList(Splitter
.on(',').trimResults().split(columnsSpecification));
families = new byte[columnStrings.size()][];
qualifiers = new byte[columnStrings.size()][];
colType = new byte[columnStrings.size()];
for (int i = 0; i < columnStrings.size(); i++) {
String str = columnStrings.get(i);
// if (ROWKEY_COLUMN_SPEC.equals(str)) {
// rowKeyColumnIndex = i;
// continue;
// }
String[] parts = str.split(":", 3);
if (parts.length == 1) {
families[i] = str.getBytes();
qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY;
colType[i] = COL_TYPE_STRING;
} else {
families[i] = parts[0].getBytes();
qualifiers[i] = parts[1].getBytes();
if (parts.length > 2) {
colType[i] = parseColType(parts[2]);
} else
colType[i] = COL_TYPE_STRING;
}
// System.out.println(str + ", idex " + i + ", coltpe: " +
// colType[i]);
}
if (keyColumns != null) {
keyColIndex = new int[keyColumns.length];
keyColLen = new int[keyColumns.length];
for (int i = 0; i < keyColumns.length; i++) {
String[] strKdef = keyColumns[i].split(":", 2);
keyColIndex[i] = Integer.parseInt(strKdef[0]);
if (keyColIndex[i] >= qualifiers.length)
keyColIndex[i] = 0;
if (strKdef.length > 1)
keyColLen[i] = Integer.parseInt(strKdef[1]);
else
keyColLen[i] = 0;// 0 means not specify the length
}
}
}
public byte parseColType(String strCT) {
if (strCT.equalsIgnoreCase("int"))
return COL_TYPE_INT;
else if (strCT.equalsIgnoreCase("string"))
return COL_TYPE_STRING;
else if (strCT.equalsIgnoreCase("long"))
return COL_TYPE_LONG;
else if (strCT.equalsIgnoreCase("datetime"))
return COL_TYPE_DATETIME;
else if (strCT.equalsIgnoreCase("double"))
return COL_TYPE_DOUBLE;
else
return COL_TYPE_STRING;
}
public int[] getRowKeyColumnIndex() {
return keyColIndex;
}
public int[] getRowKeyColumnLen() {
return keyColLen;
}
public byte[] getFamily(int idx) {
return families[idx];
}
public byte[] getQualifier(int idx) {
return qualifiers[idx];
}
public byte[] getColType() {
return colType;
}
public ParsedLine parse(byte[] lineBytes, int length)
throws BadTsvLineException {
// Enumerate separator offsets
ArrayList<Integer> tabOffsets = new ArrayList<Integer>(
families.length);
for (int i = 0; i < length; i++) {
if (lineBytes[i] == separatorByte) {
tabOffsets.add(i);
}
}
if (tabOffsets.isEmpty()) {
throw new BadTsvLineException("No delimiter");
}
tabOffsets.add(length);
if (tabOffsets.size() > families.length) {
throw new BadTsvLineException("Excessive columns");
}/*
* else if (tabOffsets.size() <= getRowKeyColumnIndex()) { throw new
* BadTsvLineException("No row key"); }
*/
return new ParsedLine(tabOffsets, lineBytes);
}
class ParsedLine {
private final ArrayList<Integer> tabOffsets;
private byte[] lineBytes;
ParsedLine(ArrayList<Integer> tabOffsets, byte[] lineBytes) {
this.tabOffsets = tabOffsets;
this.lineBytes = lineBytes;
}
public int getColumnOffset(int idx) {
if (idx > 0)
return tabOffsets.get(idx - 1) + 1;
else
return 0;
}
public int getColumnLength(int idx) {
return tabOffsets.get(idx) - getColumnOffset(idx);
}
public int getColumnCount() {
return tabOffsets.size();
}
public byte[] getLineBytes() {
return lineBytes;
}
}
public static class BadTsvLineException extends Exception {
public BadTsvLineException(String err) {
super(err);
}
private static final long serialVersionUID = 1L;
}
}
/**
* Sets up the actual job.
*
* @param conf
* The current configuration.
* @param args
* The command line parameters.
* @return The newly created job.
* @throws IOException
* When setting up the job fails.
*/
public static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException, ClassNotFoundException {
// Support non-XML supported characters
// by re-encoding the passed separator as a Base64 string.
String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
if (actualSeparator != null) {
conf.set(SEPARATOR_CONF_KEY,
new String(Base64.encodeBytes(actualSeparator.getBytes())));
}
// See if a non-default Mapper was set
String mapperClassName = conf.get(MAPPER_CONF_KEY);
Class mapperClass = mapperClassName != null ? Class
.forName(mapperClassName) : DEFAULT_MAPPER;
String tableName = args[0];
Path inputDir = new Path(args[1]);
Job job = new Job(conf, NAME + "_" + tableName);
job.setJarByClass(mapperClass);
FileInputFormat.setInputPaths(job, inputDir);
String inputCodec = conf.get(INPUT_LZO_KEY);
if (inputCodec == null) {
FileInputFormat.setMaxInputSplitSize(job, 67108864l); // max split
// size =
// 64m
job.setInputFormatClass(TextInputFormat.class);
} else {
if (inputCodec.equalsIgnoreCase("lzo"))
job.setInputFormatClass(LzoTextInputFormat.class);
else {
usage("not supported compression codec!");
System.exit(-1);
}
}
job.setMapperClass(mapperClass);
String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
if (hfileOutPath != null) {
HTable table = new HTable(conf, tableName);
job.setReducerClass(PutSortReducer.class);
Path outputDir = new Path(hfileOutPath);
FileOutputFormat.setOutputPath(job, outputDir);
job.setMapOutputKeyClass(ImmutableBytesWritable.class);
job.setMapOutputValueClass(Put.class);
HFileOutputFormat.configureIncrementalLoad(job, table);
} else {
// No reducers. Just write straight to table. Call
// initTableReducerJob
// to set up the TableOutputFormat.
TableMapReduceUtil.initTableReducerJob(tableName, null, job);
job.setNumReduceTasks(0);
}
TableMapReduceUtil.addDependencyJars(job);
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
com.google.common.base.Function.class /*
* Guava used by TsvParser
*/);
return job;
}
/*
* @param errorMsg Error message. Can be null.
*/
private static void usage(final String errorMsg) {
if (errorMsg != null && errorMsg.length() > 0) {
System.err.println("ERROR: " + errorMsg);
}
String usage = "Usage: "
+ NAME
+ " -Dimporttsv.columns=a,b,c -Dimporttsv.key.columns=0,3 <tablename> <inputdir>\n"
+ "\n"
+ "Imports the given input directory of TSV data into the specified table.\n"
+ "\n"
+ "The column names of the TSV data must be specified using the -Dimporttsv.columns\n"
+ "option. This option takes the form of comma-separated column names, where each\n"
+ "column name is either a simple column family, or a columnfamily:qualifier. The special\n"
+ "column name HBASE_ROW_KEY is used to designate that this column should be used\n"
+ "as the row key for each imported record. You must specify exactly one column\n"
+ "to be the row key, and you must specify a column name for every column that exists in the\n"
+ "input data.\n"
+ "\n"
+ "By default importtsv will load data directly into HBase. To instead generate\n"
+ "HFiles of data to prepare for a bulk data load, pass the option:\n"
+ " -D"
+ BULK_OUTPUT_CONF_KEY
+ "=/path/for/output\n"
+ " Note: if you do not use this option, then the target table must already exist in HBase\n"
+ "\n"
+ "Other options that may be specified with -D include:\n"
+ " -D"
+ SEPARATOR_CONF_ROWKEY
+ "= - default:,hbase rowkey separator"
+ " -D"
+ SKIP_LINES_CONF_KEY
+ "=false - fail if encountering an invalid line\n"
+ " '-D"
+ SEPARATOR_CONF_KEY
+ "=|' - eg separate on pipes instead of tabs\n"
+ " -D"
+ TIMESTAMP_CONF_KEY
+ "=currentTimeAsLong - use the specified timestamp for the import\n"
+ " -D" + MAPPER_CONF_KEY
+ "=my.Mapper - A user-defined Mapper to use instead of "
+ DEFAULT_MAPPER.getName() + "\n" + " -D" + CHARSET_CONF_KEY
+ "=charset of rowkey, gb18030 " + "\n";
System.err.println(usage);
}
/**
* Main entry point.
*
* @param args
* The command line parameters.
* @throws Exception
* When running the job fails.
*/
public static void main(String[] args) throws Exception {
System.out.println("==============================================");
Configuration conf = HBaseConfiguration.create();
LOG.error(PRE + "conf.toString() == " + conf.toString());
String[] otherArgs = new GenericOptionsParser(conf, args)
.getRemainingArgs();
if (otherArgs.length < 2) {
usage("Wrong number of arguments: " + otherArgs.length);
System.exit(-1);
}
String columns[] = conf.getStrings(COLUMNS_CONF_KEY);
if (columns == null) {
usage("No columns specified. Please specify with -D"
+ COLUMNS_CONF_KEY + "=...");
System.exit(-1);
}
// Make sure one or more columns are specified
if (columns.length < 2) {
usage("One or more columns in addition to the row key are required");
System.exit(-1);
}
columns = conf.getStrings(COLUMNS_CONF_KEY);
if (columns == null) {
usage("One or more key columns are required");
System.exit(-1);
}
Job job = createSubmittableJob(conf, otherArgs);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/test/java/spark/hbase/JavaHBaseContextSuite.java | spark-on-hbase/src/test/java/spark/hbase/JavaHBaseContextSuite.java | package spark.hbase;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import com.cloudera.sa.spark.hbase.example.JavaHBaseBulkDeleteExample.DeleteFunction;
import com.cloudera.sa.spark.hbase.example.JavaHBaseBulkGetExample.GetFunction;
import com.cloudera.sa.spark.hbase.example.JavaHBaseBulkGetExample.ResultFunction;
import com.cloudera.sa.spark.hbase.example.JavaHBaseBulkIncrementExample.IncrementFunction;
import com.cloudera.sa.spark.hbase.example.JavaHBaseBulkPutExample.PutFunction;
import com.google.common.collect.Lists;
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.*;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.hbase.JavaHBaseContext;
import org.junit.*;
import scala.Tuple2;
import scala.Tuple3;
import com.google.common.io.Files;
public class JavaHBaseContextSuite implements Serializable {
private transient JavaSparkContext jsc;
private transient File tempDir;
HBaseTestingUtility htu;
String tableName = "t1";
String columnFamily = "c";
@Before
public void setUp() {
jsc = new JavaSparkContext("local", "JavaHBaseContextSuite");
jsc.addJar("SparkHBase.jar");
tempDir = Files.createTempDir();
tempDir.deleteOnExit();
htu = HBaseTestingUtility.createLocalHTU();
try {
System.out.println("cleaning up test dir");
htu.cleanupTestDir();
System.out.println("starting minicluster");
htu.startMiniZKCluster();
htu.startMiniHBaseCluster(1, 1);
System.out.println(" - minicluster started");
try {
htu.deleteTable(Bytes.toBytes(tableName));
} catch (Exception e) {
System.out.println(" - no table " + tableName + " found");
}
System.out.println(" - creating table " + tableName);
htu.createTable(Bytes.toBytes(tableName), Bytes.toBytes(columnFamily));
System.out.println(" - created table");
} catch (Exception e1) {
throw new RuntimeException(e1);
}
}
@After
public void tearDown() {
try {
htu.deleteTable(Bytes.toBytes(tableName));
System.out.println("shuting down minicluster");
htu.shutdownMiniHBaseCluster();
htu.shutdownMiniZKCluster();
System.out.println(" - minicluster shut down");
htu.cleanupTestDir();
} catch (Exception e) {
throw new RuntimeException(e);
}
jsc.stop();
jsc = null;
}
@Test
public void testJavaBulkIncrement() {
List<String> list = new ArrayList<String>();
list.add("1," + columnFamily + ",counter,1");
list.add("2," + columnFamily + ",counter,2");
list.add("3," + columnFamily + ",counter,3");
list.add("4," + columnFamily + ",counter,4");
list.add("5," + columnFamily + ",counter,5");
JavaRDD<String> rdd = jsc.parallelize(list);
Configuration conf = htu.getConfiguration();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkIncrement(rdd, tableName, new IncrementFunction(), 4);
throw new RuntimeException();
}
@Test
public void testBulkPut() {
List<String> list = new ArrayList<String>();
list.add("1," + columnFamily + ",a,1");
list.add("2," + columnFamily + ",a,2");
list.add("3," + columnFamily + ",a,3");
list.add("4," + columnFamily + ",a,4");
list.add("5," + columnFamily + ",a,5");
JavaRDD<String> rdd = jsc.parallelize(list);
Configuration conf = htu.getConfiguration();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkPut(rdd, tableName, new PutFunction(), true);
}
public static class PutFunction implements Function<String, Put> {
private static final long serialVersionUID = 1L;
public Put call(String v) throws Exception {
String[] cells = v.split(",");
Put put = new Put(Bytes.toBytes(cells[0]));
put.add(Bytes.toBytes(cells[1]), Bytes.toBytes(cells[2]),
Bytes.toBytes(cells[3]));
return put;
}
}
@Test
public void testBulkDelete() {
List<byte[]> list = new ArrayList<byte[]>();
list.add(Bytes.toBytes("1"));
list.add(Bytes.toBytes("2"));
list.add(Bytes.toBytes("3"));
list.add(Bytes.toBytes("4"));
list.add(Bytes.toBytes("5"));
JavaRDD<byte[]> rdd = jsc.parallelize(list);
Configuration conf = htu.getConfiguration();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkDelete(rdd, tableName, new DeleteFunction(), 4);
}
@Test
public void testDistributedScan() {
Configuration conf = htu.getConfiguration();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
Scan scan = new Scan();
scan.setCaching(100);
JavaRDD<Tuple2<byte[], List<Tuple3<byte[], byte[], byte[]>>>> javaRdd = hbaseContext.hbaseRDD(tableName, scan);
List<Tuple2<byte[], List<Tuple3<byte[], byte[], byte[]>>>> results = javaRdd.collect();
results.size();
}
@Test
public void testBulkGet() {
List<byte[]> list = new ArrayList<byte[]>();
list.add(Bytes.toBytes("1"));
list.add(Bytes.toBytes("2"));
list.add(Bytes.toBytes("3"));
list.add(Bytes.toBytes("4"));
list.add(Bytes.toBytes("5"));
JavaRDD<byte[]> rdd = jsc.parallelize(list);
Configuration conf = htu.getConfiguration();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkGet(tableName, 2, rdd, new GetFunction(),
new ResultFunction());
}
public static class GetFunction implements Function<byte[], Get> {
private static final long serialVersionUID = 1L;
public Get call(byte[] v) throws Exception {
return new Get(v);
}
}
public static class ResultFunction implements Function<Result, String> {
private static final long serialVersionUID = 1L;
public String call(Result result) throws Exception {
Iterator<KeyValue> it = result.list().iterator();
StringBuilder b = new StringBuilder();
b.append(Bytes.toString(result.getRow()) + ":");
while (it.hasNext()) {
KeyValue kv = it.next();
String q = Bytes.toString(kv.getQualifier());
if (q.equals("counter")) {
b.append("(" + Bytes.toString(kv.getQualifier()) + ","
+ Bytes.toLong(kv.getValue()) + ")");
} else {
b.append("(" + Bytes.toString(kv.getQualifier()) + ","
+ Bytes.toString(kv.getValue()) + ")");
}
}
return b.toString();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkDeleteExample.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkDeleteExample.java | package com.cloudera.sa.spark.hbase.example;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.hbase.HBaseContext;
import org.apache.spark.hbase.JavaHBaseContext;
public class JavaHBaseBulkDeleteExample {
public static void main(String args[]) {
if (args.length == 0) {
System.out.println("JavaHBaseBulkDeleteExample {master} {tableName} ");
}
String master = args[0];
String tableName = args[1];
JavaSparkContext jsc = new JavaSparkContext(master,
"JavaHBaseBulkDeleteExample");
jsc.addJar("SparkHBase.jar");
List<byte[]> list = new ArrayList<byte[]>();
list.add(Bytes.toBytes("1"));
list.add(Bytes.toBytes("2"));
list.add(Bytes.toBytes("3"));
list.add(Bytes.toBytes("4"));
list.add(Bytes.toBytes("5"));
JavaRDD<byte[]> rdd = jsc.parallelize(list);
Configuration conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkDelete(rdd, tableName, new DeleteFunction(), 4);
}
public static class DeleteFunction implements Function<byte[], Delete> {
private static final long serialVersionUID = 1L;
public Delete call(byte[] v) throws Exception {
return new Delete(v);
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/TestJavaLocalMainExample.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/TestJavaLocalMainExample.java | package com.cloudera.sa.spark.hbase.example;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.hbase.JavaHBaseContext;
import org.junit.After;
import scala.Tuple2;
import com.google.common.io.Files;
public class TestJavaLocalMainExample {
private static transient JavaSparkContext jsc;
private static transient File tempDir;
static HBaseTestingUtility htu;
static String tableName = "t1";
static String columnFamily = "c";
public static void main(String[] agrs) {
setUp();
Configuration conf = htu.getConfiguration();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
List<byte[]> list = new ArrayList<byte[]>();
list.add(Bytes.toBytes("1"));
list.add(Bytes.toBytes("2"));
list.add(Bytes.toBytes("3"));
list.add(Bytes.toBytes("4"));
list.add(Bytes.toBytes("5"));
JavaRDD<byte[]> rdd = jsc.parallelize(list);
hbaseContext.foreachPartition(rdd, new VoidFunction<Tuple2<Iterator<byte[]>, HConnection>>() {
public void call(Tuple2<Iterator<byte[]>, HConnection> t)
throws Exception {
HTableInterface table1 = t._2().getTable(Bytes.toBytes("Foo"));
Iterator<byte[]> it = t._1();
while (it.hasNext()) {
byte[] b = it.next();
Result r = table1.get(new Get(b));
if (r.getExists()) {
table1.put(new Put(b));
}
}
}
});
//This is me
hbaseContext.foreach(rdd, new VoidFunction<Tuple2<byte[], HConnection>>() {
public void call(Tuple2<byte[], HConnection> t)
throws Exception {
HTableInterface table1 = t._2().getTable(Bytes.toBytes("Foo"));
byte[] b = t._1();
Result r = table1.get(new Get(b));
if (r.getExists()) {
table1.put(new Put(b));
}
}
});
tearDown();
}
public static void setUp() {
jsc = new JavaSparkContext("local", "JavaHBaseContextSuite");
jsc.addJar("SparkHBase.jar");
tempDir = Files.createTempDir();
tempDir.deleteOnExit();
htu = HBaseTestingUtility.createLocalHTU();
try {
System.out.println("cleaning up test dir");
htu.cleanupTestDir();
System.out.println("starting minicluster");
htu.startMiniZKCluster();
htu.startMiniHBaseCluster(1, 1);
System.out.println(" - minicluster started");
try {
htu.deleteTable(Bytes.toBytes(tableName));
} catch (Exception e) {
System.out.println(" - no table " + tableName + " found");
}
System.out.println(" - creating table " + tableName);
htu.createTable(Bytes.toBytes(tableName), Bytes.toBytes(columnFamily));
System.out.println(" - created table");
} catch (Exception e1) {
throw new RuntimeException(e1);
}
}
@After
public static void tearDown() {
try {
htu.deleteTable(Bytes.toBytes(tableName));
System.out.println("shuting down minicluster");
htu.shutdownMiniHBaseCluster();
htu.shutdownMiniZKCluster();
System.out.println(" - minicluster shut down");
htu.cleanupTestDir();
} catch (Exception e) {
throw new RuntimeException(e);
}
jsc.stop();
jsc = null;
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseMapGetPutExample.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseMapGetPutExample.java | package com.cloudera.sa.spark.hbase.example;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.hbase.JavaHBaseContext;
import scala.Tuple2;
public class JavaHBaseMapGetPutExample {
public static void main(String args[]) {
if (args.length == 0) {
System.out
.println("JavaHBaseBulkGetExample {master} {tableName}");
}
String master = args[0];
String tableName = args[1];
JavaSparkContext jsc = new JavaSparkContext(master,
"JavaHBaseBulkGetExample");
jsc.addJar("SparkHBase.jar");
List<byte[]> list = new ArrayList<byte[]>();
list.add(Bytes.toBytes("1"));
list.add(Bytes.toBytes("2"));
list.add(Bytes.toBytes("3"));
list.add(Bytes.toBytes("4"));
list.add(Bytes.toBytes("5"));
//All Spark
JavaRDD<byte[]> rdd = jsc.parallelize(list);
//All HBase
Configuration conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
//This is me
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
//This is me
hbaseContext.foreachPartition(rdd, null);
hbaseContext.foreach(rdd, new VoidFunction<Tuple2<byte[], HConnection>>() {
public void call(Tuple2<byte[], HConnection> t)
throws Exception {
HTableInterface table1 = t._2.getTable(Bytes.toBytes("Foo"));
byte[] b = t._1;
Result r = table1.get(new Get(b));
if (r.getExists()) {
table1.put(new Put(b));
}
}
});
}
public static class GetFunction implements Function<byte[], Get> {
private static final long serialVersionUID = 1L;
public Get call(byte[] v) throws Exception {
return new Get(v);
}
}
public static class CustomFunction implements VoidFunction<Tuple2<Iterator<byte[]>, HConnection>> {
public void call(Tuple2<Iterator<byte[]>, HConnection> t) throws Exception {
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkPutExample.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkPutExample.java | package com.cloudera.sa.spark.hbase.example;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.hbase.JavaHBaseContext;
public class JavaHBaseBulkPutExample {
public static void main(String args[]) {
if (args.length == 0) {
System.out
.println("JavaHBaseBulkPutExample {master} {tableName} {columnFamily}");
}
String master = args[0];
String tableName = args[1];
String columnFamily = args[2];
JavaSparkContext jsc = new JavaSparkContext(master,
"JavaHBaseBulkPutExample");
jsc.addJar("SparkHBase.jar");
List<String> list = new ArrayList<String>();
list.add("1," + columnFamily + ",a,1");
list.add("2," + columnFamily + ",a,2");
list.add("3," + columnFamily + ",a,3");
list.add("4," + columnFamily + ",a,4");
list.add("5," + columnFamily + ",a,5");
JavaRDD<String> rdd = jsc.parallelize(list);
Configuration conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkPut(rdd, tableName, new PutFunction(), true);
}
public static class PutFunction implements Function<String, Put> {
private static final long serialVersionUID = 1L;
public Put call(String v) throws Exception {
String[] cells = v.split(",");
Put put = new Put(Bytes.toBytes(cells[0]));
put.add(Bytes.toBytes(cells[1]), Bytes.toBytes(cells[2]),
Bytes.toBytes(cells[3]));
return put;
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkGetExample.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkGetExample.java | package com.cloudera.sa.spark.hbase.example;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.hbase.HBaseContext;
import org.apache.spark.hbase.JavaHBaseContext;
public class JavaHBaseBulkGetExample {
public static void main(String args[]) {
if (args.length == 0) {
System.out
.println("JavaHBaseBulkGetExample {master} {tableName}");
}
String master = args[0];
String tableName = args[1];
JavaSparkContext jsc = new JavaSparkContext(master,
"JavaHBaseBulkGetExample");
jsc.addJar("SparkHBase.jar");
List<byte[]> list = new ArrayList<byte[]>();
list.add(Bytes.toBytes("1"));
list.add(Bytes.toBytes("2"));
list.add(Bytes.toBytes("3"));
list.add(Bytes.toBytes("4"));
list.add(Bytes.toBytes("5"));
JavaRDD<byte[]> rdd = jsc.parallelize(list);
Configuration conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkGet(tableName, 2, rdd, new GetFunction(),
new ResultFunction());
}
public static class GetFunction implements Function<byte[], Get> {
private static final long serialVersionUID = 1L;
public Get call(byte[] v) throws Exception {
return new Get(v);
}
}
public static class ResultFunction implements Function<Result, String> {
private static final long serialVersionUID = 1L;
public String call(Result result) throws Exception {
Iterator<KeyValue> it = result.list().iterator();
StringBuilder b = new StringBuilder();
b.append(Bytes.toString(result.getRow()) + ":");
while (it.hasNext()) {
KeyValue kv = it.next();
String q = Bytes.toString(kv.getQualifier());
if (q.equals("counter")) {
b.append("(" + Bytes.toString(kv.getQualifier()) + ","
+ Bytes.toLong(kv.getValue()) + ")");
} else {
b.append("(" + Bytes.toString(kv.getQualifier()) + ","
+ Bytes.toString(kv.getValue()) + ")");
}
}
return b.toString();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseStreamingBulkPutExample.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseStreamingBulkPutExample.java | package com.cloudera.sa.spark.hbase.example;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.hbase.JavaHBaseContext;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
public class JavaHBaseStreamingBulkPutExample {
public static void main(String args[]) {
if (args.length == 0) {
System.out
.println("JavaHBaseBulkPutExample {master} {host} {post} {tableName} {columnFamily}");
}
String master = args[0];
String host = args[1];
String port = args[2];
String tableName = args[3];
String columnFamily = args[4];
System.out.println("master:" + master);
System.out.println("host:" + host);
System.out.println("port:" + Integer.parseInt(port));
System.out.println("tableName:" + tableName);
System.out.println("columnFamily:" + columnFamily);
SparkConf sparkConf = new SparkConf();
sparkConf.set("spark.cleaner.ttl", "120000");
JavaSparkContext jsc = new JavaSparkContext(master,
"JavaHBaseBulkPutExample");
jsc.addJar("SparkHBase.jar");
JavaStreamingContext jssc = new JavaStreamingContext(jsc, new Duration(1000));
JavaReceiverInputDStream<String> javaDstream = jssc.socketTextStream(host, Integer.parseInt(port));
Configuration conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.streamBulkPut(javaDstream, tableName, new PutFunction(), true);
}
public static class PutFunction implements Function<String, Put> {
private static final long serialVersionUID = 1L;
public Put call(String v) throws Exception {
String[] cells = v.split(",");
Put put = new Put(Bytes.toBytes(cells[0]));
put.add(Bytes.toBytes(cells[1]), Bytes.toBytes(cells[2]),
Bytes.toBytes(cells[3]));
return put;
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseDistributedScan.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseDistributedScan.java | package com.cloudera.sa.spark.hbase.example;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.hbase.JavaHBaseContext;
import scala.Tuple2;
import scala.Tuple3;
public class JavaHBaseDistributedScan {
public static void main(String args[]) {
if (args.length == 0) {
System.out
.println("JavaHBaseDistributedScan {master} {tableName}");
}
String master = args[0];
String tableName = args[1];
JavaSparkContext jsc = new JavaSparkContext(master,
"JavaHBaseDistributedScan");
jsc.addJar("SparkHBase.jar");
Configuration conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
Scan scan = new Scan();
scan.setCaching(100);
JavaRDD<Tuple2<byte[], List<Tuple3<byte[], byte[], byte[]>>>> javaRdd = hbaseContext.hbaseRDD(tableName, scan);
List<Tuple2<byte[], List<Tuple3<byte[], byte[], byte[]>>>> results = javaRdd.collect();
results.size();
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkIncrementExample.java | spark-on-hbase/src/main/java/com/cloudera/sa/spark/hbase/example/JavaHBaseBulkIncrementExample.java | package com.cloudera.sa.spark.hbase.example;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.hbase.HBaseContext;
import org.apache.spark.hbase.JavaHBaseContext;
public class JavaHBaseBulkIncrementExample {
public static void main(String args[]) {
if (args.length == 0) {
System.out
.println("JavaHBaseBulkIncrementExample {master} {tableName} {columnFamily}");
}
String master = args[0];
String tableName = args[1];
String columnFamily = args[2];
JavaSparkContext jsc = new JavaSparkContext(master,
"JavaHBaseBulkIncrementExample");
jsc.addJar("SparkHBase.jar");
List<String> list = new ArrayList<String>();
list.add("1," + columnFamily + ",counter,1");
list.add("2," + columnFamily + ",counter,2");
list.add("3," + columnFamily + ",counter,3");
list.add("4," + columnFamily + ",counter,4");
list.add("5," + columnFamily + ",counter,5");
JavaRDD<String> rdd = jsc.parallelize(list);
Configuration conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkIncrement(rdd, tableName, new IncrementFunction(), 4);
}
public static class IncrementFunction implements Function<String, Increment> {
private static final long serialVersionUID = 1L;
public Increment call(String v) throws Exception {
String[] cells = v.split(",");
Increment increment = new Increment(Bytes.toBytes(cells[0]));
increment.addColumn(Bytes.toBytes(cells[1]), Bytes.toBytes(cells[2]),
Integer.parseInt(cells[3]));
return increment;
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/jmeter-hbase-plugins/src/test/java/com/embracesource/hbase/jmeter/JMeterTest.java | jmeter-hbase-plugins/src/test/java/com/embracesource/hbase/jmeter/JMeterTest.java | package com.embracesource.hbase.jmeter;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.jmeter.config.Arguments;
import org.apache.jmeter.protocol.java.sampler.AbstractJavaSamplerClient;
import org.apache.jmeter.protocol.java.sampler.JavaSamplerContext;
import org.apache.jmeter.samplers.SampleResult;
public class JMeterTest extends AbstractJavaSamplerClient {
private HTableInterface table;
private static Configuration conf = null;
private Put put = null ;
private Get get= null ;
/*测试的方法*/
private String methedType = null ;
/*key取值长度*/
private int keyNumLength = 0 ;
/*列族数组*/
private String[] cfs = null ;
/*列数组*/
private String[] qualifiers = null ;
/*value值*/
private String values =null ;
/*是否记录日志*/
private boolean writeToWAL = true ;
/**
* 初始化配置
*/
static {
conf = HBaseConfiguration.create();
}
@Override
public void setupTest(JavaSamplerContext context) {
super.setupTest(context);
if (table == null) {
String tableName = context.getParameter("tableName");
byte[] tableNamebyte = tableName.getBytes();
boolean autoFlush = Boolean.valueOf(context.getParameter("autoFlush"));
long writeBufferSize = Long.valueOf(context.getParameter("writeBufferSize"));
int poolSize = Integer.parseInt(context.getParameter("poolSize"));
try {
table = JMeterHTablePool.getinstancePool(conf,poolSize,tableNamebyte,autoFlush,writeBufferSize).tablePool.getTable(tableName);
} catch (Exception e) {
System.out.println("htable pool error");
}
}
if( methedType == null ){
methedType = context.getParameter("putOrget");
}
if( keyNumLength == 0){
keyNumLength = Integer.parseInt(context.getParameter("keyNumLength"));
}
if(cfs == null){
String cf = context.getParameter("cf");
cfs = cf.split(",");
}
if( qualifiers == null ){
String qualifier = context.getParameter("qualifier");
qualifiers = qualifier.split(",");
}
if( values == null ){
String valueLength = context.getParameter("valueLength");
values = Strings.repeat('v', Integer.parseInt(valueLength));
}
if( writeToWAL == true ){
writeToWAL = Boolean.valueOf(context.getParameter("writeToWAL"));
}
}
public SampleResult runTest(JavaSamplerContext context) {
SampleResult sampleResult = new SampleResult();
sampleResult.sampleStart();
String key = String.valueOf(String.valueOf(new Random().nextInt(keyNumLength)).hashCode());
try {
if (methedType.equals("put")) {
put = new Put(Bytes.toBytes(key));
put.setWriteToWAL(writeToWAL);
for (int j = 0; j < cfs.length; j++) {
for (int n = 0; n < qualifiers.length; n++) {
put.add(Bytes.toBytes(cfs[j]),
Bytes.toBytes(qualifiers[n]),
Bytes.toBytes(values));
table.put(put);
}
}
} else if (methedType.equals("get")) {
get = new Get((key ).getBytes());
table.get(get);
// Result rs = table.get(get);
}
sampleResult.setSuccessful(true);
} catch (Throwable e) {
sampleResult.setSuccessful(false);
} finally {
sampleResult.sampleEnd();
}
// // 返回是否处理成功
return sampleResult;
}
@Override
public Arguments getDefaultParameters() {
Arguments params = new Arguments();
params.addArgument("putOrget", "put");
params.addArgument("keyNumLength", "5");
params.addArgument("valueLength", "1000");
params.addArgument("cf", "cf");
params.addArgument("qualifier", "a");
params.addArgument("tableName","test");
params.addArgument("autoFlush","false");
params.addArgument("writeBufferSize","2097152");
params.addArgument("writeToWAL","true");
params.addArgument("poolSize","500");
return params;
}
@Override
public void teardownTest(JavaSamplerContext context) {
super.teardownTest(context);
try {
if (table != null) {
table.flushCommits();
table.close();
table = null ;
}
} catch (IOException e) {
System.out.println("teardown error");
}
}
public static void main(String[] args){
for(int i = 0 ; i<100;i++){
String key = null ;
key = String.valueOf(String.valueOf(new Random().nextInt(1000000)).hashCode());
System.out.println(key);
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/SequenceKey.java | jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/SequenceKey.java | package com.embracesource.jmeter.hbase;
import java.util.Random;
import java.util.UUID;
public class SequenceKey {
static String[] str = new String[] { "B1C", "B1F", "B1R", "B2C", "B2F",
"B2R", "C1C", "C1F", "C1R", "F1C", "F1F", "F1R", "F2C", "F2F", "F2R",
"G1C", "G1F", "G1R", "H1C", "H1F", "H1R", "H2C", "H2F", "H2R", "H3C",
"H3F", "H3R", "J1C", "J1F", "J1R", "K1C", "K1F", "K1R", "K2C", "K2F",
"K2R", "M1C", "M1F", "M1R", "N1C", "N1F", "N1R", "N2C", "N2F", "N2R",
"N3C", "N3F", "N3R", "O1C", "O1F", "O1R", "P1C", "P1F", "P1R", "P2C",
"P2F", "P2R", "P3C", "P3F", "P3R", "Q1C", "Q1F", "Q1R", "Q2C", "Q2F",
"Q2R", "Q6C", "Q6F", "Q6R", "Q7C", "Q7F", "Q7R", "R1C", "R1F", "R1R",
"T1C", "T1F", "T1R", "T2C", "T2F", "T2R", "V1C", "V1F", "V1R", "W1C",
"W1F", "W1R", "Y1C", "Y1F", "Y1R", "Z1C", "Z1F", "Z1R" };
public static String getsequenceKey() {
return str[new Random().nextInt(93)] + UUID.randomUUID();
}
public static void main(String[] args) {
System.out.println(str.length);
for (int i = 0; i < 1000; i++) {
System.out.println(getsequenceKey());
}
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/HTableUtil.java | jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/HTableUtil.java | package com.embracesource.jmeter.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.ServerCallable;
public class HTableUtil {
@SuppressWarnings({ "deprecation" })
public static void putAndCommit(HTable table, final Put put) throws IOException {
try {
table.getConnection().getRegionServerWithRetries(
new ServerCallable<Boolean>(table.getConnection(), table.getTableName(), put.getRow()) {
public Boolean call() throws Exception {
server.put(location.getRegionInfo().getRegionName(), put);
return true;
}
});
} catch (Exception e) {
throw new IOException(e);
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/HbaseJMeter.java | jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/HbaseJMeter.java | package com.embracesource.jmeter.hbase;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.jmeter.config.Arguments;
import org.apache.jmeter.protocol.java.sampler.AbstractJavaSamplerClient;
import org.apache.jmeter.protocol.java.sampler.JavaSamplerContext;
import org.apache.jmeter.samplers.SampleResult;
public class HbaseJMeter extends AbstractJavaSamplerClient {
private HTableInterface table;
private static Configuration conf = null;
private Put put = null;
private Get get = null;
private String methedType = null;
private int keyNumLength = 0;
private String[] cfs = null;
private String[] qualifiers = null;
private String values = null;
private boolean writeToWAL = true;
private boolean keyRondom = true;
@Override
public void setupTest(JavaSamplerContext context) {
super.setupTest(context);
String hbaseZK = context.getParameter("hbase.zookeeper.quorum");
conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", hbaseZK);
conf.set("hbase.ipc.client.tcpnodelay", "true");
conf.set("hbase.client.pause", "20");
conf.set("ipc.ping.interval", "3000");
conf.set("hbase.client.retries.number", "11");
if (table == null) {
String tableName = context.getParameter("tableName");
byte[] tableNamebyte = tableName.getBytes();
boolean autoFlush = Boolean.valueOf(context.getParameter("autoFlush"));
long writeBufferSize = Long.valueOf(context.getParameter("writeBufferSize"));
int poolSize = Integer.parseInt(context.getParameter("poolSize"));
try {
table =
JMeterHTablePool.getinstancePool(conf, poolSize, tableNamebyte, autoFlush,
writeBufferSize).tablePool.getTable(tableName);
} catch (Exception e) {
System.out.println("htable pool error");
}
}
if (methedType == null) {
methedType = context.getParameter("putOrget");
}
if (keyNumLength == 0) {
keyNumLength = Integer.parseInt(context.getParameter("keyNumLength"));
}
if (cfs == null) {
String cf = context.getParameter("cf");
cfs = cf.split(",");
}
if (qualifiers == null) {
String qualifier = context.getParameter("qualifier");
qualifiers = qualifier.split(",");
}
if (values == null) {
String valueLength = context.getParameter("valueLength");
values = Strings.repeat('v', Integer.parseInt(valueLength));
}
if (writeToWAL == true) {
writeToWAL = Boolean.valueOf(context.getParameter("writeToWAL"));
}
if (keyRondom == true) {
keyRondom = Boolean.valueOf(context.getParameter("keyRondom"));
}
}
public SampleResult runTest(JavaSamplerContext context) {
SampleResult sampleResult = new SampleResult();
sampleResult.sampleStart();
String key = null;
if (keyRondom) {
key = String.valueOf(String.valueOf(new Random().nextInt(keyNumLength)).hashCode());
} else {
key = SequenceKey.getsequenceKey();
}
try {
if (methedType.equals("put")) {
put = new Put(Bytes.toBytes(key));
put.setWriteToWAL(writeToWAL);
for (int j = 0; j < cfs.length; j++) {
for (int n = 0; n < qualifiers.length; n++) {
put.add(Bytes.toBytes(cfs[j]), Bytes.toBytes(qualifiers[n]), Bytes.toBytes(values));
}
}
table.put(put);
} else if (methedType.equals("get")) {
get = new Get((key).getBytes());
table.get(get);
}
sampleResult.setSuccessful(true);
} catch (Throwable e) {
sampleResult.setSuccessful(false);
} finally {
sampleResult.sampleEnd();
}
return sampleResult;
}
@Override
public Arguments getDefaultParameters() {
Arguments params = new Arguments();
params.addArgument("tableName", "test");
params.addArgument("cf", "cf");
params.addArgument("qualifier", "a");
params.addArgument("putOrget", "put");
params.addArgument("keyNumLength", "10000000");
params.addArgument("keyRondom", "true");
params.addArgument("valueLength", "1000");
params.addArgument("autoFlush", "false");
params.addArgument("writeBufferSize", "2097152");
params.addArgument("writeToWAL", "true");
params.addArgument("poolSize", "500");
params
.addArgument(
"hbase.zookeeper.quorum",
"tkpcjk01-12,tkpcjk01-13,tkpcjk01-14,tkpcjk01-15,tkpcjk01-16,tkpcjk01-17,tkpcjk01-18,tkpcjk01-19,tkpcjk01-20,tkpcjk01-21,tkpcjk01-22,tkpcjk01-23,tkpcjk01-24");
return params;
}
@Override
public void teardownTest(JavaSamplerContext context) {
super.teardownTest(context);
try {
if (table != null) {
table.flushCommits();
table.close();
table = null;
}
} catch (IOException e) {
System.out.println("teardown error");
}
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/HbaseJMeterPut.java | jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/HbaseJMeterPut.java | //package com.embracesource.jmeter.hbase;
//
//import java.io.IOException;
//
//import org.apache.hadoop.conf.Configuration;
//import org.apache.hadoop.hbase.HBaseConfiguration;
//import org.apache.hadoop.hbase.client.HTableInterface;
//import org.apache.hadoop.hbase.client.Put;
//import org.apache.hadoop.hbase.util.Bytes;
//import org.apache.jmeter.config.Arguments;
//import org.apache.jmeter.protocol.java.sampler.AbstractJavaSamplerClient;
//import org.apache.jmeter.protocol.java.sampler.JavaSamplerContext;
//import org.apache.jmeter.samplers.SampleResult;
//
//public class HbaseJMeterPut extends AbstractJavaSamplerClient {
// private static HTableInterface table;
//
// private static Configuration conf = null;
//
// private Put put = null;
//
// private byte[] cf = null;
//
// private String[] qualifiers = null;
//
// private byte[] values = null;
//
// private boolean writeToWAL = true;
//
// @Override
// public void setupTest(JavaSamplerContext context) {
// super.setupTest(context);
//
// conf = HBaseConfiguration.create();
// conf.set("hbase.zookeeper.quorum", context.getParameter("hbase.zookeeper.quorum"));
// conf.set("hbase.ipc.client.tcpnodelay", "true");
// conf.set("hbase.client.pause", "20");
// conf.set("ipc.ping.interval", "3000");
// conf.set("hbase.client.retries.number", "4");
//
// Long writeBufferSize = Long.valueOf(context.getParameter("writeBufferSize"));
// Integer poolSize = Integer.parseInt(context.getParameter("poolSize"));
// try {
// table =
// JMeterHTablePool.getinstancePool(conf, poolSize, writeBufferSize).tablePool
// .getTable(context.getParameter("tableName"));
// } catch (Exception e) {
// }
// cf = context.getParameter("cf").getBytes();
// qualifiers = context.getParameter("qualifier").split(",");
// writeToWAL = Boolean.valueOf(context.getParameter("writeToWAL"));
// values = Bytes.toBytes("vvvvv");
// }
//
// public SampleResult runTest(JavaSamplerContext context) {
// String key = SequenceKey.getsequenceKey();
//
// SampleResult sampleResult = new SampleResult();
// sampleResult.sampleStart();
// try {
// put = new Put(Bytes.toBytes(key));
// put.setWriteToWAL(writeToWAL);
// for (int n = 0; n < qualifiers.length; n++) {
// put.add(cf, Bytes.toBytes(qualifiers[n]), values);
// }
//
// table.put(put);
// sampleResult.setSuccessful(true);
// } catch (Throwable e) {
// sampleResult.setSuccessful(false);
// } finally {
// sampleResult.sampleEnd();
// }
// return sampleResult;
// }
//
// @Override
// public Arguments getDefaultParameters() {
// Arguments params = new Arguments();
// params.addArgument("tableName", "test");
// params.addArgument("cf", "f");
// params.addArgument("qualifier", "a,b,c,d,e,f,g,h,i,j,g,k,l,m,n,o,p,q,r,s,t");
// params.addArgument("writeBufferSize", "2097152");
// params.addArgument("writeToWAL", "true");
// params.addArgument("poolSize", "500");
// params
// .addArgument(
// "hbase.zookeeper.quorum",
// "tkpcjk01-12,tkpcjk01-13,tkpcjk01-14,tkpcjk01-15,tkpcjk01-16,tkpcjk01-17,tkpcjk01-18,tkpcjk01-19,tkpcjk01-20,tkpcjk01-21,tkpcjk01-22,tkpcjk01-23,tkpcjk01-24");
// return params;
// }
//
// @Override
// public void teardownTest(JavaSamplerContext context) {
// super.teardownTest(context);
// try {
// table.close();
// } catch (IOException e) {
// }
// }
//} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/JMeterHTablePool.java | jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/JMeterHTablePool.java | package com.embracesource.jmeter.hbase;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableFactory;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
/**
* 双重检查加锁 单例模式
*
* @author allen
*
*/
public class JMeterHTablePool {
public static HTablePool tablePool = null;
private static JMeterHTablePool instancePool;
private JMeterHTablePool(Configuration config, int poolsize, byte[] tableName,
final boolean autoFlush, final long writeBufferSize) {
tablePool = new HTablePool(config, poolsize, new HTableFactory() {
@Override
public HTableInterface createHTableInterface(Configuration config, byte[] tableName) {
try {
HTable hTable = new HTable(config, tableName);
hTable.setAutoFlush(autoFlush);
hTable.setWriteBufferSize(writeBufferSize);
return hTable;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void releaseHTableInterface(HTableInterface table) throws IOException {
table.flushCommits();
super.releaseHTableInterface(table);
}
});
}
public static JMeterHTablePool getinstancePool(Configuration config, int poolSize,
byte[] tableName, final boolean autoFlush, final long writeBufferSize) {
if (instancePool == null) {
synchronized (JMeterHTablePool.class) {
if (instancePool == null) {
System.out.println("Pool instance");
instancePool =
new JMeterHTablePool(config, poolSize, tableName, autoFlush, writeBufferSize);
}
}
}
return instancePool;
}
@SuppressWarnings("deprecation")
public synchronized void flush(String tableName) throws IOException {
HTableInterface hTable = tablePool.getTable(tableName);
try {
hTable.flushCommits();
} finally {
if (hTable != null) {
tablePool.putTable(hTable);
}
}
}
public synchronized void close() throws IOException {
tablePool.close();
}
public synchronized void close(String tableName) throws IOException {
tablePool.closeTablePool(tableName.getBytes());
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/Strings.java | jmeter-hbase-plugins/src/main/java/com/embracesource/hbase/jmeter/Strings.java | package com.embracesource.jmeter.hbase;
public class Strings {
public static String repeat(char ch, int repeat) {
char[] buf = new char[repeat];
for (int i = repeat - 1; i >= 0; i--) {
buf[i] = ch;
}
return new String(buf);
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/test/java/com/embracesource/config/ZkConfigChangeSubscriberImplTest.java | zkpublisher/src/test/java/com/embracesource/config/ZkConfigChangeSubscriberImplTest.java | //package com.embracesource.config;
//
//import java.io.IOException;
//import java.util.List;
//import java.util.concurrent.CountDownLatch;
//import java.util.concurrent.TimeUnit;
//
//import junit.framework.TestCase;
//
//import org.I0Itec.zkclient.ZkClient;
//import org.springframework.context.support.ClassPathXmlApplicationContext;
//
//public class ZkConfigChangeSubscriberImplTest extends TestCase {
// private ZkClient zkClient;
// ConfigChangeSubscriber zkConfig;
//
// public void setUp() {
// ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(
// "test-spring-config.xml");
// this.zkClient = ((ZkClient) ctx.getBean("zkClient"));
// this.zkConfig = ((ConfigChangeSubscriber) ctx
// .getBean("configChangeSubscriber"));
// ZkUtils.mkPaths(this.zkClient, "/zkSample/conf");
// if (!this.zkClient.exists("/zkSample/conf/test1.properties"))
// this.zkClient.createPersistent("/zkSample/conf/test1.properties");
//
// if (!this.zkClient.exists("/zkSample/conf/test2.properties"))
// this.zkClient.createPersistent("/zkSample/conf/test2.properties");
// }
//
// public void testSubscribe() throws IOException, InterruptedException {
// final CountDownLatch latch = new CountDownLatch(1);
// this.zkConfig.subscribe("test1.properties", new ConfigChangeListener() {
// public void configChanged(String key, String value) {
// System.out.println("test1接收到数据变更通知: key=" + key + ", value="
// + value);
// latch.countDown();
// }
// });
// this.zkClient.writeData("/zkSample/conf/test1.properties", "aa=1");
// boolean notified = latch.await(30L, TimeUnit.SECONDS);
// if (!notified)
// fail("客户端没有收到变更通知");
// }
//
// public void testA() throws InterruptedException {
// List<String> list = this.zkClient.getChildren("/zkSample/conf");
// for (String s : list) {
// System.out.println("children:" + s);
// }
//
// }
//
// public void testB() throws InterruptedException {
// this.zkClient.writeData("/zkSample/conf/test2.properties", "test=123");
// System.out.println(this.zkClient
// .readData("/zkSample/conf/test2.properties"));
//
// }
//
// public void tearDown() {
// this.zkClient.delete("/zkSample/conf/test1.properties");
// this.zkClient.delete("/zkSample/conf/test2.properties");
// }
//} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/test/java/com/embracesource/config/DynamicPropertiesHelperTest.java | zkpublisher/src/test/java/com/embracesource/config/DynamicPropertiesHelperTest.java | //package com.embracesource.config;
//
//import java.util.concurrent.CountDownLatch;
//import java.util.concurrent.TimeUnit;
//
//import junit.framework.TestCase;
//
//import org.I0Itec.zkclient.ZkClient;
//import org.springframework.context.support.ClassPathXmlApplicationContext;
//
//public class DynamicPropertiesHelperTest extends TestCase {
// private DynamicPropertiesHelperFactory helperFactory;
// private ZkClient zkClient;
//
// protected void setUp() throws Exception {
// super.setUp();
// ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(
// "test-spring-config.xml");
// this.helperFactory = ((DynamicPropertiesHelperFactory) ctx
// .getBean(DynamicPropertiesHelperFactory.class));
// this.zkClient = ((ZkClient) ctx.getBean(ZkClient.class));
//
// if (!this.zkClient.exists("/zkSample/conf/test.properties"))
// this.zkClient.createPersistent("/zkSample/conf/test.properties");
// }
//
// public void testRegisterListener() throws InterruptedException {
// DynamicPropertiesHelper helper = this.helperFactory
// .getHelper("test.properties");
// final CountDownLatch latch = new CountDownLatch(1);
// helper.registerListener("listener",
// new DynamicPropertiesHelper.PropertyChangeListener() {
// public void propertyChanged(String oldValue, String newValue) {
// System.out.println("property chaged: oldValue="
// + oldValue + ", newValue=" + newValue);
// latch.countDown();
// }
// });
// this.zkClient.writeData("/zkSample/conf/test.properties", "test=123");
// try {
// if (!latch.await(5L, TimeUnit.SECONDS))
// System.out.println("no property changed event fired in 5 seconds.");
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
// assertEquals(helper.getProperty("test"), "123");
// }
//
// public void tearDown() {
// this.zkClient.delete("/zkSample/conf/test.properties");
// }
//} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/test/java/com/embracesource/config/ReadPropertiesTest.java | zkpublisher/src/test/java/com/embracesource/config/ReadPropertiesTest.java | //package com.embracesource.config;
//
//import junit.framework.TestCase;
//
//import org.springframework.context.support.ClassPathXmlApplicationContext;
//
//public class ReadPropertiesTest extends TestCase {
// private DynamicPropertiesHelperFactory helperFactory;
//
// protected void setUp() throws Exception {
// super.setUp();
// ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext(
// "test-spring-config.xml");
// this.helperFactory = ((DynamicPropertiesHelperFactory) ctx
// .getBean(DynamicPropertiesHelperFactory.class));
// }
//
// public void testReadProperties() throws InterruptedException {
// DynamicPropertiesHelper helper = this.helperFactory
// .getHelper("test.properties");
//
// while (true) {
// System.out.println(helper.getProperty("a"));
// Thread.sleep(5000L);
// }
//
// }
//}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/ZkUtils.java | zkpublisher/src/main/java/com/embracesource/config/ZkUtils.java | package com.embracesource.config;
import java.io.UnsupportedEncodingException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.exception.ZkMarshallingError;
import org.I0Itec.zkclient.serialize.ZkSerializer;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
public class ZkUtils {
private static final Logger logger = Logger.getLogger(ZkUtils.class);
public static String getZkPath(String rootNode, String key) {
if (!StringUtils.isEmpty(rootNode)) {
if (key.startsWith("/")) {
key = key.substring(1);
}
if (rootNode.endsWith("/")) {
return rootNode + key;
}
return rootNode + "/" + key;
}
return key;
}
public static void mkPaths(ZkClient client, String path) {
String[] subs = path.split("\\/");
if (subs.length < 2) {
return;
}
String curPath = "";
for (int i = 1; i < subs.length; i++) {
curPath = curPath + "/" + subs[i];
if (!client.exists(curPath)) {
if (logger.isDebugEnabled()) {
logger.debug("Trying to create zk node: " + curPath);
}
client.createPersistent(curPath);
if (logger.isDebugEnabled())
logger.debug("Zk node created successfully: " + curPath);
}
}
}
public static String formatAsMonthDate(Date requestTime) {
return new SimpleDateFormat("MMdd").format(requestTime);
}
public static class StringSerializer implements ZkSerializer {
private String encoding;
public StringSerializer(String encoding) {
this.encoding = encoding;
}
public Object deserialize(byte[] abyte0) throws ZkMarshallingError {
try {
return new String(abyte0, this.encoding);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public byte[] serialize(Object obj) throws ZkMarshallingError {
if (obj == null) {
return null;
}
if (!(obj instanceof String)) {
throw new ZkMarshallingError(
"The input obj must be an instance of String.");
}
try {
return ((String) obj).getBytes(this.encoding);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/ZkConfigSaver.java | zkpublisher/src/main/java/com/embracesource/config/ZkConfigSaver.java | package com.embracesource.config;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.List;
import java.util.Properties;
import org.I0Itec.zkclient.ZkClient;
import org.apache.commons.io.FileUtils;
/**
* zk配置文件下载类
* @author june
*
*/
public class ZkConfigSaver {
public static final String CONF_ENCODING = "UTF-8";
public static String ZK_CONFIG_ROOTNODE = "/zkSample/conf";
public static String ZK_CONF_ENCODING = "UTF-8";
public static int ZK_TIMEOUT = 30000;
public static String ZK_ADDRESS = "";
private static final void loadProperties() {
InputStream is = ZkConfigPublisher.class
.getResourceAsStream("/zkpublisher.properties");
if (is == null) {
throw new RuntimeException("找不到config.properties资源文件.");
}
Properties props = new Properties();
try {
props.load(new BufferedReader(new InputStreamReader(is, "UTF-8")));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
ZK_CONFIG_ROOTNODE = props.getProperty("ZK_CONFIG_ROOTNODE");
ZK_CONF_ENCODING = props.getProperty("ZK_CONF_ENCODING");
ZK_TIMEOUT = Integer.parseInt(props.getProperty("ZK_TIMEOUT"));
ZK_ADDRESS = props.getProperty("ZK_ADDRESS");
}
public static void main(String[] args) {
if ((args == null) || (args.length < 1)) {
throw new RuntimeException("需要指定输出目录名");
}
loadProperties();
ZkClient client = new ZkClient(ZK_ADDRESS, ZK_TIMEOUT);
client.setZkSerializer(new ZkUtils.StringSerializer(ZK_CONF_ENCODING));
File confDir = new File(args[0]);
confDir.mkdirs();
saveConfigs(client, ZK_CONFIG_ROOTNODE, confDir);
}
private static void saveConfigs(ZkClient client, String rootNode,
File confDir) {
List<String> configs = client.getChildren(rootNode);
for (String config : configs) {
String content = (String) client.readData(rootNode + "/" + config);
File confFile = new File(confDir, config);
try {
FileUtils.writeStringToFile(confFile, content, "UTF-8");
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("配置成功保存到本地: " + confFile.getAbsolutePath());
}
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/DynamicPropertiesSpringConfigurer.java | zkpublisher/src/main/java/com/embracesource/config/DynamicPropertiesSpringConfigurer.java | package com.embracesource.config;
import java.util.Enumeration;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
public class DynamicPropertiesSpringConfigurer extends
PropertyPlaceholderConfigurer {
private static final Logger logger = Logger
.getLogger(DynamicPropertiesSpringConfigurer.class);
private DynamicPropertiesHelperFactory helperFactory;
private String[] propertiesKeys;
public void setHelperFactory(DynamicPropertiesHelperFactory helperFactory) {
this.helperFactory = helperFactory;
}
public void setPropertiesKeys(String[] propertiesKeys) {
this.propertiesKeys = propertiesKeys;
}
protected void processProperties(
ConfigurableListableBeanFactory beanFactoryToProcess,
Properties props) throws BeansException {
if (this.propertiesKeys != null) {
for (String propsKey : this.propertiesKeys) {
DynamicPropertiesHelper helper = this.helperFactory
.getHelper(propsKey);
if (helper != null) {
Enumeration<String> keys = helper.getPropertyKeys();
while (keys.hasMoreElements()) {
String key = (String) keys.nextElement();
props.put(key, helper.getProperty(key));
}
} else {
logger.warn("配置不存在: " + propsKey);
}
}
}
super.processProperties(beanFactoryToProcess, props);
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/ConfigChangeSubscriber.java | zkpublisher/src/main/java/com/embracesource/config/ConfigChangeSubscriber.java | package com.embracesource.config;
import java.util.List;
/**
* 配置改变的订阅者,在每一個zk文件上订阅一個监听器
*
* @author june
*
*/
public abstract interface ConfigChangeSubscriber {
public abstract String getInitValue(String paramString);
public abstract void subscribe(String paramString,
ConfigChangeListener paramConfigChangeListener);
public abstract List<String> listKeys();
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/DynamicPropertiesHelperFactory.java | zkpublisher/src/main/java/com/embracesource/config/DynamicPropertiesHelperFactory.java | package com.embracesource.config;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
/**
* 动态配置文件辅助类的工厂类,在创建动态配置文件辅助类时,会订阅zk数据改变的事件
* @author june
*
*/
public class DynamicPropertiesHelperFactory {
private ConfigChangeSubscriber configChangeSubscriber;
private ConcurrentHashMap<String, DynamicPropertiesHelper> helpers = new ConcurrentHashMap<String, DynamicPropertiesHelper>();
public DynamicPropertiesHelperFactory(
ConfigChangeSubscriber configChangeSubscriber) {
this.configChangeSubscriber = configChangeSubscriber;
}
public DynamicPropertiesHelper getHelper(String key) {
DynamicPropertiesHelper helper = (DynamicPropertiesHelper) this.helpers
.get(key);
if (helper != null) {
return helper;
}
return createHelper(key);
}
/**
*
* @param key zk中的一个节点
* @return
*/
private DynamicPropertiesHelper createHelper(String key) {
List<String> keys = this.configChangeSubscriber.listKeys();
if ((keys == null) || (keys.size() == 0)) {
return null;
}
if (!keys.contains(key)) {
return null;
}
String initValue = this.configChangeSubscriber.getInitValue(key);
final DynamicPropertiesHelper helper = new DynamicPropertiesHelper(initValue);
DynamicPropertiesHelper old = (DynamicPropertiesHelper) this.helpers
.putIfAbsent(key, helper);
if (old != null) {
return old;
}
/**
* 订阅zk数据改变
*/
this.configChangeSubscriber.subscribe(key, new ConfigChangeListener() {
public void configChanged(String key, String value) {
helper.refresh(value);
}
});
return helper;
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/ConfigChangeListener.java | zkpublisher/src/main/java/com/embracesource/config/ConfigChangeListener.java | package com.embracesource.config;
/**
* 监听器,监听配置的改变
*
* @author june
*
*/
public abstract interface ConfigChangeListener {
public abstract void configChanged(String paramString1, String paramString2);
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/DynamicPropertiesHelper.java | zkpublisher/src/main/java/com/embracesource/config/DynamicPropertiesHelper.java | package com.embracesource.config;
import java.io.IOException;
import java.io.StringReader;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
/**
* 动态配置文件辅助类
* @author june
*
*/
public class DynamicPropertiesHelper {
private ConcurrentHashMap<String, String> properties = new ConcurrentHashMap<String, String>();
private ConcurrentHashMap<String, List<PropertyChangeListener>> propListeners = new ConcurrentHashMap<String, List<PropertyChangeListener>>();
public DynamicPropertiesHelper(String initValue) {
Properties props = parse(initValue);
for (Map.Entry<Object, Object> propEn : props.entrySet())
this.properties.put((String) propEn.getKey(),
(String) propEn.getValue());
}
private Properties parse(String value) {
Properties props = new Properties();
if (!StringUtils.isEmpty(value))
try {
props.load(new StringReader(value));
} catch (IOException localIOException) {
}
return props;
}
public synchronized void refresh(String propertiesAsStr) {
Properties props = parse(propertiesAsStr);
for (Map.Entry<Object, Object> propEn : props.entrySet())
setValue((String) propEn.getKey(), (String) propEn.getValue());
}
private void setValue(String key, String newValue) {
String oldValue = (String) this.properties.get(key);
this.properties.put(key, newValue);
if (!ObjectUtils.equals(oldValue, newValue))
firePropertyChanged(key, oldValue, newValue);
}
public boolean containsProperty(String key) {
return this.properties.containsKey(key);
}
public String getProperty(String key) {
return (String) this.properties.get(key);
}
public String getProperty(String key,String defaultValue) {
if(!containsProperty(key)||this.properties.get(key)==null){
return defaultValue;
}
return (String) this.properties.get(key);
}
public Boolean getBooleanProperty(String key,Boolean defaultValue) {
if(!containsProperty(key)||this.properties.get(key)==null){
return defaultValue;
}
return Boolean.valueOf((String) this.properties.get(key));
}
public Integer getIntProperty(String key,Integer defaultValue) {
Integer retValue=defaultValue;
try {
retValue=Integer.parseInt((String) this.properties.get(key));
} catch (NumberFormatException e) {
}
return retValue;
}
public Long getLongProperty(String key,Long defaultValue) {
Long retValue=defaultValue;
try {
retValue=Long.parseLong((String) this.properties.get(key));
} catch (NumberFormatException e) {
}
return retValue;
}
public Double getDoubleProperty(String key,Double defaultValue) {
Double retValue=defaultValue;
try {
retValue=Double.parseDouble((String) this.properties.get(key));
} catch (NumberFormatException e) {
}
return retValue;
}
public Enumeration<String> getPropertyKeys() {
return this.properties.keys();
}
/**
*
* @param key listener名字
* @param listener
*/
public void registerListener(String key, PropertyChangeListener listener) {
List<PropertyChangeListener> listeners = new CopyOnWriteArrayList<PropertyChangeListener>();
List<PropertyChangeListener> old = this.propListeners.putIfAbsent(key, listeners);
if (old != null) {
listeners = old;
}
listeners.add(listener);
}
private void firePropertyChanged(String key, String oldValue,
String newValue) {
List<PropertyChangeListener> listeners = (List<PropertyChangeListener>) this.propListeners.get(key);
if ((listeners == null) || (listeners.size() == 0)) {
return;
}
for (PropertyChangeListener listener : listeners)
listener.propertyChanged(oldValue, newValue);
}
public static abstract interface PropertyChangeListener {
public abstract void propertyChanged(String paramString1,
String paramString2);
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/ZkConfigPublisher.java | zkpublisher/src/main/java/com/embracesource/config/ZkConfigPublisher.java | package com.embracesource.config;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.Properties;
import org.I0Itec.zkclient.ZkClient;
import org.apache.commons.io.FileUtils;
/**
* zk配置文件发布者类
* @author june
*
*/
public class ZkConfigPublisher {
public static String CONF_DIR = "conf";
public static final String CONF_ENCODING = "UTF-8";
public static String ZK_CONFIG_ROOTNODE = "/zkSample/conf";
public static String ZK_CONF_ENCODING = "UTF-8";
public static int ZK_TIMEOUT = 30000;
public static String ZK_ADDRESS = "";
private static final void loadProperties() {
InputStream is = ZkConfigPublisher.class
.getResourceAsStream("/zkpublisher.properties");
if (is == null) {
throw new RuntimeException("找不到zkpublisher.properties资源文件.");
}
Properties props = new Properties();
try {
props.load(new BufferedReader(new InputStreamReader(is, "UTF-8")));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
ZK_CONFIG_ROOTNODE = props.getProperty("ZK_CONFIG_ROOTNODE");
ZK_CONF_ENCODING = props.getProperty("ZK_CONF_ENCODING");
ZK_TIMEOUT = Integer.parseInt(props.getProperty("ZK_TIMEOUT"));
ZK_ADDRESS = props.getProperty("ZK_ADDRESS");
}
public static void main(String[] args) {
loadProperties();
ZkClient client = new ZkClient(ZK_ADDRESS, ZK_TIMEOUT);
client.setZkSerializer(new ZkUtils.StringSerializer(ZK_CONF_ENCODING));
File confDir = new File(CONF_DIR);
if ((!confDir.exists()) || (!confDir.isDirectory())) {
System.err.println("错误: 配置目录" + confDir + "不存在或非法! ");
System.exit(1);
}
publishConfigs(client, ZK_CONFIG_ROOTNODE, confDir);
}
private static void publishConfigs(ZkClient client, String rootNode,
File confDir) {
File[] confs = confDir.listFiles();
int success = 0;
int failed = 0;
for (File conf : confs) {
if (!conf.isFile()) {
continue;
}
String name = conf.getName();
String path = ZkUtils.getZkPath(rootNode, name);
ZkUtils.mkPaths(client, path);
String content;
try {
content = FileUtils.readFileToString(conf, "UTF-8");
} catch (IOException e) {
System.err.println("错误: 读取文件内容时遇到异常:" + e.getMessage());
failed++;
continue;
}
if (!client.exists(path)) {
try {
client.createPersistent(path);
client.writeData(path, content);
} catch (Throwable e) {
System.err.println("错误: 尝试发布配置失败: " + e.getMessage());
failed++;
continue;
}
System.out.println("提示: 已经成功将配置文件" + conf + "内容发布为新的ZK配置"
+ path);
} else {
try {
client.writeData(path, content);
} catch (Throwable e) {
System.err.println("错误: 尝试发布配置失败: " + e.getMessage());
failed++;
continue;
}
System.out.println("提示: 已经成功将配置文件" + conf + "内容更新到ZK配置" + path);
}
success++;
}
System.out.println("提示: 完成配置发布,成功" + success + ",失败" + failed + "。");
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/zkpublisher/src/main/java/com/embracesource/config/ZkConfigChangeSubscriberImpl.java | zkpublisher/src/main/java/com/embracesource/config/ZkConfigChangeSubscriberImpl.java | package com.embracesource.config;
import java.util.List;
import org.I0Itec.zkclient.IZkDataListener;
import org.I0Itec.zkclient.ZkClient;
import org.apache.commons.lang.StringUtils;
/**
* 订阅者实现类,当订阅到zk数据改变时,会触发ConfigChangeListener
*
* @author june
*
*/
public class ZkConfigChangeSubscriberImpl implements ConfigChangeSubscriber {
private ZkClient zkClient;
private String rootNode;
public ZkConfigChangeSubscriberImpl(ZkClient zkClient, String rootNode) {
this.rootNode = rootNode;
this.zkClient = zkClient;
}
public void subscribe(String key, ConfigChangeListener listener) {
String path = ZkUtils.getZkPath(this.rootNode, key);
if (!this.zkClient.exists(path)) {
throw new RuntimeException(
"配置("
+ path
+ ")不存在, 必须先定义配置才能监听配置的变化, 请检查配置的key是否正确, 如果确认配置key正确, 那么需要保证先使用配置发布命令发布配置! ");
}
this.zkClient.subscribeDataChanges(path, new DataListenerAdapter(
listener));
}
/**
* 触发ConfigChangeListener
*
* @param path
* @param value
* @param configListener
*/
private void fireConfigChanged(String path, String value,
ConfigChangeListener configListener) {
configListener.configChanged(getKey(path), value);
}
private String getKey(String path) {
String key = path;
if (!StringUtils.isEmpty(this.rootNode)) {
key = path.replaceFirst(this.rootNode, "");
if (key.startsWith("/")) {
key = key.substring(1);
}
}
return key;
}
public String getInitValue(String key) {
String path = ZkUtils.getZkPath(this.rootNode, key);
return (String) this.zkClient.readData(path);
}
public List<String> listKeys() {
return this.zkClient.getChildren(this.rootNode);
}
/**
* 数据监听器适配类,当zk数据变化时,触发ConfigChangeListener
*
* @author june
*
*/
private class DataListenerAdapter implements IZkDataListener {
private ConfigChangeListener configListener;
public DataListenerAdapter(ConfigChangeListener configListener) {
this.configListener = configListener;
}
public void handleDataChange(String s, Object obj) throws Exception {
ZkConfigChangeSubscriberImpl.this.fireConfigChanged(s,
(String) obj, this.configListener);
}
public void handleDataDeleted(String s) throws Exception {
}
}
} | java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/cassandra/CassandraConnection.java | kettle-cassandra-plugin/src/org/pentaho/cassandra/CassandraConnection.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.cassandra;
import java.util.HashMap;
import java.util.Map;
import org.apache.cassandra.thrift.AuthenticationRequest;
import org.apache.cassandra.thrift.Cassandra;
import org.apache.cassandra.thrift.KsDef;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.pentaho.di.core.Const;
/**
* Class for establishing a connection with Cassandra. Encapsulates
* the transport and Cassandra client object.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision; $
*/
public class CassandraConnection {
private TTransport m_transport;
protected Cassandra.Client m_client;
protected String m_keyspaceName;
/**
* Construct an CassandaraConnection with no authentication.
*
* @param host the host to connect to
* @param port the port to use
* @throws Exception if the connection fails
*/
public CassandraConnection(String host, int port)
throws Exception {
this (host, port, null, null, -1);
}
/**
* Construct a CassandraConnection with no authentication
* and the supplied socket timeout (milliseconds).
*
* @param host the host to connect to
* @param port the port to use
* @param timeout the socket timeout to use in milliseconds
* @throws Exception if the connection fails
*/
public CassandraConnection(String host, int port, int timeout)
throws Exception {
this (host, port, null, null, timeout);
}
/**
* Construct an CassandaraConnection with optional authentication.
*
* @param host the host to connect to
* @param port the port to use
* @param username the username to authenticate with (may be null
* for no authentication)
* @param password the password to authenticate with (may be null
* for no authentication)
* @throws Exception if the connection fails
*/
public CassandraConnection(String host, int port,
String username, String password, int timeout) throws Exception {
TSocket socket = new TSocket(host, port);
if (timeout > 0) {
socket.setTimeout(timeout);
}
m_transport = new TFramedTransport(socket);
TProtocol protocol = new TBinaryProtocol(m_transport);
m_client = new Cassandra.Client(protocol);
m_transport.open();
if (!Const.isEmpty(username) && !Const.isEmpty(password)) {
Map<String, String> creds = new HashMap<String, String>();
creds.put("username", username);
creds.put("password", password);
m_client.login(new AuthenticationRequest(creds));
}
}
/**
* Get the encapsulated Cassandra.Client object
*
* @return the encapsulated Cassandra.Client object
*/
public Cassandra.Client getClient() {
return m_client;
}
/**
* Get a keyspace definition for the set keyspace
*
* @return a keyspace definition
* @throws Exception if a problem occurs
*/
public KsDef describeKeyspace() throws Exception {
if (m_keyspaceName == null || m_keyspaceName.length() == 0) {
throw new Exception("No keyspace has been set!");
}
return m_client.describe_keyspace(m_keyspaceName);
}
public void close() {
if (m_transport != null) {
m_transport.close();
m_transport = null;
m_client = null;
}
}
private void checkOpen() throws Exception {
if (m_transport == null && m_client == null) {
throw new Exception("Connection is closed!");
}
}
/**
* Set the Cassandra keyspace (database) to use.
*
* @param keySpace the name of the keyspace to use
* @throws Exception if the keyspace doesn't exist
*/
public void setKeyspace(String keySpace) throws Exception {
checkOpen();
m_client.set_keyspace(keySpace);
m_keyspaceName = keySpace;
}
/**
* Get the column family meta data for the supplied column family (table) name
*
* @param colFam the name of the column family to get meta data for
* @return the column family meta data
* @throws Exception if a problem occurs
*/
public CassandraColumnMetaData getColumnFamilyMetaData(String colFam)
throws Exception {
if (m_keyspaceName == null) {
throw new Exception("No keypsace set!");
}
CassandraColumnMetaData meta = new CassandraColumnMetaData(this, colFam);
return meta;
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/cassandra/CassandraColumnMetaData.java | kettle-cassandra-plugin/src/org/pentaho/cassandra/CassandraColumnMetaData.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.cassandra;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.AsciiType;
import org.apache.cassandra.db.marshal.BooleanType;
import org.apache.cassandra.db.marshal.CompositeType;
import org.apache.cassandra.db.marshal.DateType;
import org.apache.cassandra.db.marshal.DecimalType;
import org.apache.cassandra.db.marshal.DoubleType;
import org.apache.cassandra.db.marshal.DynamicCompositeType;
import org.apache.cassandra.db.marshal.FloatType;
import org.apache.cassandra.db.marshal.Int32Type;
import org.apache.cassandra.db.marshal.IntegerType;
import org.apache.cassandra.db.marshal.LexicalUUIDType;
import org.apache.cassandra.db.marshal.LongType;
import org.apache.cassandra.db.marshal.TypeParser;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.db.marshal.UUIDType;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.SyntaxException;
import org.apache.cassandra.thrift.CfDef;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnDef;
import org.apache.cassandra.thrift.CqlRow;
import org.apache.cassandra.thrift.KeySlice;
import org.apache.cassandra.thrift.KsDef;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
/**
* Class encapsulating read-only schema information for a column family. Has
* utility routines for converting between Cassandra meta data and Kettle meta
* data, and for deserializing values.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision$
*/
public class CassandraColumnMetaData {
protected static final Class<?> PKG = CassandraColumnMetaData.class;
public static final String UTF8 = "UTF-8";
/** Name of the column family this meta data refers to */
protected String m_columnFamilyName; // can be used as the key name
/** Type of the key */
protected String m_keyValidator; // name of the class for key validation
/** Type of the column names (used for sorting columns) */
protected String m_columnComparator; // name of the class for sorting column
// names
/** m_columnComparator converted to Charset encoding string */
protected String m_columnNameEncoding;
/**
* Default validator for the column family (table) - we can use this as the
* type for any columns specified in a SELECT clause which *arent* in the meta
* data
*/
protected String m_defaultValidationClass;
/** Map of column names/types */
protected Map<String, String> m_columnMeta;
/** Map of column names to indexed values (if any) */
protected Map<String, HashSet<Object>> m_indexedVals;
/** Holds the schema textual description */
protected StringBuffer m_schemaDescription;
/**
* Constructor.
*
* @param conn connection to cassandra
* @param columnFamily the name of the column family to maintain meta data
* for.
* @throws Exception if a problem occurs during connection or when fetching
* meta data
*/
public CassandraColumnMetaData(CassandraConnection conn, String columnFamily)
throws Exception {
m_columnFamilyName = columnFamily;
refresh(conn);
}
public String getDefaultValidationClass() {
return m_defaultValidationClass;
}
/**
* Refreshes the encapsulated meta data for the column family.
*
* @param conn the connection to cassandra to use for refreshing the meta data
* @throws Exception if a problem occurs during connection or when fetching
* meta data
*/
public void refresh(CassandraConnection conn) throws Exception {
m_schemaDescription = new StringBuffer();
// column families
KsDef keySpace = conn.describeKeyspace();
List<CfDef> colFams = null;
if (keySpace != null) {
colFams = keySpace.getCf_defs();
} else {
throw new Exception(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.UnableToGetMetaDataForKeyspace",
conn.m_keyspaceName));
}
// look for the requested column family
CfDef colDefs = null;
for (CfDef fam : colFams) {
String columnFamilyName = fam.getName(); // table name
if (columnFamilyName.equals(m_columnFamilyName)) {
m_schemaDescription.append("Column family: " + m_columnFamilyName);
m_keyValidator = fam.getKey_validation_class(); // key type
m_columnComparator = fam.getComparator_type(); // column names encoded
// as
m_defaultValidationClass = fam.getDefault_validation_class(); // default
// column
// type
m_schemaDescription.append("\n\tKey validator: " + m_keyValidator);
m_schemaDescription.append("\n\tColumn comparator: "
+ m_columnComparator);
m_schemaDescription.append("\n\tDefault column validator: "
+ m_defaultValidationClass);
/*
* m_schemaDescription.append("\n\tDefault column validator: " +
* m_defaultValidationClass
* .substring(m_defaultValidationClass.lastIndexOf(".")+1,
* m_defaultValidationClass.length()));
*/
// these seem to have disappeared between 0.8.6 and 1.0.0!
/*
* m_schemaDescription.append("\n\tMemtable operations: " +
* fam.getMemtable_operations_in_millions());
* m_schemaDescription.append("\n\tMemtable throughput: " +
* fam.getMemtable_throughput_in_mb());
* m_schemaDescription.append("\n\tMemtable flush after: " +
* fam.getMemtable_flush_after_mins());
*/
// these have disappeared between 1.0.8 and 1.1.0!!
// m_schemaDescription.append("\n\tRows cached: " +
// fam.getRow_cache_size());
// m_schemaDescription.append("\n\tRow cache save period: " +
// fam.getRow_cache_save_period_in_seconds());
// m_schemaDescription.append("\n\tKeys cached: " +
// fam.getKey_cache_size());
// m_schemaDescription.append("\n\tKey cached save period: " +
// fam.getKey_cache_save_period_in_seconds());
m_schemaDescription.append("\n\tRead repair chance: "
+ fam.getRead_repair_chance());
m_schemaDescription
.append("\n\tGC grace: " + fam.getGc_grace_seconds());
m_schemaDescription.append("\n\tMin compaction threshold: "
+ fam.getMin_compaction_threshold());
m_schemaDescription.append("\n\tMax compaction threshold: "
+ fam.getMax_compaction_threshold());
m_schemaDescription.append("\n\tReplicate on write: "
+ fam.replicate_on_write);
// String rowCacheP = fam.getRow_cache_provider();
m_schemaDescription.append("\n\n\tColumn metadata:");
colDefs = fam;
break;
}
}
if (colDefs == null) {
throw new Exception(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.UnableToFindRequestedColumnFamily",
m_columnFamilyName, conn.m_keyspaceName));
}
m_columnNameEncoding = m_columnComparator;
// set up our meta data map
m_columnMeta = new TreeMap<String, String>();
m_indexedVals = new HashMap<String, HashSet<Object>>();
String comment = colDefs.getComment();
if (comment != null && comment.length() > 0) {
extractIndexedMeta(comment, m_indexedVals);
}
Iterator<ColumnDef> colMetaData = colDefs.getColumn_metadataIterator();
if (colMetaData != null) {
while (colMetaData.hasNext()) {
ColumnDef currentDef = colMetaData.next();
ByteBuffer b = ByteBuffer.wrap(currentDef.getName());
String colName = getColumnValue(b, m_columnComparator).toString();
String colType = currentDef.getValidation_class();
m_columnMeta.put(colName, colType);
m_schemaDescription.append("\n\tColumn name: " + colName);
m_schemaDescription.append("\n\t\tColumn validator: " + colType);
String indexName = currentDef.getIndex_name();
if (!Const.isEmpty(indexName)) {
m_schemaDescription.append("\n\t\tIndex name: "
+ currentDef.getIndex_name());
}
if (m_indexedVals.containsKey(colName)) {
HashSet<Object> indexedVals = m_indexedVals.get(colName);
m_schemaDescription.append("\n\t\tLegal values: {");
int count = 0;
for (Object val : indexedVals) {
m_schemaDescription.append(val.toString());
count++;
if (count != indexedVals.size()) {
m_schemaDescription.append(",");
} else {
m_schemaDescription.append("}");
}
}
}
}
}
}
protected void extractIndexedMeta(String comment,
Map<String, HashSet<Object>> indexedVals) {
if (comment.indexOf("@@@") < 0) {
return;
}
String meta = comment.substring(comment.indexOf("@@@"),
comment.lastIndexOf("@@@"));
meta = meta.replace("@@@", "");
String[] fields = meta.split(";");
for (String field : fields) {
field = field.trim();
String[] parts = field.split(":");
if (parts.length != 2) {
continue;
}
String fieldName = parts[0].trim();
String valsS = parts[1];
valsS = valsS.replace("{", "");
valsS = valsS.replace("}", "");
String[] vals = valsS.split(",");
if (vals.length > 0) {
HashSet<Object> valsSet = new HashSet<Object>();
for (String aVal : vals) {
valsSet.add(aVal.trim());
}
indexedVals.put(fieldName, valsSet);
}
}
// }
}
/**
* Static utility routine for checking for the existence of a column family
* (table)
*
* @param conn the connection to use
* @param columnFamily the column family to check for
* @return true if the supplied column family name exists in the keyspace
* @throws Exception if a problem occurs
*/
public static boolean columnFamilyExists(CassandraConnection conn,
String columnFamily) throws Exception {
boolean found = false;
// column families
KsDef keySpace = conn.describeKeyspace();
List<CfDef> colFams = null;
if (keySpace != null) {
colFams = keySpace.getCf_defs();
} else {
throw new Exception(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.UnableToGetMetaDataForKeyspace",
conn.m_keyspaceName));
}
// look for the requested column family
for (CfDef fam : colFams) {
String columnFamilyName = fam.getName(); // table name
if (columnFamilyName.equals(columnFamily)) {
found = true;
break;
}
}
return found;
}
/**
* Static utility routine that returns a list of column families that exist in
* the keyspace encapsulated in the supplied connection
*
* @param conn the connection to use
* @return a list of column families (tables)
* @throws Exception if a problem occurs
*/
public static List<String> getColumnFamilyNames(CassandraConnection conn)
throws Exception {
KsDef keySpace = conn.describeKeyspace();
List<CfDef> colFams = null;
if (keySpace != null) {
colFams = keySpace.getCf_defs();
} else {
throw new Exception(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.UnableToGetMetaDataForKeyspace",
conn.m_keyspaceName));
}
List<String> colFamNames = new ArrayList<String>();
for (CfDef fam : colFams) {
colFamNames.add(fam.getName());
}
return colFamNames;
}
/**
* Return the schema overview information
*
* @return the textual description of the schema
*/
public String getSchemaDescription() {
return m_schemaDescription.toString();
}
/**
* Return the Cassandra column type (internal cassandra class name relative to
* org.apache.cassandra.db.marshal) for the given Kettle column.
*
* @param vm the ValueMetaInterface for the Kettle column
* @return the corresponding internal cassandra type.
*/
public static String getCassandraTypeForValueMeta(ValueMetaInterface vm) {
switch (vm.getType()) {
case ValueMetaInterface.TYPE_STRING:
return "UTF8Type";
case ValueMetaInterface.TYPE_BIGNUMBER:
return "DecimalType";
case ValueMetaInterface.TYPE_BOOLEAN:
return "BooleanType";
case ValueMetaInterface.TYPE_INTEGER:
return "LongType";
case ValueMetaInterface.TYPE_NUMBER:
return "DoubleType";
case ValueMetaInterface.TYPE_DATE:
return "DateType";
case ValueMetaInterface.TYPE_BINARY:
case ValueMetaInterface.TYPE_SERIALIZABLE:
return "BytesType";
}
return "UTF8Type";
}
/**
* Return the Cassandra CQL column/key type for the given Kettle column. We
* use this type for CQL create column family statements since, for some
* reason, the internal type isn't recognized for the key. Internal types
* *are* recognized for column definitions. The CQL reference guide states
* that fully qualified (or relative to org.apache.cassandra.db.marshal) class
* names can be used instead of CQL types - however, using these when defining
* the key type always results in BytesType getting set for the key for some
* reason.
*
* @param vm the ValueMetaInterface for the Kettle column
* @return the corresponding CQL type
*/
public static String getCQLTypeForValueMeta(ValueMetaInterface vm) {
switch (vm.getType()) {
case ValueMetaInterface.TYPE_STRING:
return "varchar";
case ValueMetaInterface.TYPE_BIGNUMBER:
return "decimal";
case ValueMetaInterface.TYPE_BOOLEAN:
return "boolean";
case ValueMetaInterface.TYPE_INTEGER:
return "bigint";
case ValueMetaInterface.TYPE_NUMBER:
return "double";
case ValueMetaInterface.TYPE_DATE:
return "timestamp";
case ValueMetaInterface.TYPE_BINARY:
case ValueMetaInterface.TYPE_SERIALIZABLE:
return "blob";
}
return "blob";
}
/**
* Static utility method that converts a Kettle value into an appropriately
* encoded CQL string.
*
* @param vm the ValueMeta for the Kettle value
* @param value the actual Kettle value
* @return an appropriately encoded CQL string representation of the value,
* suitable for using in an CQL query.
* @throws KettleValueException if there is an error converting.
*/
public static String kettleValueToCQL(ValueMetaInterface vm, Object value)
throws KettleValueException {
switch (vm.getType()) {
case ValueMetaInterface.TYPE_STRING: {
UTF8Type u = UTF8Type.instance;
String toConvert = vm.getString(value);
ByteBuffer decomposed = u.decompose(toConvert);
String cassandraString = u.getString(decomposed);
return escapeSingleQuotes(cassandraString);
}
case ValueMetaInterface.TYPE_BIGNUMBER: {
DecimalType dt = DecimalType.instance;
BigDecimal toConvert = vm.getBigNumber(value);
ByteBuffer decomposed = dt.decompose(toConvert);
String cassandraString = dt.getString(decomposed);
return cassandraString;
}
case ValueMetaInterface.TYPE_BOOLEAN: {
BooleanType bt = BooleanType.instance;
Boolean toConvert = vm.getBoolean(value);
ByteBuffer decomposed = bt.decompose(toConvert);
String cassandraString = bt.getString(decomposed);
return escapeSingleQuotes(cassandraString);
}
case ValueMetaInterface.TYPE_INTEGER: {
LongType lt = LongType.instance;
Long toConvert = vm.getInteger(value);
ByteBuffer decomposed = lt.decompose(toConvert);
String cassandraString = lt.getString(decomposed);
return cassandraString;
}
case ValueMetaInterface.TYPE_NUMBER: {
DoubleType dt = DoubleType.instance;
Double toConvert = vm.getNumber(value);
ByteBuffer decomposed = dt.decompose(toConvert);
String cassandraString = dt.getString(decomposed);
return cassandraString;
}
case ValueMetaInterface.TYPE_DATE:
DateType d = DateType.instance;
Date toConvert = vm.getDate(value);
ByteBuffer decomposed = d.decompose(toConvert);
String cassandraFormattedDateString = d.getString(decomposed);
return escapeSingleQuotes(cassandraFormattedDateString);
case ValueMetaInterface.TYPE_BINARY:
case ValueMetaInterface.TYPE_SERIALIZABLE:
throw new KettleValueException(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.CantConvertBinaryToCQL"));
}
throw new KettleValueException(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.CantConvertType", vm.getName(),
vm.getTypeDesc()));
}
/**
* Static utility to decompose a Kettle value to a ByteBuffer. Note - does not
* check if the kettle value is null.
*
* @param vm the ValueMeta for the Kettle value
* @param value the actual Kettle value
* @return a ByteBuffer encapsulating the bytes for the decomposed value
* @throws KettleException if a problem occurs
*/
public ByteBuffer kettleValueToByteBuffer(ValueMetaInterface vm,
Object value, boolean isKey) throws KettleException {
String fullTransCoder = m_defaultValidationClass;
// check the key first
if (isKey) {
fullTransCoder = m_keyValidator;
} else {
fullTransCoder = m_columnMeta.get(vm.getName());
if (fullTransCoder == null) {
// use default if not in column meta data
fullTransCoder = m_defaultValidationClass;
}
}
String transCoder = fullTransCoder;
// if it's a composite type make sure that we check only against the
// primary type
if (transCoder.indexOf('(') > 0) {
transCoder = transCoder.substring(0, transCoder.indexOf('('));
}
ByteBuffer decomposed = null;
if (transCoder.indexOf("UTF8Type") > 0) {
UTF8Type u = UTF8Type.instance;
decomposed = u.decompose(vm.getString(value));
} else if (transCoder.indexOf("AsciiType") > 0) {
AsciiType at = AsciiType.instance;
decomposed = at.decompose(vm.getString(value));
} else if (transCoder.indexOf("LongType") > 0) {
LongType lt = LongType.instance;
decomposed = lt.decompose(vm.getInteger(value));
} else if (transCoder.indexOf("DoubleType") > 0) {
DoubleType dt = DoubleType.instance;
decomposed = dt.decompose(vm.getNumber(value));
} else if (transCoder.indexOf("DateType") > 0) {
DateType dt = DateType.instance;
decomposed = dt.decompose(vm.getDate(value));
} else if (transCoder.indexOf("IntegerType") > 0) {
IntegerType it = IntegerType.instance;
decomposed = it.decompose(vm.getBigNumber(value).toBigInteger());
} else if (transCoder.indexOf("FloatType") > 0) {
FloatType ft = FloatType.instance;
decomposed = ft.decompose(vm.getNumber(value).floatValue());
} else if (transCoder.indexOf("LexicalUUIDType") > 0) {
LexicalUUIDType lt = LexicalUUIDType.instance;
UUID uuid = UUID.fromString((vm.getString(value)));
decomposed = lt.decompose(uuid);
} else if (transCoder.indexOf("UUIDType") > 0) {
UUIDType ut = UUIDType.instance;
UUID uuid = UUID.fromString((vm.getString(value)));
decomposed = ut.decompose(uuid);
} else if (transCoder.indexOf("BooleanType") > 0) {
BooleanType bt = BooleanType.instance;
decomposed = bt.decompose(vm.getBoolean(value));
} else if (transCoder.indexOf("Int32Type") > 0) {
Int32Type it = Int32Type.instance;
decomposed = it.decompose(vm.getInteger(value).intValue());
} else if (transCoder.indexOf("DecimalType") > 0) {
DecimalType dt = DecimalType.instance;
decomposed = dt.decompose(vm.getBigNumber(value));
} else if (transCoder.indexOf("DynamicCompositeType") > 0) {
AbstractType serializer = null;
if (vm.isString()) {
try {
serializer = TypeParser.parse(fullTransCoder);
decomposed = ((DynamicCompositeType) serializer).fromString(vm
.getString(value));
} catch (ConfigurationException e) {
throw new KettleException(e.getMessage(), e);
} catch (SyntaxException e) {
throw new KettleException(e.getMessage(), e);
}
} else {
throw new KettleException(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.CantConvertTypeThrift",
vm.getTypeDesc(), fullTransCoder));
}
} else if (transCoder.indexOf("CompositeType") > 0) {
AbstractType serializer = null;
if (vm.isString()) {
try {
serializer = TypeParser.parse(fullTransCoder);
decomposed = ((CompositeType) serializer).fromString(vm.toString());
} catch (ConfigurationException e) {
throw new KettleException(e.getMessage(), e);
} catch (SyntaxException e) {
throw new KettleException(e.getMessage(), e);
}
} else {
throw new KettleException(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.CantConvertTypeThrift",
vm.getTypeDesc(), fullTransCoder));
}
}
if (decomposed == null) {
throw new KettleException(BaseMessages.getString(PKG,
"CassandraColumnMetaData.Error.UnableToConvertValue", vm.getName()));
}
return decomposed;
}
protected static String escapeSingleQuotes(String source) {
// escaped by doubling (as in SQL)
return source.replace("'", "''");
}
/**
* Encode a string representation of a column name using the serializer for
* the default comparator.
*
* @param colName the textual column name to serialze
* @return a ByteBuffer encapsulating the serialized column name
* @throws KettleException if a problem occurs during serialization
*/
public ByteBuffer columnNameToByteBuffer(String colName)
throws KettleException {
AbstractType serializer = null;
String fullEncoder = m_columnComparator;
String encoder = fullEncoder;
// if it's a composite type make sure that we check only against the
// primary type
if (encoder.indexOf('(') > 0) {
encoder = encoder.substring(0, encoder.indexOf('('));
}
if (encoder.indexOf("UTF8Type") > 0) {
serializer = UTF8Type.instance;
} else if (encoder.indexOf("AsciiType") > 0) {
serializer = AsciiType.instance;
} else if (encoder.indexOf("LongType") > 0) {
serializer = LongType.instance;
} else if (encoder.indexOf("DoubleType") > 0) {
serializer = DoubleType.instance;
} else if (encoder.indexOf("DateType") > 0) {
serializer = DateType.instance;
} else if (encoder.indexOf("IntegerType") > 0) {
serializer = IntegerType.instance;
} else if (encoder.indexOf("FloatType") > 0) {
serializer = FloatType.instance;
} else if (encoder.indexOf("LexicalUUIDType") > 0) {
serializer = LexicalUUIDType.instance;
} else if (encoder.indexOf("UUIDType") > 0) {
serializer = UUIDType.instance;
} else if (encoder.indexOf("BooleanType") > 0) {
serializer = BooleanType.instance;
} else if (encoder.indexOf("Int32Type") > 0) {
serializer = Int32Type.instance;
} else if (encoder.indexOf("DecimalType") > 0) {
serializer = DecimalType.instance;
} else if (encoder.indexOf("DynamicCompositeType") > 0) {
try {
serializer = TypeParser.parse(fullEncoder);
} catch (ConfigurationException e) {
throw new KettleException(e.getMessage(), e);
} catch (SyntaxException e) {
throw new KettleException(e.getMessage(), e);
}
} else if (encoder.indexOf("CompositeType") > 0) {
try {
serializer = TypeParser.parse(fullEncoder);
} catch (ConfigurationException e) {
throw new KettleException(e.getMessage(), e);
} catch (SyntaxException e) {
throw new KettleException(e.getMessage(), e);
}
}
ByteBuffer result = serializer.fromString(colName);
return result;
}
/**
* Encodes and object via serialization
*
* @param obj the object to encode
* @return an array of bytes containing the serialized object
* @throws IOException if serialization fails
*
* public static byte[] encodeObject(Object obj) throws IOException
* { ByteArrayOutputStream bos = new ByteArrayOutputStream();
* BufferedOutputStream buf = new BufferedOutputStream(bos);
* ObjectOutputStream oos = new ObjectOutputStream(buf);
* oos.writeObject(obj); buf.flush();
*
* return bos.toByteArray(); }
*/
/**
* Get the Kettle ValueMeta the corresponds to the type of the key for this
* column family.
*
* @return the key's ValueMeta
*/
public ValueMetaInterface getValueMetaForKey() {
return getValueMetaForColumn(getKeyName());
}
/**
* Get the Kettle ValueMeta that corresponds to the type of the supplied
* cassandra column.
*
* @param colName the name of the column to get a ValueMeta for
* @return the ValueMeta that is appropriate for the type of the supplied
* column.
*/
public ValueMetaInterface getValueMetaForColumn(String colName) {
String type = null;
// check the key first
if (colName.equals(getKeyName())) {
type = m_keyValidator;
} else {
type = m_columnMeta.get(colName);
if (type == null) {
type = m_defaultValidationClass;
}
}
int kettleType = 0;
if (type.indexOf("UTF8Type") > 0 || type.indexOf("AsciiType") > 0
|| type.indexOf("UUIDType") > 0 || type.indexOf("CompositeType") > 0) {
kettleType = ValueMetaInterface.TYPE_STRING;
} else if (type.indexOf("LongType") > 0 || type.indexOf("IntegerType") > 0
|| type.indexOf("Int32Type") > 0) {
kettleType = ValueMetaInterface.TYPE_INTEGER;
} else if (type.indexOf("DoubleType") > 0 || type.indexOf("FloatType") > 0) {
kettleType = ValueMetaInterface.TYPE_NUMBER;
} else if (type.indexOf("DateType") > 0) {
kettleType = ValueMetaInterface.TYPE_DATE;
} else if (type.indexOf("DecimalType") > 0) {
kettleType = ValueMetaInterface.TYPE_BIGNUMBER;
} else if (type.indexOf("BytesType") > 0) {
kettleType = ValueMetaInterface.TYPE_BINARY;
} else if (type.indexOf("BooleanType") > 0) {
kettleType = ValueMetaInterface.TYPE_BOOLEAN;
}
ValueMetaInterface newVM = new ValueMeta(colName, kettleType);
if (m_indexedVals.containsKey(colName)) {
// make it indexed!
newVM.setStorageType(ValueMetaInterface.STORAGE_TYPE_INDEXED);
HashSet<Object> indexedV = m_indexedVals.get(colName);
Object[] iv = indexedV.toArray();
newVM.setIndex(iv);
}
return newVM;
}
/**
* Get a list of ValueMetas corresponding to the columns in this schema
*
* @return a list of ValueMetas
*/
public List<ValueMetaInterface> getValueMetasForSchema() {
List<ValueMetaInterface> newL = new ArrayList<ValueMetaInterface>();
for (String colName : m_columnMeta.keySet()) {
ValueMetaInterface colVM = getValueMetaForColumn(colName);
newL.add(colVM);
}
return newL;
}
/**
* Get a Set of column names that are defined in the meta data for this schema
*
* @return a set of column names.
*/
public Set<String> getColumnNames() {
// only returns those column names that are defined in the schema!
return m_columnMeta.keySet();
}
/**
* Returns true if the supplied column name exists in this schema.
*
* @param colName the name of the column to check.
* @return true if the column exists in the meta data for this column family.
*/
public boolean columnExistsInSchema(String colName) {
return (m_columnMeta.get(colName) != null);
}
/**
* Get the name of the key for this column family (equals the name of the
* column family).
*
* @return the name of the key
*/
public String getKeyName() {
// we use the column family/table name as the key
return getColumnFamilyName();
}
/**
* Return the name of this column family.
*
* @return the name of this column family.
*/
public String getColumnFamilyName() {
return m_columnFamilyName;
}
/**
* Return the decoded key value of a row. Assumes that the supplied row comes
* from the column family that this meta data represents!!
*
* @param row a Cassandra row
* @return the decoded key value
* @throws KettleException if a deserializer can't be determined
*/
public Object getKeyValue(CqlRow row) throws KettleException {
/*
* byte[] key = row.getKey();
*
* return getColumnValue(key, m_keyValidator);
*/
ByteBuffer key = row.bufferForKey();
if (m_keyValidator.indexOf("BytesType") > 0) {
return row.getKey();
}
return getColumnValue(key, m_keyValidator);
}
/**
* Return the decoded key value of a row. Assumes that the supplied row comes
* from the column family that this meta data represents!!
*
* @param row a Cassandra row
* @return the decoded key value
* @throws KettleException if a deserializer can't be determined
*/
public Object getKeyValue(KeySlice row) throws KettleException {
ByteBuffer key = row.bufferForKey();
if (m_keyValidator.indexOf("BytesType") > 0) {
return row.getKey();
}
return getColumnValue(key, m_keyValidator);
}
public String getColumnName(Column aCol) throws KettleException {
ByteBuffer b = aCol.bufferForName();
String decodedColName = getColumnValue(b, m_columnComparator).toString();
return decodedColName;
}
private Object getColumnValue(ByteBuffer valueBuff, String decoder)
throws KettleException {
if (valueBuff == null) {
return null;
}
Object result = null;
AbstractType deserializer = null;
String fullDecoder = decoder;
// if it's a composite type make sure that we check only against the
// primary type
if (decoder.indexOf('(') > 0) {
decoder = decoder.substring(0, decoder.indexOf('('));
}
if (decoder.indexOf("UTF8Type") > 0) {
deserializer = UTF8Type.instance;
} else if (decoder.indexOf("AsciiType") > 0) {
deserializer = AsciiType.instance;
} else if (decoder.indexOf("LongType") > 0) {
deserializer = LongType.instance;
} else if (decoder.indexOf("DoubleType") > 0) {
deserializer = DoubleType.instance;
} else if (decoder.indexOf("DateType") > 0) {
deserializer = DateType.instance;
} else if (decoder.indexOf("IntegerType") > 0) {
deserializer = IntegerType.instance;
result = new Long(((IntegerType) deserializer).compose(valueBuff)
.longValue());
return result;
} else if (decoder.indexOf("FloatType") > 0) {
deserializer = FloatType.instance;
result = new Double(((FloatType) deserializer).compose(valueBuff))
.doubleValue();
return result;
} else if (decoder.indexOf("LexicalUUIDType") > 0) {
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | true |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutput.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutput.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandraoutput;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.thrift.Mutation;
import org.pentaho.cassandra.CassandraColumnMetaData;
import org.pentaho.cassandra.CassandraConnection;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Class providing an output step for writing data to a cassandra table (column
* family). Can create the specified column family (if it doesn't already exist)
* and can update column family meta data.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public class CassandraOutput extends BaseStep implements StepInterface {
protected CassandraOutputMeta m_meta;
protected CassandraOutputData m_data;
public CassandraOutput(StepMeta stepMeta,
StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans) {
super(stepMeta, stepDataInterface, copyNr, transMeta, trans);
}
/** Column meta data and schema information */
protected CassandraColumnMetaData m_cassandraMeta;
/** Holds batch mutate for Thrift-based IO */
protected Map<ByteBuffer, Map<String, List<Mutation>>> m_thriftBatch;
/** Holds batch insert CQL statement */
protected StringBuilder m_batchInsertCQL;
/** Current batch of rows to insert */
protected List<Object[]> m_batch;
/** The number of rows seen so far for this batch */
protected int m_rowsSeen;
/** The batch size to use */
protected int m_batchSize = 100;
/** The consistency to use - null means to use the cassandra default */
protected String m_consistency = null;
/** The name of the column family (table) to write to */
protected String m_columnFamilyName;
/** The index of the key field in the incoming rows */
protected int m_keyIndex = -1;
protected int m_cqlBatchInsertTimeout = 0;
/** Default batch split factor */
protected int m_batchSplitFactor = 10;
/** Whether to use Thrift for IO or not */
protected boolean m_useThriftIO;
protected void initialize(StepMetaInterface smi, StepDataInterface sdi)
throws KettleException {
m_meta = (CassandraOutputMeta) smi;
m_data = (CassandraOutputData) sdi;
first = false;
m_rowsSeen = 0;
// Get the connection to Cassandra
String hostS = environmentSubstitute(m_meta.getCassandraHost());
String portS = environmentSubstitute(m_meta.getCassandraPort());
String userS = m_meta.getUsername();
String passS = m_meta.getPassword();
String batchTimeoutS = environmentSubstitute(m_meta
.getCQLBatchInsertTimeout());
String batchSplitFactor = environmentSubstitute(m_meta.getCQLSubBatchSize());
String schemaHostS = environmentSubstitute(m_meta.getSchemaHost());
String schemaPortS = environmentSubstitute(m_meta.getSchemaPort());
if (Const.isEmpty(schemaHostS)) {
schemaHostS = hostS;
}
if (Const.isEmpty(schemaPortS)) {
schemaPortS = portS;
}
if (!Const.isEmpty(userS) && !Const.isEmpty(passS)) {
userS = environmentSubstitute(userS);
passS = environmentSubstitute(passS);
}
String keyspaceS = environmentSubstitute(m_meta.getCassandraKeyspace());
m_columnFamilyName = environmentSubstitute(m_meta.getColumnFamilyName());
String keyField = environmentSubstitute(m_meta.getKeyField());
try {
if (!Const.isEmpty(batchTimeoutS)) {
try {
m_cqlBatchInsertTimeout = Integer.parseInt(batchTimeoutS);
if (m_cqlBatchInsertTimeout < 500) {
logBasic(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.MinimumTimeout"));
m_cqlBatchInsertTimeout = 500;
}
} catch (NumberFormatException e) {
logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.CantParseTimeout"));
m_cqlBatchInsertTimeout = 10000;
}
}
if (!Const.isEmpty(batchSplitFactor)) {
try {
m_batchSplitFactor = Integer.parseInt(batchSplitFactor);
} catch (NumberFormatException e) {
logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.CantParseSubBatchSize"));
}
}
if (Const.isEmpty(hostS) || Const.isEmpty(portS)
|| Const.isEmpty(keyspaceS)) {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.MissingConnectionDetails"));
}
if (Const.isEmpty(m_columnFamilyName)) {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.NoColumnFamilySpecified"));
}
if (Const.isEmpty(keyField)) {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.NoIncomingKeySpecified"));
}
// check that the specified key field is present in the incoming data
m_keyIndex = getInputRowMeta().indexOfValue(keyField);
if (m_keyIndex < 0) {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG, "CassandraOutput.Error.CantFindKeyField",
keyField));
}
logBasic(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.ConnectingForSchemaOperations", schemaHostS,
schemaPortS, keyspaceS));
CassandraConnection connection = null;
try {
connection = openConnection(true);
if (!CassandraColumnMetaData.columnFamilyExists(connection,
m_columnFamilyName)) {
if (m_meta.getCreateColumnFamily()) {
// create the column family (table)
boolean result = CassandraOutputData.createColumnFamily(connection,
m_columnFamilyName, getInputRowMeta(), m_keyIndex,
m_meta.getUseCompression());
if (!result) {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.NeedAtLeastOneFieldAppartFromKey"));
}
} else {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.ColumnFamilyDoesNotExist",
m_columnFamilyName, keyspaceS));
}
}
// get the column family meta data
logBasic(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.GettingMetaData", m_columnFamilyName));
m_cassandraMeta = new CassandraColumnMetaData(connection,
m_columnFamilyName);
// check that we have at least one incoming field apart from the key
if (CassandraOutputData.numFieldsToBeWritten(m_columnFamilyName,
getInputRowMeta(), m_keyIndex, m_cassandraMeta,
m_meta.getInsertFieldsNotInMeta()) < 2) {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.NeedAtLeastOneFieldAppartFromKey"));
}
// output (downstream) is the same as input
m_data.setOutputRowMeta(getInputRowMeta());
String batchSize = environmentSubstitute(m_meta.getBatchSize());
if (!Const.isEmpty(batchSize)) {
try {
m_batchSize = Integer.parseInt(batchSize);
} catch (NumberFormatException e) {
logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.CantParseBatchSize"));
m_batchSize = 100;
}
} else {
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG, "CassandraOutput.Error.NoBatchSizeSet"));
}
if (m_meta.getUpdateCassandraMeta()) {
// Update cassandra meta data for unknown incoming fields?
CassandraOutputData.updateCassandraMeta(connection,
m_columnFamilyName, getInputRowMeta(), m_keyIndex,
m_cassandraMeta);
}
// Truncate (remove all data from) column family first?
if (m_meta.getTruncateColumnFamily()) {
CassandraOutputData.truncateColumnFamily(connection,
m_columnFamilyName);
}
m_useThriftIO = m_meta.getUseThriftIO();
// Try to execute any apriori CQL commands?
if (!Const.isEmpty(m_meta.getAprioriCQL())) {
String aprioriCQL = environmentSubstitute(m_meta.getAprioriCQL());
logBasic(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.ExecutingAprioriCQL",
m_columnFamilyName, aprioriCQL));
CassandraOutputData.executeAprioriCQL(connection, aprioriCQL, log,
m_meta.getUseCompression());
}
} finally {
if (connection != null) {
closeConnection(connection);
connection = null;
}
}
m_consistency = environmentSubstitute(m_meta.getConsistency());
m_batchInsertCQL = CassandraOutputData.newCQLBatch(m_batchSize,
m_consistency);
m_batch = new ArrayList<Object[]>();
} catch (Exception ex) {
logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.InitializationProblem"), ex);
}
}
@Override
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi)
throws KettleException {
Object[] r = getRow();
if (r == null) {
// no more output
// flush the last batch
if (m_rowsSeen > 0) {
doBatch();
}
m_batchInsertCQL = null;
m_batch = null;
m_thriftBatch = null;
setOutputDone();
return false;
}
if (first) {
initialize(smi, sdi);
}
m_batch.add(r);
m_rowsSeen++;
if (m_rowsSeen == m_batchSize) {
doBatch();
}
return true;
}
protected void doBatch() throws KettleException {
try {
doBatch(m_batch);
} catch (Exception e) {
logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.CommitFailed", m_batchInsertCQL.toString(), e));
throw new KettleException(e.fillInStackTrace());
}
// ready for a new batch
m_batch.clear();
m_rowsSeen = 0;
}
protected void doBatch(List<Object[]> batch) throws Exception {
// stopped?
if (isStopped()) {
logDebug(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.StoppedSkippingBatch"));
return;
}
// ignore empty batch
if (batch == null || batch.isEmpty()) {
logDebug(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.SkippingEmptyBatch"));
return;
}
// construct CQL/thrift batch and commit
CassandraConnection connection = null;
int size = batch.size();
try {
if (m_useThriftIO) {
m_thriftBatch = CassandraOutputData.newThriftBatch(size);
} else {
// construct CQL
m_batchInsertCQL = CassandraOutputData.newCQLBatch(m_batchSize,
m_consistency);
}
int rowsAdded = 0;
for (Object[] r : batch) {
// add the row to the batch
if (m_useThriftIO) {
if (CassandraOutputData.addRowToThriftBatch(m_thriftBatch,
m_columnFamilyName, getInputRowMeta(), m_keyIndex, r,
m_cassandraMeta, m_meta.getInsertFieldsNotInMeta(), log,m_meta.isAsIndexColumn())) {
rowsAdded++;
}
} else {
if (CassandraOutputData.addRowToCQLBatch(m_batchInsertCQL,
m_columnFamilyName, getInputRowMeta(), m_keyIndex, r,
m_cassandraMeta, m_meta.getInsertFieldsNotInMeta(), log,m_meta.isAsIndexColumn())) {
rowsAdded++;
}
}
}
if (rowsAdded == 0) {
logDebug(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.SkippingEmptyBatch"));
return;
}
if (!m_useThriftIO) {
CassandraOutputData.completeCQLBatch(m_batchInsertCQL);
}
// commit
connection = openConnection(false);
logDetailed(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.CommittingBatch", m_columnFamilyName, ""
+ size));
if (m_useThriftIO) {
CassandraOutputData.commitThriftBatch(m_thriftBatch, m_consistency,
connection, m_cqlBatchInsertTimeout);
} else {
CassandraOutputData.commitCQLBatch(m_batchInsertCQL, connection,
m_meta.getUseCompression(), m_cqlBatchInsertTimeout);
}
} catch (Exception e) {
closeConnection(connection);
connection = null;
logDetailed(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.FailedToInsertBatch", "" + size), e);
logDetailed(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.WillNowTrySplittingIntoSubBatches"));
// is it possible to divide and conquer?
if (size == 1) {
// single error row - found it!
if (getStepMeta().isDoingErrorHandling()) {
putError(getInputRowMeta(), batch.get(0), 1L, e.getMessage(), null,
"ERR_INSERT01");
}
} else if (size > m_batchSplitFactor) {
// split into smaller batches and try separately
List<Object[]> subBatch = new ArrayList<Object[]>();
while (batch.size() > m_batchSplitFactor) {
while (subBatch.size() < m_batchSplitFactor && batch.size() > 0) {
// remove from the right - avoid internal shifting
subBatch.add(batch.remove(batch.size() - 1));
}
doBatch(subBatch);
subBatch.clear();
}
doBatch(batch);
} else {
// try each row individually
List<Object[]> subBatch = new ArrayList<Object[]>();
while (batch.size() > 0) {
subBatch.clear();
// remove from the right - avoid internal shifting
subBatch.add(batch.remove(batch.size() - 1));
doBatch(subBatch);
}
}
} finally {
closeConnection(connection);
connection = null;
}
}
@Override
public void setStopped(boolean stopped) {
if (isStopped() && stopped == true) {
return;
}
super.setStopped(stopped);
}
protected CassandraConnection openConnection(boolean forSchemaChanges)
throws KettleException {
// Get the connection to Cassandra
String hostS = environmentSubstitute(m_meta.getCassandraHost());
String portS = environmentSubstitute(m_meta.getCassandraPort());
String userS = m_meta.getUsername();
String passS = m_meta.getPassword();
String timeoutS = environmentSubstitute(m_meta.getSocketTimeout());
String schemaHostS = environmentSubstitute(m_meta.getSchemaHost());
String schemaPortS = environmentSubstitute(m_meta.getSchemaPort());
if (Const.isEmpty(schemaHostS)) {
schemaHostS = hostS;
}
if (Const.isEmpty(schemaPortS)) {
schemaPortS = portS;
}
if (!Const.isEmpty(userS) && !Const.isEmpty(passS)) {
userS = environmentSubstitute(userS);
passS = environmentSubstitute(passS);
}
String keyspaceS = environmentSubstitute(m_meta.getCassandraKeyspace());
CassandraConnection connection = null;
try {
if (Const.isEmpty(timeoutS)) {
if (forSchemaChanges) {
connection = CassandraOutputData.getCassandraConnection(schemaHostS,
Integer.parseInt(schemaPortS), userS, passS);
} else {
connection = CassandraOutputData.getCassandraConnection(hostS,
Integer.parseInt(portS), userS, passS);
}
} else {
int sockTimeout = 30000;
try {
sockTimeout = Integer.parseInt(timeoutS);
} catch (NumberFormatException e) {
logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.CantParseSocketTimeout"));
}
if (forSchemaChanges) {
connection = CassandraOutputData.getCassandraConnection(schemaHostS,
Integer.parseInt(schemaPortS), userS, passS, sockTimeout);
} else {
connection = CassandraOutputData.getCassandraConnection(hostS,
Integer.parseInt(portS), userS, passS, sockTimeout);
}
}
connection.setKeyspace(keyspaceS);
} catch (Exception ex) {
closeConnection(connection);
throw new KettleException(ex.getMessage(), ex);
}
return connection;
}
protected void closeConnection(CassandraConnection conn) {
if (conn != null) {
logBasic(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Message.ClosingConnection"));
conn.close();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutputData.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutputData.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandraoutput;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.zip.Deflater;
import org.apache.cassandra.thrift.CfDef;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnDef;
import org.apache.cassandra.thrift.ColumnOrSuperColumn;
import org.apache.cassandra.thrift.Compression;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.cassandra.thrift.InvalidRequestException;
import org.apache.cassandra.thrift.KsDef;
import org.apache.cassandra.thrift.Mutation;
import org.apache.cassandra.thrift.SchemaDisagreementException;
import org.apache.cassandra.thrift.TimedOutException;
import org.apache.cassandra.thrift.UnavailableException;
import org.apache.thrift.TException;
import org.pentaho.cassandra.CassandraColumnMetaData;
import org.pentaho.cassandra.CassandraConnection;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.step.BaseStepData;
import org.pentaho.di.trans.step.StepDataInterface;
/**
* Data class for the CassandraOutput step. Contains methods for obtaining a
* connection to cassandra, creating a new column family, updating a column
* family's meta data and constructing a batch insert CQL statement.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public class CassandraOutputData extends BaseStepData implements
StepDataInterface {
/** The output data format */
protected RowMetaInterface m_outputRowMeta;
/**
* Get the output row format
*
* @return the output row format
*/
public RowMetaInterface getOutputRowMeta() {
return m_outputRowMeta;
}
/**
* Set the output row format
*
* @param rmi
* the output row format
*/
public void setOutputRowMeta(RowMetaInterface rmi) {
m_outputRowMeta = rmi;
}
/**
* Get a connection to cassandra
*
* @param host
* the hostname of a cassandra node
* @param port
* the port that cassandra is listening on
* @param username
* the username for (optional) authentication
* @param password
* the password for (optional) authentication
* @return a connection to cassandra
* @throws Exception
* if a problem occurs during connection
*/
public static CassandraConnection getCassandraConnection(String host,
int port, String username, String password) throws Exception {
return new CassandraConnection(host, port, username, password, -1);
}
/**
* Get a connection to cassandra
*
* @param host
* the hostname of a cassandra node
* @param port
* the port that cassandra is listening on
* @param username
* the username for (optional) authentication
* @param password
* the password for (optional) authentication
* @param timeout
* the socket timeout to use
* @return a connection to cassandra
* @throws Exception
* if a problem occurs during connection
*/
public static CassandraConnection getCassandraConnection(String host,
int port, String username, String password, int timeout)
throws Exception {
return new CassandraConnection(host, port, username, password, timeout);
}
public static Map<ByteBuffer, Map<String, List<Mutation>>> newThriftBatch(
int numRows) {
return new HashMap<ByteBuffer, Map<String, List<Mutation>>>(numRows);
}
/**
* Begin a new batch cql statement
*
* @param numRows
* the number of rows to be inserted in this batch
* @param consistency
* the consistency (e.g. ONE, QUORUM etc.) to use, or null to use
* the default.
*
* @return a StringBuilder initialized for the batch.
*/
public static StringBuilder newCQLBatch(int numRows, String consistency) {
// make a stab at a reasonable initial capacity
StringBuilder batch = new StringBuilder(numRows * 80);
batch.append("BEGIN BATCH");
if (!Const.isEmpty(consistency)) {
batch.append(" USING CONSISTENCY ").append(consistency);
}
batch.append("\n");
return batch;
}
/**
* Append the "APPLY BATCH" statement to complete the batch
*
* @param batch
* the StringBuilder batch to complete
*/
public static void completeCQLBatch(StringBuilder batch) {
batch.append("APPLY BATCH");
}
/**
* Send the batch insert.
*
* @param batch
* the CQL batch insert statement
* @param conn
* the connection to use
* @param compressCQL
* true if the CQL should be compressed
* @param timeout
* number of milliseconds to wait for connection to time out
*
* @throws Exception
* if a problem occurs
*/
@SuppressWarnings("deprecation")
public static void commitCQLBatch(final StringBuilder batch,
final CassandraConnection conn, final boolean compressCQL,
final int timeout) throws Exception {
// compress the batch if necessary
final byte[] toSend = compressCQL ? compressQuery(batch.toString(),
Compression.GZIP) : batch.toString().getBytes(
Charset.forName(CassandraColumnMetaData.UTF8));
// do commit in separate thread to be able to monitor timeout
long start = System.currentTimeMillis();
long time = System.currentTimeMillis() - start;
final Exception[] e = new Exception[1];
final AtomicBoolean done = new AtomicBoolean(false);
Thread t = new Thread(new Runnable() {
public void run() {
try {
conn.getClient().execute_cql_query(ByteBuffer.wrap(toSend),
compressCQL ? Compression.GZIP : Compression.NONE);
} catch (Exception ex) {
e[0] = ex;
} finally {
done.set(true);
}
}
});
t.start();
// wait for it to complete
while (!done.get()) {
time = System.currentTimeMillis() - start;
if (timeout > 0 && time > timeout) {
try {
// try to kill it!
t.stop();
} catch (Exception ex) {/* YUM! */
}
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.TimeoutReached"));
}
// wait
Thread.sleep(100);
}
// was there a problem?
if (e[0] != null) {
throw e[0];
}
}
/**
* Commit the thrift batch
*
* @param thriftBatch
* the batch to commit
* @param consistency
* the consistency level to use
* @param conn
* the connection to use
* @param timeout
* number of milliseconds to wait for connection to time out
*
* @throws Exception
* if a problem occurs
*/
@SuppressWarnings("deprecation")
public static void commitThriftBatch(
final Map<ByteBuffer, Map<String, List<Mutation>>> thriftBatch,
final String consistency, final CassandraConnection conn,
final int timeout) throws Exception {
ConsistencyLevel levelToUse = ConsistencyLevel.ANY;
if (!Const.isEmpty(consistency)) {
try {
levelToUse = ConsistencyLevel.valueOf(consistency);
} catch (IllegalArgumentException ex) {
}
}
final ConsistencyLevel fLevelToUse = levelToUse;
// do commit in separate thread to be able to monitor timeout
long start = System.currentTimeMillis();
long time = System.currentTimeMillis() - start;
final Exception[] e = new Exception[1];
final AtomicBoolean done = new AtomicBoolean(false);
Thread t = new Thread(new Runnable() {
public void run() {
try {
conn.getClient().batch_mutate(thriftBatch, fLevelToUse);
} catch (Exception ex) {
e[0] = ex;
} finally {
done.set(true);
}
}
});
t.start();
// wait for it to complete
while (!done.get()) {
time = System.currentTimeMillis() - start;
if (timeout > 0 && time > timeout) {
try {
// try to kill it!
t.stop();
} catch (Exception ex) {
}
throw new KettleException(BaseMessages.getString(
CassandraOutputMeta.PKG,
"CassandraOutput.Error.TimeoutReached"));
}
// wait
Thread.sleep(100);
}
// was there a problem?
if (e[0] != null) {
throw e[0];
}
}
/**
* Send the batch insert.
*
* @param batch
* the CQL batch insert statement
* @param conn
* the connection to use
* @param compressCQL
* true if the CQL should be compressed
* @throws Exception
* if a problem occurs
*/
public static void commitCQLBatch(StringBuilder batch,
CassandraConnection conn, boolean compressCQL) throws Exception {
// compress the batch if necessary
byte[] toSend = null;
if (compressCQL) {
toSend = compressQuery(batch.toString(), Compression.GZIP);
} else {
toSend = batch.toString().getBytes(
Charset.forName(CassandraColumnMetaData.UTF8));
}
conn.getClient().execute_cql_query(ByteBuffer.wrap(toSend),
compressCQL ? Compression.GZIP : Compression.NONE);
}
/**
* Checks for null row key and rows with no non-null values
*
* @param inputMeta
* the input row meta
* @param keyIndex
* the index of the key field in the incoming row data
* @param row
* the row to check
* @param log
* logging
* @return true if the row is OK
* @throws KettleException
* if a problem occurs
*/
protected static boolean preAddChecks(RowMetaInterface inputMeta,
int keyIndex, Object[] row, LogChannelInterface log)
throws KettleException {
// check the key first
ValueMetaInterface keyMeta = inputMeta.getValueMeta(keyIndex);
if (keyMeta.isNull(row[keyIndex])) {
log.logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.SkippingRowNullKey", row));
return false;
}
// quick scan to see if we have at least one non-null value apart from
// the key
boolean ok = false;
for (int i = 0; i < inputMeta.size(); i++) {
if (i != keyIndex) {
ValueMetaInterface v = inputMeta.getValueMeta(i);
if (!v.isNull(row[i])) {
ok = true;
break;
}
}
}
if (!ok) {
log.logError(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.SkippingRowNoNonNullValues",
keyMeta.getString(row[keyIndex])));
}
return ok;
}
/**
* Adds a kettle row to a thrift-based batch (builds the map of keys to
* mutations).
*
* @param thriftBatch
* the map of keys to mutations
* @param colFamilyName
* the name of the column family (table) to insert into
* @param inputMeta
* Kettle input row meta data
* @param keyIndex
* the index of the incoming field to use as the key for
* inserting
* @param row
* the Kettle row
* @param cassandraMeta
* meta data on the columns in the cassandra column family
* (table)
* @param insertFieldsNotInMetaData
* true if any Kettle fields that are not in the Cassandra column
* family (table) meta data are to be inserted. This is
* irrelevant if the user has opted to have the step initially
* update the Cassandra meta data for incoming fields that are
* not known about.
*
* @return true if the row was added to the batch
*
* @throws KettleException
* if a problem occurs
*/
public static boolean addRowToThriftBatch(
Map<ByteBuffer, Map<String, List<Mutation>>> thriftBatch,
String colFamilyName, RowMetaInterface inputMeta, int keyIndex,
Object[] row, CassandraColumnMetaData cassandraMeta,
boolean insertFieldsNotInMetaData, LogChannelInterface log,
boolean isAsIndexColumn) throws KettleException {
if (!preAddChecks(inputMeta, keyIndex, row, log)) {
return false;
}
ValueMetaInterface keyMeta = inputMeta.getValueMeta(keyIndex);
ByteBuffer keyBuff = cassandraMeta.kettleValueToByteBuffer(keyMeta,
row[keyIndex], true);
Map<String, List<Mutation>> mapCF = thriftBatch.get(keyBuff);
List<Mutation> mutList = null;
// check to see if we have already got some mutations for this key in
// the batch
if (mapCF != null) {
mutList = mapCF.get(colFamilyName);
} else {
mapCF = new HashMap<String, List<Mutation>>(1);
mutList = new ArrayList<Mutation>();
}
for (int i = 0; i < inputMeta.size(); i++) {
if (i != keyIndex) {
ValueMetaInterface colMeta = inputMeta.getValueMeta(i);
String colName = colMeta.getName();
if (!cassandraMeta.columnExistsInSchema(colName)
&& !insertFieldsNotInMetaData) {
continue;
}
// don't insert if null!
if (colMeta.isNull(row[i])) {
continue;
}
Column col = new Column(
cassandraMeta.columnNameToByteBuffer(colName));
if (isAsIndexColumn) {
col = col.setValue(cassandraMeta.kettleValueToByteBuffer(
colMeta, "-", false));
} else {
col = col.setValue(cassandraMeta.kettleValueToByteBuffer(
colMeta, row[i], false));
}
col = col.setTimestamp(System.currentTimeMillis());
ColumnOrSuperColumn cosc = new ColumnOrSuperColumn();
cosc.setColumn(col);
Mutation mut = new Mutation();
mut.setColumn_or_supercolumn(cosc);
mutList.add(mut);
}
}
// column family name -> mutations
mapCF.put(colFamilyName, mutList);
// row key -> column family - > mutations
thriftBatch.put(keyBuff, mapCF);
return true;
}
/**
* converts a kettle row to CQL insert statement and adds it to the batch
*
* @param batch
* StringBuilder for collecting the batch CQL
* @param colFamilyName
* the name of the column family (table) to insert into
* @param inputMeta
* Kettle input row meta data
* @param keyIndex
* the index of the incoming field to use as the key for
* inserting
* @param row
* the Kettle row
* @param cassandraMeta
* meta data on the columns in the cassandra column family
* (table)
* @param insertFieldsNotInMetaData
* true if any Kettle fields that are not in the Cassandra column
* family (table) meta data are to be inserted. This is
* irrelevant if the user has opted to have the step initially
* update the Cassandra meta data for incoming fields that are
* not known about.
*
* @return true if the row was added to the batch
*
* @throws KettleException
* if a problem occurs
*/
public static boolean addRowToCQLBatch(StringBuilder batch,
String colFamilyName, RowMetaInterface inputMeta, int keyIndex,
Object[] row, CassandraColumnMetaData cassandraMeta,
boolean insertFieldsNotInMetaData, LogChannelInterface log,
boolean isAsIndexColumn) throws KettleException {
if (!preAddChecks(inputMeta, keyIndex, row, log)) {
return false;
}
ValueMetaInterface keyMeta = inputMeta.getValueMeta(keyIndex);
batch.append("INSERT INTO ").append(colFamilyName).append(" (KEY");
for (int i = 0; i < inputMeta.size(); i++) {
if (i != keyIndex) {
ValueMetaInterface colMeta = inputMeta.getValueMeta(i);
String colName = colMeta.getName();
if (!cassandraMeta.columnExistsInSchema(colName)
&& !insertFieldsNotInMetaData) {
continue;
}
// don't insert if null!
if (colMeta.isNull(row[i])) {
continue;
}
if (isAsIndexColumn) {
batch.append(", '")
.append(CassandraColumnMetaData.kettleValueToCQL(
colMeta, row[i])).append("'");
} else {
batch.append(", '").append(colName).append("'");
}
}
}
batch.append(") VALUES (");
// key first
String keyString = CassandraColumnMetaData.kettleValueToCQL(keyMeta,
row[keyIndex]);
batch.append("'").append(keyString).append("'");
for (int i = 0; i < inputMeta.size(); i++) {
if (i != keyIndex) {
ValueMetaInterface colMeta = inputMeta.getValueMeta(i);
String colName = colMeta.getName();
if (!cassandraMeta.columnExistsInSchema(colName)
&& !insertFieldsNotInMetaData) {
continue;
}
// don't insert if null!
if (colMeta.isNull(row[i])) {
continue;
}
if (isAsIndexColumn) {
batch.append(", '-'");
} else {
batch.append(", '")
.append(CassandraColumnMetaData.kettleValueToCQL(
colMeta, row[i])).append("'");
}
// batch.append(", '")
// .append(CassandraColumnMetaData.kettleValueToCQL(
// colMeta, row[i])).append("'");
}
}
batch.append(")\n");
return true;
}
protected static int numFieldsToBeWritten(String colFamilyName,
RowMetaInterface inputMeta, int keyIndex,
CassandraColumnMetaData cassandraMeta,
boolean insertFieldsNotInMetaData) {
// check how many fields will actually be inserted - we must insert at
// least
// one field
// apart from the key or Cassandra will complain.
int count = 1; // key
for (int i = 0; i < inputMeta.size(); i++) {
if (i != keyIndex) {
ValueMetaInterface colMeta = inputMeta.getValueMeta(i);
String colName = colMeta.getName();
if (!cassandraMeta.columnExistsInSchema(colName)
&& !insertFieldsNotInMetaData) {
continue;
}
count++;
}
}
return count;
}
/**
* Constructs and executes a CQL TRUNCATE statement.
*
* @param conn
* the connection to use
* @param columnFamily
* the name of the column family to truncate.
* @throws Exception
* if a problem occurs.
*/
public static void truncateColumnFamily(CassandraConnection conn,
String columnFamily) throws Exception {
String cqlCommand = "TRUNCATE " + columnFamily;
conn.getClient().execute_cql_query(
ByteBuffer.wrap(cqlCommand.getBytes()), Compression.NONE);
}
/**
* Updates the schema information for a given column family with any fields
* in the supplied RowMeta that aren't defined in the schema. Abuses the
* schema "comment" field to store information on any indexed values that
* might be in the fields in the RowMeta.
*
* @param conn
* the connection to use
* @param colFamilyName
* the name of the column family to update
* @param inputMeta
* the row meta containing (potentially) new fields
* @param keyIndex
* the index of the key field in the row meta
* @param cassandraMeta
* meta data for the cassandra column family
* @throws Exception
* if a problem occurs updating the schema
*/
public static void updateCassandraMeta(CassandraConnection conn,
String colFamilyName, RowMetaInterface inputMeta, int keyIndex,
CassandraColumnMetaData cassandraMeta) throws Exception {
// column families
KsDef keySpace = conn.describeKeyspace();
List<CfDef> colFams = null;
if (keySpace != null) {
colFams = keySpace.getCf_defs();
} else {
throw new Exception(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.UnableToGetKeyspaceMetaData"));
}
// look for the requested column family
CfDef colFamDefToUpdate = null;
for (CfDef fam : colFams) {
String columnFamilyName = fam.getName(); // table name
if (columnFamilyName.equals(colFamilyName)) {
colFamDefToUpdate = fam;
break;
}
}
if (colFamDefToUpdate == null) {
throw new Exception(BaseMessages.getString(CassandraOutputMeta.PKG,
"CassandraOutput.Error.CantUpdateMetaData", colFamilyName));
}
String comment = colFamDefToUpdate.getComment();
List<ValueMetaInterface> indexedVals = new ArrayList<ValueMetaInterface>();
for (int i = 0; i < inputMeta.size(); i++) {
if (i != keyIndex) {
ValueMetaInterface colMeta = inputMeta.getValueMeta(i);
if (colMeta.getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED) {
indexedVals.add(colMeta);
}
String colName = colMeta.getName();
if (!cassandraMeta.columnExistsInSchema(colName)) {
String colType = CassandraColumnMetaData
.getCassandraTypeForValueMeta(colMeta);
ColumnDef newCol = new ColumnDef(ByteBuffer.wrap(colName
.getBytes()), colType);
colFamDefToUpdate.addToColumn_metadata(newCol);
}
}
}
// update the comment fields for any new indexed vals
if (indexedVals.size() > 0) {
String before = "";
String after = "";
String meta = "";
if (comment != null && comment.length() > 0) {
// is there any indexed value meta data there already?
if (comment.indexOf("@@@") >= 0) {
// have to strip out existing stuff
before = comment.substring(0, comment.indexOf("@@@"));
after = comment.substring(comment.lastIndexOf("@@@") + 3,
comment.length());
meta = comment.substring(comment.indexOf("@@@",
comment.lastIndexOf("@@@")));
meta = meta.replace("@@@", "");
}
}
StringBuffer buff = new StringBuffer();
buff.append(meta);
for (ValueMetaInterface vm : indexedVals) {
String colName = vm.getName();
if (meta.indexOf(colName) < 0) {
// add this one
Object[] legalVals = vm.getIndex();
if (buff.length() > 0) {
buff.append(";").append(colName).append(":{");
} else {
buff.append(colName).append(":{");
}
for (int i = 0; i < legalVals.length; i++) {
buff.append(legalVals[i].toString());
if (i != legalVals.length - 1) {
buff.append(",");
}
}
buff.append("}");
}
}
comment = before + "@@@" + buff.toString() + "@@@" + after;
colFamDefToUpdate.setComment(comment);
}
conn.getClient().system_update_column_family(colFamDefToUpdate);
// get the cassandraMeta to refresh itself
cassandraMeta.refresh(conn);
}
/**
* Static utility method that executes a set of semicolon separated CQL
* commands against a keyspace. In the context of CassandraOutput this
* method can be used to execute CQL commands (to create secondary indexes
* for example) before rows are inserted into the column family in question.
*
* @param conn
* the connection to use
* @param cql
* the string containing the semicolon separated cql commands to
* execute
* @param log
* the logging object to log errors to
* @param compressCQL
* true if the cql commands should be compressed before sending
* to the server.
*/
public static void executeAprioriCQL(CassandraConnection conn, String cql,
LogChannelInterface log, boolean compressCQL) {
// split out separate statements
String[] cqlRequests = cql.split(";");
if (cqlRequests.length > 0) {
for (String cqlC : cqlRequests) {
cqlC = cqlC.trim();
if (!cqlC.endsWith(";")) {
cqlC += ";";
}
// try and execute it
byte[] toSend = null;
if (compressCQL) {
toSend = compressQuery(cqlC, Compression.GZIP);
} else {
toSend = cqlC.getBytes(Charset
.forName(CassandraColumnMetaData.UTF8));
}
String errorMessage = null;
try {
conn.getClient().execute_cql_query(ByteBuffer.wrap(toSend),
compressCQL ? Compression.GZIP : Compression.NONE);
} catch (InvalidRequestException e) {
errorMessage = e.why;
} catch (UnavailableException e) {
errorMessage = e.getMessage();
} catch (TimedOutException e) {
errorMessage = e.getMessage();
} catch (SchemaDisagreementException e) {
errorMessage = e.getMessage();
} catch (TException e) {
errorMessage = e.getMessage();
}
if (errorMessage != null) {
log.logBasic("Unable to execute a priori CQL command '"
+ cqlC + "'. (" + errorMessage + ")");
}
}
}
}
/**
* Constructs a CQL statement to create a new column family. Uses Cassandra
* defaults for default comparator, key_cache size etc. at present.
*
* @param conn
* the connection to use
* @param colFamilyName
* the name of the column family (table) to create.
* @param inputMeta
* the row meta information on the incoming fields to be inserted
* into this new column family
* @param keyIndex
* the index of the incoming field that is to be used as the key
* for the new column family
* @param compressCQL
* true if the CQL statement is to be compressed before sending
* to the server
* @throws Exception
* if a problem occurs.
*/
public static boolean createColumnFamily(CassandraConnection conn,
String colFamilyName, RowMetaInterface inputMeta, int keyIndex,
boolean compressCQL) throws Exception {
// TODO handle optional keywords for column family creation - default
// comparator, key_cache_size etc.
// Will require more UI and Meta class setters/getters
StringBuffer buff = new StringBuffer();
buff.append("CREATE COLUMNFAMILY " + colFamilyName);
ValueMetaInterface kvm = inputMeta.getValueMeta(keyIndex);
buff.append(" (KEY ")
.append("'"
+ CassandraColumnMetaData.getCQLTypeForValueMeta(kvm)
+ "'");
buff.append(" PRIMARY KEY");
List<ValueMetaInterface> indexedVals = new ArrayList<ValueMetaInterface>();
if (inputMeta.size() > 1) {
// boolean first = true;
for (int i = 0; i < inputMeta.size(); i++) {
if (i != keyIndex) {
ValueMetaInterface vm = inputMeta.getValueMeta(i);
if (vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_INDEXED) {
indexedVals.add(vm);
}
String colName = vm.getName();
String colType = "'"
+ CassandraColumnMetaData
.getCQLTypeForValueMeta(vm) + "'";
buff.append(", ");
buff.append("'" + colName + "'").append(" ");
buff.append(colType);
}
}
} else {
return false; // we can't insert any data if there is only the key
// coming
// into the step
}
// abuse the comment field to store any indexed values :-)
if (indexedVals.size() == 0) {
buff.append(");");
} else {
buff.append(") WITH comment = '@@@");
int count = 0;
for (ValueMetaInterface vm : indexedVals) {
String colName = vm.getName();
Object[] legalVals = vm.getIndex();
buff.append(colName).append(":{");
for (int i = 0; i < legalVals.length; i++) {
buff.append(legalVals[i].toString());
if (i != legalVals.length - 1) {
buff.append(",");
}
}
buff.append("}");
if (count != indexedVals.size() - 1) {
buff.append(";");
}
count++;
}
buff.append("@@@';");
}
byte[] toSend = null;
if (compressCQL) {
toSend = compressQuery(buff.toString(), Compression.GZIP);
} else {
toSend = buff.toString().getBytes(
Charset.forName(CassandraColumnMetaData.UTF8));
}
conn.getClient().execute_cql_query(ByteBuffer.wrap(toSend),
compressCQL ? Compression.GZIP : Compression.NONE);
return true;
}
/**
* Compress a CQL query
*
* @param queryStr
* the CQL query
* @param compression
* compression option (GZIP is the only option - so far)
* @return an array of bytes containing the compressed query
*/
public static byte[] compressQuery(String queryStr, Compression compression) {
byte[] data = queryStr.getBytes(Charset
.forName(CassandraColumnMetaData.UTF8));
Deflater compressor = new Deflater();
compressor.setInput(data);
compressor.finish();
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
while (!compressor.finished()) {
int size = compressor.deflate(buffer);
byteArray.write(buffer, 0, size);
}
return byteArray.toByteArray();
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutputDialog.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutputDialog.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandraoutput;
import java.util.ArrayList;
import java.util.List;
import org.apache.cassandra.thrift.InvalidRequestException;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.cassandra.CassandraColumnMetaData;
import org.pentaho.cassandra.CassandraConnection;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.dialog.ShowMessageDialog;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
/**
* Dialog class for the CassandraOutput step
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public class CassandraOutputDialog extends BaseStepDialog implements
StepDialogInterface {
private static final Class<?> PKG = CassandraOutputMeta.class;
private final CassandraOutputMeta m_currentMeta;
private final CassandraOutputMeta m_originalMeta;
/** various UI bits and pieces for the dialog */
private Label m_stepnameLabel;
private Text m_stepnameText;
private CTabFolder m_wTabFolder;
private CTabItem m_connectionTab;
private CTabItem m_writeTab;
private CTabItem m_schemaTab;
private Label m_hostLab;
private TextVar m_hostText;
private Label m_portLab;
private TextVar m_portText;
private Label m_userLab;
private TextVar m_userText;
private Label m_passLab;
private TextVar m_passText;
private Label m_socketTimeoutLab;
private TextVar m_socketTimeoutText;
private Label m_keyspaceLab;
private TextVar m_keyspaceText;
private Label m_columnFamilyLab;
private CCombo m_columnFamilyCombo;
private Button m_getColumnFamiliesBut;
private Label m_consistencyLab;
private TextVar m_consistencyText;
private Label m_batchSizeLab;
private TextVar m_batchSizeText;
private Label m_batchInsertTimeoutLab;
private TextVar m_batchInsertTimeoutText;
private Label m_subBatchSizeLab;
private TextVar m_subBatchSizeText;
private Label m_keyFieldLab;
private CCombo m_keyFieldCombo;
private Button m_getFieldsBut;
private Label m_useThriftIOLab;
private Button m_useThriftIOCheck;
private Label m_schemaHostLab;
private TextVar m_schemaHostText;
private Label m_schemaPortLab;
private TextVar m_schemaPortText;
private Label m_createColumnFamilyLab;
private Button m_createColumnFamilyBut;
private Label m_truncateColumnFamilyLab;
private Button m_truncateColumnFamilyBut;
private Label m_updateColumnFamilyMetaDataLab;
private Button m_updateColumnFamilyMetaDataBut;
private Label m_insertFieldsNotInColumnFamMetaLab;
private Button m_insertFieldsNotInColumnFamMetaBut;
private Label m_compressionLab;
private Button m_useCompressionBut;
private Button m_showSchemaBut;
private Button m_aprioriCQLBut;
private String m_aprioriCQL;
private Label asIndexColumn;
private Button asIndexColumnBut;
public CassandraOutputDialog(Shell parent, Object in, TransMeta tr,
String name) {
super(parent, (BaseStepMeta) in, tr, name);
m_currentMeta = (CassandraOutputMeta) in;
m_originalMeta = (CassandraOutputMeta) m_currentMeta.clone();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX);
props.setLook(shell);
setShellImage(shell, m_currentMeta);
// used to listen to a text field (m_wStepname)
final ModifyListener lsMod = new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_currentMeta.setChanged();
}
};
changed = m_currentMeta.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Shell.Title"));
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
m_stepnameLabel = new Label(shell, SWT.RIGHT);
m_stepnameLabel.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.StepName.Label"));
props.setLook(m_stepnameLabel);
FormData fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.right = new FormAttachment(middle, -margin);
fd.top = new FormAttachment(0, margin);
m_stepnameLabel.setLayoutData(fd);
m_stepnameText = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
m_stepnameText.setText(stepname);
props.setLook(m_stepnameText);
m_stepnameText.addModifyListener(lsMod);
// format the text field
fd = new FormData();
fd.left = new FormAttachment(middle, 0);
fd.top = new FormAttachment(0, margin);
fd.right = new FormAttachment(100, 0);
m_stepnameText.setLayoutData(fd);
m_wTabFolder = new CTabFolder(shell, SWT.BORDER);
props.setLook(m_wTabFolder, Props.WIDGET_STYLE_TAB);
m_wTabFolder.setSimple(false);
// start of the connection tab
m_connectionTab = new CTabItem(m_wTabFolder, SWT.BORDER);
m_connectionTab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Tab.Connection"));
Composite wConnectionComp = new Composite(m_wTabFolder, SWT.NONE);
props.setLook(wConnectionComp);
FormLayout connectionLayout = new FormLayout();
connectionLayout.marginWidth = 3;
connectionLayout.marginHeight = 3;
wConnectionComp.setLayout(connectionLayout);
// host line
m_hostLab = new Label(wConnectionComp, SWT.RIGHT);
props.setLook(m_hostLab);
m_hostLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Hostname.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_stepnameText, margin);
fd.right = new FormAttachment(middle, -margin);
m_hostLab.setLayoutData(fd);
m_hostText = new TextVar(transMeta, wConnectionComp, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_hostText);
m_hostText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_hostText.setToolTipText(transMeta.environmentSubstitute(m_hostText
.getText()));
}
});
m_hostText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_stepnameText, margin);
fd.left = new FormAttachment(middle, 0);
m_hostText.setLayoutData(fd);
// port line
m_portLab = new Label(wConnectionComp, SWT.RIGHT);
props.setLook(m_portLab);
m_portLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Port.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_hostText, margin);
fd.right = new FormAttachment(middle, -margin);
m_portLab.setLayoutData(fd);
m_portText = new TextVar(transMeta, wConnectionComp, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_portText);
m_portText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_portText.setToolTipText(transMeta.environmentSubstitute(m_portText
.getText()));
}
});
m_portText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_hostText, margin);
fd.left = new FormAttachment(middle, 0);
m_portText.setLayoutData(fd);
// socket timeout line
m_socketTimeoutLab = new Label(wConnectionComp, SWT.RIGHT);
props.setLook(m_socketTimeoutLab);
m_socketTimeoutLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.SocketTimeout.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_portText, margin);
fd.right = new FormAttachment(middle, -margin);
m_socketTimeoutLab.setLayoutData(fd);
m_socketTimeoutText = new TextVar(transMeta, wConnectionComp, SWT.SINGLE
| SWT.LEFT | SWT.BORDER);
props.setLook(m_socketTimeoutText);
m_socketTimeoutText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_socketTimeoutText.setToolTipText(transMeta
.environmentSubstitute(m_socketTimeoutText.getText()));
}
});
m_socketTimeoutText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_portText, margin);
fd.left = new FormAttachment(middle, 0);
m_socketTimeoutText.setLayoutData(fd);
// username line
m_userLab = new Label(wConnectionComp, SWT.RIGHT);
props.setLook(m_userLab);
m_userLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.User.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_socketTimeoutText, margin);
fd.right = new FormAttachment(middle, -margin);
m_userLab.setLayoutData(fd);
m_userText = new TextVar(transMeta, wConnectionComp, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_userText);
m_userText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_userText.setToolTipText(transMeta.environmentSubstitute(m_userText
.getText()));
}
});
m_userText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_socketTimeoutText, margin);
fd.left = new FormAttachment(middle, 0);
m_userText.setLayoutData(fd);
// password line
m_passLab = new Label(wConnectionComp, SWT.RIGHT);
props.setLook(m_passLab);
m_passLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Password.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_userText, margin);
fd.right = new FormAttachment(middle, -margin);
m_passLab.setLayoutData(fd);
m_passText = new TextVar(transMeta, wConnectionComp, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_passText);
m_passText.setEchoChar('*');
// If the password contains a variable, don't hide it.
m_passText.getTextWidget().addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
checkPasswordVisible();
}
});
m_passText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_userText, margin);
fd.left = new FormAttachment(middle, 0);
m_passText.setLayoutData(fd);
// keyspace line
m_keyspaceLab = new Label(wConnectionComp, SWT.RIGHT);
props.setLook(m_keyspaceLab);
m_keyspaceLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Keyspace.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_passText, margin);
fd.right = new FormAttachment(middle, -margin);
m_keyspaceLab.setLayoutData(fd);
m_keyspaceText = new TextVar(transMeta, wConnectionComp, SWT.SINGLE
| SWT.LEFT | SWT.BORDER);
props.setLook(m_keyspaceText);
m_keyspaceText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_keyspaceText.setToolTipText(transMeta
.environmentSubstitute(m_keyspaceText.getText()));
}
});
m_keyspaceText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_passText, margin);
fd.left = new FormAttachment(middle, 0);
m_keyspaceText.setLayoutData(fd);
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(0, 0);
fd.right = new FormAttachment(100, 0);
fd.bottom = new FormAttachment(100, 0);
wConnectionComp.setLayoutData(fd);
wConnectionComp.layout();
m_connectionTab.setControl(wConnectionComp);
// --- start of the write tab ---
m_writeTab = new CTabItem(m_wTabFolder, SWT.NONE);
m_writeTab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Tab.Write"));
Composite wWriteComp = new Composite(m_wTabFolder, SWT.NONE);
props.setLook(wWriteComp);
FormLayout writeLayout = new FormLayout();
writeLayout.marginWidth = 3;
writeLayout.marginHeight = 3;
wWriteComp.setLayout(writeLayout);
// column family line
m_columnFamilyLab = new Label(wWriteComp, SWT.RIGHT);
props.setLook(m_columnFamilyLab);
m_columnFamilyLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.ColumnFamily.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(0, 0);
fd.right = new FormAttachment(middle, -margin);
m_columnFamilyLab.setLayoutData(fd);
m_getColumnFamiliesBut = new Button(wWriteComp, SWT.PUSH | SWT.CENTER);
props.setLook(m_getColumnFamiliesBut);
m_getColumnFamiliesBut.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.GetColFam.Button"));
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(0, 0);
m_getColumnFamiliesBut.setLayoutData(fd);
m_getColumnFamiliesBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setupColumnFamiliesCombo();
}
});
m_columnFamilyCombo = new CCombo(wWriteComp, SWT.BORDER);
props.setLook(m_columnFamilyCombo);
m_columnFamilyCombo.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_columnFamilyCombo.setToolTipText(transMeta
.environmentSubstitute(m_columnFamilyCombo.getText()));
}
});
m_columnFamilyCombo.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(m_getColumnFamiliesBut, -margin);
fd.top = new FormAttachment(0, margin);
fd.left = new FormAttachment(middle, 0);
m_columnFamilyCombo.setLayoutData(fd);
// consistency line
m_consistencyLab = new Label(wWriteComp, SWT.RIGHT);
props.setLook(m_consistencyLab);
m_consistencyLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Consistency.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_columnFamilyCombo, margin);
fd.right = new FormAttachment(middle, -margin);
m_consistencyLab.setLayoutData(fd);
m_consistencyLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Consistency.Label.TipText"));
m_consistencyText = new TextVar(transMeta, wWriteComp, SWT.SINGLE
| SWT.LEFT | SWT.BORDER);
props.setLook(m_consistencyText);
m_consistencyText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_consistencyText.setToolTipText(transMeta
.environmentSubstitute(m_consistencyText.getText()));
}
});
m_consistencyText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_columnFamilyCombo, margin);
fd.left = new FormAttachment(middle, 0);
m_consistencyText.setLayoutData(fd);
// batch size line
m_batchSizeLab = new Label(wWriteComp, SWT.RIGHT);
props.setLook(m_batchSizeLab);
m_batchSizeLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.BatchSize.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_consistencyText, margin);
fd.right = new FormAttachment(middle, -margin);
m_batchSizeLab.setLayoutData(fd);
m_batchSizeLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.BatchSize.TipText"));
m_batchSizeText = new TextVar(transMeta, wWriteComp, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_batchSizeText);
m_batchSizeText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_batchSizeText.setToolTipText(transMeta
.environmentSubstitute(m_batchSizeText.getText()));
}
});
m_batchSizeText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_consistencyText, margin);
fd.left = new FormAttachment(middle, 0);
m_batchSizeText.setLayoutData(fd);
// batch insert timeout
m_batchInsertTimeoutLab = new Label(wWriteComp, SWT.RIGHT);
props.setLook(m_batchInsertTimeoutLab);
m_batchInsertTimeoutLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.BatchInsertTimeout.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_batchSizeText, margin);
fd.right = new FormAttachment(middle, -margin);
m_batchInsertTimeoutLab.setLayoutData(fd);
m_batchInsertTimeoutLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.BatchInsertTimeout.TipText"));
m_batchInsertTimeoutText = new TextVar(transMeta, wWriteComp, SWT.SINGLE
| SWT.LEFT | SWT.BORDER);
props.setLook(m_batchInsertTimeoutText);
m_batchInsertTimeoutText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_batchInsertTimeoutText.setToolTipText(transMeta
.environmentSubstitute(m_batchInsertTimeoutText.getText()));
}
});
m_batchInsertTimeoutText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_batchSizeText, margin);
fd.left = new FormAttachment(middle, 0);
m_batchInsertTimeoutText.setLayoutData(fd);
// sub-batch size
m_subBatchSizeLab = new Label(wWriteComp, SWT.RIGHT);
props.setLook(m_subBatchSizeLab);
m_subBatchSizeLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.SubBatchSize.Label"));
m_subBatchSizeLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.SubBatchSize.TipText"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_batchInsertTimeoutText, margin);
fd.right = new FormAttachment(middle, -margin);
m_subBatchSizeLab.setLayoutData(fd);
m_subBatchSizeText = new TextVar(transMeta, wWriteComp, SWT.SINGLE
| SWT.LEFT | SWT.BORDER);
props.setLook(m_subBatchSizeText);
m_subBatchSizeText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_subBatchSizeText.setToolTipText(transMeta
.environmentSubstitute(m_subBatchSizeText.getText()));
}
});
m_subBatchSizeText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_batchInsertTimeoutText, margin);
fd.left = new FormAttachment(middle, 0);
m_subBatchSizeText.setLayoutData(fd);
// key field line
m_keyFieldLab = new Label(wWriteComp, SWT.RIGHT);
props.setLook(m_keyFieldLab);
m_keyFieldLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.KeyField.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_subBatchSizeText, margin);
fd.right = new FormAttachment(middle, -margin);
m_keyFieldLab.setLayoutData(fd);
m_getFieldsBut = new Button(wWriteComp, SWT.PUSH | SWT.CENTER);
props.setLook(m_getFieldsBut);
m_getFieldsBut.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.GetFields.Button"));
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_subBatchSizeText, 0);
m_getFieldsBut.setLayoutData(fd);
m_getFieldsBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setupFieldsCombo();
}
});
m_keyFieldCombo = new CCombo(wWriteComp, SWT.BORDER);
m_keyFieldCombo.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_keyFieldCombo.setToolTipText(transMeta
.environmentSubstitute(m_keyFieldCombo.getText()));
}
});
m_keyFieldCombo.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(m_getFieldsBut, -margin);
fd.top = new FormAttachment(m_subBatchSizeText, margin);
fd.left = new FormAttachment(middle, 0);
m_keyFieldCombo.setLayoutData(fd);
m_useThriftIOLab = new Label(wWriteComp, SWT.RIGHT);
props.setLook(m_useThriftIOLab);
m_useThriftIOLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.UseThriftIO.Label"));
m_useThriftIOLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.UseThriftIO.TipText"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_keyFieldCombo, margin);
fd.right = new FormAttachment(middle, -margin);
m_useThriftIOLab.setLayoutData(fd);
m_useThriftIOCheck = new Button(wWriteComp, SWT.CHECK);
m_useThriftIOCheck.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.UseThriftIO.TipText"));
props.setLook(m_useThriftIOCheck);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_keyFieldCombo, margin);
fd.left = new FormAttachment(middle, 0);
m_useThriftIOCheck.setLayoutData(fd);
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(0, 0);
fd.right = new FormAttachment(100, 0);
fd.bottom = new FormAttachment(100, 0);
wWriteComp.setLayoutData(fd);
asIndexColumn = new Label(wWriteComp, SWT.RIGHT);
props.setLook(asIndexColumn);
asIndexColumn.setText("As index column");
asIndexColumn.setToolTipText("As index column");
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_useThriftIOCheck, margin);
fd.right = new FormAttachment(middle, -margin);
asIndexColumn.setLayoutData(fd);
asIndexColumnBut = new Button(wWriteComp, SWT.CHECK);
props.setLook(asIndexColumnBut);
asIndexColumnBut.setToolTipText("As index column");
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.left = new FormAttachment(middle, 0);
fd.top = new FormAttachment(m_useThriftIOCheck, margin);
asIndexColumnBut.setLayoutData(fd);
asIndexColumnBut.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
m_currentMeta.setChanged();
}
});
wWriteComp.layout();
m_writeTab.setControl(wWriteComp);
// show schema button
m_showSchemaBut = new Button(wWriteComp, SWT.PUSH);
m_showSchemaBut.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.Schema.Button"));
props.setLook(m_showSchemaBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.bottom = new FormAttachment(100, -margin * 2);
m_showSchemaBut.setLayoutData(fd);
m_showSchemaBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
popupSchemaInfo();
}
});
// ---- start of the schema options tab ----
m_schemaTab = new CTabItem(m_wTabFolder, SWT.NONE);
m_schemaTab.setText(BaseMessages.getString(PKG,
"CassandraOutputData.Tab.Schema"));
Composite wSchemaComp = new Composite(m_wTabFolder, SWT.NONE);
props.setLook(wSchemaComp);
FormLayout schemaLayout = new FormLayout();
schemaLayout.marginWidth = 3;
schemaLayout.marginHeight = 3;
wSchemaComp.setLayout(schemaLayout);
// schema host line
m_schemaHostLab = new Label(wSchemaComp, SWT.RIGHT);
props.setLook(m_schemaHostLab);
m_schemaHostLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.SchemaHostname.Label"));
m_schemaHostLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.SchemaHostname.TipText"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(0, margin);
fd.right = new FormAttachment(middle, -margin);
m_schemaHostLab.setLayoutData(fd);
m_schemaHostText = new TextVar(transMeta, wSchemaComp, SWT.SINGLE
| SWT.LEFT | SWT.BORDER);
props.setLook(m_schemaHostText);
m_schemaHostText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_schemaHostText.setToolTipText(transMeta
.environmentSubstitute(m_schemaHostText.getText()));
}
});
m_schemaHostText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(0, margin);
fd.left = new FormAttachment(middle, 0);
m_schemaHostText.setLayoutData(fd);
// schema port line
m_schemaPortLab = new Label(wSchemaComp, SWT.RIGHT);
props.setLook(m_schemaPortLab);
m_schemaPortLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.SchemaPort.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_schemaHostText, margin);
fd.right = new FormAttachment(middle, -margin);
m_schemaPortLab.setLayoutData(fd);
m_schemaPortText = new TextVar(transMeta, wSchemaComp, SWT.SINGLE
| SWT.LEFT | SWT.BORDER);
props.setLook(m_schemaPortText);
m_schemaPortText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_schemaPortText.setToolTipText(transMeta
.environmentSubstitute(m_schemaPortText.getText()));
}
});
m_schemaPortText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_schemaHostText, margin);
fd.left = new FormAttachment(middle, 0);
m_schemaPortText.setLayoutData(fd);
// create column family line
m_createColumnFamilyLab = new Label(wSchemaComp, SWT.RIGHT);
props.setLook(m_createColumnFamilyLab);
m_createColumnFamilyLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.CreateColumnFamily.Label"));
m_createColumnFamilyLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.CreateColumnFamily.TipText"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_schemaPortText, margin);
fd.right = new FormAttachment(middle, -margin);
m_createColumnFamilyLab.setLayoutData(fd);
m_createColumnFamilyBut = new Button(wSchemaComp, SWT.CHECK);
m_createColumnFamilyBut.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.CreateColumnFamily.TipText"));
props.setLook(m_createColumnFamilyBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_schemaPortText, margin);
fd.left = new FormAttachment(middle, 0);
m_createColumnFamilyBut.setLayoutData(fd);
m_createColumnFamilyBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
m_currentMeta.setChanged();
}
});
// truncate column family line
m_truncateColumnFamilyLab = new Label(wSchemaComp, SWT.RIGHT);
props.setLook(m_truncateColumnFamilyLab);
m_truncateColumnFamilyLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.TruncateColumnFamily.Label"));
m_truncateColumnFamilyLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.TruncateColumnFamily.TipText"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_createColumnFamilyBut, margin);
fd.right = new FormAttachment(middle, -margin);
m_truncateColumnFamilyLab.setLayoutData(fd);
m_truncateColumnFamilyBut = new Button(wSchemaComp, SWT.CHECK);
m_truncateColumnFamilyBut.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.TruncateColumnFamily.TipText"));
props.setLook(m_truncateColumnFamilyBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_createColumnFamilyBut, margin);
fd.left = new FormAttachment(middle, 0);
m_truncateColumnFamilyBut.setLayoutData(fd);
m_truncateColumnFamilyBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
m_currentMeta.setChanged();
}
});
// update column family meta data line
m_updateColumnFamilyMetaDataLab = new Label(wSchemaComp, SWT.RIGHT);
props.setLook(m_updateColumnFamilyMetaDataLab);
m_updateColumnFamilyMetaDataLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.UpdateColumnFamilyMetaData.Label"));
m_updateColumnFamilyMetaDataLab.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.UpdateColumnFamilyMetaData.TipText"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_truncateColumnFamilyBut, margin);
fd.right = new FormAttachment(middle, -margin);
m_updateColumnFamilyMetaDataLab.setLayoutData(fd);
m_updateColumnFamilyMetaDataBut = new Button(wSchemaComp, SWT.CHECK);
m_updateColumnFamilyMetaDataBut.setToolTipText(BaseMessages.getString(PKG,
"CassandraOutputDialog.UpdateColumnFamilyMetaData.TipText"));
props.setLook(m_updateColumnFamilyMetaDataBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_truncateColumnFamilyBut, margin);
fd.left = new FormAttachment(middle, 0);
m_updateColumnFamilyMetaDataBut.setLayoutData(fd);
m_updateColumnFamilyMetaDataBut
.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
m_currentMeta.setChanged();
}
});
// insert fields not in meta line
m_insertFieldsNotInColumnFamMetaLab = new Label(wSchemaComp, SWT.RIGHT);
props.setLook(m_insertFieldsNotInColumnFamMetaLab);
m_insertFieldsNotInColumnFamMetaLab.setText(BaseMessages.getString(PKG,
"CassandraOutputDialog.InsertFieldsNotInColumnFamMetaData.Label"));
m_insertFieldsNotInColumnFamMetaLab
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | true |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutputMeta.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/CassandraOutputMeta.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandraoutput;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.annotations.Step;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.w3c.dom.Node;
/**
* Class providing an output step for writing data to a cassandra table (column
* family). Can create the specified column family (if it doesn't already exist)
* and can update column family meta data.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
@Step(id = "CassandraOutput", image = "Cassandra.png", name = "Cassandra Output", description = "Writes to a Cassandra table", categoryDescription = "Big Data")
public class CassandraOutputMeta extends BaseStepMeta implements
StepMetaInterface {
protected static final Class<?> PKG = CassandraOutputMeta.class;
/** The host to contact */
protected String m_cassandraHost = "localhost";
/** The port that cassandra is listening on */
protected String m_cassandraPort = "9160";
/** The username to use for authentication */
protected String m_username;
/** The password to use for authentication */
protected String m_password;
/** The keyspace (database) to use */
protected String m_cassandraKeyspace;
/** The cassandra node to put schema updates through */
protected String m_schemaHost;
/** The port of the cassandra node for schema updates */
protected String m_schemaPort;
/** The column family (table) to write to */
protected String m_columnFamily = "";
/** The consistency level to use - null or empty string result in the default */
protected String m_consistency = "";
/**
* The batch size - i.e. how many rows to collect before inserting them via a
* batch CQL statement
*/
protected String m_batchSize = "100";
/** Whether to use GZIP compression of CQL queries */
protected boolean m_useCompression = false;
/** Whether to create the specified column family (table) if it doesn't exist */
protected boolean m_createColumnFamily = true;
/** The field in the incoming data to use as the key for inserts */
protected String m_keyField = "";
/**
* Timeout (milliseconds) to use for socket connections - blank means use
* cluster default
*/
protected String m_socketTimeout = "";
/**
* Timeout (milliseconds) to use for CQL batch inserts. If blank, no timeout
* is used. Otherwise, whent the timeout occurs the step will try to kill the
* insert and re-try after splitting the batch according to the batch split
* factor
*/
protected String m_cqlBatchTimeout = "";
/**
* Default batch split size - only comes into play if cql batch timeout has
* been specified. Specifies the size of the sub-batches to split the batch
* into if a timeout occurs.
*/
protected String m_cqlSubBatchSize = "10";
/**
* Whether or not to insert incoming fields that are not in the cassandra
* table's meta data. Has no affect if the user has opted to update the meta
* data for unknown incoming fields
*/
protected boolean m_insertFieldsNotInMeta = false;
/**
* Whether or not to initially update the column family meta data with any
* unknown incoming fields
*/
protected boolean m_updateCassandraMeta = false;
/** Whether to truncate the column family (table) before inserting */
protected boolean m_truncateColumnFamily = false;
/**
* Any CQL statements to execute before inserting the first row. Can be used,
* for example, to create secondary indexes on columns in a column family.
*/
protected String m_aprioriCQL = "";
/** Use thrift IO-based batch mutate instead of CQL? */
protected boolean m_useThriftIO = false;
protected boolean asIndexColumn = false;
/**
* Set the host for sending schema updates to
*
* @param s the host for sending schema updates to
*/
public void setSchemaHost(String s) {
m_schemaHost = s;
}
/**
* Set the host for sending schema updates to
*
* @return the host for sending schema updates to
*/
public String getSchemaHost() {
return m_schemaHost;
}
/**
* Set the port for the schema update host
*
* @param p port for the schema update host
*/
public void setSchemaPort(String p) {
m_schemaPort = p;
}
/**
* Get the port for the schema update host
*
* @return port for the schema update host
*/
public String getSchemaPort() {
return m_schemaPort;
}
/**
* Set how many sub-batches a batch should be split into when an insert times
* out.
*
* @param f the number of sub-batches to create when an insert times out.
*/
public void setCQLSubBatchSize(String f) {
m_cqlSubBatchSize = f;
}
/**
* Get how many sub-batches a batch should be split into when an insert times
* out.
*
* @return the number of sub-batches to create when an insert times out.
*/
public String getCQLSubBatchSize() {
return m_cqlSubBatchSize;
}
/**
* Set the timeout for failing a batch insert attempt.
*
* @param t the time (milliseconds) to wait for a batch insert to succeed.
*/
public void setCQLBatchInsertTimeout(String t) {
m_cqlBatchTimeout = t;
}
/**
* Get the timeout for failing a batch insert attempt.
*
* @return the time (milliseconds) to wait for a batch insert to succeed.
*/
public String getCQLBatchInsertTimeout() {
return m_cqlBatchTimeout;
}
/**
* Set the timeout (milliseconds) to use for socket comms
*
* @param t the timeout to use in milliseconds
*/
public void setSocketTimeout(String t) {
m_socketTimeout = t;
}
/**
* Get the timeout (milliseconds) to use for socket comms
*
* @return the timeout to use in milliseconds
*/
public String getSocketTimeout() {
return m_socketTimeout;
}
/**
* Set the cassandra node hostname to connect to
*
* @param host the host to connect to
*/
public void setCassandraHost(String host) {
m_cassandraHost = host;
}
/**
* Get the name of the cassandra node to connect to
*
* @return the name of the cassandra node to connect to
*/
public String getCassandraHost() {
return m_cassandraHost;
}
/**
* Set the port that cassandra is listening on
*
* @param port the port that cassandra is listening on
*/
public void setCassandraPort(String port) {
m_cassandraPort = port;
}
/**
* Get the port that cassandra is listening on
*
* @return the port that cassandra is listening on
*/
public String getCassandraPort() {
return m_cassandraPort;
}
/**
* Set the username to authenticate with
*
* @param un the username to authenticate with
*/
public void setUsername(String un) {
m_username = un;
}
/**
* Get the username to authenticate with
*
* @return the username to authenticate with
*/
public String getUsername() {
return m_username;
}
/**
* Set the password to authenticate with
*
* @param pass the password to authenticate with
*/
public void setPassword(String pass) {
m_password = pass;
}
/**
* Get the password to authenticate with
*
* @return the password to authenticate with
*/
public String getPassword() {
return m_password;
}
/**
* Set the keyspace (db) to use
*
* @param keyspace the keyspace to use
*/
public void setCassandraKeyspace(String keyspace) {
m_cassandraKeyspace = keyspace;
}
/**
* Get the keyspace (db) to use
*
* @return the keyspace (db) to use
*/
public String getCassandraKeyspace() {
return m_cassandraKeyspace;
}
/**
* Set the column family (table) to write to
*
* @param colFam the name of the column family to write to
*/
public void setColumnFamilyName(String colFam) {
m_columnFamily = colFam;
}
/**
* Get the name of the column family to write to
*
* @return the name of the columm family to write to
*/
public String getColumnFamilyName() {
return m_columnFamily;
}
/**
* Set whether to create the specified column family (table) if it doesn't
* already exist
*
* @param create true if the specified column family is to be created if it
* doesn't already exist
*/
public void setCreateColumnFamily(boolean create) {
m_createColumnFamily = create;
}
/**
* Get whether to create the specified column family (table) if it doesn't
* already exist
*
* @return true if the specified column family is to be created if it doesn't
* already exist
*/
public boolean getCreateColumnFamily() {
return m_createColumnFamily;
}
/**
* Set the consistency to use (e.g. ONE, QUORUM etc).
*
* @param consistency the consistency to use
*/
public void setConsistency(String consistency) {
m_consistency = consistency;
}
/**
* Get the consistency to use
*
* @return the consistency
*/
public String getConsistency() {
return m_consistency;
}
/**
* Set the batch size to use (i.e. max rows to send via a CQL batch insert
* statement)
*
* @param batchSize the max number of rows to send in each CQL batch insert
*/
public void setBatchSize(String batchSize) {
m_batchSize = batchSize;
}
/**
* Get the batch size to use (i.e. max rows to send via a CQL batch insert
* statement)
*
* @return the batch size.
*/
public String getBatchSize() {
return m_batchSize;
}
/**
* Set whether to compress (GZIP) CQL queries when transmitting them to the
* server
*
* @param c true if CQL queries are to be compressed
*/
public void setUseCompression(boolean c) {
m_useCompression = c;
}
/**
* Get whether CQL queries will be compressed (GZIP) or not
*
* @return true if CQL queries will be compressed when sending to the server
*/
public boolean getUseCompression() {
return m_useCompression;
}
/**
* Set whether or not to insert any incoming fields that are not in the
* Cassandra table's column meta data. This has no affect if the user has
* opted to first update the meta data with any unknown columns.
*
* @param insert true if incoming fields not found in the table's meta data
* are to be inserted (and validated according to the default
* validator for the table)
*/
public void setInsertFieldsNotInMeta(boolean insert) {
m_insertFieldsNotInMeta = insert;
}
/**
* Get whether or not to insert any incoming fields that are not in the
* Cassandra table's column meta data. This has no affect if the user has
* opted to first update the meta data with any unknown columns.
*
* @return true if incoming fields not found in the table's meta data are to
* be inserted (and validated according to the default validator for
* the table)
*/
public boolean getInsertFieldsNotInMeta() {
return m_insertFieldsNotInMeta;
}
/**
* Set the incoming field to use as the key for inserts
*
* @param keyField the name of the incoming field to use as the key
*/
public void setKeyField(String keyField) {
m_keyField = keyField;
}
/**
* Get the name of the incoming field to use as the key for inserts
*
* @return the name of the incoming field to use as the key for inserts
*/
public String getKeyField() {
return m_keyField;
}
/**
* Set whether to update the column family meta data with any unknown incoming
* columns
*
* @param u true if the meta data is to be updated with any unknown incoming
* columns
*/
public void setUpdateCassandraMeta(boolean u) {
m_updateCassandraMeta = u;
}
/**
* Get whether to update the column family meta data with any unknown incoming
* columns
*
* @return true if the meta data is to be updated with any unknown incoming
* columns
*/
public boolean getUpdateCassandraMeta() {
return m_updateCassandraMeta;
}
/**
* Set whether to first truncate (remove all data) the column family (table)
* before inserting.
*
* @param t true if the column family is to be initially truncated.
*/
public void setTruncateColumnFamily(boolean t) {
m_truncateColumnFamily = t;
}
/**
* Get whether to first truncate (remove all data) the column family (table)
* before inserting.
*
* @return true if the column family is to be initially truncated.
*/
public boolean getTruncateColumnFamily() {
return m_truncateColumnFamily;
}
/**
* Set any cql statements (separated by ;'s) to execute before inserting the
* first row into the column family. Can be used to do tasks like creating
* secondary indexes on columns in the table.
*
* @param cql cql statements (separated by ;'s) to execute
*/
public void setAprioriCQL(String cql) {
m_aprioriCQL = cql;
}
/**
* Get any cql statements (separated by ;'s) to execute before inserting the
* first row into the column family. Can be used to do tasks like creating
* secondary indexes on columns in the table.
*
* @return cql statements (separated by ;'s) to execute
*/
public String getAprioriCQL() {
return m_aprioriCQL;
}
/**
* Set whether to use Thrift IO-based batch mutate instead of batch CQL.
*
* @param useThrift true if Thrift IO is to be used rather than CQL.
*/
public void setUseThriftIO(boolean useThrift) {
m_useThriftIO = useThrift;
}
/**
* Get whether to use Thrift IO-based batch mutate instead of batch CQL.
*
* @return true if Thrift IO is to be used rather than CQL.
*/
public boolean getUseThriftIO() {
return m_useThriftIO;
}
@Override
public String getXML() {
StringBuffer retval = new StringBuffer();
if (!Const.isEmpty(m_cassandraHost)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_host", m_cassandraHost));
}
if (!Const.isEmpty(m_cassandraPort)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_port", m_cassandraPort));
}
if (!Const.isEmpty(m_schemaHost)) {
retval.append("\n ").append(
XMLHandler.addTagValue("schema_host", m_schemaHost));
}
if (!Const.isEmpty(m_schemaPort)) {
retval.append("\n ").append(
XMLHandler.addTagValue("schema_port", m_schemaPort));
}
if (!Const.isEmpty(m_socketTimeout)) {
retval.append("\n ").append(
XMLHandler.addTagValue("socket_timeout", m_socketTimeout));
}
if (!Const.isEmpty(m_password)) {
retval.append("\n ").append(
XMLHandler.addTagValue("password",
Encr.encryptPasswordIfNotUsingVariables(m_password)));
}
if (!Const.isEmpty(m_username)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_user", m_username));
}
if (!Const.isEmpty(m_cassandraKeyspace)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_keyspace", m_cassandraKeyspace));
}
if (!Const.isEmpty(m_cassandraKeyspace)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_keyspace", m_cassandraKeyspace));
}
if (!Const.isEmpty(m_columnFamily)) {
retval.append("\n ").append(
XMLHandler.addTagValue("column_family", m_columnFamily));
}
if (!Const.isEmpty(m_keyField)) {
retval.append("\n ").append(
XMLHandler.addTagValue("key_field", m_keyField));
}
if (!Const.isEmpty(m_consistency)) {
retval.append("\n ").append(
XMLHandler.addTagValue("consistency", m_consistency));
}
if (!Const.isEmpty(m_batchSize)) {
retval.append("\n ").append(
XMLHandler.addTagValue("batch_size", m_batchSize));
}
if (!Const.isEmpty(m_cqlBatchTimeout)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cql_batch_timeout", m_cqlBatchTimeout));
}
if (!Const.isEmpty(m_cqlSubBatchSize)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cql_sub_batch_size", m_cqlSubBatchSize));
}
retval.append("\n ").append(
XMLHandler.addTagValue("create_column_family", m_createColumnFamily));
retval.append("\n ").append(
XMLHandler.addTagValue("use_compression", m_useCompression));
retval.append("\n ").append(
XMLHandler.addTagValue("insert_fields_not_in_meta",
m_insertFieldsNotInMeta));
retval.append("\n ").append(
XMLHandler.addTagValue("update_cassandra_meta", m_updateCassandraMeta));
retval.append("\n ").append(
XMLHandler
.addTagValue("truncate_column_family", m_truncateColumnFamily));
if (!Const.isEmpty(m_aprioriCQL)) {
retval.append("\n ").append(
XMLHandler.addTagValue("apriori_cql", m_aprioriCQL));
}
retval.append("\n ").append(
XMLHandler.addTagValue("use_thrift_io", m_useThriftIO));
retval.append("\n ").append(XMLHandler.
addTagValue("asIndexColumn", asIndexColumn));
return retval.toString();
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases,
Map<String, Counter> counters) throws KettleXMLException {
m_cassandraHost = XMLHandler.getTagValue(stepnode, "cassandra_host");
m_cassandraPort = XMLHandler.getTagValue(stepnode, "cassandra_port");
m_schemaHost = XMLHandler.getTagValue(stepnode, "schema_host");
m_schemaPort = XMLHandler.getTagValue(stepnode, "schema_port");
m_socketTimeout = XMLHandler.getTagValue(stepnode, "socket_timeout");
m_username = XMLHandler.getTagValue(stepnode, "username");
m_password = XMLHandler.getTagValue(stepnode, "password");
m_cassandraKeyspace = XMLHandler
.getTagValue(stepnode, "cassandra_keyspace");
m_columnFamily = XMLHandler.getTagValue(stepnode, "column_family");
m_keyField = XMLHandler.getTagValue(stepnode, "key_field");
m_consistency = XMLHandler.getTagValue(stepnode, "consistency");
m_batchSize = XMLHandler.getTagValue(stepnode, "batch_size");
m_cqlBatchTimeout = XMLHandler.getTagValue(stepnode, "cql_batch_timeout");
m_cqlSubBatchSize = XMLHandler.getTagValue(stepnode, "cql_sub_batch_size");
m_createColumnFamily = XMLHandler.getTagValue(stepnode,
"create_column_family").equalsIgnoreCase("Y");
m_useCompression = XMLHandler.getTagValue(stepnode, "use_compression")
.equalsIgnoreCase("Y");
m_insertFieldsNotInMeta = XMLHandler.getTagValue(stepnode,
"insert_fields_not_in_meta").equalsIgnoreCase("Y");
m_updateCassandraMeta = XMLHandler.getTagValue(stepnode,
"update_cassandra_meta").equalsIgnoreCase("Y");
m_truncateColumnFamily = XMLHandler.getTagValue(stepnode,
"truncate_column_family").equalsIgnoreCase("Y");
m_aprioriCQL = XMLHandler.getTagValue(stepnode, "apriori_cql");
String useThrift = XMLHandler.getTagValue(stepnode, "use_thrift_io");
if (!Const.isEmpty(useThrift)) {
m_useThriftIO = useThrift.equalsIgnoreCase("Y");
}
asIndexColumn = XMLHandler.getTagValue(stepnode, "asIndexColumn").
equalsIgnoreCase("Y");
}
public void readRep(Repository rep, ObjectId id_step,
List<DatabaseMeta> databases, Map<String, Counter> counters)
throws KettleException {
m_cassandraHost = rep.getStepAttributeString(id_step, 0, "cassandra_host");
m_cassandraPort = rep.getStepAttributeString(id_step, 0, "cassandra_port");
m_schemaHost = rep.getStepAttributeString(id_step, 0, "schema_host");
m_schemaPort = rep.getStepAttributeString(id_step, 0, "schema_port");
m_socketTimeout = rep.getStepAttributeString(id_step, 0, "socket_timeout");
m_username = rep.getStepAttributeString(id_step, 0, "username");
m_password = rep.getStepAttributeString(id_step, 0, "password");
if (!Const.isEmpty(m_password)) {
m_password = Encr.decryptPasswordOptionallyEncrypted(m_password);
}
m_cassandraKeyspace = rep.getStepAttributeString(id_step, 0,
"cassandra_keyspace");
m_columnFamily = rep.getStepAttributeString(id_step, 0, "column_family");
m_keyField = rep.getStepAttributeString(id_step, 0, "key_field");
m_consistency = rep.getStepAttributeString(id_step, 0, "consistency");
m_batchSize = rep.getStepAttributeString(id_step, 0, "batch_size");
m_cqlBatchTimeout = rep.getStepAttributeString(id_step, 0,
"cql_batch_timeout");
m_cqlSubBatchSize = rep.getStepAttributeString(id_step, 0,
"cql_sub_batch_size");
m_createColumnFamily = rep.getStepAttributeBoolean(id_step, 0,
"create_column_family");
m_useCompression = rep.getStepAttributeBoolean(id_step, 0,
"use_compression");
m_insertFieldsNotInMeta = rep.getStepAttributeBoolean(id_step, 0,
"insert_fields_not_in_meta");
m_updateCassandraMeta = rep.getStepAttributeBoolean(id_step, 0,
"update_cassandra_meta");
m_truncateColumnFamily = rep.getStepAttributeBoolean(id_step, 0,
"truncate_column_family");
m_aprioriCQL = rep.getStepAttributeString(id_step, 0, "apriori_cql");
m_useThriftIO = rep.getStepAttributeBoolean(id_step, 0, "use_thrift_io");
asIndexColumn = rep.getStepAttributeBoolean(id_step, 0, "asIndexColumn");
}
public void saveRep(Repository rep, ObjectId id_transformation,
ObjectId id_step) throws KettleException {
if (!Const.isEmpty(m_cassandraHost)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "cassandra_host",
m_cassandraHost);
}
if (!Const.isEmpty(m_cassandraPort)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "cassandra_port",
m_cassandraPort);
}
if (!Const.isEmpty(m_schemaHost)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "schema_host",
m_schemaHost);
}
if (!Const.isEmpty(m_schemaPort)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "schema_port",
m_schemaPort);
}
if (!Const.isEmpty(m_socketTimeout)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "socket_timeout",
m_socketTimeout);
}
if (!Const.isEmpty(m_username)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "username",
m_username);
}
if (!Const.isEmpty(m_password)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "password",
Encr.encryptPasswordIfNotUsingVariables(m_password));
}
if (!Const.isEmpty(m_cassandraKeyspace)) {
rep.saveStepAttribute(id_transformation, id_step, 0,
"cassandra_keyspace", m_cassandraKeyspace);
}
if (!Const.isEmpty(m_columnFamily)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "column_family",
m_columnFamily);
}
if (!Const.isEmpty(m_keyField)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "key_field",
m_keyField);
}
if (!Const.isEmpty(m_consistency)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "consistency",
m_consistency);
}
if (!Const.isEmpty(m_batchSize)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "batch_size",
m_batchSize);
}
if (!Const.isEmpty(m_cqlBatchTimeout)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "cql_batch_timeout",
m_cqlBatchTimeout);
}
if (!Const.isEmpty(m_cqlSubBatchSize)) {
rep.saveStepAttribute(id_transformation, id_step, 0,
"cql_sub_batch_size", m_cqlSubBatchSize);
}
rep.saveStepAttribute(id_transformation, id_step, 0,
"create_column_family", m_createColumnFamily);
rep.saveStepAttribute(id_transformation, id_step, 0, "use_compression",
m_useCompression);
rep.saveStepAttribute(id_transformation, id_step, 0,
"insert_fields_not_in_meta", m_insertFieldsNotInMeta);
rep.saveStepAttribute(id_transformation, id_step, 0,
"update_cassandra_meta", m_updateCassandraMeta);
rep.saveStepAttribute(id_transformation, id_step, 0,
"truncate_column_family", m_truncateColumnFamily);
if (!Const.isEmpty(m_aprioriCQL)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "apriori_cql",
m_aprioriCQL);
}
rep.saveStepAttribute(id_transformation, id_step, 0, "use_thrift_io",
m_useThriftIO);
rep.saveStepAttribute(id_transformation, id_step, 0, "asIndexColumn",
asIndexColumn);
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta,
StepMeta stepMeta, RowMetaInterface prev, String[] input,
String[] output, RowMetaInterface info) {
CheckResult cr;
if ((prev == null) || (prev.size() == 0)) {
cr = new CheckResult(CheckResult.TYPE_RESULT_WARNING,
"Not receiving any fields from previous steps!", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK,
"Step is connected to previous one, receiving " + prev.size()
+ " fields", stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length > 0) {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK,
"Step is receiving info from other steps.", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR,
"No input received from other steps!", stepMeta);
remarks.add(cr);
}
}
public StepInterface getStep(StepMeta stepMeta,
StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans) {
return new CassandraOutput(stepMeta, stepDataInterface, copyNr, transMeta,
trans);
}
public StepDataInterface getStepData() {
return new CassandraOutputData();
}
public void setDefault() {
m_cassandraHost = "localhost";
m_cassandraPort = "9160";
m_schemaHost = "localhost";
m_schemaPort = "9160";
m_columnFamily = "";
m_batchSize = "100";
m_useCompression = false;
m_insertFieldsNotInMeta = false;
m_updateCassandraMeta = false;
m_truncateColumnFamily = false;
m_aprioriCQL = "";
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.trans.step.BaseStepMeta#getDialogClassName()
*/
@Override
public String getDialogClassName() {
return "org.pentaho.di.trans.steps.cassandraoutput.CassandraOutputDialog";
}
@Override
public boolean supportsErrorHandling() {
return true;
}
public boolean isAsIndexColumn() {
return asIndexColumn;
}
public void setAsIndexColumn(boolean asIndexColumn) {
this.asIndexColumn = asIndexColumn;
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/EnterCQLDialog.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandraoutput/EnterCQLDialog.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandraoutput;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Dialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.pentaho.di.core.Const;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.core.widget.StyledTextComp;
import org.pentaho.di.ui.spoon.job.JobGraph;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.di.ui.trans.steps.tableinput.SQLValuesHighlight;
/**
* Provides a popup dialog for editing CQL commands.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision$
*/
public class EnterCQLDialog extends Dialog {
private static Class<?> PKG = EnterCQLDialog.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
protected String m_title;
protected String m_originalCQL;
protected String m_currentCQL;
protected Shell m_parent;
protected Shell m_shell;
protected Button m_ok;
protected Button m_cancel;
protected Listener m_lsCancel;
protected Listener m_lsOK;
// protected SelectionAdapter m_lsDef;
protected PropsUI m_props;
protected StyledTextComp m_cqlText;
protected TransMeta m_transMeta;
protected ModifyListener m_lsMod;
public EnterCQLDialog(Shell parent, TransMeta transMeta, ModifyListener lsMod,
String title, String cql) {
super(parent, SWT.NONE);
m_parent = parent;
m_props = PropsUI.getInstance();
m_title = title;
m_originalCQL = cql;
m_transMeta = transMeta;
m_lsMod = lsMod;
}
public String open() {
Display display = m_parent.getDisplay();
m_shell = new Shell(m_parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN | SWT.APPLICATION_MODAL);
m_props.setLook(m_shell);
m_shell.setImage(GUIResource.getInstance().getImageSpoon());
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
m_shell.setLayout(formLayout);
m_shell.setText(m_title);
int margin = Const.MARGIN;
m_cqlText = new StyledTextComp(m_transMeta, m_shell,
SWT.MULTI | SWT.LEFT | SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL, "");
m_props.setLook(m_cqlText, m_props.WIDGET_STYLE_FIXED);
m_cqlText.setText(m_originalCQL);
m_currentCQL = m_originalCQL;
FormData fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(0, 0);
fd.right = new FormAttachment(100, -2 * margin);
fd.bottom = new FormAttachment(100, -50);
m_cqlText.setLayoutData(fd);
m_cqlText.addModifyListener(m_lsMod);
m_cqlText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_cqlText.setToolTipText(m_transMeta.environmentSubstitute(m_cqlText.getText()));
}
});
// Text Highlighting
m_cqlText.addLineStyleListener(new SQLValuesHighlight());
// Some buttons
m_ok = new Button(m_shell, SWT.PUSH);
m_ok.setText(BaseMessages.getString(PKG, "System.Button.OK"));
m_cancel = new Button(m_shell, SWT.PUSH);
m_cancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
BaseStepDialog.positionBottomButtons(m_shell, new Button[] { m_ok, m_cancel }, margin, null);
// Add listeners
m_lsCancel = new Listener() { public void handleEvent(Event e) { cancel(); } };
m_lsOK = new Listener() { public void handleEvent(Event e) { ok(); } };
m_ok.addListener(SWT.Selection, m_lsOK);
m_cancel.addListener(SWT.Selection, m_lsCancel);
// Detect [X] or ALT-F4 or something that kills this window...
m_shell.addShellListener( new ShellAdapter() { public void shellClosed(ShellEvent e) { checkCancel(e); } } );
BaseStepDialog.setSize(m_shell);
m_shell.open();
while (!m_shell.isDisposed()) {
if (!display.readAndDispatch()) display.sleep();
}
return m_currentCQL;
}
public void dispose() {
m_props.setScreen(new WindowProperty(m_shell));
m_shell.dispose();
}
protected void ok() {
m_currentCQL = m_cqlText.getText();
dispose();
}
protected void cancel() {
m_currentCQL = m_originalCQL;
dispose();
}
public void checkCancel(ShellEvent e) {
String newText = m_cqlText.getText();
if (!newText.equals(m_originalCQL)) {
int save = JobGraph.showChangedWarning(m_shell, m_title);
if (save == SWT.CANCEL) {
e.doit = false;
} else if (save == SWT.YES) {
ok();
} else {
cancel();
}
} else {
cancel();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableWriter.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableWriter.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrasstableoutput;
/*
* Adapted from DataStax DataImportExample
* http://www.datastax.com/wp-content/uploads/2011/08/DataImportExample.java
*
* Original Disclaimer:
* This file is an example on how to use the Cassandra SSTableSimpleUnsortedWriter class to create
* sstables from a csv input file.
* While this has been tested to work, this program is provided "as is" with no guarantee. Moreover,
* it's primary aim is toward simplicity rather than completness. In partical, don't use this as an
* example to parse csv files at home.
*
*/
import static org.apache.cassandra.utils.ByteBufferUtil.bytes;
import java.io.File;
import java.nio.ByteBuffer;
import java.text.DateFormat;
import java.util.Date;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.cassandra.db.marshal.AsciiType;
import org.apache.cassandra.io.sstable.SSTableSimpleUnsortedWriter;
import org.apache.log4j.helpers.ISO8601DateFormat;
import org.pentaho.di.core.exception.KettleException;
/**
* Outputs Cassandra SSTables (sorted-string tables) to a directory.
*
* Adapted from DataStax DataImportExample
* http://www.datastax.com/wp-content/uploads/2011/08/DataImportExample.java
*
* @author Rob Turner (robert{[at]}robertturner{[dot]}com{[dot]}au)
*/
public class SSTableWriter {
private static final DateFormat ISO8601 = ISO8601DateFormat.getInstance();
private static final int DEFAULT_BUFFER_SIZE_MB = 16;
private String directory = System.getProperty("java.io.tmpdir");
private String keyspace;
private String columnFamily;
private String keyField;
private int bufferSize = DEFAULT_BUFFER_SIZE_MB;
private SSTableSimpleUnsortedWriter writer;
/**
* Set the directory to read the sstables from
*
* @param directory the directory to read the sstables from
*/
public void setDirectory(String directory) {
this.directory = directory;
}
/**
* Set the target keyspace
*
* @param keyspace the keyspace to use
*/
public void setKeyspace(String keyspace) {
this.keyspace = keyspace;
}
/**
* Set the column family (table) to load to. Note: it is assumed that
* this column family exists in the keyspace apriori.
*
* @param columnFamily the column family to load to.
*/
public void setColumnFamily(String columnFamily) {
this.columnFamily = columnFamily;
}
/**
* Set the key field name
*
* @param keyField the key field name
*/
public void setKeyField(String keyField) {
this.keyField = keyField;
}
/**
* Set the buffer size (Mb) to use. A new table file is written
* every time the buffer is full.
*
* @param bufferSize the size of the buffer to use
*/
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
}
/**
* Initialization. Creates target directory if needed and establishes
* the writer
*
* @throws Exception if a problem occurs
*/
public void init() throws Exception {
File directory = new File(this.directory);
if (!directory.exists()) {
directory.mkdir();
}
try {
//TODO set parameter for null
writer = new SSTableSimpleUnsortedWriter(directory, null, keyspace,
columnFamily, AsciiType.instance, null, bufferSize);
} catch (Throwable t) {
throw new KettleException(
"Failed to create SSTableSimpleUnsortedWriter", t);
}
}
/**
* Process a row of data
*
* @param record a row of data as a Map of column names to values
* @throws Exception if a problem occurs
*/
public void processRow(Map<String, Object> record) throws Exception {
// get UUID
ByteBuffer uuid = valueToBytes(record.get(keyField));
// write record
writer.newRow(uuid);
long timestamp = System.currentTimeMillis() * 1000;
for (Entry<String, Object> entry : record.entrySet()) {
// get value
Object value = entry.getValue();
if (isNull(value)) {
continue;
}
// don't write the key as a column!
if (entry.getKey().equals(keyField)) {
continue;
}
// write
writer.addColumn(bytes(entry.getKey()), valueToBytes(value),
timestamp);
}
}
private static final ByteBuffer valueToBytes(Object val) throws Exception {
if (val instanceof String) {
return bytes((String) val);
}
if (val instanceof Integer) {
return bytes(((Integer) val).intValue());
}
if (val instanceof Float) {
return bytes(((Float) val).floatValue());
}
if (val instanceof Boolean) {
// will return "true" or "false"
return bytes(val.toString());
}
if (val instanceof Date) {
// use ISO 8601 date format
try {
return bytes(ISO8601.format((Date) val));
} catch (ArrayIndexOutOfBoundsException e) {
// something wrong with the date... just convert to string
return bytes(val.toString());
}
}
if (val instanceof Long) {
return bytes(((Long) val).longValue());
}
if (val instanceof Double) {
return bytes(((Double) val).doubleValue());
}
if (val instanceof byte[]) {
return ByteBuffer.wrap((byte[]) val);
}
// reduce to string
return bytes(val.toString());
}
static final boolean isNull(Object val) {
if (val == null) {
return true;
}
// empty strings are considered null in this context
if (val instanceof String) {
return "".equals(val);
}
return false;
}
/**
* Close the writer
*
* @throws Exception if a problem occurs
*/
public void close() throws Exception {
if (writer != null) {
writer.close();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutputData.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutputData.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrasstableoutput;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.trans.step.BaseStepData;
import org.pentaho.di.trans.step.StepDataInterface;
/**
* Data class for SSTablesOutput step.
*
* @author Rob Turner (robert{[at]}robertturner{[dot]}com{[dot]}au)
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public class SSTableOutputData extends BaseStepData implements
StepDataInterface {
/** The output data format */
protected RowMetaInterface outputRowMeta;
/**
* Get the output row format
*
* @return the output row format
*/
public RowMetaInterface getOutputRowMeta() {
return outputRowMeta;
}
/**
* Set the output row format
*
* @param rmi
* the output row format
*/
public void setOutputRowMeta(RowMetaInterface rmi) {
outputRowMeta = rmi;
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutputDialog.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutputDialog.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrasstableoutput;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
/**
* Dialog class for the SSTableOutput step
*
* @author Rob Turner (robert{[at]}robertturner{[dot]}com{[dot]}au)
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public class SSTableOutputDialog extends BaseStepDialog implements
StepDialogInterface {
private static final Class<?> PKG = SSTableOutputMeta.class;
private final SSTableOutputMeta m_currentMeta;
private final SSTableOutputMeta m_originalMeta;
/** various UI bits and pieces for the dialog */
private Label m_stepnameLabel;
private Text m_stepnameText;
private Label m_yamlLab;
private Button m_yamlBut;
private TextVar m_yamlText;
private Label m_directoryLab;
private Button m_directoryBut;
private TextVar m_directoryText;
private Label m_keyspaceLab;
private TextVar m_keyspaceText;
private Label m_columnFamilyLab;
private TextVar m_columnFamilyText;
private Label m_keyFieldLab;
private CCombo m_keyFieldCombo;
private Label m_bufferSizeLab;
private TextVar m_bufferSizeText;
private Button m_getFieldsBut;
public SSTableOutputDialog(Shell parent, Object in, TransMeta tr, String name) {
super(parent, (BaseStepMeta) in, tr, name);
m_currentMeta = (SSTableOutputMeta) in;
m_originalMeta = (SSTableOutputMeta) m_currentMeta.clone();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX);
props.setLook(shell);
setShellImage(shell, m_currentMeta);
// used to listen to a text field (m_wStepname)
final ModifyListener lsMod = new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_currentMeta.setChanged();
}
};
changed = m_currentMeta.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
shell.setText(BaseMessages
.getString(PKG, "SSTableOutputDialog.Shell.Title"));
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
m_stepnameLabel = new Label(shell, SWT.RIGHT);
m_stepnameLabel.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.StepName.Label"));
props.setLook(m_stepnameLabel);
FormData fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.right = new FormAttachment(middle, -margin);
fd.top = new FormAttachment(0, margin);
m_stepnameLabel.setLayoutData(fd);
m_stepnameText = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
m_stepnameText.setText(stepname);
props.setLook(m_stepnameText);
m_stepnameText.addModifyListener(lsMod);
// format the text field
fd = new FormData();
fd.left = new FormAttachment(middle, 0);
fd.top = new FormAttachment(0, margin);
fd.right = new FormAttachment(100, 0);
m_stepnameText.setLayoutData(fd);
// yaml file line
m_yamlLab = new Label(shell, SWT.RIGHT);
props.setLook(m_yamlLab);
m_yamlLab.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.YAML.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_stepnameText, margin);
fd.right = new FormAttachment(middle, -margin);
m_yamlLab.setLayoutData(fd);
m_yamlBut = new Button(shell, SWT.PUSH | SWT.CENTER);
props.setLook(m_yamlBut);
m_yamlBut.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.YAML.Button"));
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_stepnameText, margin);
m_yamlBut.setLayoutData(fd);
m_yamlBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
FileDialog dialog = new FileDialog(shell, SWT.OPEN);
String[] extensions = null;
String[] filterNames = null;
extensions = new String[2];
filterNames = new String[2];
extensions[0] = "*.yaml";
filterNames[0] = BaseMessages.getString(PKG,
"SSTableOutputDialog.FileType.YAML");
extensions[1] = "*";
filterNames[1] = BaseMessages
.getString(PKG, "System.FileType.AllFiles");
dialog.setFilterExtensions(extensions);
if (dialog.open() != null) {
m_yamlText.setText(dialog.getFilterPath()
+ System.getProperty("file.separator") + dialog.getFileName());
}
}
});
m_yamlText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_yamlText);
m_yamlText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_yamlText.setToolTipText(transMeta.environmentSubstitute(m_yamlText
.getText()));
}
});
m_yamlText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(m_yamlBut, 0);
fd.top = new FormAttachment(m_stepnameText, margin);
fd.left = new FormAttachment(middle, 0);
m_yamlText.setLayoutData(fd);
// directory line
m_directoryLab = new Label(shell, SWT.RIGHT);
props.setLook(m_directoryLab);
m_directoryLab.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.Directory.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_yamlText, margin);
fd.right = new FormAttachment(middle, -margin);
m_directoryLab.setLayoutData(fd);
m_directoryBut = new Button(shell, SWT.PUSH | SWT.CENTER);
props.setLook(m_directoryBut);
m_directoryBut.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.Directory.Button"));
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_yamlText, margin);
m_directoryBut.setLayoutData(fd);
m_directoryBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
FileDialog dialog = new FileDialog(shell, SWT.OPEN);
String[] extensions = null;
String[] filterNames = null;
extensions = new String[1];
filterNames = new String[1];
extensions[0] = "*";
filterNames[0] = BaseMessages
.getString(PKG, "System.FileType.AllFiles");
dialog.setFilterExtensions(extensions);
if (dialog.open() != null) {
m_directoryText.setText(dialog.getFilterPath()
+ System.getProperty("file.separator") + dialog.getFileName());
}
}
});
m_directoryText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_directoryText);
m_directoryText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_directoryText.setToolTipText(transMeta
.environmentSubstitute(m_directoryText.getText()));
}
});
m_directoryText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(m_directoryBut, 0);
fd.top = new FormAttachment(m_yamlText, margin);
fd.left = new FormAttachment(middle, 0);
m_directoryText.setLayoutData(fd);
// keyspace line
m_keyspaceLab = new Label(shell, SWT.RIGHT);
props.setLook(m_keyspaceLab);
m_keyspaceLab.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.Keyspace.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_directoryText, margin);
fd.right = new FormAttachment(middle, -margin);
m_keyspaceLab.setLayoutData(fd);
m_keyspaceText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_keyspaceText);
m_keyspaceText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_keyspaceText.setToolTipText(transMeta
.environmentSubstitute(m_keyspaceText.getText()));
}
});
m_keyspaceText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_directoryText, margin);
fd.left = new FormAttachment(middle, 0);
m_keyspaceText.setLayoutData(fd);
// column family line
m_columnFamilyLab = new Label(shell, SWT.RIGHT);
props.setLook(m_columnFamilyLab);
m_columnFamilyLab.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.ColumnFamily.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_keyspaceText, margin);
fd.right = new FormAttachment(middle, -margin);
m_columnFamilyLab.setLayoutData(fd);
m_columnFamilyText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_columnFamilyText);
m_columnFamilyText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_columnFamilyText.setToolTipText(transMeta
.environmentSubstitute(m_columnFamilyText.getText()));
}
});
m_columnFamilyText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_keyspaceText, margin);
fd.left = new FormAttachment(middle, 0);
m_columnFamilyText.setLayoutData(fd);
// key field line
m_keyFieldLab = new Label(shell, SWT.RIGHT);
props.setLook(m_keyFieldLab);
m_keyFieldLab.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.KeyField.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_columnFamilyText, margin);
fd.right = new FormAttachment(middle, -margin);
m_keyFieldLab.setLayoutData(fd);
m_getFieldsBut = new Button(shell, SWT.PUSH | SWT.CENTER);
props.setLook(m_getFieldsBut);
m_getFieldsBut.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.GetFields.Button"));
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_columnFamilyText, 0);
m_getFieldsBut.setLayoutData(fd);
m_getFieldsBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setupFieldsCombo();
}
});
m_keyFieldCombo = new CCombo(shell, SWT.BORDER);
m_keyFieldCombo.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_keyFieldCombo.setToolTipText(transMeta
.environmentSubstitute(m_keyFieldCombo.getText()));
}
});
m_keyFieldCombo.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(m_getFieldsBut, -margin);
fd.top = new FormAttachment(m_columnFamilyText, margin);
fd.left = new FormAttachment(middle, 0);
m_keyFieldCombo.setLayoutData(fd);
// buffer size
m_bufferSizeLab = new Label(shell, SWT.RIGHT);
props.setLook(m_bufferSizeLab);
m_bufferSizeLab.setText(BaseMessages.getString(PKG,
"SSTableOutputDialog.BufferSize.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_keyFieldCombo, margin);
fd.right = new FormAttachment(middle, -margin);
m_bufferSizeLab.setLayoutData(fd);
m_bufferSizeText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_bufferSizeText);
m_bufferSizeText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_bufferSizeText.setToolTipText(transMeta
.environmentSubstitute(m_bufferSizeText.getText()));
}
});
m_bufferSizeText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_keyFieldCombo, margin);
fd.left = new FormAttachment(middle, 0);
m_bufferSizeText.setLayoutData(fd);
// Buttons inherited from BaseStepDialog
wOK = new Button(shell, SWT.PUSH);
wOK.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
setButtonPositions(new Button[] { wOK, wCancel }, margin, m_bufferSizeText);
// Add listeners
lsCancel = new Listener() {
public void handleEvent(Event e) {
cancel();
}
};
lsOK = new Listener() {
public void handleEvent(Event e) {
ok();
}
};
wCancel.addListener(SWT.Selection, lsCancel);
wOK.addListener(SWT.Selection, lsOK);
lsDef = new SelectionAdapter() {
@Override
public void widgetDefaultSelected(SelectionEvent e) {
ok();
}
};
m_stepnameText.addSelectionListener(lsDef);
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener(new ShellAdapter() {
@Override
public void shellClosed(ShellEvent e) {
cancel();
}
});
setSize();
getData();
shell.open();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
return stepname;
}
protected void setupFieldsCombo() {
// try and set up from incoming fields from previous step
StepMeta stepMeta = transMeta.findStep(stepname);
if (stepMeta != null) {
try {
RowMetaInterface row = transMeta.getPrevStepFields(stepMeta);
if (row.size() == 0) {
MessageDialog.openError(shell, BaseMessages.getString(PKG,
"SSTableOutputData.Message.NoIncomingFields.Title"), BaseMessages
.getString(PKG, "SSTableOutputData.Message.NoIncomingFields"));
return;
}
m_keyFieldCombo.removeAll();
for (int i = 0; i < row.size(); i++) {
ValueMetaInterface vm = row.getValueMeta(i);
m_keyFieldCombo.add(vm.getName());
}
} catch (KettleException ex) {
MessageDialog.openError(shell, BaseMessages.getString(PKG,
"SSTableOutputData.Message.NoIncomingFields.Title"), BaseMessages
.getString(PKG, "SSTableOutputData.Message.NoIncomingFields"));
}
}
}
protected void ok() {
if (Const.isEmpty(m_stepnameText.getText())) {
return;
}
stepname = m_stepnameText.getText();
m_currentMeta.setYamlPath(m_yamlText.getText());
m_currentMeta.setDirectory(m_directoryText.getText());
m_currentMeta.setCassandraKeyspace(m_keyspaceText.getText());
m_currentMeta.setColumnFamilyName(m_columnFamilyText.getText());
m_currentMeta.setKeyField(m_keyFieldCombo.getText());
m_currentMeta.setBufferSize(m_bufferSizeText.getText());
if (!m_originalMeta.equals(m_currentMeta)) {
m_currentMeta.setChanged();
changed = m_currentMeta.hasChanged();
}
dispose();
}
protected void cancel() {
stepname = null;
m_currentMeta.setChanged(changed);
dispose();
}
protected void getData() {
if (!Const.isEmpty(m_currentMeta.getYamlPath())) {
m_yamlText.setText(m_currentMeta.getYamlPath());
}
if (!Const.isEmpty(m_currentMeta.getDirectory())) {
m_directoryText.setText(m_currentMeta.getDirectory());
}
if (!Const.isEmpty(m_currentMeta.getCassandraKeyspace())) {
m_keyspaceText.setText(m_currentMeta.getCassandraKeyspace());
}
if (!Const.isEmpty(m_currentMeta.getColumnFamilyName())) {
m_columnFamilyText.setText(m_currentMeta.getColumnFamilyName());
}
if (!Const.isEmpty(m_currentMeta.getKeyField())) {
m_keyFieldCombo.setText(m_currentMeta.getKeyField());
}
if (!Const.isEmpty(m_currentMeta.getBufferSize())) {
m_bufferSizeText.setText(m_currentMeta.getBufferSize());
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutput.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutput.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrasstableoutput;
import java.util.HashMap;
import java.util.Map;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Output step for writing Cassandra SSTables (sorted-string tables).
*
* @author Rob Turner (robert{[at]}robertturner{[dot]}com{[dot]}au)
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public class SSTableOutput extends BaseStep implements StepInterface {
protected SSTableOutputMeta m_meta;
protected SSTableOutputData m_data;
public SSTableOutput(StepMeta stepMeta, StepDataInterface stepDataInterface,
int copyNr, TransMeta transMeta, Trans trans) {
super(stepMeta, stepDataInterface, copyNr, transMeta, trans);
}
/** The number of rows seen so far for this batch */
protected int rowsSeen;
/** The directory to output to */
protected String directory;
/** The keyspace to use */
protected String keyspace;
/** The name of the column family (table) to write to */
protected String columnFamily;
/** The key field used to determine unique keys (IDs) for rows */
protected String keyField;
/** Size (MB) of write buffer */
protected String bufferSize;
/** Writes the SSTable output */
protected SSTableWriter writer;
/** Used to determine input fields */
protected RowMetaInterface inputMetadata;
/** List of field names (optimization) */
private String[] fieldNames;
/** List of field indices (optimization) */
private int[] fieldValueIndices;
private void initialize(StepMetaInterface smi, StepDataInterface sdi)
throws Exception {
first = false;
rowsSeen = 0;
m_meta = (SSTableOutputMeta) smi;
m_data = (SSTableOutputData) sdi;
inputMetadata = getInputRowMeta();
String yamlPath = environmentSubstitute(m_meta.getYamlPath());
if (Const.isEmpty(yamlPath)) {
throw new Exception(BaseMessages.getString(SSTableOutputMeta.PKG,
"SSTableOutput.Error.NoPathToYAML"));
}
logBasic(BaseMessages.getString(SSTableOutputMeta.PKG,
"SSTableOutput.Message.YAMLPath", yamlPath));
System.setProperty("cassandra.config", "file:" + yamlPath);
directory = environmentSubstitute(m_meta.getDirectory());
keyspace = environmentSubstitute(m_meta.getCassandraKeyspace());
columnFamily = environmentSubstitute(m_meta.getColumnFamilyName());
keyField = environmentSubstitute(m_meta.getKeyField());
bufferSize = environmentSubstitute(m_meta.getBufferSize());
if (Const.isEmpty(columnFamily)) {
throw new KettleException(BaseMessages.getString(SSTableOutputMeta.PKG,
"SSTableOutput.Error.NoColumnFamilySpecified"));
}
if (Const.isEmpty(keyField)) {
throw new KettleException(BaseMessages.getString(SSTableOutputMeta.PKG,
"SSTableOutput.Error.NoKeySpecified"));
}
// what are the fields? where are they?
fieldNames = inputMetadata.getFieldNames();
fieldValueIndices = new int[fieldNames.length];
for (int i = 0; i < fieldNames.length; i++) {
fieldValueIndices[i] = inputMetadata.indexOfValue(fieldNames[i]);
}
// create/init writer
if (writer != null) {
writer.close();
}
writer = new SSTableWriter();
writer.setDirectory(directory);
writer.setKeyspace(keyspace);
writer.setColumnFamily(columnFamily);
writer.setKeyField(keyField);
writer.setBufferSize(Integer.parseInt(bufferSize));
writer.init();
}
@Override
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi)
throws KettleException {
// still processing?
if (isStopped()) {
return false;
}
Object[] r = getRow();
try {
if (r == null) {
// no more output - clean up/close connections
setOutputDone();
closeWriter();
return false;
}
if (first) {
initialize(smi, sdi);
}
// create record
Map<String, Object> record = new HashMap<String, Object>();
for (int i = 0; i < fieldNames.length; i++) {
Object value = r[fieldValueIndices[i]];
if (SSTableWriter.isNull(value)) {
continue;
}
record.put(fieldNames[i], value);
}
// write it
writer.processRow(record);
} catch (Exception e) {
logError(BaseMessages.getString(SSTableOutputMeta.PKG,
"SSTableOutput.Error.FailedToProcessRow"), e);
// single error row - found it!
putError(getInputRowMeta(), r, 1L, e.getMessage(), null,
"ERR_SSTABLE_OUTPUT_01");
}
// error will occur after adding it
return true;
}
@Override
public void setStopped(boolean stopped) {
super.setStopped(stopped);
if (stopped) {
closeWriter();
}
}
public void closeWriter() {
if (writer != null) {
try {
writer.close();
writer = null;
} catch (Exception e) {
// YUM!!
logError(BaseMessages.getString(SSTableOutputMeta.PKG,
"SSTableOutput.Error.FailedToCloseWriter"), e);
}
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutputMeta.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrasstableoutput/SSTableOutputMeta.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrasstableoutput;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.annotations.Step;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.w3c.dom.Node;
/**
* Provides metadata for the Cassandra SSTable output step.
*
* @author Rob Turner (robert{[at]}robertturner{[dot]}com{[dot]}au)
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
@Step(id = "SSTableOutput", image = "Cassandra.png", name = "SSTable Output", description = "Writes to a filesystem directory as a Cassandra SSTable", categoryDescription = "Big Data")
public class SSTableOutputMeta extends BaseStepMeta implements
StepMetaInterface {
protected static final Class<?> PKG = SSTableOutputMeta.class;
/** The path to the yaml file */
protected String m_yamlPath;
/** The directory to output to */
protected String directory;
/** The keyspace (database) to use */
protected String cassandraKeyspace;
/** The column family (table) to write to */
protected String columnFamily = "";
/** The field in the incoming data to use as the key for inserts */
protected String keyField = "";
/** Size (MB) of write buffer */
protected String bufferSize = "16";
/**
* Get the path the the yaml file
*
* @return the path to the yaml file
*/
public String getYamlPath() {
return m_yamlPath;
}
/**
* Set the path the the yaml file
*
* @param path the path to the yaml file
*/
public void setYamlPath(String path) {
m_yamlPath = path;
}
/**
* Where the SSTables are written to
*
* @return String directory
*/
public String getDirectory() {
return directory;
}
/**
* Where the SSTables are written to
*
* @param directory String
*/
public void setDirectory(String directory) {
this.directory = directory;
}
/**
* Set the keyspace (db) to use
*
* @param keyspace the keyspace to use
*/
public void setCassandraKeyspace(String keyspace) {
cassandraKeyspace = keyspace;
}
/**
* Get the keyspace (db) to use
*
* @return the keyspace (db) to use
*/
public String getCassandraKeyspace() {
return cassandraKeyspace;
}
/**
* Set the column family (table) to write to
*
* @param colFam the name of the column family to write to
*/
public void setColumnFamilyName(String colFam) {
columnFamily = colFam;
}
/**
* Get the name of the column family to write to
*
* @return the name of the columm family to write to
*/
public String getColumnFamilyName() {
return columnFamily;
}
/**
* Set the incoming field to use as the key for inserts
*
* @param keyField the name of the incoming field to use as the key
*/
public void setKeyField(String keyField) {
this.keyField = keyField;
}
/**
* Get the name of the incoming field to use as the key for inserts
*
* @return the name of the incoming field to use as the key for inserts
*/
public String getKeyField() {
return keyField;
}
/**
* Size (MB) of write buffer
*
* @return String
*/
public String getBufferSize() {
return bufferSize;
}
/**
* Size (MB) of write buffer
*
* @param bufferSize String
*/
public void setBufferSize(String bufferSize) {
this.bufferSize = bufferSize;
}
@Override
public boolean supportsErrorHandling() {
// enable define error handling option
return true;
}
@Override
public String getXML() {
StringBuffer retval = new StringBuffer();
if (!Const.isEmpty(m_yamlPath)) {
retval.append("\n ").append(
XMLHandler.addTagValue("yaml_path", m_yamlPath));
}
if (!Const.isEmpty(directory)) {
retval.append("\n ").append(
XMLHandler.addTagValue("output_directory", directory));
}
if (!Const.isEmpty(cassandraKeyspace)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_keyspace", cassandraKeyspace));
}
if (!Const.isEmpty(cassandraKeyspace)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_keyspace", cassandraKeyspace));
}
if (!Const.isEmpty(columnFamily)) {
retval.append("\n ").append(
XMLHandler.addTagValue("column_family", columnFamily));
}
if (!Const.isEmpty(keyField)) {
retval.append("\n ").append(
XMLHandler.addTagValue("key_field", keyField));
}
if (!Const.isEmpty(bufferSize)) {
retval.append("\n ").append(
XMLHandler.addTagValue("buffer_size_mb", bufferSize));
}
return retval.toString();
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases,
Map<String, Counter> counters) throws KettleXMLException {
m_yamlPath = XMLHandler.getTagValue(stepnode, "yaml_path");
directory = XMLHandler.getTagValue(stepnode, "output_directory");
cassandraKeyspace = XMLHandler.getTagValue(stepnode, "cassandra_keyspace");
columnFamily = XMLHandler.getTagValue(stepnode, "column_family");
keyField = XMLHandler.getTagValue(stepnode, "key_field");
bufferSize = XMLHandler.getTagValue(stepnode, "buffer_size_mb");
}
public void readRep(Repository rep, ObjectId id_step,
List<DatabaseMeta> databases, Map<String, Counter> counters)
throws KettleException {
m_yamlPath = rep.getStepAttributeString(id_step, 0, "yaml_path");
directory = rep.getStepAttributeString(id_step, 0, "output_directory");
cassandraKeyspace = rep.getStepAttributeString(id_step, 0,
"cassandra_keyspace");
columnFamily = rep.getStepAttributeString(id_step, 0, "column_family");
keyField = rep.getStepAttributeString(id_step, 0, "key_field");
bufferSize = rep.getStepAttributeString(id_step, 0, "buffer_size_mb");
}
public void saveRep(Repository rep, ObjectId id_transformation,
ObjectId id_step) throws KettleException {
if (!Const.isEmpty(m_yamlPath)) {
rep.saveStepAttribute(id_transformation, id_step, "yaml_path", m_yamlPath);
}
if (!Const.isEmpty(directory)) {
rep.saveStepAttribute(id_transformation, id_step, "output_directory",
directory);
}
if (!Const.isEmpty(cassandraKeyspace)) {
rep.saveStepAttribute(id_transformation, id_step, "cassandra_keyspace",
cassandraKeyspace);
}
if (!Const.isEmpty(columnFamily)) {
rep.saveStepAttribute(id_transformation, id_step, "column_family",
columnFamily);
}
if (!Const.isEmpty(keyField)) {
rep.saveStepAttribute(id_transformation, id_step, "key_field", keyField);
}
if (!Const.isEmpty(bufferSize)) {
rep.saveStepAttribute(id_transformation, id_step, "buffer_size_mb",
bufferSize);
}
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta,
StepMeta stepMeta, RowMetaInterface prev, String[] input,
String[] output, RowMetaInterface info) {
CheckResult cr;
if ((prev == null) || (prev.size() == 0)) {
cr = new CheckResult(CheckResult.TYPE_RESULT_WARNING,
"Not receiving any fields from previous steps!", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK,
"Step is connected to previous one, receiving " + prev.size()
+ " fields", stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length > 0) {
cr = new CheckResult(CheckResult.TYPE_RESULT_OK,
"Step is receiving info from other steps.", stepMeta);
remarks.add(cr);
} else {
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR,
"No input received from other steps!", stepMeta);
remarks.add(cr);
}
}
public StepInterface getStep(StepMeta stepMeta,
StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans) {
return new SSTableOutput(stepMeta, stepDataInterface, copyNr, transMeta,
trans);
}
public StepDataInterface getStepData() {
return new SSTableOutputData();
}
public void setDefault() {
directory = System.getProperty("java.io.tmpdir");
bufferSize = "16";
columnFamily = "";
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.trans.step.BaseStepMeta#getDialogClassName()
*/
@Override
public String getDialogClassName() {
return "org.pentaho.di.trans.steps.cassandrasstableoutput.SSTableOutputDialog";
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInputDialog.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInputDialog.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrainput;
import java.util.ArrayList;
import org.apache.cassandra.thrift.InvalidRequestException;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.cassandra.CassandraColumnMetaData;
import org.pentaho.cassandra.CassandraConnection;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.dialog.ShowMessageDialog;
import org.pentaho.di.ui.core.widget.StyledTextComp;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.di.ui.trans.steps.tableinput.SQLValuesHighlight;
/**
* Dialog class for the CassandraInput step
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision$
*/
public class CassandraInputDialog extends BaseStepDialog implements
StepDialogInterface {
private static final Class<?> PKG = CassandraInputMeta.class;
private final CassandraInputMeta m_currentMeta;
private final CassandraInputMeta m_originalMeta;
/** various UI bits and pieces for the dialog */
private Label m_stepnameLabel;
private Text m_stepnameText;
private Label m_hostLab;
private TextVar m_hostText;
private Label m_portLab;
private TextVar m_portText;
private Label m_userLab;
private TextVar m_userText;
private Label m_passLab;
private TextVar m_passText;
private Label m_keyspaceLab;
private TextVar m_keyspaceText;
private Label m_compressionLab;
private Button m_useCompressionBut;
private Label m_outputTuplesLab;
private Button m_outputTuplesBut;
private Label m_useThriftLab;
private Button m_useThriftBut;
private Label m_timeoutLab;
private TextVar m_timeoutText;
private Label m_positionLab;
private Button m_showSchemaBut;
private Label m_cqlLab;
private StyledTextComp m_cqlText;
public CassandraInputDialog(Shell parent, Object in, TransMeta tr, String name) {
super(parent, (BaseStepMeta) in, tr, name);
m_currentMeta = (CassandraInputMeta) in;
m_originalMeta = (CassandraInputMeta) m_currentMeta.clone();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX);
props.setLook(shell);
setShellImage(shell, m_currentMeta);
// used to listen to a text field (m_wStepname)
ModifyListener lsMod = new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_currentMeta.setChanged();
}
};
changed = m_currentMeta.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Shell.Title"));
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
m_stepnameLabel = new Label(shell, SWT.RIGHT);
m_stepnameLabel.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.StepName.Label"));
props.setLook(m_stepnameLabel);
FormData fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.right = new FormAttachment(middle, -margin);
fd.top = new FormAttachment(0, margin);
m_stepnameLabel.setLayoutData(fd);
m_stepnameText = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
m_stepnameText.setText(stepname);
props.setLook(m_stepnameText);
m_stepnameText.addModifyListener(lsMod);
// format the text field
fd = new FormData();
fd.left = new FormAttachment(middle, 0);
fd.top = new FormAttachment(0, margin);
fd.right = new FormAttachment(100, 0);
m_stepnameText.setLayoutData(fd);
// host line
m_hostLab = new Label(shell, SWT.RIGHT);
props.setLook(m_hostLab);
m_hostLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Hostname.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_stepnameText, margin);
fd.right = new FormAttachment(middle, -margin);
m_hostLab.setLayoutData(fd);
m_hostText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_hostText);
m_hostText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_hostText.setToolTipText(transMeta.environmentSubstitute(m_hostText
.getText()));
}
});
m_hostText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_stepnameText, margin);
fd.left = new FormAttachment(middle, 0);
m_hostText.setLayoutData(fd);
// port line
m_portLab = new Label(shell, SWT.RIGHT);
props.setLook(m_portLab);
m_portLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Port.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_hostText, margin);
fd.right = new FormAttachment(middle, -margin);
m_portLab.setLayoutData(fd);
m_portText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_portText);
m_portText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_portText.setToolTipText(transMeta.environmentSubstitute(m_portText
.getText()));
}
});
m_portText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_hostText, margin);
fd.left = new FormAttachment(middle, 0);
m_portText.setLayoutData(fd);
// timeout line
m_timeoutLab = new Label(shell, SWT.RIGHT);
props.setLook(m_timeoutLab);
m_timeoutLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Timeout.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_portText, margin);
fd.right = new FormAttachment(middle, -margin);
m_timeoutLab.setLayoutData(fd);
m_timeoutText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_timeoutText);
m_timeoutText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_timeoutText.setToolTipText(transMeta
.environmentSubstitute(m_timeoutText.getText()));
}
});
m_timeoutText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_portText, margin);
fd.left = new FormAttachment(middle, 0);
m_timeoutText.setLayoutData(fd);
// username line
m_userLab = new Label(shell, SWT.RIGHT);
props.setLook(m_userLab);
m_userLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.User.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_timeoutText, margin);
fd.right = new FormAttachment(middle, -margin);
m_userLab.setLayoutData(fd);
m_userText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_userText);
m_userText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_userText.setToolTipText(transMeta.environmentSubstitute(m_userText
.getText()));
}
});
m_userText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_timeoutText, margin);
fd.left = new FormAttachment(middle, 0);
m_userText.setLayoutData(fd);
// password line
m_passLab = new Label(shell, SWT.RIGHT);
props.setLook(m_passLab);
m_passLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Password.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_userText, margin);
fd.right = new FormAttachment(middle, -margin);
m_passLab.setLayoutData(fd);
m_passText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_passText);
m_passText.setEchoChar('*');
// If the password contains a variable, don't hide it.
m_passText.getTextWidget().addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
checkPasswordVisible();
}
});
m_passText.addModifyListener(lsMod);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_userText, margin);
fd.left = new FormAttachment(middle, 0);
m_passText.setLayoutData(fd);
// keyspace line
m_keyspaceLab = new Label(shell, SWT.RIGHT);
props.setLook(m_keyspaceLab);
m_keyspaceLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Keyspace.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_passText, margin);
fd.right = new FormAttachment(middle, -margin);
m_keyspaceLab.setLayoutData(fd);
m_keyspaceText = new TextVar(transMeta, shell, SWT.SINGLE | SWT.LEFT
| SWT.BORDER);
props.setLook(m_keyspaceText);
m_keyspaceText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
m_keyspaceText.setToolTipText(transMeta
.environmentSubstitute(m_keyspaceText.getText()));
}
});
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_passText, margin);
fd.left = new FormAttachment(middle, 0);
m_keyspaceText.setLayoutData(fd);
// output key, column, timestamp tuples line
m_outputTuplesLab = new Label(shell, SWT.RIGHT);
props.setLook(m_outputTuplesLab);
m_outputTuplesLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.OutputTuples.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_keyspaceText, margin);
fd.right = new FormAttachment(middle, -margin);
m_outputTuplesLab.setLayoutData(fd);
m_outputTuplesBut = new Button(shell, SWT.CHECK);
props.setLook(m_outputTuplesBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_keyspaceText, margin);
fd.left = new FormAttachment(middle, 0);
m_outputTuplesBut.setLayoutData(fd);
m_outputTuplesBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
// check tuple/thrift mode
checkWidgets();
}
});
m_useThriftLab = new Label(shell, SWT.RIGHT);
props.setLook(m_useThriftLab);
m_useThriftLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Thrift.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_outputTuplesBut, margin);
fd.right = new FormAttachment(middle, -margin);
m_useThriftLab.setLayoutData(fd);
m_useThriftBut = new Button(shell, SWT.CHECK);
props.setLook(m_useThriftBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.top = new FormAttachment(m_outputTuplesBut, margin);
fd.left = new FormAttachment(middle, 0);
m_useThriftBut.setLayoutData(fd);
// compression check box
m_compressionLab = new Label(shell, SWT.RIGHT);
props.setLook(m_compressionLab);
m_compressionLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.UseCompression.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_useThriftBut, margin);
fd.right = new FormAttachment(middle, -margin);
m_compressionLab.setLayoutData(fd);
m_useCompressionBut = new Button(shell, SWT.CHECK);
props.setLook(m_useCompressionBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.left = new FormAttachment(middle, 0);
fd.top = new FormAttachment(m_useThriftBut, margin);
m_useCompressionBut.setLayoutData(fd);
m_useCompressionBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
m_currentMeta.setChanged();
}
});
// Buttons inherited from BaseStepDialog
wOK = new Button(shell, SWT.PUSH);
wOK.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
setButtonPositions(new Button[] { wOK, wCancel }, margin, m_cqlText);
// position label
m_positionLab = new Label(shell, SWT.NONE);
props.setLook(m_positionLab);
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.right = new FormAttachment(middle, -margin);
fd.bottom = new FormAttachment(wOK, -margin);
m_positionLab.setLayoutData(fd);
m_showSchemaBut = new Button(shell, SWT.PUSH);
m_showSchemaBut.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Schema.Button"));
props.setLook(m_showSchemaBut);
fd = new FormData();
fd.right = new FormAttachment(100, 0);
fd.bottom = new FormAttachment(wOK, -margin);
m_showSchemaBut.setLayoutData(fd);
m_showSchemaBut.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
RowMeta outputF = new RowMeta();
CassandraConnection conn = null;
try {
String hostS = transMeta.environmentSubstitute(m_hostText.getText());
String portS = transMeta.environmentSubstitute(m_portText.getText());
String userS = m_userText.getText();
String passS = m_passText.getText();
if (!Const.isEmpty(userS) && !Const.isEmpty(passS)) {
userS = transMeta.environmentSubstitute(userS);
passS = transMeta.environmentSubstitute(passS);
}
String keyspaceS = transMeta.environmentSubstitute(m_keyspaceText
.getText());
String cqlText = transMeta.environmentSubstitute(m_cqlText.getText());
conn = CassandraInputData.getCassandraConnection(hostS,
Integer.parseInt(portS), userS, passS);
try {
conn.setKeyspace(keyspaceS);
} catch (InvalidRequestException ire) {
logError(
BaseMessages
.getString(PKG,
"CassandraInputDialog.Error.ProblemGettingSchemaInfo.Message")
+ ":\n\n" + ire.why, ire);
new ErrorDialog(
shell,
BaseMessages
.getString(PKG,
"CassandraInputDialog.Error.ProblemGettingSchemaInfo.Title"),
BaseMessages
.getString(PKG,
"CassandraInputDialog.Error.ProblemGettingSchemaInfo.Message")
+ ":\n\n" + ire.why, ire);
return;
}
String colFam = CassandraInputData
.getColumnFamilyNameFromCQLSelectQuery(cqlText);
if (Const.isEmpty(colFam)) {
throw new Exception(BaseMessages.getString(PKG,
"CassandraInput.Error.NoFromClauseInQuery"));
}
if (!CassandraColumnMetaData.columnFamilyExists(conn, colFam)) {
throw new Exception(BaseMessages.getString(PKG,
"CassandraInput.Error.NonExistentColumnFamily", colFam,
keyspaceS));
}
CassandraColumnMetaData cassMeta = new CassandraColumnMetaData(conn,
colFam);
String schemaDescription = cassMeta.getSchemaDescription();
ShowMessageDialog smd = new ShowMessageDialog(shell,
SWT.ICON_INFORMATION | SWT.OK, "Schema info", schemaDescription,
true);
smd.open();
} catch (Exception e1) {
logError(
BaseMessages
.getString(PKG,
"CassandraInputDialog.Error.ProblemGettingSchemaInfo.Message")
+ ":\n\n" + e1.getMessage(), e1);
new ErrorDialog(
shell,
BaseMessages.getString(PKG,
"CassandraInputDialog.Error.ProblemGettingSchemaInfo.Title"),
BaseMessages
.getString(PKG,
"CassandraInputDialog.Error.ProblemGettingSchemaInfo.Message")
+ ":\n\n" + e1.getMessage(), e1);
} finally {
if (conn != null) {
conn.close();
}
}
}
});
// cql stuff
m_cqlLab = new Label(shell, SWT.NONE);
props.setLook(m_cqlLab);
m_cqlLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.CQL.Label"));
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_useCompressionBut, margin);
fd.right = new FormAttachment(middle, -margin);
m_cqlLab.setLayoutData(fd);
m_cqlText = new StyledTextComp(transMeta, shell, SWT.MULTI | SWT.LEFT
| SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL, "");
props.setLook(m_cqlText, props.WIDGET_STYLE_FIXED);
m_cqlText.addModifyListener(lsMod);
fd = new FormData();
fd.left = new FormAttachment(0, 0);
fd.top = new FormAttachment(m_cqlLab, margin);
fd.right = new FormAttachment(100, -2 * margin);
fd.bottom = new FormAttachment(m_showSchemaBut, -margin);
m_cqlText.setLayoutData(fd);
m_cqlText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
setPosition();
m_cqlText.setToolTipText(transMeta.environmentSubstitute(m_cqlText
.getText()));
}
});
// Text Highlighting
m_cqlText.addLineStyleListener(new SQLValuesHighlight());
m_cqlText.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
setPosition();
}
@Override
public void keyReleased(KeyEvent e) {
setPosition();
}
});
m_cqlText.addFocusListener(new FocusAdapter() {
@Override
public void focusGained(FocusEvent e) {
setPosition();
}
@Override
public void focusLost(FocusEvent e) {
setPosition();
}
});
m_cqlText.addMouseListener(new MouseAdapter() {
@Override
public void mouseDoubleClick(MouseEvent e) {
setPosition();
}
@Override
public void mouseDown(MouseEvent e) {
setPosition();
}
@Override
public void mouseUp(MouseEvent e) {
setPosition();
}
});
// Add listeners
lsCancel = new Listener() {
public void handleEvent(Event e) {
cancel();
}
};
lsOK = new Listener() {
public void handleEvent(Event e) {
ok();
}
};
wCancel.addListener(SWT.Selection, lsCancel);
wOK.addListener(SWT.Selection, lsOK);
lsDef = new SelectionAdapter() {
@Override
public void widgetDefaultSelected(SelectionEvent e) {
ok();
}
};
m_stepnameText.addSelectionListener(lsDef);
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener(new ShellAdapter() {
@Override
public void shellClosed(ShellEvent e) {
cancel();
}
});
setSize();
getData();
shell.open();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
return stepname;
}
private void checkWidgets() {
if (m_outputTuplesBut.getSelection()) {
m_useThriftBut.setEnabled(true);
} else {
m_useThriftBut.setSelection(false);
m_useThriftBut.setEnabled(false);
}
}
protected void ok() {
if (Const.isEmpty(m_stepnameText.getText())) {
return;
}
stepname = m_stepnameText.getText();
m_currentMeta.setCassandraHost(m_hostText.getText());
m_currentMeta.setCassandraPort(m_portText.getText());
m_currentMeta.setSocketTimeout(m_timeoutText.getText());
m_currentMeta.setUsername(m_userText.getText());
m_currentMeta.setPassword(m_passText.getText());
m_currentMeta.setCassandraKeyspace(m_keyspaceText.getText());
m_currentMeta.setUseCompression(m_useCompressionBut.getSelection());
m_currentMeta.setOutputKeyValueTimestampTuples(m_outputTuplesBut
.getSelection());
m_currentMeta.setUseThriftIO(m_useThriftBut.getSelection());
m_currentMeta.setCQLSelectQuery(m_cqlText.getText());
if (!m_originalMeta.equals(m_currentMeta)) {
m_currentMeta.setChanged();
changed = m_currentMeta.hasChanged();
}
dispose();
}
protected void cancel() {
stepname = null;
m_currentMeta.setChanged(changed);
dispose();
}
protected void getData() {
if (!Const.isEmpty(m_currentMeta.getCassandraHost())) {
m_hostText.setText(m_currentMeta.getCassandraHost());
}
if (!Const.isEmpty(m_currentMeta.getCassandraPort())) {
m_portText.setText(m_currentMeta.getCassandraPort());
}
if (!Const.isEmpty(m_currentMeta.getSocketTimeout())) {
m_timeoutText.setText(m_currentMeta.getSocketTimeout());
}
if (!Const.isEmpty(m_currentMeta.getUsername())) {
m_userText.setText(m_currentMeta.getUsername());
}
if (!Const.isEmpty(m_currentMeta.getPassword())) {
m_passText.setText(m_currentMeta.getPassword());
}
if (!Const.isEmpty(m_currentMeta.getCassandraKeyspace())) {
m_keyspaceText.setText(m_currentMeta.getCassandraKeyspace());
}
m_useCompressionBut.setSelection(m_currentMeta.getUseCompression());
m_outputTuplesBut.setSelection(m_currentMeta
.getOutputKeyValueTimestampTuples());
m_useThriftBut.setSelection(m_currentMeta.getUseThriftIO());
if (!Const.isEmpty(m_currentMeta.getCQLSelectQuery())) {
m_cqlText.setText(m_currentMeta.getCQLSelectQuery());
}
checkWidgets();
}
protected void setPosition() {
String scr = m_cqlText.getText();
int linenr = m_cqlText.getLineAtOffset(m_cqlText.getCaretOffset()) + 1;
int posnr = m_cqlText.getCaretOffset();
// Go back from position to last CR: how many positions?
int colnr = 0;
while (posnr > 0 && scr.charAt(posnr - 1) != '\n'
&& scr.charAt(posnr - 1) != '\r') {
posnr--;
colnr++;
}
m_positionLab.setText(BaseMessages.getString(PKG,
"CassandraInputDialog.Position.Label", "" + linenr, "" + colnr));
}
private void checkPasswordVisible() {
String password = m_passText.getText();
ArrayList<String> list = new ArrayList<String>();
StringUtil.getUsedVariables(password, list, true);
if (list.size() == 0) {
m_passText.setEchoChar('*');
} else {
m_passText.setEchoChar('\0'); // show everything
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInput.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInput.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrainput;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.Compression;
import org.apache.cassandra.thrift.CqlResult;
import org.apache.cassandra.thrift.CqlRow;
import org.pentaho.cassandra.CassandraColumnMetaData;
import org.pentaho.cassandra.CassandraConnection;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Class providing an input step for reading data from a table (column family)
* in Cassandra. Accesses the schema information stored in Cassandra for type
* information.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision$
*/
public class CassandraInput extends BaseStep implements StepInterface {
protected CassandraInputMeta m_meta;
protected CassandraInputData m_data;
public CassandraInput(StepMeta stepMeta, StepDataInterface stepDataInterface,
int copyNr, TransMeta transMeta, Trans trans) {
super(stepMeta, stepDataInterface, copyNr, transMeta, trans);
}
/** Connection to cassandra */
protected CassandraConnection m_connection;
/** Column meta data and schema information */
protected CassandraColumnMetaData m_cassandraMeta;
/** For iterating over a result set */
protected Iterator<CqlRow> m_resultIterator;
/**
* map of indexes into the output field structure (key is special - it's
* always the first field in the output row meta
*/
protected Map<String, Integer> m_outputFormatMap = new HashMap<String, Integer>();
@Override
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi)
throws KettleException {
if (first) {
first = false;
m_data = (CassandraInputData) sdi;
m_meta = (CassandraInputMeta) smi;
// Get the connection to Cassandra
String hostS = environmentSubstitute(m_meta.getCassandraHost());
String portS = environmentSubstitute(m_meta.getCassandraPort());
String timeoutS = environmentSubstitute(m_meta.getSocketTimeout());
String userS = m_meta.getUsername();
String passS = m_meta.getPassword();
if (!Const.isEmpty(userS) && !Const.isEmpty(passS)) {
userS = environmentSubstitute(userS);
passS = environmentSubstitute(passS);
}
String keyspaceS = environmentSubstitute(m_meta.getCassandraKeyspace());
if (Const.isEmpty(hostS) || Const.isEmpty(portS)
|| Const.isEmpty(keyspaceS)) {
throw new KettleException("Some connection details are missing!!");
}
logBasic(BaseMessages.getString(CassandraInputMeta.PKG,
"CassandraInput.Info.Connecting", hostS, portS, keyspaceS));
try {
if (Const.isEmpty(timeoutS)) {
m_connection = CassandraInputData.getCassandraConnection(hostS,
Integer.parseInt(portS), userS, passS);
} else {
m_connection = CassandraInputData
.getCassandraConnection(hostS, Integer.parseInt(portS), userS,
passS, Integer.parseInt(timeoutS));
}
m_connection.setKeyspace(keyspaceS);
} catch (Exception ex) {
closeConnection();
throw new KettleException(ex.getMessage(), ex);
}
// check the source column family (table) first
String colFamName = m_data
.getColumnFamilyNameFromCQLSelectQuery(environmentSubstitute(m_meta
.getCQLSelectQuery()));
if (Const.isEmpty(colFamName)) {
throw new KettleException(BaseMessages.getString(
CassandraInputMeta.PKG,
"CassandraInput.Error.NonExistentColumnFamily"));
}
try {
if (!CassandraColumnMetaData.columnFamilyExists(m_connection,
colFamName)) {
throw new KettleException(BaseMessages.getString(
CassandraInputMeta.PKG,
"CassandraInput.Error.NonExistentColumnFamily", colFamName,
keyspaceS));
}
} catch (Exception ex) {
closeConnection();
throw new KettleException(ex.getMessage(), ex);
}
// set up the output row meta
m_data.setOutputRowMeta(new RowMeta());
m_meta.getFields(m_data.getOutputRowMeta(), getStepname(), null, null,
this);
// check that there are some outgoing fields!
if (m_data.getOutputRowMeta().size() == 0) {
throw new KettleException(BaseMessages.getString(
CassandraInputMeta.PKG,
"CassandraInput.Error.QueryWontProduceOutputFields"));
}
// set up the lookup map
if (!m_meta.getOutputKeyValueTimestampTuples()) {
for (int i = 0; i < m_data.getOutputRowMeta().size(); i++) {
String fieldName = m_data.getOutputRowMeta().getValueMeta(i)
.getName();
m_outputFormatMap.put(fieldName, i);
}
}
// column family name (key) is the first field output
try {
logBasic(BaseMessages.getString(CassandraInputMeta.PKG,
"CassandraInput.Info.GettintMetaData", colFamName));
m_cassandraMeta = new CassandraColumnMetaData(m_connection, colFamName);
} catch (Exception e) {
closeConnection();
throw new KettleException(e.getMessage(), e);
}
String queryS = environmentSubstitute(m_meta.getCQLSelectQuery());
Compression compression = m_meta.getUseCompression() ? Compression.GZIP
: Compression.NONE;
try {
if (!m_meta.getUseThriftIO()) {
logBasic(BaseMessages.getString(
CassandraInputMeta.PKG,
"CassandraInput.Info.ExecutingQuery",
queryS,
(m_meta.getUseCompression() ? BaseMessages.getString(
CassandraInputMeta.PKG,
"CassandraInput.Info.UsingGZIPCompression") : "")));
byte[] queryBytes = (m_meta.getUseCompression() ? CassandraInputData
.compressQuery(queryS, compression) : queryS.getBytes());
// In Cassandra 1.1 the version of CQL to use can be set
// programatically. The default
// is to use CQL v 2.0.0
// m_connection.getClient().set_cql_version("3.0.0");
CqlResult result = m_connection.getClient().execute_cql_query(
ByteBuffer.wrap(queryBytes), compression);
m_resultIterator = result.getRowsIterator();
} else if (m_meta.getOutputKeyValueTimestampTuples()) {
// --------------- use thrift IO (only applicable for <key, value>
// tuple mode at present) ----------
List<String> userCols = (m_meta.m_specificCols != null && m_meta.m_specificCols
.size() > 0) ? m_meta.m_specificCols : null;
m_data.sliceModeInit(m_cassandraMeta, userCols, m_meta.m_rowLimit,
m_meta.m_colLimit, m_meta.m_rowBatchSize, m_meta.m_colBatchSize);
List<Object[]> batch = m_data.cassandraRowToKettleTupleSliceMode(
m_cassandraMeta, m_connection);
while (batch != null) {
for (Object[] r : batch) {
putRow(m_data.getOutputRowMeta(), r);
if (log.isRowLevel()) {
log.logRowlevel(toString(), "Outputted row #" + getProcessed()
+ " : " + r);
}
}
batch = m_data.cassandraRowToKettleTupleSliceMode(m_cassandraMeta,
m_connection);
}
// done
closeConnection();
setOutputDone();
return false;
// --------------- end thrift IO mode
}
} catch (Exception e) {
closeConnection();
throw new KettleException(e.getMessage(), e);
}
}
if (m_resultIterator.hasNext()) {
CqlRow nextRow = m_resultIterator.next();
Object[] outputRowData = null;
if (m_meta.getOutputKeyValueTimestampTuples()) {
Iterator<Column> columnIterator = nextRow.getColumnsIterator();
// The key always appears to be the first column in the list (even
// though it is separately
// avaliable via CqlRow.getKey(). We discard it here because testing for
// a column named
// "KEY" only works if column names are textual
// ARGHHHHH! - this assumption is only true for wildcard queries!!!!!!
// (i.e. select *)!!!!!!
// So select col1, col2 etc. or ranges (which we don't support) will not
// include the row key
// as the first column
if (m_meta.m_isSelectStarQuery) {
columnIterator.next(); // throw away the key column
}
while ((outputRowData = m_data.cassandraRowToKettleTupleMode(
m_cassandraMeta, nextRow, columnIterator)) != null) {
putRow(m_data.getOutputRowMeta(), outputRowData);
if (log.isRowLevel()) {
log.logRowlevel(toString(), "Outputted row #" + getProcessed()
+ " : " + outputRowData);
}
}
} else {
outputRowData = m_data.cassandraRowToKettle(m_cassandraMeta, nextRow,
m_outputFormatMap);
// output the row
putRow(m_data.getOutputRowMeta(), outputRowData);
if (log.isRowLevel()) {
log.logRowlevel(toString(), "Outputted row #" + getProcessed()
+ " : " + outputRowData);
}
}
} else {
closeConnection();
setOutputDone();
return false;
}
if (checkFeedback(getProcessed())) {
logBasic("Read " + getProcessed() + " rows from Cassandra");
}
return true;
}
@Override
public void setStopped(boolean stopped) {
if (isStopped() && stopped == true) {
return;
}
super.setStopped(stopped);
if (stopped) {
closeConnection();
}
}
protected void closeConnection() {
if (m_connection != null) {
logBasic(BaseMessages.getString(CassandraInputMeta.PKG,
"CassandraInput.Info.ClosingConnection"));
m_connection.close();
}
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInputData.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInputData.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrainput;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.zip.Deflater;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnOrSuperColumn;
import org.apache.cassandra.thrift.ColumnParent;
import org.apache.cassandra.thrift.Compression;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.cassandra.thrift.CqlRow;
import org.apache.cassandra.thrift.KeyRange;
import org.apache.cassandra.thrift.KeySlice;
import org.apache.cassandra.thrift.SlicePredicate;
import org.apache.cassandra.thrift.SliceRange;
import org.apache.cassandra.thrift.TimedOutException;
import org.pentaho.cassandra.CassandraColumnMetaData;
import org.pentaho.cassandra.CassandraConnection;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.trans.step.BaseStepData;
import org.pentaho.di.trans.step.StepDataInterface;
/**
* Data class for the CassandraInput step. Contains some utility methods for
* obtaining a connection to cassandra, translating a row from cassandra to
* Kettle and for compressing a query.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision$
*/
public class CassandraInputData extends BaseStepData implements
StepDataInterface {
/** The output data format */
protected RowMetaInterface m_outputRowMeta;
/**
* Get the output row format
*
* @return the output row format
*/
public RowMetaInterface getOutputRowMeta() {
return m_outputRowMeta;
}
/**
* Set the output row format
*
* @param rmi the output row format
*/
public void setOutputRowMeta(RowMetaInterface rmi) {
m_outputRowMeta = rmi;
}
/**
* Get a connection to cassandra
*
* @param host the hostname of a cassandra node
* @param port the port that cassandra is listening on
* @param username the username for (optional) authentication
* @param password the password for (optional) authentication
* @return a connection to cassandra
* @throws Exception if a problem occurs during connection
*/
public static CassandraConnection getCassandraConnection(String host,
int port, String username, String password) throws Exception {
return new CassandraConnection(host, port, username, password, -1);
}
/**
* Get a connection to cassandra
*
* @param host the hostname of a cassandra node
* @param port the port that cassandra is listening on
* @param username the username for (optional) authentication
* @param password the password for (optional) authentication
* @param timeout the socket timeout to use
* @return a connection to cassandra
* @throws Exception if a problem occurs during connection
*/
public static CassandraConnection getCassandraConnection(String host,
int port, String username, String password, int timeout) throws Exception {
return new CassandraConnection(host, port, username, password, timeout);
}
// ------------------------------------------------------------------
// The following code implements pure Thrift-based <key, col_name, value,
// timestamp>
// tuple extraction
protected boolean m_newSliceQuery = false;
protected List<String> m_requestedCols = null;
protected int m_sliceRowsMax;
protected int m_sliceColsMax;
protected int m_sliceRowsBatchSize;
protected int m_sliceColsBatchSize;
protected SliceRange m_sliceRange;
protected KeyRange m_keyRange;
protected SlicePredicate m_slicePredicate;
protected ColumnParent m_colParent;
int m_rowIndex;
int m_colIndex;
// current batch of rows
protected List<KeySlice> m_cassandraRows;
// current batch of columns from current row
protected List<ColumnOrSuperColumn> m_currentCols;
protected List<Object[]> m_converted;
protected int m_colCount;
protected int m_rowCount;
public void sliceModeInit(CassandraColumnMetaData meta,
List<String> colNames, int maxRows, int maxCols, int rowBatchSize,
int colBatchSize) throws KettleException {
m_newSliceQuery = true;
m_requestedCols = colNames;
m_sliceRowsMax = maxRows;
m_sliceColsMax = maxCols;
m_sliceRowsBatchSize = rowBatchSize;
m_sliceColsBatchSize = colBatchSize;
m_rowIndex = 0;
m_colIndex = 0;
if (m_sliceColsBatchSize <= 0) {
m_sliceColsBatchSize = Integer.MAX_VALUE;
}
if (m_sliceRowsBatchSize <= 0) {
m_sliceRowsBatchSize = Integer.MAX_VALUE;
}
List<ByteBuffer> specificCols = null;
if (m_requestedCols != null && m_requestedCols.size() > 0) {
specificCols = new ArrayList<ByteBuffer>();
// encode the textual column names
for (String colName : m_requestedCols) {
ByteBuffer encoded = meta.columnNameToByteBuffer(colName);
specificCols.add(encoded);
}
}
m_slicePredicate = new SlicePredicate();
if (specificCols == null) {
m_sliceRange = new SliceRange(ByteBuffer.wrap(new byte[0]),
ByteBuffer.wrap(new byte[0]), false, m_sliceColsBatchSize);
m_slicePredicate.setSlice_range(m_sliceRange);
} else {
m_slicePredicate.setColumn_names(specificCols);
}
m_keyRange = new KeyRange(m_sliceRowsBatchSize);
m_keyRange.setStart_key(new byte[0]);
m_keyRange.setEnd_key(new byte[0]);
m_colParent = new ColumnParent(meta.getColumnFamilyName());
m_converted = new ArrayList<Object[]>();
}
private void advanceToNonEmptyRow() {
KeySlice row = m_cassandraRows.get(m_rowIndex);
m_currentCols = row.getColumns();
int skipSize = 0;
while (m_currentCols.size() == skipSize
&& m_rowIndex < m_cassandraRows.size() - 1) {
m_rowIndex++;
row = m_cassandraRows.get(m_rowIndex);
m_currentCols = row.getColumns();
}
if (m_currentCols.size() == skipSize) {
// we've been through the batch and there are no columns in any of these
// rows -
// so nothing to output! Indicate this by setting currentCols to null
m_currentCols = null;
}
}
private void getNextBatchOfRows(CassandraConnection conn) throws Exception {
// reset the column range (if necessary)
if (m_requestedCols == null) {
m_sliceRange = m_sliceRange.setStart(ByteBuffer.wrap(new byte[0]));
m_sliceRange = m_sliceRange.setFinish(ByteBuffer.wrap(new byte[0]));
m_slicePredicate.setSlice_range(m_sliceRange);
}
// set the key range start to the last key from the last batch of rows
m_keyRange.setStart_key(m_cassandraRows.get(m_cassandraRows.size() - 1)
.getKey());
m_cassandraRows = conn.getClient().get_range_slices(m_colParent,
m_slicePredicate, m_keyRange, ConsistencyLevel.ONE);
m_colCount = 0;
// key ranges are *inclusive* of the start key - we will have already
// processed the first
// row in the last batch. Hence start at index 1 of this batch
m_rowIndex = 1;
if (m_cassandraRows == null || m_cassandraRows.size() <= 1
|| m_rowCount == m_sliceRowsMax) {
// indicate done
m_currentCols = null;
m_cassandraRows = null;
} else {
advanceToNonEmptyRow();
}
}
private void getNextBatchOfColumns(CassandraConnection conn) throws Exception {
m_sliceRange = m_sliceRange.setStart(m_currentCols
.get(m_currentCols.size() - 1).getColumn().bufferForName());
m_slicePredicate.setSlice_range(m_sliceRange);
// fetch the next bunch of columns for the current row
m_currentCols = conn.getClient().get_slice(
m_cassandraRows.get(m_rowIndex).bufferForKey(), m_colParent,
m_slicePredicate, ConsistencyLevel.ONE);
// as far as I understand it - these things are always inclusive of the
// start element,
// so we need to skip the first element cause it was processed already in
// the last batch
// of columns
if (m_currentCols == null || m_currentCols.size() <= 1) {
// no more columns in the current row - move to the next row
m_rowCount++;
m_rowIndex++;
m_colCount = 0;
if (m_rowIndex == m_cassandraRows.size()) {
getNextBatchOfRows(conn);
while (m_cassandraRows != null && m_currentCols == null) {
// keep going until we get some rows with columns!
getNextBatchOfRows(conn);
}
} else {
advanceToNonEmptyRow();
while (m_cassandraRows != null && m_currentCols == null) {
// keep going until we get some rows with columns!
getNextBatchOfRows(conn);
}
}
} else {
// we need to discard the first col in the list since we will have
// processed
// that already in the batch
m_currentCols.remove(0);
}
}
public List<Object[]> cassandraRowToKettleTupleSliceMode(
CassandraColumnMetaData metaData, CassandraConnection conn)
throws KettleException {
m_converted.clear();
int timeouts = 0;
try {
while (timeouts < 5) {
try {
if (m_newSliceQuery) {
m_cassandraRows = conn.getClient().get_range_slices(m_colParent,
m_slicePredicate, m_keyRange, ConsistencyLevel.ONE);
if (m_cassandraRows == null || m_cassandraRows.size() == 0) {
// done
return null;
} else {
advanceToNonEmptyRow();
while (m_cassandraRows != null && m_currentCols == null) {
// keep going until we get some rows with columns!
getNextBatchOfRows(conn);
}
if (m_cassandraRows == null) {
// we're done
return null;
}
m_colCount = 0;
m_rowCount = 0;
m_newSliceQuery = false;
}
} else {
// determine what we need to get next - more columns from current
// row, or start next row
// or get next row batch or done
if (m_rowCount == m_sliceRowsMax) {
// hit our LIMIT of rows - done
return null;
}
if (m_rowIndex == m_cassandraRows.size()) {
// get next batch of rows
getNextBatchOfRows(conn);
while (m_cassandraRows != null && m_currentCols == null) {
// keep going until we get some rows with columns!
getNextBatchOfRows(conn);
}
if (m_cassandraRows == null) {
// we're done
return null;
}
} else if (m_colCount == -1) {
// get next row
KeySlice row = m_cassandraRows.get(m_rowIndex);
m_currentCols = row.getColumns();
m_colCount = 0;
} else {
getNextBatchOfColumns(conn);
// check against our limit again
if (m_rowCount == m_sliceRowsMax) {
return null;
}
if (m_cassandraRows == null) {
// we're done
return null;
}
}
}
break;
} catch (TimedOutException e) {
timeouts++;
}
}
if (timeouts == 5) {
throw new KettleException(
"Maximum number of consecutive timeouts exceeded");
}
KeySlice row = m_cassandraRows.get(m_rowIndex);
Object rowKey = metaData.getKeyValue(row);
if (rowKey == null) {
throw new KettleException("Unable to obtain a key value for the row!");
}
String keyName = metaData.getKeyName();
int keyIndex = m_outputRowMeta.indexOfValue(keyName);
if (keyIndex < 0) {
throw new KettleException("Unable to find the key field name '"
+ keyName + "' in the output row meta data!");
}
for (int i = 0; i < m_currentCols.size(); i++) {
Object[] outputRowData = RowDataUtil.allocateRowData(m_outputRowMeta
.size());
outputRowData[keyIndex] = rowKey;
Column col = m_currentCols.get(i).getColumn();
String colName = metaData.getColumnName(col);
Object colValue = metaData.getColumnValue(col);
if (colValue == null) {
// skip null columns (only applies if we're processing
// a specified list of columns rather than all columns).
continue;
}
outputRowData[1] = colName;
String stringV = colValue.toString();
outputRowData[2] = stringV;
if (colValue instanceof Date) {
ValueMeta tempDateMeta = new ValueMeta("temp",
ValueMetaInterface.TYPE_DATE);
stringV = tempDateMeta.getString(colValue);
outputRowData[2] = stringV;
} else if (colValue instanceof byte[]) {
outputRowData[2] = colValue;
}
// the timestamp as a date object
long timestampL = col.getTimestamp();
outputRowData[3] = timestampL;
m_converted.add(outputRowData);
m_colCount++;
if (m_colCount == m_sliceColsMax && m_requestedCols == null) {
// max number of cols reached for this row
m_colCount = -1; // indicate move to the next row
m_rowCount++;
m_rowIndex++;
break; // don't process any more
}
}
if (m_requestedCols != null) {
// assume that we don't need to page columns when the user has
// explicitly named the ones that they want
m_colCount = -1;
m_rowCount++;
m_rowIndex++;
}
} catch (Exception ex) {
throw new KettleException(ex.getMessage(), ex);
}
return m_converted;
}
// --------------- End Thrift-based tuple mode -------------------------
/**
* Converts a cassandra row to a Kettle row in the key, colName, colValue,
* timestamp format
*
* @param metaData meta data on the cassandra column family being read from
* @param cassandraRow a row from the column family
* @param cassandraColIter an interator over columns for the current row
*
* @return a Kettle row
* @throws KettleException if a problem occurs
*/
public Object[] cassandraRowToKettleTupleMode(
CassandraColumnMetaData metaData, CqlRow cassandraRow,
Iterator<Column> cassandraColIter) throws KettleException {
Object[] outputRowData = RowDataUtil
.allocateRowData(m_outputRowMeta.size());
Object key = metaData.getKeyValue(cassandraRow);
if (key == null) {
throw new KettleException("Unable to obtain a key value for the row!");
}
String keyName = metaData.getKeyName();
int keyIndex = m_outputRowMeta.indexOfValue(keyName);
if (keyIndex < 0) {
throw new KettleException("Unable to find the key field name '" + keyName
+ "' in the output row meta data!");
}
outputRowData[keyIndex] = key;
// advance the iterator to the next column
if (cassandraColIter.hasNext()) {
Column aCol = cassandraColIter.next();
String colName = metaData.getColumnName(aCol);
// skip the key
if (colName.equals("KEY")) {
if (cassandraColIter.hasNext()) {
aCol = cassandraColIter.next();
colName = metaData.getColumnName(aCol);
} else {
// run out of columns
return null;
}
}
// for queries that specify column names we need to check that the value
// is not null in this row
while (metaData.getColumnValue(aCol) == null) {
if (cassandraColIter.hasNext()) {
aCol = cassandraColIter.next();
colName = metaData.getColumnName(aCol);
} else {
return null;
}
}
outputRowData[1] = colName;
// do the value (stored as a string)
Object colValue = metaData.getColumnValue(aCol);
String stringV = colValue.toString();
outputRowData[2] = stringV;
if (colValue instanceof Date) {
ValueMeta tempDateMeta = new ValueMeta("temp",
ValueMetaInterface.TYPE_DATE);
stringV = tempDateMeta.getString(colValue);
outputRowData[2] = stringV;
} else if (colValue instanceof byte[]) {
outputRowData[2] = colValue;
}
// the timestamp as a date object
long timestampL = aCol.getTimestamp();
outputRowData[3] = timestampL;
} else {
return null; // signify no more columns for this row...
}
return outputRowData;
}
/**
* Converts a cassandra row to a Kettle row
*
* @param metaData meta data on the cassandra column family being read from
* @param cassandraRow a row from the column family
* @param outputFormatMap a Map of output field names to indexes in the
* outgoing Kettle row structure
* @return a Kettle row
* @throws KettleException if a problem occurs
*/
public Object[] cassandraRowToKettle(CassandraColumnMetaData metaData,
CqlRow cassandraRow, Map<String, Integer> outputFormatMap)
throws KettleException {
Object[] outputRowData = RowDataUtil
.allocateRowData(m_outputRowMeta.size());
Object key = metaData.getKeyValue(cassandraRow);
if (key == null) {
throw new KettleException("Unable to obtain a key value for the row!");
}
String keyName = metaData.getKeyName();
int keyIndex = m_outputRowMeta.indexOfValue(keyName);
if (keyIndex < 0) {
throw new KettleException("Unable to find the key field name '" + keyName
+ "' in the output row meta data!");
}
outputRowData[keyIndex] = key;
// do the columns
List<Column> rowColumns = cassandraRow.getColumns();
for (Column aCol : rowColumns) {
String colName = metaData.getColumnName(aCol);
Integer outputIndex = outputFormatMap.get(colName);
if (outputIndex != null) {
Object colValue = metaData.getColumnValue(aCol);
outputRowData[outputIndex.intValue()] = colValue;
}
}
return outputRowData;
}
/**
* Extract the column family name (table name) from a CQL SELECT query.
* Assumes that any kettle variables have been already substituted in the
* query
*
* @param subQ the query with vars substituted
* @return the column family name or null if the query is malformed
*/
public static String getColumnFamilyNameFromCQLSelectQuery(String subQ) {
String result = null;
if (Const.isEmpty(subQ)) {
return null;
}
// assumes env variables already replaced in query!
if (!subQ.toLowerCase().startsWith("select")) {
// not a select statement!
return null;
}
if (subQ.indexOf(';') < 0) {
// query must end with a ';' or it will wait for more!
return null;
}
// subQ = subQ.toLowerCase();
// strip off where clause (if any)
if (subQ.toLowerCase().lastIndexOf("where") > 0) {
subQ = subQ.substring(0, subQ.toLowerCase().lastIndexOf("where"));
}
// determine the source column family
// look for a FROM that is surrounded by space
int fromIndex = subQ.toLowerCase().indexOf("from");
String tempS = subQ.toLowerCase();
int offset = fromIndex;
while (fromIndex > 0 && tempS.charAt(fromIndex - 1) != ' '
&& (fromIndex + 4 < tempS.length())
&& tempS.charAt(fromIndex + 4) != ' ') {
tempS = tempS.substring(fromIndex + 4, tempS.length());
fromIndex = tempS.indexOf("from");
offset += (4 + fromIndex);
}
fromIndex = offset;
if (fromIndex < 0) {
return null; // no from clause
}
result = subQ.substring(fromIndex + 4, subQ.length()).trim();
if (result.indexOf(' ') > 0) {
result = result.substring(0, result.indexOf(' '));
} else {
result = result.replace(";", "");
}
if (result.length() == 0) {
return null; // no column family specified
}
return result;
}
/**
* Compress a CQL query
*
* @param queryStr the CQL query
* @param compression compression option (GZIP is the only option - so far)
* @return an array of bytes containing the compressed query
*/
public static byte[] compressQuery(String queryStr, Compression compression) {
byte[] data = queryStr.getBytes(Charset
.forName(CassandraColumnMetaData.UTF8));
Deflater compressor = new Deflater();
compressor.setInput(data);
compressor.finish();
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
while (!compressor.finished()) {
int size = compressor.deflate(buffer);
byteArray.write(buffer, 0, size);
}
return byteArray.toByteArray();
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
chensoul/learning-hadoop | https://github.com/chensoul/learning-hadoop/blob/196093e9658d2d936e8a9d6680232377f735fdfa/kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInputMeta.java | kettle-cassandra-plugin/src/org/pentaho/di/trans/steps/cassandrainput/CassandraInputMeta.java | /*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.cassandrainput;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.eclipse.swt.widgets.Shell;
import org.pentaho.cassandra.CassandraColumnMetaData;
import org.pentaho.cassandra.CassandraConnection;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.annotations.Step;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.w3c.dom.Node;
/**
* Class providing an input step for reading data from an Cassandra column
* family (table).
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision$
*/
@Step(id = "CassandraInput", image = "Cassandra.png", name = "Cassandra Input", description = "Reads data from a Cassandra table", categoryDescription = "Big Data")
public class CassandraInputMeta extends BaseStepMeta implements
StepMetaInterface {
protected static final Class<?> PKG = CassandraInputMeta.class;
/** The host to contact */
protected String m_cassandraHost = "localhost";
/** The port that cassandra is listening on */
protected String m_cassandraPort = "9160";
/** Username for authentication */
protected String m_username;
/** Password for authentication */
protected String m_password;
/** The keyspace (database) to use */
protected String m_cassandraKeyspace;
/** Whether to use GZIP compression of CQL queries */
protected boolean m_useCompression;
/** The select query to execute */
protected String m_cqlSelectQuery = "SELECT <fields> FROM <column family> WHERE <condition>;";
/** Output in tuple mode? */
protected boolean m_outputKeyValueTimestampTuples;
/** Use thrift IO for tuple mode? */
protected boolean m_useThriftIO = false;
/**
* Timeout (milliseconds) to use for socket connections - blank means use
* cluster default
*/
protected String m_socketTimeout = "";
// set based on parsed CQL
/**
* True if a select * is being done - this is important to know because rows
* from select * queries contain the key as the first column. Key is also
* available separately in the API (and we use this for retrieving the key).
* The column that contains the key in this case is not necessarily
* convertible using the default column validator because there is a separate
* key validator. So we need to be able to recognize the key when it appears
* as a column and skip it. Can't rely on it's name (KEY) since this is only
* easily detectable when the column names are strings.
*/
protected boolean m_isSelectStarQuery = false;
// these are set based on the parsed CQL when executing tuple mode using
// thrift
protected int m_rowLimit = -1; // no limit - otherwise we look for LIMIT in
// CQL
protected int m_colLimit = -1; // no limit - otherwise we look for FIRST N in
// CQL
// maximum number of rows or columns to pull over at one time via thrift
protected int m_rowBatchSize = 100;
protected int m_colBatchSize = 100;
protected List<String> m_specificCols;
/**
* Set the timeout (milliseconds) to use for socket comms
*
* @param t the timeout to use in milliseconds
*/
public void setSocketTimeout(String t) {
m_socketTimeout = t;
}
/**
* Get the timeout (milliseconds) to use for socket comms
*
* @return the timeout to use in milliseconds
*/
public String getSocketTimeout() {
return m_socketTimeout;
}
/**
* Set whether to use pure thrift IO for the <key,value> tuple mode.
*
* @param useThrift true if thrift IO is to be used
*/
public void setUseThriftIO(boolean useThrift) {
m_useThriftIO = useThrift;
}
/**
* Get whether to use pure thrift IO for the <key,value> tuple mode.
*
* @return true if thrift IO is to be used
*/
public boolean getUseThriftIO() {
return m_useThriftIO;
}
/**
* Set the cassandra node hostname to connect to
*
* @param host the host to connect to
*/
public void setCassandraHost(String host) {
m_cassandraHost = host;
}
/**
* Get the name of the cassandra node to connect to
*
* @return the name of the cassandra node to connect to
*/
public String getCassandraHost() {
return m_cassandraHost;
}
/**
* Set the port that cassandra is listening on
*
* @param port the port that cassandra is listening on
*/
public void setCassandraPort(String port) {
m_cassandraPort = port;
}
/**
* Get the port that cassandra is listening on
*
* @return the port that cassandra is listening on
*/
public String getCassandraPort() {
return m_cassandraPort;
}
/**
* Set the keyspace (db) to use
*
* @param keyspace the keyspace to use
*/
public void setCassandraKeyspace(String keyspace) {
m_cassandraKeyspace = keyspace;
}
/**
* Get the keyspace (db) to use
*
* @return the keyspace (db) to use
*/
public String getCassandraKeyspace() {
return m_cassandraKeyspace;
}
/**
* Set whether to compress (GZIP) CQL queries when transmitting them to the
* server
*
* @param c true if CQL queries are to be compressed
*/
public void setUseCompression(boolean c) {
m_useCompression = c;
}
/**
* Get whether CQL queries will be compressed (GZIP) or not
*
* @return true if CQL queries will be compressed when sending to the server
*/
public boolean getUseCompression() {
return m_useCompression;
}
/**
* Set the CQL SELECT query to execute.
*
* @param query the query to execute
*/
public void setCQLSelectQuery(String query) {
m_cqlSelectQuery = query;
}
/**
* Get the CQL SELECT query to execute
*
* @return the query to execute
*/
public String getCQLSelectQuery() {
return m_cqlSelectQuery;
}
/**
* Set the username to authenticate with
*
* @param un the username to authenticate with
*/
public void setUsername(String un) {
m_username = un;
}
/**
* Get the username to authenticate with
*
* @return the username to authenticate with
*/
public String getUsername() {
return m_username;
}
/**
* Set the password to authenticate with
*
* @param pass the password to authenticate with
*/
public void setPassword(String pass) {
m_password = pass;
}
/**
* Get the password to authenticate with
*
* @return the password to authenticate with
*/
public String getPassword() {
return m_password;
}
/**
* Set whether to output key, column, timestamp tuples as rows rather than
* standard row format.
*
* @param o true if tuples are to be output
*/
public void setOutputKeyValueTimestampTuples(boolean o) {
m_outputKeyValueTimestampTuples = o;
}
/**
* Get whether to output key, column, timestamp tuples as rows rather than
* standard row format.
*
* @return true if tuples are to be output
*/
public boolean getOutputKeyValueTimestampTuples() {
return m_outputKeyValueTimestampTuples;
}
@Override
public String getXML() {
StringBuffer retval = new StringBuffer();
if (!Const.isEmpty(m_cassandraHost)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_host", m_cassandraHost));
}
if (!Const.isEmpty(m_cassandraPort)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_port", m_cassandraPort));
}
if (!Const.isEmpty(m_username)) {
retval.append("\n ").append(
XMLHandler.addTagValue("username", m_username));
}
if (!Const.isEmpty(m_password)) {
retval.append("\n ").append(
XMLHandler.addTagValue("password",
Encr.encryptPasswordIfNotUsingVariables(m_password)));
}
if (!Const.isEmpty(m_cassandraKeyspace)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cassandra_keyspace", m_cassandraKeyspace));
}
retval.append("\n ").append(
XMLHandler.addTagValue("use_compression", m_useCompression));
if (!Const.isEmpty(m_cqlSelectQuery)) {
retval.append("\n ").append(
XMLHandler.addTagValue("cql_select_query", m_cqlSelectQuery));
}
retval.append("\n ").append(
XMLHandler.addTagValue("output_key_value_timestamp_tuples",
m_outputKeyValueTimestampTuples));
retval.append("\n ").append(
XMLHandler.addTagValue("use_thrift_io", m_useThriftIO));
if (!Const.isEmpty(m_socketTimeout)) {
retval.append("\n ").append(
XMLHandler.addTagValue("socket_timeout", m_socketTimeout));
}
return retval.toString();
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases,
Map<String, Counter> counters) throws KettleXMLException {
m_cassandraHost = XMLHandler.getTagValue(stepnode, "cassandra_host");
m_cassandraPort = XMLHandler.getTagValue(stepnode, "cassandra_port");
m_username = XMLHandler.getTagValue(stepnode, "username");
m_password = XMLHandler.getTagValue(stepnode, "password");
if (!Const.isEmpty(m_password)) {
m_password = Encr.decryptPasswordOptionallyEncrypted(m_password);
}
m_cassandraKeyspace = XMLHandler
.getTagValue(stepnode, "cassandra_keyspace");
m_cqlSelectQuery = XMLHandler.getTagValue(stepnode, "cql_select_query");
m_useCompression = XMLHandler.getTagValue(stepnode, "use_compression")
.equalsIgnoreCase("Y");
String kV = XMLHandler.getTagValue(stepnode,
"output_key_value_timestamp_tuples");
if (kV != null) {
m_outputKeyValueTimestampTuples = kV.equalsIgnoreCase("Y");
}
String thrift = XMLHandler.getTagValue(stepnode, "use_thrift_io");
if (thrift != null) {
m_useThriftIO = thrift.equalsIgnoreCase("Y");
}
m_socketTimeout = XMLHandler.getTagValue(stepnode, "socket_timeout");
}
public void readRep(Repository rep, ObjectId id_step,
List<DatabaseMeta> databases, Map<String, Counter> counters)
throws KettleException {
m_cassandraHost = rep.getStepAttributeString(id_step, 0, "cassandra_host");
m_cassandraPort = rep.getStepAttributeString(id_step, 0, "cassandra_port");
m_username = rep.getStepAttributeString(id_step, 0, "username");
m_password = rep.getStepAttributeString(id_step, 0, "password");
if (!Const.isEmpty(m_password)) {
m_password = Encr.decryptPasswordOptionallyEncrypted(m_password);
}
m_cassandraKeyspace = rep.getStepAttributeString(id_step, 0,
"cassandra_keyspace");
m_cqlSelectQuery = rep.getStepAttributeString(id_step, 0,
"cql_select_query");
m_useCompression = rep.getStepAttributeBoolean(id_step, 0,
"use_compression");
m_outputKeyValueTimestampTuples = rep.getStepAttributeBoolean(id_step, 0,
"output_key_value_timestamp_tuples");
m_useThriftIO = rep.getStepAttributeBoolean(id_step, 0, "use_thrift_io");
m_socketTimeout = rep.getStepAttributeString(id_step, 0, "socket_timeout");
}
public void saveRep(Repository rep, ObjectId id_transformation,
ObjectId id_step) throws KettleException {
if (!Const.isEmpty(m_cassandraHost)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "cassandra_host",
m_cassandraHost);
}
if (!Const.isEmpty(m_cassandraPort)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "cassandra_port",
m_cassandraPort);
}
if (!Const.isEmpty(m_username)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "username",
m_username);
}
if (!Const.isEmpty(m_password)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "password",
Encr.encryptPasswordIfNotUsingVariables(m_password));
}
if (!Const.isEmpty(m_cassandraKeyspace)) {
rep.saveStepAttribute(id_transformation, id_step, 0,
"cassandra_keyspace", m_cassandraKeyspace);
}
rep.saveStepAttribute(id_transformation, id_step, 0, "use_compression",
m_useCompression);
if (!Const.isEmpty(m_cqlSelectQuery)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "cql_select_query",
m_cqlSelectQuery);
}
rep.saveStepAttribute(id_transformation, id_step, 0,
"output_key_value_timestamp_tuples", m_outputKeyValueTimestampTuples);
rep.saveStepAttribute(id_transformation, id_step, 0, "use_thrift_io",
m_useThriftIO);
if (!Const.isEmpty(m_socketTimeout)) {
rep.saveStepAttribute(id_transformation, id_step, 0, "socket_timeout",
m_socketTimeout);
}
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta,
StepMeta stepMeta, RowMetaInterface prev, String[] input,
String[] output, RowMetaInterface info) {
// TODO Auto-generated method stub
}
public StepInterface getStep(StepMeta stepMeta,
StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans) {
return new CassandraInput(stepMeta, stepDataInterface, copyNr, transMeta,
trans);
}
public StepDataInterface getStepData() {
return new CassandraInputData();
}
public void setDefault() {
m_cassandraHost = "localhost";
m_cassandraPort = "9160";
m_cqlSelectQuery = "SELECT <fields> FROM <column family> WHERE <condition>;";
m_useCompression = false;
m_socketTimeout = "";
}
@Override
public void getFields(RowMetaInterface rowMeta, String origin,
RowMetaInterface[] info, StepMeta nextStep, VariableSpace space)
throws KettleStepException {
m_specificCols = null;
m_rowLimit = -1;
m_colLimit = -1;
rowMeta.clear(); // start afresh - eats the input
if (Const.isEmpty(m_cassandraKeyspace)) {
// no keyspace!
return;
}
String colFamName = null;
if (!Const.isEmpty(m_cqlSelectQuery)) {
String subQ = space.environmentSubstitute(m_cqlSelectQuery);
if (!subQ.toLowerCase().startsWith("select")) {
// not a select statement!
logError(BaseMessages.getString(PKG,
"CassandraInput.Error.NoSelectInQuery"));
return;
}
if (subQ.indexOf(';') < 0) {
// query must end with a ';' or it will wait for more!
logError(BaseMessages.getString(PKG,
"CassandraInput.Error.QueryTermination"));
return;
}
// is there a LIMIT clause?
if (subQ.toLowerCase().indexOf("limit") > 0) {
String limitS = subQ.toLowerCase()
.substring(subQ.toLowerCase().indexOf("limit") + 5, subQ.length())
.trim();
limitS = limitS.replaceAll(";", "");
try {
m_rowLimit = Integer.parseInt(limitS);
} catch (NumberFormatException ex) {
logError(BaseMessages
.getString(PKG, "CassandraInput.Error.UnableToParseLimitClause",
m_cqlSelectQuery));
m_rowLimit = 10000;
}
}
// strip off where clause (if any)
if (subQ.toLowerCase().lastIndexOf("where") > 0) {
subQ = subQ.substring(0, subQ.toLowerCase().lastIndexOf("where"));
}
// first determine the source column family
// look for a FROM that is surrounded by space
int fromIndex = subQ.toLowerCase().indexOf("from");
String tempS = subQ.toLowerCase();
int offset = fromIndex;
while (fromIndex > 0 && tempS.charAt(fromIndex - 1) != ' '
&& (fromIndex + 4 < tempS.length())
&& tempS.charAt(fromIndex + 4) != ' ') {
tempS = tempS.substring(fromIndex + 4, tempS.length());
fromIndex = tempS.indexOf("from");
offset += (4 + fromIndex);
}
fromIndex = offset;
if (fromIndex < 0) {
logError(BaseMessages.getString(PKG,
"CassandraInput.Error.MustSpecifyAColumnFamily"));
return; // no from clause
}
colFamName = subQ.substring(fromIndex + 4, subQ.length()).trim();
if (colFamName.indexOf(' ') > 0) {
colFamName = colFamName.substring(0, colFamName.indexOf(' '));
} else {
colFamName = colFamName.replace(";", "");
}
if (colFamName.length() == 0) {
return; // no column family specified
}
// is there a FIRST clause?
if (subQ.toLowerCase().indexOf("first") > 0) {
String firstS = subQ.toLowerCase()
.substring(subQ.toLowerCase().indexOf("first") + 5, subQ.length())
.trim();
firstS = firstS.substring(0, firstS.indexOf(' '));
try {
m_colLimit = Integer.parseInt(firstS);
} catch (NumberFormatException ex) {
logError(BaseMessages
.getString(PKG, "CassandraInput.Error.UnableToParseFirstClause",
m_cqlSelectQuery));
return;
}
}
// now determine if its a select */FIRST or specific set of columns
String[] cols = null;
if (subQ.indexOf("*") > 0) {
// nothing special to do here
m_isSelectStarQuery = true;
} else {
m_isSelectStarQuery = false;
String colsS = subQ.substring(subQ.indexOf('\''), fromIndex);
cols = colsS.split(",");
}
// try and connect to get meta data
String hostS = space.environmentSubstitute(m_cassandraHost);
String portS = space.environmentSubstitute(m_cassandraPort);
String userS = m_username;
String passS = m_password;
if (!Const.isEmpty(userS) && !Const.isEmpty(passS)) {
userS = space.environmentSubstitute(m_username);
passS = space.environmentSubstitute(m_password);
}
String keyspaceS = space.environmentSubstitute(m_cassandraKeyspace);
CassandraConnection conn = null;
try {
conn = CassandraInputData.getCassandraConnection(hostS,
Integer.parseInt(portS), userS, passS);
conn.setKeyspace(keyspaceS);
} catch (Exception ex) {
logError(ex.getMessage(), ex);
return;
}
try {
CassandraColumnMetaData colMeta = new CassandraColumnMetaData(conn,
colFamName);
// Do the key first
ValueMetaInterface km = colMeta.getValueMetaForKey();
rowMeta.addValueMeta(km);
if (getOutputKeyValueTimestampTuples()) {
// special case where user has asked for all row keys, columns and
// timestamps output as separate rows.
ValueMetaInterface vm = new ValueMeta("ColumnName",
ValueMetaInterface.TYPE_STRING);
rowMeta.addValueMeta(vm);
vm = null;
String defaultColumnValidator = colMeta.getDefaultValidationClass();
if (!Const.isEmpty(defaultColumnValidator)) {
if (defaultColumnValidator.indexOf('(') > 0) {
defaultColumnValidator = defaultColumnValidator.substring(0,
defaultColumnValidator.indexOf(')'));
}
if (defaultColumnValidator.endsWith("BytesType")) {
vm = new ValueMeta("ColumnValue", ValueMeta.TYPE_BINARY);
}
}
if (vm == null) {
vm = new ValueMeta("ColumnValue", ValueMetaInterface.TYPE_STRING);
}
rowMeta.addValueMeta(vm);
vm = new ValueMeta("Timestamp", ValueMetaInterface.TYPE_INTEGER);
rowMeta.addValueMeta(vm);
conn.close();
// specific columns requested
if (cols != null) {
m_specificCols = new ArrayList<String>();
for (String col : cols) {
col = cleanseColName(col);
m_specificCols.add(col);
}
}
return;
}
if (cols == null) {
// select * - use all the columns that are defined in the schema
List<ValueMetaInterface> vms = colMeta.getValueMetasForSchema();
for (ValueMetaInterface vm : vms) {
rowMeta.addValueMeta(vm);
}
} else {
m_specificCols = new ArrayList<String>();
// do the individual columns
for (String col : cols) {
col = cleanseColName(col);
if (!colMeta.columnExistsInSchema(col)) {
// this one isn't known about in about in the schema - we can
// output it
// as long as its values satisfy the default validator...
logBasic(BaseMessages.getString(PKG,
"CassandraInput.Info.DefaultColumnValidator", col));
}
ValueMetaInterface vm = colMeta.getValueMetaForColumn(col);
rowMeta.addValueMeta(vm);
}
}
} catch (Exception ex) {
logBasic(BaseMessages.getString(PKG,
"CassandraInput.Info.UnableToRetrieveColumnMetaData", colFamName),
ex);
return;
} finally {
if (conn != null) {
conn.close();
}
}
}
}
private String cleanseColName(String col) {
col = col.trim();
col = col.replace("'", "");
col = col.replace("\"", "");
return col;
}
/**
* Get the UI for this step.
*
* @param shell a <code>Shell</code> value
* @param meta a <code>StepMetaInterface</code> value
* @param transMeta a <code>TransMeta</code> value
* @param name a <code>String</code> value
* @return a <code>StepDialogInterface</code> value
*/
public StepDialogInterface getDialog(Shell shell, StepMetaInterface meta,
TransMeta transMeta, String name) {
return new CassandraInputDialog(shell, meta, transMeta, name);
}
}
| java | Apache-2.0 | 196093e9658d2d936e8a9d6680232377f735fdfa | 2026-01-05T02:41:02.465238Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/CSVMetadata.java | annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/CSVMetadata.java | package io.quarkiverse.operatorsdk.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Provides more information to generate more detailed OLM bundle manifests, in particular in the ClusterServiceVersion
* resource. This can be used to provide information that cannot be inferred automatically or override what is generated by
* default. Many of these fields map to CSV fields. See the OLM documentation on
* <a href="https://olm.operatorframework.io/docs/tasks/creating-operator-manifests/#basic-metadata-optional">operator
* manifests</a> for more details.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE })
@SuppressWarnings("unused")
public @interface CSVMetadata {
/**
* The name which should be used for the generated bundle. If not provided, the name is derived from the project's (maven or
* gradle) name. This name can be used to assign reconcilers to the same bundle by creating a {@link SharedCSVMetadata}
* implementation bearing a {@link CSVMetadata} annotation specifying the CSV metadata to be shared among reconcilers
* assigned to that named bundle.
*
* @deprecated Use {@link #bundleName()} and {@link #csvName()} instead as previously this method was being used for both
* values resulting in confusion or even problems setting the correct CSV name as recommended by OLM. See
* <a href='https://github.com/quarkiverse/quarkus-operator-sdk/issues/738'>this issue</a> for a more detailed
* discussion.
*/
@Deprecated
String name() default "";
/**
* The name which should be used for the generated bundle. If not provided, the name is derived from the project's (maven or
* gradle) name. This name can be used to assign reconcilers to the same bundle by creating a {@link SharedCSVMetadata}
* implementation bearing a {@link CSVMetadata} annotation specifying the CSV metadata to be shared among reconcilers
* assigned to that named bundle.
*/
String bundleName() default "";
/**
* The name used in the CSV metadata stanza. If not provided, this will default to {@code <bundle name>.v<version>}
* as recommended by OLM.
*/
String csvName() default "";
/**
* Extra annotations that should be added to the CSV metadata.
*/
Annotations annotations() default @Annotations;
/**
* Extra labels that should be added to the CSV metadata.
*/
Label[] labels() default {};
String description() default "";
String displayName() default "";
Icon[] icon() default {};
String[] keywords() default "";
String maturity() default "";
String version() default "";
String replaces() default "";
Maintainer[] maintainers() default {};
Provider provider() default @Provider;
InstallMode[] installModes() default {};
PermissionRule[] permissionRules() default {};
RequiredCRD[] requiredCRDs() default {};
String minKubeVersion() default "";
Link[] links() default {};
@interface Annotations {
String containerImage() default "";
String repository() default "";
String capabilities() default "";
String categories() default "";
boolean certified() default false;
String almExamples() default "";
String skipRange() default "";
Annotation[] others() default {};
@interface Annotation {
String name();
String value();
}
}
@interface Label {
String name();
String value();
}
@interface Icon {
String DEFAULT_MEDIA_TYPE = "image/svg+xml";
String fileName();
String mediatype() default DEFAULT_MEDIA_TYPE;
}
@interface Maintainer {
String email() default "";
String name() default "";
}
@interface Provider {
String name() default "";
String url() default "";
}
@interface InstallMode {
String type();
boolean supported() default true;
}
/**
* Additional RBAC rules that need to be provided because they cannot be inferred automatically. Note that RBAC rules added
* to your reconciler via {@link RBACRule} should already be handled automatically, under the service account name
* associated with your Reconciler so this annotation should only be used to add additional rules to other service accounts
* or for rules that you don't want to appear in the generated Kubernetes manifests.
*/
@interface PermissionRule {
String[] apiGroups();
String[] resources();
String[] verbs() default { "get", "list", "watch", "create", "delete", "patch", "update" };
/**
* @return the service account name to which the permission rule will be assigned. If not provided, the default service
* account name as defined for your operator will be used. Note that for the rule to be effectively added to the
* CSV, a service account with that name <em>must</em> exist in the generated kubernetes manifests as this is
* the base upon which the bundle generator works. This means that if you add a rule that targets a service
* account that is not present in the generated manifest, then this rule won't appear in the generated CSV.
*/
String serviceAccountName() default "";
}
@interface RequiredCRD {
String kind();
String name();
String version();
}
@interface Link {
String url();
String name();
}
/**
* Whether the associated resource is explicitly marked as optional, i.e. the operator can work without it being present on
* the cluster. This will have the effect of not requiring the associated resource in the generated OLM bundle.
*
* @since 7.3.0
*/
@interface Optional {
boolean value() default true;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/SharedCSVMetadata.java | annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/SharedCSVMetadata.java | package io.quarkiverse.operatorsdk.annotations;
/**
* A marker interface used to identify classes bearing {@link CSVMetadata} that needs to be shared across reconcilers using the
* same {@link CSVMetadata#bundleName()} attribute. Note that sharing metadata without using {@link SharedCSVMetadata} is not
* allowed.
*/
public interface SharedCSVMetadata {
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/RBACRule.java | annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/RBACRule.java | package io.quarkiverse.operatorsdk.annotations;
import java.lang.annotation.*;
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE })
@Repeatable(AdditionalRBACRules.class)
@SuppressWarnings("unused")
public @interface RBACRule {
/**
* Represents a wildcard string that matches any RBAC-related value (verb, resource, etc…).
*/
String ALL = "*";
String[] apiGroups() default {};
String[] verbs();
String[] resources() default {};
String[] resourceNames() default {};
String[] nonResourceURLs() default {};
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/AdditionalRBACRules.java | annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/AdditionalRBACRules.java | package io.quarkiverse.operatorsdk.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE })
@SuppressWarnings("unused")
public @interface AdditionalRBACRules {
RBACRule[] value();
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/RBACRoleRef.java | annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/RBACRoleRef.java | package io.quarkiverse.operatorsdk.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Repeatable;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE })
@Repeatable(AdditionalRBACRoleRefs.class)
public @interface RBACRoleRef {
String RBAC_API_GROUP = "rbac.authorization.k8s.io";
RoleKind kind() default RoleKind.Role;
String name() default "";
enum RoleKind {
ClusterRole,
Role
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/AdditionalRBACRoleRefs.java | annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/AdditionalRBACRoleRefs.java | package io.quarkiverse.operatorsdk.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE })
@SuppressWarnings("unused")
public @interface AdditionalRBACRoleRefs {
RBACRoleRef[] value();
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/RBACVerbs.java | annotations/src/main/java/io/quarkiverse/operatorsdk/annotations/RBACVerbs.java | package io.quarkiverse.operatorsdk.annotations;
import java.util.ArrayList;
import java.util.Arrays;
public class RBACVerbs {
public static final String CREATE = "create";
public static final String PATCH = "patch";
public static final String UPDATE = "update";
public static final String GET = "get";
public static final String LIST = "list";
public static final String WATCH = "watch";
public static final String DELETE = "delete";
public static final String[] UPDATE_VERBS = new String[] { PATCH, UPDATE };
public static final String[] READ_VERBS = new String[] { GET, LIST, WATCH };
public static final String[] ALL_COMMON_VERBS;
static {
final var verbs = new ArrayList<String>(READ_VERBS.length + UPDATE_VERBS.length + 2);
verbs.addAll(Arrays.asList(READ_VERBS));
verbs.addAll(Arrays.asList(UPDATE_VERBS));
verbs.add(CREATE);
verbs.add(DELETE);
ALL_COMMON_VERBS = verbs.toArray(new String[0]);
}
private RBACVerbs() {
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/test/java/io/quarkiverse/operatorsdk/it/cdi/ConditionsCDITest.java | integration-tests/cdi/src/test/java/io/quarkiverse/operatorsdk/it/cdi/ConditionsCDITest.java | package io.quarkiverse.operatorsdk.it.cdi;
import static org.awaitility.Awaitility.await;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.time.Duration;
import jakarta.inject.Inject;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.ObjectMetaBuilder;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.dependent.DependentResource;
import io.quarkus.test.junit.QuarkusTest;
/**
* Verify that the Condition subclasses are properly recognized as CDI beans.
* </p>
* The test sets up a CR with two dependents - Deployment and ConfigMap. Deployment
* dependent sets the readyPostcondition, activationCondition, and reconcilePrecondition conditions
* which are all evaluated when the CR and thus the Deployment dependent are created. The
* ConfigMap then defines only the deletePostcondition condition, but it depends on the
* Deployment dependent which is evaluated when the CR and the Deployment is deleted.
* </p>
* All conditions are defined as CDI beans and inject an instance of {@link TestUUIDBean}. When their
* {@link io.javaoperatorsdk.operator.processing.dependent.workflow.Condition#isMet(DependentResource, HasMetadata, Context)}
* method is invoked, they save the random UUID produced by the bean to be verified in this test.
* </p>
* The test creates a new CR and verifies that all of readyPostcondition, activationCondition,
* and reconcilePrecondition conditions correctly injected the CDI bean. Then it deletes the CR
* to check the last deletePostcondition in the same manner.
*/
@QuarkusTest
class ConditionsCDITest {
private static final Duration timeout = Duration.ofSeconds(30);
@Inject
KubernetesClient kubernetesClient;
@Inject
TestUUIDBean testUUIDBean;
@Inject
ReadyPostCondition readyPostCondition;
@Inject
CustomActionvationCondition customActionvationCondition;
@Inject
ReconcilePrecondition reconcilePrecondition;
@Inject
DeletePostCondition deletePostCondition;
@AfterEach
void cleanup() {
kubernetesClient.apiextensions().v1().customResourceDefinitions().withName("testresources.josdk.quarkiverse.io")
.delete();
}
@Test
void conditionsInjectionsTest() {
String expectedUUID = testUUIDBean.uuid();
TestResource testResource = createTestResource();
kubernetesClient.resource(testResource).create();
// All conditions should be ready after ReadyPostCondition is evaluated, so waiting for it is enough
await().atMost(timeout)
.untilAsserted(() -> assertNotNull(readyPostCondition.getUuid()));
assertEquals(expectedUUID, readyPostCondition.getUuid(), "ReadyPostCondition injection not processed");
assertEquals(expectedUUID, customActionvationCondition.getUuid(), "CustomActivationCondition injection not processed");
assertEquals(expectedUUID, reconcilePrecondition.getUuid(), "ReconcilePrecondition injection not processed");
kubernetesClient.resource(testResource).delete();
await().atMost(timeout)
.untilAsserted(() -> assertEquals(expectedUUID, deletePostCondition.getUuid(),
"DeletePostCondition injection not processed"));
}
private static TestResource createTestResource() {
var tr = new TestResource();
tr.setMetadata(new ObjectMetaBuilder()
.withName("test-resource-sample").build());
return tr;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/ReconcilePrecondition.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/ReconcilePrecondition.java | package io.quarkiverse.operatorsdk.it.cdi;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.dependent.DependentResource;
import io.javaoperatorsdk.operator.processing.dependent.workflow.Condition;
import io.quarkus.arc.Unremovable;
@ApplicationScoped
@Unremovable
public class ReconcilePrecondition implements Condition {
@Inject
TestUUIDBean testUUIDBean;
private String uuid;
@Override
public boolean isMet(DependentResource dependentResource, HasMetadata primary, Context context) {
uuid = testUUIDBean.uuid();
return true;
}
public String getUuid() {
return uuid;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/DeletePostCondition.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/DeletePostCondition.java | package io.quarkiverse.operatorsdk.it.cdi;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.dependent.DependentResource;
import io.javaoperatorsdk.operator.processing.dependent.workflow.Condition;
import io.quarkus.arc.Unremovable;
@ApplicationScoped
@Unremovable
public class DeletePostCondition implements Condition {
@Inject
TestUUIDBean testUUIDBean;
private String uuid;
@Override
public boolean isMet(DependentResource dependentResource, HasMetadata primary, Context context) {
this.uuid = testUUIDBean.uuid();
return true;
}
public String getUuid() {
return uuid;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/TestResource.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/TestResource.java | package io.quarkiverse.operatorsdk.it.cdi;
import io.fabric8.kubernetes.client.CustomResource;
import io.fabric8.kubernetes.model.annotation.Group;
import io.fabric8.kubernetes.model.annotation.ShortNames;
import io.fabric8.kubernetes.model.annotation.Version;
@Group("josdk.quarkiverse.io")
@Version("v1alpha1")
@ShortNames("tr")
public class TestResource extends CustomResource<Void, Void> {
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/CustomActionvationCondition.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/CustomActionvationCondition.java | package io.quarkiverse.operatorsdk.it.cdi;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.dependent.DependentResource;
import io.javaoperatorsdk.operator.processing.dependent.workflow.Condition;
import io.quarkus.arc.Unremovable;
@ApplicationScoped
@Unremovable
public class CustomActionvationCondition implements Condition {
@Inject
TestUUIDBean testUUIDBean;
private String uuid;
@Override
public boolean isMet(DependentResource dependentResource, HasMetadata primary, Context context) {
uuid = testUUIDBean.uuid();
return true;
}
public String getUuid() {
return uuid;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/ReadyPostCondition.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/ReadyPostCondition.java | package io.quarkiverse.operatorsdk.it.cdi;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.dependent.DependentResource;
import io.javaoperatorsdk.operator.processing.dependent.workflow.Condition;
import io.quarkus.arc.Unremovable;
@ApplicationScoped
@Unremovable
public class ReadyPostCondition implements Condition<Deployment, TestResource> {
@Inject
TestUUIDBean testUUIDBean;
private String uuid;
@Override
public boolean isMet(DependentResource<Deployment, TestResource> dependentResource, TestResource primary,
Context<TestResource> context) {
uuid = testUUIDBean.uuid();
return dependentResource
.getSecondaryResource(primary, context)
.map(deployment -> deployment.getSpec().getReplicas().equals(1))
.orElse(false);
}
public String getUuid() {
return uuid;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/TestReconciler.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/TestReconciler.java | package io.quarkiverse.operatorsdk.it.cdi;
import io.javaoperatorsdk.operator.api.reconciler.Cleaner;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.DeleteControl;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
import io.javaoperatorsdk.operator.api.reconciler.Workflow;
import io.javaoperatorsdk.operator.api.reconciler.dependent.Dependent;
@Workflow(dependents = {
@Dependent(name = TestReconciler.DEPLOYMENT, type = DeploymentDependent.class, activationCondition = CustomActionvationCondition.class, readyPostcondition = ReadyPostCondition.class, reconcilePrecondition = ReconcilePrecondition.class),
@Dependent(type = ConfigMapDependent.class, deletePostcondition = DeletePostCondition.class, dependsOn = TestReconciler.DEPLOYMENT)
})
@ControllerConfiguration
public class TestReconciler implements Reconciler<TestResource>, Cleaner<TestResource> {
public static final String DEPLOYMENT = "deployment";
@Override
public UpdateControl<TestResource> reconcile(TestResource resource, Context<TestResource> context) throws Exception {
return UpdateControl.noUpdate();
}
@Override
public DeleteControl cleanup(TestResource resource, Context<TestResource> context) throws Exception {
return DeleteControl.defaultDelete();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/TestUUIDBean.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/TestUUIDBean.java | package io.quarkiverse.operatorsdk.it.cdi;
import java.util.UUID;
import jakarta.enterprise.context.ApplicationScoped;
@ApplicationScoped
public class TestUUIDBean {
private final String id = UUID.randomUUID().toString();
public String uuid() {
return id;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/ConfigMapDependent.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/ConfigMapDependent.java | package io.quarkiverse.operatorsdk.it.cdi;
import java.util.Map;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.fabric8.kubernetes.api.model.ConfigMap;
import io.fabric8.kubernetes.api.model.ObjectMetaBuilder;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.processing.dependent.kubernetes.CRUDKubernetesDependentResource;
import io.javaoperatorsdk.operator.processing.event.ResourceID;
public class ConfigMapDependent extends CRUDKubernetesDependentResource<ConfigMap, TestResource> {
private static final Logger log = LoggerFactory.getLogger(ConfigMapDependent.class);
@Override
protected ConfigMap desired(TestResource primary, Context<TestResource> context) {
Optional<ConfigMap> optionalConfigMap = getConfigMap(primary, context);
if (!optionalConfigMap.isPresent()) {
ConfigMap configMap = new ConfigMap();
configMap.setMetadata(
new ObjectMetaBuilder()
.withName(primary.getMetadata().getName() + "-cm")
.withNamespace(primary.getMetadata().getNamespace())
.build());
configMap.setData(Map.of("key", "data"));
return configMap;
}
return optionalConfigMap.get();
}
@Override
public void delete(TestResource primary, Context<TestResource> context) {
Optional<ConfigMap> optionalConfigMap = getConfigMap(primary, context);
optionalConfigMap.ifPresent(
(configMap -> {
if (configMap.getMetadata().getAnnotations() != null) {
context.getClient().resource(configMap).delete();
}
}));
}
private static Optional<ConfigMap> getConfigMap(TestResource primary, Context<TestResource> context) {
Optional<ConfigMap> optionalConfigMap = context.getSecondaryResource(ConfigMap.class);
if (optionalConfigMap.isEmpty()) {
log.debug("Config Map not found for primary: {}", ResourceID.fromResource(primary));
return Optional.empty();
}
return optionalConfigMap;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/DeploymentDependent.java | integration-tests/cdi/src/main/java/io/quarkiverse/operatorsdk/it/cdi/DeploymentDependent.java | package io.quarkiverse.operatorsdk.it.cdi;
import java.util.Map;
import org.jboss.logging.Logger;
import io.fabric8.kubernetes.api.model.ObjectMetaBuilder;
import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.processing.dependent.kubernetes.CRUDNoGCKubernetesDependentResource;
public class DeploymentDependent extends CRUDNoGCKubernetesDependentResource<Deployment, TestResource> {
private static final Logger LOG = Logger.getLogger(DeploymentDependent.class);
@Override
protected Deployment desired(TestResource primary, Context<TestResource> context) {
return new DeploymentBuilder()
.withMetadata(new ObjectMetaBuilder()
.withName(primary.getMetadata().getName() + "-deployment")
.withNamespace("default")
.withLabels(Map.of("app", primary.getMetadata().getName()))
.build())
.withNewSpec()
.withNewSelector().withMatchLabels(Map.of("app", primary.getMetadata().getName())).endSelector()
.withNewTemplate().withNewMetadata().withLabels(Map.of("app", primary.getMetadata().getName())).endMetadata()
.withNewSpec().addNewContainer()
.withName("nginx").withImage("nginx:1.14.2")
.addNewPort().withName("http").withProtocol("TCP").withContainerPort(8080).endPort()
.endContainer()
.endSpec()
.endTemplate()
.endSpec()
.build();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/SetOperatorLevelNamespacesTest.java | integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/SetOperatorLevelNamespacesTest.java | package io.quarkiverse.operatorsdk.it;
import static io.restassured.RestAssured.given;
import static org.hamcrest.Matchers.*;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Test;
import io.quarkus.test.junit.QuarkusTest;
import io.quarkus.test.junit.TestProfile;
@QuarkusTest
@TestProfile(SetOperatorLevelNamespacesTestProfile.class)
public class SetOperatorLevelNamespacesTest {
@Test
void configurationForControllerShouldExistAndUseOperatorLevelConfigurationWhenSet() {
// check that the config for the test controller can be retrieved and is conform to our
// expectations
final var resourceName = io.quarkiverse.operatorsdk.it.Test.class.getCanonicalName();
given()
.when()
.get("/operator/" + TestReconciler.NAME + "/config")
.then()
.statusCode(200)
.body(
"customResourceClass", equalTo(resourceName),
"name", equalTo(TestReconciler.NAME),
"watchCurrentNamespace", is(false),
"namespaces", hasSize(1),
"namespaces", hasItem("operator-level"), // namespace is set at the operator level by the TestProfile, so the namespace value should match what was set there
"retry.maxAttempts", equalTo(1),
"generationAware", equalTo(false),
"maxReconciliationIntervalSeconds", equalTo(TestReconciler.INTERVAL));
}
@Test
void dependentAnnotationsShouldAppearInConfiguration() {
given()
.when()
.get("/operator/" + DependentDefiningReconciler.NAME + "/config")
.then()
.statusCode(200).body(
"watchCurrentNamespace", Matchers.equalTo(false),
"namespaces", hasSize(1),
"namespaces", hasItem("operator-level"),
"dependents", hasSize(2),
"dependents.dependentClass",
hasItems(ReadOnlyDependentResource.class.getCanonicalName(),
CRUDDependentResource.class.getCanonicalName()),
"dependents.dependentConfig.labelSelector",
hasItems(ReadOnlyDependentResource.LABEL_SELECTOR, CRUDDependentResource.LABEL_SELECTOR),
"dependents.dependentConfig.onAddFilter",
hasItem(CRUDDependentResource.TestOnAddFilter.class.getCanonicalName()));
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/SetOperatorLevelNamespacesTestProfile.java | integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/SetOperatorLevelNamespacesTestProfile.java | package io.quarkiverse.operatorsdk.it;
import java.util.Collections;
import java.util.Map;
import io.quarkus.test.junit.QuarkusTestProfile;
public class SetOperatorLevelNamespacesTestProfile implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Collections.singletonMap("quarkus.operator-sdk.namespaces", "operator-level");
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/CustomKubernetesServerTestResource.java | integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/CustomKubernetesServerTestResource.java | package io.quarkiverse.operatorsdk.it;
import io.quarkus.test.kubernetes.client.KubernetesServerTestResource;
public class CustomKubernetesServerTestResource extends KubernetesServerTestResource {
@Override
protected void configureServer() {
super.configureServer();
server.expect().get().withPath("/version")
.andReturn(200, "{\"major\": \"13\", \"minor\": \"37\"}").always();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/OperatorSDKResourceTest.java | integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/OperatorSDKResourceTest.java | package io.quarkiverse.operatorsdk.it;
import static io.restassured.RestAssured.given;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import java.time.Duration;
import org.junit.jupiter.api.Test;
import io.javaoperatorsdk.operator.ReconcilerUtils;
import io.quarkus.test.common.WithTestResource;
import io.quarkus.test.junit.DisabledOnIntegrationTest;
import io.quarkus.test.junit.QuarkusTest;
/**
* This test will only pass in IDEs if you set your runner to set env properties as follow:
*
* <ul>
* <li>{@link NamespaceFromEnvReconciler#ENV_VAR_NAME} = {@link NamespaceFromEnvReconciler#FROM_ENV_VAR_NS}</li>
* <li>QUARKUS_OPERATOR_SDK_CONTROLLERS_{@link EmptyReconciler#NAME}.toUpperCase()_NAMESPACES =
* {@link EmptyReconciler#FROM_ENV_NS1} + ", " + {@link EmptyReconciler#FROM_ENV_NS2}</li>
* <li>{@link VariableNSReconciler#ENV_VAR_NAME} = {@link VariableNSReconciler#EXPECTED_NS_VALUE}</li>
* <li>QUARKUS_OPERATOR_SDK_CONTROLLERS_ReconcilerUtils.getDefaultNameFor(KeycloakController.class).toUpperCase()_NAMESPACES =
* {@link KeycloakController#FROM_ENV}</li>
* </ul>
*
* See also {@code maven-surefire-plugin} configuration where these same environment variables are set
*/
@QuarkusTest
@WithTestResource(CustomKubernetesServerTestResource.class)
class OperatorSDKResourceTest {
@Test
void shouldNotValidateCRs() {
given().when().get("/operator/config").then().statusCode(200).body(
"validate", equalTo(false));
}
@Test
@DisabledOnIntegrationTest("Skipped because native tests are run using LaunchMode.NORMAL")
void shouldApplyCRDsByDefaultInTestMode() {
given().when().get("/operator/config").then().statusCode(200).body(
"applyCRDs", equalTo(true));
}
@Test
void shouldHavePropertiesDefinedReconciliationThreads() {
given().when().get("/operator/config").then().statusCode(200).body(
"maxThreads", equalTo(10));
}
@Test
void shouldHavePropertiesDefinedTerminationTimeout() {
given().when().get("/operator/config").then().statusCode(200).body(
"timeout", equalTo(20));
}
@Test
void shouldHaveCustomMetricsImplementationIfDefined() {
given().when().get("/operator/config").then().statusCode(200).body(
"registryBound", equalTo(true));
}
@Test
void shouldOnlyHaveLeaderElectionActivatedInRequestedModes() {
given().when().get("/operator/config").then().statusCode(200).body(
"leaderConfig", equalTo(TestLeaderElectionConfiguration.class.getName()));
}
@Test
void shouldBeAbleToConfigureSSASupportFromProperties() {
given().when().get("/operator/config").then().statusCode(200).body(
"useSSA", equalTo(false));
}
@Test
void controllerShouldExist() {
// first check that we're not always returning true for any controller name :)
given().when().get("/operator/does_not_exist").then().statusCode(200).body(is("false"));
// given the name of the TestController, the app should reply true meaning that it is indeed
// injected
given().when().get("/operator/" + TestReconciler.NAME).then().statusCode(200)
.body(is("true"));
}
@Test
void allControllersShouldHaveAssociatedConfiguration() {
final var names = given().when().get("/operator/controllers").then()
.statusCode(200)
.contentType("application/json")
.extract()
.as(String[].class);
assertThat(names, arrayContainingInAnyOrder(ApplicationScopedReconciler.NAME,
ConfiguredReconciler.NAME,
TestReconciler.NAME,
ReconcilerUtils.getDefaultNameFor(GatewayReconciler.class),
DependentDefiningReconciler.NAME, NamespaceFromEnvReconciler.NAME,
EmptyReconciler.NAME, VariableNSReconciler.NAME,
AnnotatedDependentReconciler.NAME,
ReconcilerUtils.getDefaultNameFor(KeycloakController.class),
NameWithSpaceReconciler.NAME,
CustomRateLimiterReconciler.NAME,
SecretReconciler.NAME));
}
@Test
void configurationForControllerShouldExistAndUseBuildTimeOperatorLevelConfigurationWhenSet() {
// check that the config for the test controller can be retrieved and is conform to our
// expectations
final var resourceName = io.quarkiverse.operatorsdk.it.Test.class.getCanonicalName();
given()
.when()
.get("/operator/" + TestReconciler.NAME + "/config")
.then()
.statusCode(200)
.body(
"customResourceClass", equalTo(resourceName),
"name", equalTo(TestReconciler.NAME),
"watchCurrentNamespace", equalTo(false),
// build time values are propagated at runtime if no runtime value is specified
"namespaces", hasSize(2),
"namespaces", hasItem("builtime-namespace1"),
"namespaces", hasItem("buildtime-ns2"),
"retry.maxAttempts", equalTo(1), // should use property even if no annotation exists
"generationAware", equalTo(false),
"maxReconciliationIntervalSeconds", equalTo(TestReconciler.INTERVAL));
}
@Test
void applicationPropertiesShouldOverrideDefaultAndAnnotation() {
given()
.when()
.get("/operator/" + ConfiguredReconciler.NAME + "/config")
.then()
.statusCode(200)
.body(
"finalizer", equalTo("from-property/finalizer"),
"namespaces", hasItem("bar"),
"retry.maxAttempts", equalTo(ConfiguredReconciler.MAX_ATTEMPTS), // from annotation
"retry.initialInterval", equalTo(20000), // annotation value should be overridden by property
"rateLimiter.refreshPeriod", equalTo(60F), // for some reason the period is reported as a float
"labelSelector", equalTo("environment=production,tier!=frontend"));
given()
.when()
.get("/operator/" + ApplicationScopedReconciler.NAME + "/config")
.then()
.statusCode(200)
.body("namespaces", hasItem("default"));
given()
.when()
.get("/operator/" + NameWithSpaceReconciler.NAME + "/config")
.then()
.statusCode(200)
.body("namespaces", hasItem("name-with-space"));
}
@Test
void dependentAnnotationsShouldAppearInConfiguration() {
given()
.when()
.get("/operator/" + DependentDefiningReconciler.NAME + "/config")
.then()
.statusCode(200).body(
"watchCurrentNamespace", equalTo(false),
"namespaces", hasSize(1),
"namespaces", hasItem("operator-level-for-manifests"),
"dependents", hasSize(2),
"dependents.dependentClass",
hasItems(ReadOnlyDependentResource.class.getCanonicalName(),
CRUDDependentResource.class.getCanonicalName()),
"dependents.dependentConfig.labelSelector",
hasItems(ReadOnlyDependentResource.LABEL_SELECTOR, CRUDDependentResource.LABEL_SELECTOR),
"dependents.dependentConfig.onAddFilter",
hasItem(CRUDDependentResource.TestOnAddFilter.class.getCanonicalName()));
}
@Test
void workflowShouldBeRetrievable() {
given()
.when()
.get("/operator/" + DependentDefiningReconciler.NAME + "/workflow")
.then()
.statusCode(200).body(
"cleaner", is(false),
"empty", is(false),
"dependents." + ReadOnlyDependentResource.NAME + ".type",
startsWith(ReadOnlyDependentResource.class.getName()),
"dependents." + ReadOnlyDependentResource.NAME + ".readyCondition",
startsWith(ReadOnlyDependentResource.ReadOnlyReadyCondition.class.getName()),
"dependents.crud.type", startsWith(CRUDDependentResource.class.getName()));
}
@Test
void dependentConfigurationShouldBeRetrievableAfterConfiguration() {
given()
.when()
.get("/operator/" + DependentDefiningReconciler.NAME + "/dependents/" + ReadOnlyDependentResource.NAME)
.then()
.statusCode(200)
.body("labelSelector", equalTo(ReadOnlyDependentResource.LABEL_SELECTOR));
}
@Test
void shouldExpandVariablesInNamespacesConfigurationFromAnnotation() {
assertThat(System.getenv(NamespaceFromEnvReconciler.ENV_VAR_NAME),
is(NamespaceFromEnvReconciler.FROM_ENV_VAR_NS));
given()
.when()
.get("/operator/" + NamespaceFromEnvReconciler.NAME + "/config")
.then()
.statusCode(200).body(
"namespaces", hasItem(NamespaceFromEnvReconciler.FROM_ENV_VAR_NS),
"namespaces", hasItem("static"),
"namespaces", hasSize(2));
}
@Test
void shouldExpandVariablesInNamespacesConfigurationFromProperties() {
assertThat(System.getenv(VariableNSReconciler.ENV_VAR_NAME), is(VariableNSReconciler.EXPECTED_NS_VALUE));
given()
.when()
.get("/operator/" + VariableNSReconciler.NAME + "/config")
.then()
.statusCode(200).body(
"namespaces", hasItem(VariableNSReconciler.EXPECTED_NS_VALUE),
"namespaces", hasSize(1));
}
@Test
void shouldUseNamespacesFromEnvVariableIfSet() {
given()
.when()
.get("/operator/" + EmptyReconciler.NAME + "/config")
.then()
.statusCode(200)
.body(
"namespaces", hasItem(EmptyReconciler.FROM_ENV_NS1),
"namespaces", hasItem(EmptyReconciler.FROM_ENV_NS2),
"watchCurrentNamespace", equalTo(false),
"namespaces", hasSize(2));
given()
.when()
.get("/operator/" + ReconcilerUtils.getDefaultNameFor(KeycloakController.class) + "/config")
.then()
.statusCode(200)
.body(
"namespaces", hasItem(KeycloakController.FROM_ENV),
"watchCurrentNamespace", equalTo(false),
"namespaces", hasSize(1));
}
@Test
void customAnnotatedDependentsShouldUseAnnotationValues() {
given()
.when()
.get("/operator/" + AnnotatedDependentReconciler.NAME + "/config")
.then()
.statusCode(200)
.body(
"dependents", hasSize(1),
"dependents[0].dependentClass", equalTo(AnnotatedDependentResource.class.getCanonicalName()),
"dependents[0].dependentConfig.value", equalTo(AnnotatedDependentResource.VALUE));
}
@Test
void customRateLimiterConfiguredViaCustomAnnotationShouldWork() {
given()
.when()
.get("/operator/" + CustomRateLimiterReconciler.NAME + "/config")
.then()
.statusCode(200)
.body(
"rateLimiter.value", equalTo(42),
"itemStore.name", equalTo(NullItemStore.NAME));
}
@Test
void shouldHaveDefaultMaxReconciliationInterval() {
given()
.when()
.get("/operator/" + EmptyReconciler.NAME + "/config")
.then()
.statusCode(200)
.body("maxReconciliationIntervalSeconds", equalTo(Long.valueOf(Duration.ofHours(10).getSeconds()).intValue()));
}
@Test
void shouldUseMaxReconciliationIntervalFromPropertyIfProvided() {
given()
.when()
.get("/operator/" + SecretReconciler.NAME + "/config")
.then()
.statusCode(200)
.body("maxReconciliationIntervalSeconds",
equalTo(Long.valueOf(Duration.ofMinutes(15).getSeconds()).intValue()));
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/EmptyOperatorLevelNamespacesTestProfile.java | integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/EmptyOperatorLevelNamespacesTestProfile.java | package io.quarkiverse.operatorsdk.it;
import java.util.Collections;
import java.util.Map;
import io.quarkus.test.junit.QuarkusTestProfile;
public class EmptyOperatorLevelNamespacesTestProfile implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Collections.singletonMap("quarkus.operator-sdk.namespaces", "");
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/NativeOperatorSDKResourceIT.java | integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/NativeOperatorSDKResourceIT.java | package io.quarkiverse.operatorsdk.it;
import io.quarkus.test.junit.QuarkusIntegrationTest;
@QuarkusIntegrationTest
public class NativeOperatorSDKResourceIT extends OperatorSDKResourceTest {
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/EmptyOperatorLevelNamespacesTest.java | integration-tests/basic/src/test/java/io/quarkiverse/operatorsdk/it/EmptyOperatorLevelNamespacesTest.java | package io.quarkiverse.operatorsdk.it;
import static io.restassured.RestAssured.given;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import org.junit.jupiter.api.Test;
import io.quarkus.test.junit.QuarkusTest;
import io.quarkus.test.junit.TestProfile;
@QuarkusTest
@TestProfile(EmptyOperatorLevelNamespacesTestProfile.class)
public class EmptyOperatorLevelNamespacesTest {
@Test
public void reconcilersWithoutSpecificNamespacesShouldWatchAllNamespaces() {
given()
.when().get("/operator/" + TestReconciler.NAME +
"/config")
.then()
.statusCode(200)
.body("watchAllNamespaces", is(true));
}
@Test
public void reconcilerWithSpecificNamespacesShouldUseThem() {
given()
.when().get("/operator/" + ApplicationScopedReconciler.NAME +
"/config")
.then()
.statusCode(200)
.body("watchAllNamespaces", is(false))
.body("namespaces.size()", is(1))
.body("namespaces[0]", equalTo("default"));
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/NamespaceFromEnvReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/NamespaceFromEnvReconciler.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.api.model.Pod;
import io.javaoperatorsdk.operator.api.config.informer.Informer;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(name = NamespaceFromEnvReconciler.NAME, informer = @Informer(namespaces = { "static",
"${" + NamespaceFromEnvReconciler.ENV_VAR_NAME + "}" }))
public class NamespaceFromEnvReconciler implements Reconciler<Pod> {
public static final String NAME = "fromenv";
public static final String ENV_VAR_NAME = "NAMESPACE_FROM_ENV";
static final String FROM_ENV_VAR_NS = "fromEnvVarNS";
@Override
public UpdateControl<Pod> reconcile(Pod pod, Context<Pod> context) throws Exception {
return UpdateControl.noUpdate();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestMetrics.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestMetrics.java | package io.quarkiverse.operatorsdk.it;
import jakarta.inject.Singleton;
import io.javaoperatorsdk.operator.api.monitoring.Metrics;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.binder.MeterBinder;
@Singleton
public class TestMetrics implements Metrics, MeterBinder {
private boolean registryBound;
@Override
public void bindTo(MeterRegistry meterRegistry) {
registryBound = true;
}
public boolean isRegistryBound() {
return registryBound;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ReadOnlyDependentResource.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ReadOnlyDependentResource.java | package io.quarkiverse.operatorsdk.it;
import java.util.Collections;
import java.util.Set;
import io.fabric8.kubernetes.api.model.ConfigMap;
import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.javaoperatorsdk.operator.api.config.informer.Informer;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.dependent.DependentResource;
import io.javaoperatorsdk.operator.processing.dependent.kubernetes.KubernetesDependent;
import io.javaoperatorsdk.operator.processing.dependent.kubernetes.KubernetesDependentResource;
import io.javaoperatorsdk.operator.processing.dependent.workflow.Condition;
import io.javaoperatorsdk.operator.processing.event.ResourceID;
import io.javaoperatorsdk.operator.processing.event.source.SecondaryToPrimaryMapper;
@KubernetesDependent(informer = @Informer(labelSelector = ReadOnlyDependentResource.LABEL_SELECTOR))
public class ReadOnlyDependentResource extends KubernetesDependentResource<Deployment, ConfigMap> implements
SecondaryToPrimaryMapper<Deployment> {
public static final String LABEL_SELECTOR = "readonly";
public static final String NAME = "read-only";
public ReadOnlyDependentResource() {
super(Deployment.class);
}
@Override
public Set<ResourceID> toPrimaryResourceIDs(Deployment deployment) {
return Collections.emptySet();
}
public static class ReadOnlyReadyCondition implements Condition<Deployment, ConfigMap> {
@Override
public boolean isMet(DependentResource<Deployment, ConfigMap> dependentResource,
ConfigMap configMap, Context<ConfigMap> context) {
return false;
}
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/AnnotatedDependentResource.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/AnnotatedDependentResource.java | package io.quarkiverse.operatorsdk.it;
import java.util.Optional;
import io.fabric8.kubernetes.api.model.Service;
import io.javaoperatorsdk.operator.api.config.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.config.dependent.ConfigurationConverter;
import io.javaoperatorsdk.operator.api.config.dependent.Configured;
import io.javaoperatorsdk.operator.api.config.dependent.DependentResourceSpec;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.dependent.DependentResource;
import io.javaoperatorsdk.operator.api.reconciler.dependent.ReconcileResult;
import io.javaoperatorsdk.operator.api.reconciler.dependent.managed.ConfiguredDependentResource;
@ADRConfigurationAnnotation(AnnotatedDependentResource.VALUE)
@Configured(by = ADRConfigurationAnnotation.class, with = ADRConfiguration.class, converter = AnnotatedDependentResource.class)
public class AnnotatedDependentResource implements DependentResource<TestResource, Service>,
ConfiguredDependentResource<ADRConfiguration>,
ConfigurationConverter<ADRConfigurationAnnotation, ADRConfiguration> {
public static final int VALUE = 42;
private ADRConfiguration config;
@Override
public ReconcileResult<TestResource> reconcile(Service service, Context<Service> context) {
return null;
}
@Override
public Class<TestResource> resourceType() {
return TestResource.class;
}
@Override
public void configureWith(ADRConfiguration adrConfiguration) {
this.config = adrConfiguration;
}
@Override
public Optional<ADRConfiguration> configuration() {
return Optional.ofNullable(config);
}
@Override
public ADRConfiguration configFrom(ADRConfigurationAnnotation adrConfigurationAnnotation,
DependentResourceSpec<?, ?, ADRConfiguration> dependentResourceSpec,
ControllerConfiguration<?> controllerConfiguration) {
return new ADRConfiguration(adrConfigurationAnnotation.value());
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/KeycloakController.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/KeycloakController.java | package io.quarkiverse.operatorsdk.it;
import io.javaoperatorsdk.operator.api.config.informer.Informer;
import io.javaoperatorsdk.operator.api.reconciler.Constants;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(informer = @Informer(namespaces = Constants.WATCH_CURRENT_NAMESPACE))
public class KeycloakController implements Reconciler<Keycloak> {
public static final String FROM_ENV = Constants.WATCH_ALL_NAMESPACES;
@Override
public UpdateControl<Keycloak> reconcile(Keycloak keycloak, Context<Keycloak> context)
throws Exception {
return null;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/VariableNSReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/VariableNSReconciler.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(name = VariableNSReconciler.NAME)
public class VariableNSReconciler implements Reconciler<Deployment> {
public static final String NAME = "variablens";
public static final String ENV_VAR_NAME = "VARIABLE_NS_ENV";
public static final String EXPECTED_NS_VALUE = "variableNSFromEnv";
@Override
public UpdateControl<Deployment> reconcile(Deployment deployment, Context<Deployment> context)
throws Exception {
return null;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/Test.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/Test.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.model.annotation.Group;
import io.fabric8.kubernetes.model.annotation.Version;
@Group("josdk.quarkiverse.io")
@Version("v1alpha1")
public class Test extends TestResource {
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestResource.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestResource.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.client.CustomResource;
import io.fabric8.kubernetes.model.annotation.Group;
import io.fabric8.kubernetes.model.annotation.Version;
@Group("example.com")
@Version("v1")
public abstract class TestResource extends CustomResource<String, Void> {
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/EmptyReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/EmptyReconciler.java | package io.quarkiverse.operatorsdk.it;
import static io.quarkiverse.operatorsdk.it.EmptyReconciler.NAME;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(name = NAME)
public class EmptyReconciler implements Reconciler<EmptyCR> {
public static final String NAME = "empty";
static final String FROM_ENV_NS1 = "fromEnv1";
static final String FROM_ENV_NS2 = "fromEnv2";
@Override
public UpdateControl<EmptyCR> reconcile(EmptyCR emptyCR, Context<EmptyCR> context)
throws Exception {
return UpdateControl.noUpdate();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/NullItemStore.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/NullItemStore.java | package io.quarkiverse.operatorsdk.it;
import java.util.stream.Stream;
import io.fabric8.kubernetes.api.model.ResourceQuota;
import io.fabric8.kubernetes.client.informers.cache.ItemStore;
import io.quarkus.runtime.annotations.RegisterForReflection;
@RegisterForReflection // for proper serialization in native mode
public class NullItemStore implements ItemStore<ResourceQuota> {
public static final String NAME = "NullItemStoreName";
// so that it appears in the JSON configuration and we can check against it
public String getName() {
return NAME;
}
@Override
public String getKey(ResourceQuota resourceQuota) {
return null;
}
@Override
public ResourceQuota put(String s, ResourceQuota resourceQuota) {
return null;
}
@Override
public ResourceQuota remove(String s) {
return null;
}
@Override
public Stream<String> keySet() {
return null;
}
@Override
public Stream<ResourceQuota> values() {
return null;
}
@Override
public int size() {
return 0;
}
@Override
public ResourceQuota get(String s) {
return null;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/CustomRateConfiguration.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/CustomRateConfiguration.java | package io.quarkiverse.operatorsdk.it;
import java.lang.annotation.*;
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE })
public @interface CustomRateConfiguration {
int value();
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ADRConfiguration.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ADRConfiguration.java | package io.quarkiverse.operatorsdk.it;
import io.quarkus.runtime.annotations.RecordableConstructor;
public class ADRConfiguration {
private final int value;
@RecordableConstructor
public ADRConfiguration(int value) {
this.value = value;
}
@SuppressWarnings("unused")
public int getValue() {
return value;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/SecretReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/SecretReconciler.java | package io.quarkiverse.operatorsdk.it;
import java.util.Base64;
import java.util.HashMap;
import io.fabric8.kubernetes.api.model.Secret;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.MaxReconciliationInterval;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(name = SecretReconciler.NAME, maxReconciliationInterval = @MaxReconciliationInterval(interval = 2))
public class SecretReconciler implements Reconciler<Secret> {
public static final String NAME = "secret";
@Override
public UpdateControl<Secret> reconcile(Secret secret, Context context) {
if (secret.getType().equals("Opaque")) {
final var labels = secret.getMetadata().getLabels();
if (labels != null && "true".equals(labels.get("quarkus-operator-sdk.secret-reconciler-marker"))) {
System.out.println("Reconciling secret " + secret.getMetadata().getName());
var data = secret.getData();
if (data == null) {
data = new HashMap<>();
secret.setStringData(data);
}
final String foo = data.putIfAbsent("quarkus-operator-sdk.added-value",
Base64.getEncoder().encodeToString("quarkus-operator-sdk rocks!".getBytes()));
if (foo == null) {
return UpdateControl.patchResource(secret);
}
}
}
return UpdateControl.noUpdate();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ApplicationScopedReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ApplicationScopedReconciler.java | package io.quarkiverse.operatorsdk.it;
import jakarta.enterprise.context.ApplicationScoped;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ApplicationScoped
@ControllerConfiguration(name = ApplicationScopedReconciler.NAME)
public class ApplicationScopedReconciler implements Reconciler<ChildTestResource> {
public static final String NAME = "ApplicationScoped";
@Override
public UpdateControl<ChildTestResource> reconcile(ChildTestResource childTestResource,
Context context) {
return UpdateControl.noUpdate();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ShouldBeIgnoredReconciler2.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ShouldBeIgnoredReconciler2.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
public class ShouldBeIgnoredReconciler2 implements Reconciler<HasMetadata> {
@Override
public UpdateControl<HasMetadata> reconcile(HasMetadata hasMetadata, Context context) {
return UpdateControl.noUpdate();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/NameWithSpaceReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/NameWithSpaceReconciler.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.api.model.ServiceAccount;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(name = NameWithSpaceReconciler.NAME)
public class NameWithSpaceReconciler implements Reconciler<ServiceAccount> {
public static final String NAME = "name with space";
@Override
public UpdateControl<ServiceAccount> reconcile(ServiceAccount serviceAccount,
Context<ServiceAccount> context) throws Exception {
return null;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestReconciler.java | package io.quarkiverse.operatorsdk.it;
import java.util.concurrent.TimeUnit;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.MaxReconciliationInterval;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(name = TestReconciler.NAME, maxReconciliationInterval = @MaxReconciliationInterval(interval = TestReconciler.INTERVAL, timeUnit = TimeUnit.SECONDS))
public class TestReconciler implements Reconciler<Test> {
public static final String NAME = "test";
public static final int INTERVAL = 50;
@Override
public UpdateControl<Test> reconcile(Test test, Context<Test> context) throws Exception {
return null;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ConfiguredReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ConfiguredReconciler.java | package io.quarkiverse.operatorsdk.it;
import java.util.concurrent.TimeUnit;
import io.javaoperatorsdk.operator.api.config.informer.Informer;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
import io.javaoperatorsdk.operator.processing.event.rate.RateLimited;
import io.javaoperatorsdk.operator.processing.retry.GradualRetry;
@ControllerConfiguration(name = ConfiguredReconciler.NAME, informer = @Informer(namespaces = "foo"))
@GradualRetry(maxAttempts = ConfiguredReconciler.MAX_ATTEMPTS, initialInterval = 1000)
@RateLimited(maxReconciliations = 1, within = 1, unit = TimeUnit.MINUTES)
public class ConfiguredReconciler implements Reconciler<ChildTestResource2> {
public static final String NAME = "annotation";
public static final int MAX_ATTEMPTS = 23;
@Override
public UpdateControl<ChildTestResource2> reconcile(ChildTestResource2 childTestResource2,
Context<ChildTestResource2> context) throws Exception {
return null;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestLeaderElectionConfiguration.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/TestLeaderElectionConfiguration.java | package io.quarkiverse.operatorsdk.it;
import jakarta.inject.Singleton;
import io.javaoperatorsdk.operator.api.config.LeaderElectionConfiguration;
@Singleton
public class TestLeaderElectionConfiguration extends LeaderElectionConfiguration {
public TestLeaderElectionConfiguration() {
super("testLeaseName");
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ADRConfigurationAnnotation.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ADRConfigurationAnnotation.java | package io.quarkiverse.operatorsdk.it;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface ADRConfigurationAnnotation {
int value();
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/CustomRateLimiterReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/CustomRateLimiterReconciler.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.api.model.ResourceQuota;
import io.javaoperatorsdk.operator.api.config.informer.Informer;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.ControllerConfiguration;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
@ControllerConfiguration(name = CustomRateLimiterReconciler.NAME, rateLimiter = CustomRateLimiter.class, informer = @Informer(itemStore = NullItemStore.class))
@CustomRateConfiguration(42)
public class CustomRateLimiterReconciler implements Reconciler<ResourceQuota> {
public static final String NAME = "CustomRateLimiter";
@Override
public UpdateControl<ResourceQuota> reconcile(ResourceQuota resourceQuota, Context<ResourceQuota> context)
throws Exception {
return null;
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/GatewayReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/GatewayReconciler.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.api.model.gatewayapi.v1.Gateway;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
public class GatewayReconciler implements Reconciler<Gateway> {
@Override
public UpdateControl<Gateway> reconcile(Gateway gateway, Context context) {
return UpdateControl.noUpdate();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
quarkiverse/quarkus-operator-sdk | https://github.com/quarkiverse/quarkus-operator-sdk/blob/c5276c168ac00f94007e8b0c2c1b2c8162a1c424/integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ShouldBeIgnoredReconciler.java | integration-tests/basic/src/main/java/io/quarkiverse/operatorsdk/it/ShouldBeIgnoredReconciler.java | package io.quarkiverse.operatorsdk.it;
import io.fabric8.kubernetes.client.CustomResource;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.Reconciler;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
public class ShouldBeIgnoredReconciler implements Reconciler<CustomResource<Void, Void>> {
@Override
public UpdateControl<CustomResource<Void, Void>> reconcile(CustomResource<Void, Void> resource,
Context<CustomResource<Void, Void>> context) {
return UpdateControl.noUpdate();
}
}
| java | Apache-2.0 | c5276c168ac00f94007e8b0c2c1b2c8162a1c424 | 2026-01-05T02:41:12.555806Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.