repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
onosfw/apis
|
onos/apis/dir_c033ff7368e78e32007d33ed3077bca6.js
|
<filename>onos/apis/dir_c033ff7368e78e32007d33ed3077bca6.js<gh_stars>0
var dir_c033ff7368e78e32007d33ed3077bca6 =
[
[ "src", "dir_4b993e487eefba95ee4ce0c7cf007153.html", "dir_4b993e487eefba95ee4ce0c7cf007153" ]
];
|
BloomTech-Labs/niyon-fe
|
src/components/connections/Connections.test.js
|
import React from 'react';
import { shallow } from 'enzyme';
import toJSON from 'enzyme-to-json';
import Connections from './Connections'
import findByTestAttr from '../../tests/utils'
const setUp = (props = {}) => {
const wrapper = shallow(<Connections { ...props } />)
return wrapper
}
describe('<Connections /> component testing', () => {
const testProps = {
sumConnections: 1
}
let component;
beforeEach(() => {
component = setUp({...testProps});
});
it('should match snapshot tests', () => {
expect(toJSON(component)).toMatchSnapshot();
})
it('should render <Connections /> component correctly', () => {
const connectionsWrapper = findByTestAttr(component, 'connections');
expect(connectionsWrapper.length).toBe(1);
});
it('should render a second container <Div /> correctly', () => {
const secondWrapper = findByTestAttr(component, 'second-wrapper');
expect(secondWrapper.exists()).toBe(true);
});
it('should render <Paper /> component correctly', () => {
const wrapper = findByTestAttr(component, 'paper');
expect(wrapper.exists()).toBe(true);
});
it('should render mani title in the component', () => {
const sum = testProps.sumConnections;
const h1 = findByTestAttr(component, 'my-connections');
expect(h1.length).toBe(1);
expect(h1.text().trim()).toEqual(`My Connections (${sum})`);
});
it('should render <SwipeTabsConnections /> component correctly', () => {
expect(component.find('SwipeTabsConnections').exists()).toBe(true);
});
});
|
chenyang8094/alibabacloud-tairjedis-sdk
|
src/test/java/com/aliyun/tair/tests/tairts/TairTsTest.java
|
package com.aliyun.tair.tests.tairts;
import com.aliyun.tair.tairts.params.ExtsAggregationParams;
import com.aliyun.tair.tairts.params.ExtsAttributesParams;
import com.aliyun.tair.tairts.params.ExtsDataPoint;
import com.aliyun.tair.tairts.params.ExtsFilter;
import com.aliyun.tair.tairts.results.ExtsDataPointResult;
import com.aliyun.tair.tairts.results.ExtsLabelResult;
import com.aliyun.tair.tairts.results.ExtsSkeyResult;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static redis.clients.jedis.Protocol.toByteArray;
public class TairTsTest extends TairTsTestBase {
private String randomSkey;
private String randomSkey2;
private byte[] bSkey;
private byte[] bSkey2;
private String randomPkey;
private byte[] randomPKeyBinary;
private long startTs;
private long endTs;
public TairTsTest() {
randomPkey = "randomPkey_" + Thread.currentThread().getName() + UUID.randomUUID().toString();
randomPKeyBinary = ("randomPkey_" + Thread.currentThread().getName() + UUID.randomUUID().toString()).getBytes();
randomSkey = "key" + Thread.currentThread().getName() + UUID.randomUUID().toString();
randomSkey2 = "key2" + Thread.currentThread().getName() + UUID.randomUUID().toString();
bSkey = ("bkey" + Thread.currentThread().getName() + UUID.randomUUID().toString()).getBytes();
bSkey2 = ("bkey2" + Thread.currentThread().getName() + UUID.randomUUID().toString()).getBytes();
startTs = (System.currentTimeMillis() - 100000) / 1000 * 1000;
endTs = System.currentTimeMillis() / 1000 * 1000;
}
@Test
public void extsaddTest() throws Exception {
for (int i = 0; i < 1; i++) {
double val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val);
Assert.assertEquals("OK", addRet);
ts = ts + 1;
tsStr = String.valueOf(ts);
addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
byte[] tsStr = toByteArray(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr, val);
Assert.assertEquals("OK", addRet);
ts = ts + 1;
tsStr = toByteArray(ts);
addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
}
@Test
public void extsAlterTest() throws Exception {
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < 1; i++) {
double val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
ts = ts + 1;
tsStr = String.valueOf(ts);
addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsFilter<String> filter1 = new ExtsFilter<String>("label1=1");
ExtsFilter<String> filter2 = new ExtsFilter<String>("label2=2");
ExtsFilter<String> filter3 = new ExtsFilter<String>("label3=3");
ExtsFilter<String> filter4 = new ExtsFilter<String>("label4=4");
ArrayList<ExtsFilter<String>> filterList1 = new ArrayList<ExtsFilter<String>>();
filterList1.add(filter1);
filterList1.add(filter2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPkey, startTsStr, endTsStr, filterList1);
assertEquals(1, rangeByteRet.size());
assertEquals(randomSkey, rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals(0, labelRet.size());
ExtsAttributesParams params = new ExtsAttributesParams();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label3");
labels.add("3");
labels.add("label4");
labels.add("4");
params.labels(labels);
String alterRet = tairTs.extsalter(randomPkey, randomSkey, params);
ArrayList<ExtsFilter<String>> filterList2 = new ArrayList<ExtsFilter<String>>();
filterList2.add(filter3);
filterList2.add(filter4);
rangeByteRet = tairTs.extsmrange(randomPkey, startTsStr, endTsStr, filterList1);
assertEquals(0, rangeByteRet.size());
rangeByteRet = tairTs.extsmrange(randomPkey, startTsStr, endTsStr, filterList2);
assertEquals(1, rangeByteRet.size());
}
@Test
public void extsRawModifyTest() throws Exception {
for (int i = 0; i < 1; i++) {
double val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsrawmodify(randomPkey, randomSkey, tsStr, val);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsrawmodify(randomPkey, randomSkey, tsStr, val);
Assert.assertEquals("OK", addRet);
ExtsDataPointResult getRet = tairTs.extsget(randomPkey, randomSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
ts = ts + 1;
tsStr = String.valueOf(ts);
addRet = tairTs.extsrawmodify(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsrawmodify(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
getRet = tairTs.extsget(randomPkey, randomSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
}
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
byte[] tsStr = toByteArray(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsrawmodify(randomPKeyBinary, bSkey, tsStr, val);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsrawmodify(randomPKeyBinary, bSkey, tsStr, val);
Assert.assertEquals("OK", addRet);
ExtsDataPointResult getRet = tairTs.extsget(randomPKeyBinary, bSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
ts = ts + 1;
tsStr = toByteArray(ts);
addRet = tairTs.extsrawmodify(randomPKeyBinary, bSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
tairTs.extsrawmodify(randomPKeyBinary, bSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
getRet = tairTs.extsget(randomPKeyBinary, bSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
}
}
// @Test
// public void extsEvictTest() throws Exception {
//
// long num = 100;
// String startTsStr = String.valueOf(0);
// String endTsStr = String.valueOf(endTs);
//
// for (int i = 0; i < num; i++) {
// double val = i;
// long ts = startTs + i*1000;
// String tsStr = String.valueOf(ts);
// ExtsAttributesParams params = new ExtsAttributesParams();
// params.dataEt(10000);
// params.chunkSize(10);
// params.uncompressed();
// ArrayList<String> labels = new ArrayList<String>();
// labels.add("label1");
// labels.add("1");
// labels.add("label2");
// labels.add("2");
// params.labels(labels);
//
// String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
// Assert.assertEquals("OK", addRet);
// }
//
// ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
// paramsAgg.maxCountSize(1000);
// paramsAgg.aggAvg(1000);
//
// List<ExtsDataPointResult> rangeByteRet = tairTs.extsrange(randomPkey, randomSkey, startTsStr, endTsStr, paramsAgg);
//// System.out.print(rangeByteRet.get(0).getTs());
// System.out.print(rangeByteRet.get(0).getDoubleValue());
// assertEquals(num, rangeByteRet.size());
// for (int i = 0; i < num; i++) {
// double val = i;
// long ts = startTs + i*1000;
// assertEquals(ts, rangeByteRet.get(i).getTs());
// assertEquals(val, rangeByteRet.get(i).getDoubleValue(), 0.0);
// }
// }
@Test
public void extsmaddTest() throws Exception {
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
params.labels(labels);
ArrayList<ExtsDataPoint<String>> addList = new ArrayList<ExtsDataPoint<String>>();
ExtsDataPoint<String> add1 = new ExtsDataPoint<String>(randomSkey, tsStr, val);
ExtsDataPoint<String> add2 = new ExtsDataPoint<String>(randomSkey2, tsStr, val);
addList.add(add1);
addList.add(add2);
List<String> maddRet = tairTs.extsmadd(randomPkey, addList);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
String delRet = tairTs.extsdel(randomPkey, randomSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPkey, randomSkey2);
Assert.assertEquals("OK", delRet);
maddRet = tairTs.extsmadd(randomPkey, addList, params);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
delRet = tairTs.extsdel(randomPkey, randomSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPkey, randomSkey2);
Assert.assertEquals("OK", delRet);
}
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
byte[] tsStr = toByteArray(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
params.labels(labels);
ArrayList<ExtsDataPoint<byte[]>> addList = new ArrayList<ExtsDataPoint<byte[]>>();
ExtsDataPoint<byte[]> add1 = new ExtsDataPoint<byte[]>(bSkey, tsStr, val);
ExtsDataPoint<byte[]> add2 = new ExtsDataPoint<byte[]>(bSkey2, tsStr, val);
addList.add(add1);
addList.add(add2);
List<String> maddRet = tairTs.extsmadd(randomPKeyBinary, addList);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
String delRet = tairTs.extsdel(randomPKeyBinary, bSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPKeyBinary, bSkey2);
Assert.assertEquals("OK", delRet);
maddRet = tairTs.extsmadd(randomPKeyBinary, addList,params);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
delRet = tairTs.extsdel(randomPKeyBinary, bSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPKeyBinary, bSkey2);
Assert.assertEquals("OK", delRet);
}
}
@Test
public void extsRawMModifyTest() throws Exception {
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
params.labels(labels);
ArrayList<ExtsDataPoint<String>> addList = new ArrayList<ExtsDataPoint<String>>();
ExtsDataPoint<String> add1 = new ExtsDataPoint<String>(randomSkey, tsStr, val);
ExtsDataPoint<String> add2 = new ExtsDataPoint<String>(randomSkey2, tsStr, val);
addList.add(add1);
addList.add(add2);
List<String> maddRet = tairTs.extsmrawmodify(randomPkey, addList);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
ExtsDataPointResult getRet = tairTs.extsget(randomPkey, randomSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPkey, randomSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
ts = ts + 1;
tsStr = String.valueOf(ts);
ArrayList<ExtsDataPoint<String>> addList2 = new ArrayList<ExtsDataPoint<String>>();
add1 = new ExtsDataPoint<String>(randomSkey, tsStr, val);
add2 = new ExtsDataPoint<String>(randomSkey2, tsStr, val);
addList2.add(add1);
addList2.add(add2);
maddRet = tairTs.extsmrawmodify(randomPkey, addList2,params);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
getRet = tairTs.extsget(randomPkey, randomSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPkey, randomSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
String delRet = tairTs.extsdel(randomPkey, randomSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPkey, randomSkey2);
Assert.assertEquals("OK", delRet);
}
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
byte[] tsStr = toByteArray(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
params.labels(labels);
ArrayList<ExtsDataPoint<byte[]>> addList = new ArrayList<ExtsDataPoint<byte[]>>();
ExtsDataPoint<byte[]> add1 = new ExtsDataPoint<byte[]>(bSkey, tsStr, val);
ExtsDataPoint<byte[]> add2 = new ExtsDataPoint<byte[]>(bSkey2, tsStr, val);
addList.add(add1);
addList.add(add2);
List<String> maddRet = tairTs.extsmrawmodify(randomPKeyBinary, addList);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
ExtsDataPointResult getRet = tairTs.extsget(randomPKeyBinary, bSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPKeyBinary, bSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
ts = ts + 1;
tsStr = toByteArray(ts);
ArrayList<ExtsDataPoint<byte[]>> addList2 = new ArrayList<ExtsDataPoint<byte[]>>();
add1 = new ExtsDataPoint<byte[]>(bSkey, tsStr, val);
add2 = new ExtsDataPoint<byte[]>(bSkey2, tsStr, val);
addList2.add(add1);
addList2.add(add2);
maddRet = tairTs.extsmrawmodify(randomPKeyBinary, addList2,params);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
getRet = tairTs.extsget(randomPKeyBinary, bSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPKeyBinary, bSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
String delRet = tairTs.extsdel(randomPKeyBinary, bSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPKeyBinary, bSkey2);
Assert.assertEquals("OK", delRet);
}
}
@Test
public void extsRawMIncrTest() throws Exception {
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
params.labels(labels);
ArrayList<ExtsDataPoint<String>> addList = new ArrayList<ExtsDataPoint<String>>();
ExtsDataPoint<String> add1 = new ExtsDataPoint<String>(randomSkey, tsStr, val);
ExtsDataPoint<String> add2 = new ExtsDataPoint<String>(randomSkey2, tsStr, val);
addList.add(add1);
addList.add(add2);
List<String> maddRet = tairTs.extsmrawincr(randomPkey, addList);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
ExtsDataPointResult getRet = tairTs.extsget(randomPkey, randomSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPkey, randomSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
ts = ts + 1;
val = val + 1;
tsStr = String.valueOf(ts);
ArrayList<ExtsDataPoint<String>> addList2 = new ArrayList<ExtsDataPoint<String>>();
add1 = new ExtsDataPoint<String>(randomSkey, tsStr, val);
add2 = new ExtsDataPoint<String>(randomSkey2, tsStr, val);
addList2.add(add1);
addList2.add(add2);
maddRet = tairTs.extsmrawincr(randomPkey, addList2,params);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
getRet = tairTs.extsget(randomPkey, randomSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPkey, randomSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
String delRet = tairTs.extsdel(randomPkey, randomSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPkey, randomSkey2);
Assert.assertEquals("OK", delRet);
}
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
byte[] tsStr = toByteArray(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
params.labels(labels);
ArrayList<ExtsDataPoint<byte[]>> addList = new ArrayList<ExtsDataPoint<byte[]>>();
ExtsDataPoint<byte[]> add1 = new ExtsDataPoint<byte[]>(bSkey, tsStr, val);
ExtsDataPoint<byte[]> add2 = new ExtsDataPoint<byte[]>(bSkey2, tsStr, val);
addList.add(add1);
addList.add(add2);
List<String> maddRet = tairTs.extsmrawincr(randomPKeyBinary, addList);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
ExtsDataPointResult getRet = tairTs.extsget(randomPKeyBinary, bSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPKeyBinary, bSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
ts = ts + 1;
tsStr = toByteArray(ts);
ArrayList<ExtsDataPoint<byte[]>> addList2 = new ArrayList<ExtsDataPoint<byte[]>>();
add1 = new ExtsDataPoint<byte[]>(bSkey, tsStr, val);
add2 = new ExtsDataPoint<byte[]>(bSkey2, tsStr, val);
addList2.add(add1);
addList2.add(add2);
maddRet = tairTs.extsmrawincr(randomPKeyBinary, addList2,params);
for (int j = 0; j < maddRet.size(); j++) {
Assert.assertEquals("OK", maddRet.get(j));
}
getRet = tairTs.extsget(randomPKeyBinary, bSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
getRet = tairTs.extsget(randomPKeyBinary, bSkey2);
assertEquals((long)ts, getRet.getTs());
assertEquals(val, getRet.getDoubleValue(), 0.0);
String delRet = tairTs.extsdel(randomPKeyBinary, bSkey);
Assert.assertEquals("OK", delRet);
delRet = tairTs.extsdel(randomPKeyBinary, bSkey2);
Assert.assertEquals("OK", delRet);
}
}
@Test
public void extsgetTest() throws Exception {
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
ExtsDataPointResult getRet = tairTs.extsget(randomPkey, randomSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
}
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
byte[] tsStr = toByteArray(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr, val);
Assert.assertEquals("OK", addRet);
ExtsDataPointResult getRet = tairTs.extsget(randomPKeyBinary, bSkey);
assertEquals((long)ts, getRet.getTs());
assertEquals(i, getRet.getDoubleValue(), 0.0);
}
}
@Test
public void extsqueryTest() throws Exception {
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
ExtsAttributesParams params2 = new ExtsAttributesParams();
params2.dataEt(1000000000);
params2.chunkSize(1024);
params2.uncompressed();
ArrayList<String> labels2 = new ArrayList<String>();
labels2.add("label1");
labels2.add("1");
labels2.add("label3");
labels2.add("3");
params2.labels(labels2);
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsadd(randomPkey, randomSkey2, tsStr, val, params2);
Assert.assertEquals("OK", addRet);
ExtsFilter<String> filter1 = new ExtsFilter<String>("label1=1");
ExtsFilter<String> filter2 = new ExtsFilter<String>("label2=2");
ExtsFilter<String> filter3 = new ExtsFilter<String>("label3=3");
ExtsFilter<String> filter4 = new ExtsFilter<String>("label2=3");
ArrayList<ExtsFilter<String>> filterList = new ArrayList<ExtsFilter<String>>();
filterList.add(filter1);
filterList.add(filter2);
List<String> queryRet = tairTs.extsquery(randomPkey, filterList);
assertEquals(1, queryRet.size());
assertEquals(randomSkey, queryRet.get(0));
ArrayList<ExtsFilter<String>> filterList2 = new ArrayList<ExtsFilter<String>>();
filterList2.add(filter1);
filterList2.add(filter3);
queryRet = tairTs.extsquery(randomPkey, filterList2);
assertEquals(1, queryRet.size());
assertEquals(randomSkey2, queryRet.get(0));
ArrayList<ExtsFilter<String>> filterList3 = new ArrayList<ExtsFilter<String>>();
filterList3.add(filter1);
queryRet = tairTs.extsquery(randomPkey, filterList3);
assertEquals(2, queryRet.size());
ArrayList<ExtsFilter<String>> filterList4 = new ArrayList<ExtsFilter<String>>();
filterList4.add(filter4);
queryRet = tairTs.extsquery(randomPkey, filterList4);
assertEquals(0, queryRet.size());
}
for (int i = 0; i < 1; i++) {
long val = i;
long ts = startTs + i*1;
byte[] tsStr = toByteArray(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
ExtsAttributesParams params2 = new ExtsAttributesParams();
params2.dataEt(1000000000);
params2.chunkSize(1024);
params2.uncompressed();
ArrayList<String> labels2 = new ArrayList<String>();
labels2.add("label1");
labels2.add("1");
labels2.add("label3");
labels2.add("3");
params2.labels(labels2);
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsadd(randomPKeyBinary, bSkey2, tsStr, val, params2);
Assert.assertEquals("OK", addRet);
ExtsFilter<byte[]> filter1 = new ExtsFilter<byte[]>("label1=1".getBytes());
ExtsFilter<byte[]> filter2 = new ExtsFilter<byte[]>("label2=2".getBytes());
ExtsFilter<byte[]> filter3 = new ExtsFilter<byte[]>("label3=3".getBytes());
ExtsFilter<byte[]> filter4 = new ExtsFilter<byte[]>("label2=3".getBytes());
ArrayList<ExtsFilter<byte[]>> filterList = new ArrayList<ExtsFilter<byte[]>>();
filterList.add(filter1);
filterList.add(filter2);
List<byte[]> queryRet = tairTs.extsquery(randomPKeyBinary, filterList);
assertEquals(1, queryRet.size());
assertEquals(new String(bSkey), new String(queryRet.get(0)));
ArrayList<ExtsFilter<byte[]>> filterList2 = new ArrayList<ExtsFilter<byte[]>>();
filterList2.add(filter1);
filterList2.add(filter3);
queryRet = tairTs.extsquery(randomPKeyBinary, filterList2);
assertEquals(1, queryRet.size());
assertEquals(new String(bSkey2), new String(queryRet.get(0)));
ArrayList<ExtsFilter<byte[]>> filterList3 = new ArrayList<ExtsFilter<byte[]>>();
filterList3.add(filter1);
queryRet = tairTs.extsquery(randomPKeyBinary, filterList3);
assertEquals(2, queryRet.size());
ArrayList<ExtsFilter<byte[]>> filterList4 = new ArrayList<ExtsFilter<byte[]>>();
filterList4.add(filter4);
queryRet = tairTs.extsquery(randomPKeyBinary, filterList4);
assertEquals(0, queryRet.size());
}
}
@Test
public void extsrangeTest() throws Exception {
int num = 3;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsSkeyResult rangeByteRet = tairTs.extsrange(randomPkey, randomSkey, startTsStr, endTsStr, paramsAgg);
List<ExtsDataPointResult> dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
}
paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
rangeByteRet = tairTs.extsrange(randomPKeyBinary, bSkey, startTsStr.getBytes(), endTsStr.getBytes(), paramsAgg);
dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
paramsAgg.reverse();
rangeByteRet = tairTs.extsrange(randomPKeyBinary, bSkey, startTsStr.getBytes(), endTsStr.getBytes(), paramsAgg);
dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(num-1-i).getTs());
assertEquals(val, dataPointRet.get(num-1-i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeKeysTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsadd(randomPkey, randomSkey2, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ArrayList<String> keys = new ArrayList<String>();
keys.add(randomSkey);
keys.add(randomSkey2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPkey, keys, startTsStr, endTsStr);
assertEquals(2, rangeByteRet.size());
assertEquals(randomSkey, rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals(0, labelRet.size());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
assertEquals(randomSkey2, rangeByteRet.get(1).getSkey());
labelRet = rangeByteRet.get(1).getLabels();
assertEquals(0, labelRet.size());
dataPointRet = rangeByteRet.get(1).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeKeysByteTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr.getBytes(), val, params);
addRet = tairTs.extsadd(randomPKeyBinary, bSkey2, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ArrayList<byte[]> keys = new ArrayList<byte[]>();
keys.add(bSkey);
keys.add(bSkey2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPKeyBinary, keys, startTsStr.getBytes(), endTsStr.getBytes());
assertEquals(2, rangeByteRet.size());
assertEquals(new String(bSkey), rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals(0, labelRet.size());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
assertEquals(new String(bSkey2), rangeByteRet.get(1).getSkey());
labelRet = rangeByteRet.get(1).getLabels();
assertEquals(0, labelRet.size());
dataPointRet = rangeByteRet.get(1).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeKeysLabelsTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size() / 2;
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsadd(randomPkey, randomSkey2, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
paramsAgg.withLabels();
ArrayList<String> keys = new ArrayList<String>();
keys.add(randomSkey);
keys.add(randomSkey2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPkey, keys, startTsStr, endTsStr, paramsAgg);
assertEquals(2, rangeByteRet.size());
assertEquals(randomSkey, rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
assertEquals(randomSkey2, rangeByteRet.get(1).getSkey());
labelRet = rangeByteRet.get(1).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
dataPointRet = rangeByteRet.get(1).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeSkeysLabelsByteTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsadd(randomPKeyBinary, bSkey2, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
paramsAgg.withLabels();
ArrayList<byte[]> keys = new ArrayList<byte[]>();
keys.add(bSkey);
keys.add(bSkey2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPKeyBinary, keys, startTsStr.getBytes(), endTsStr.getBytes(), paramsAgg);
assertEquals(2, rangeByteRet.size());
assertEquals(new String(bSkey), rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
assertEquals(new String(bSkey2), rangeByteRet.get(1).getSkey());
labelRet = rangeByteRet.get(1).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
dataPointRet = rangeByteRet.get(1).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<String> filter1 = new ExtsFilter<String>("label1=1");
ExtsFilter<String> filter2 = new ExtsFilter<String>("label2=2");
ExtsFilter<String> filter3 = new ExtsFilter<String>("label3=3");
ExtsFilter<String> filter4 = new ExtsFilter<String>("label2=3");
ArrayList<ExtsFilter<String>> filterList = new ArrayList<ExtsFilter<String>>();
filterList.add(filter1);
filterList.add(filter2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPkey, startTsStr, endTsStr, filterList);
assertEquals(1, rangeByteRet.size());
assertEquals(randomSkey, rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals(0, labelRet.size());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeByteTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<byte[]> filter1 = new ExtsFilter<byte[]>("label1=1".getBytes());
ExtsFilter<byte[]> filter2 = new ExtsFilter<byte[]>("label2=2".getBytes());
ExtsFilter<byte[]> filter3 = new ExtsFilter<byte[]>("label3=3".getBytes());
ExtsFilter<byte[]> filter4 = new ExtsFilter<byte[]>("label2=3".getBytes());
ArrayList<ExtsFilter<byte[]>> filterList = new ArrayList<ExtsFilter<byte[]>>();
filterList.add(filter1);
filterList.add(filter2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPKeyBinary, startTsStr.getBytes(), endTsStr.getBytes(), filterList);
assertEquals(1, rangeByteRet.size());
assertEquals(new String(bSkey), rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals(0, labelRet.size());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeLabelsTest() throws Exception {
int num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size() / 2;
String addRet = null;
addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
addRet = tairTs.extsadd(randomPkey, randomSkey2, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
paramsAgg.withLabels();
ExtsFilter<String> filter1 = new ExtsFilter<String>("label1=1");
ExtsFilter<String> filter2 = new ExtsFilter<String>("label2=2");
ExtsFilter<String> filter3 = new ExtsFilter<String>("label3=3");
ExtsFilter<String> filter4 = new ExtsFilter<String>("label2=3");
ArrayList<ExtsFilter<String>> filterList = new ArrayList<ExtsFilter<String>>();
filterList.add(filter1);
filterList.add(filter2);
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPkey, startTsStr, endTsStr, paramsAgg, filterList);
assertEquals(2, rangeByteRet.size());
assertEquals(randomSkey, rangeByteRet.get(1).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
assertEquals(randomSkey2, rangeByteRet.get(0).getSkey());
labelRet = rangeByteRet.get(1).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
dataPointRet = rangeByteRet.get(1).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
paramsAgg.reverse();
rangeByteRet = tairTs.extsmrange(randomPkey, startTsStr, endTsStr, paramsAgg, filterList);
assertEquals(2, rangeByteRet.size());
assertEquals(randomSkey, rangeByteRet.get(1).getSkey());
labelRet = rangeByteRet.get(0).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
assertEquals(ts, dataPointRet.get(num-1-i).getTs());
assertEquals(val, dataPointRet.get(num-1-i).getDoubleValue(), 0.0);
}
assertEquals(randomSkey2, rangeByteRet.get(0).getSkey());
labelRet = rangeByteRet.get(1).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
dataPointRet = rangeByteRet.get(1).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i * 1000;
assertEquals(ts, dataPointRet.get(num-1-i).getTs());
assertEquals(val, dataPointRet.get(num-1-i).getDoubleValue(), 0.0);
}
}
@Test
public void extsmrangeLabelsByteTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<byte[]> filter1 = new ExtsFilter<byte[]>("label1=1".getBytes());
ExtsFilter<byte[]> filter2 = new ExtsFilter<byte[]>("label2=2".getBytes());
ExtsFilter<byte[]> filter3 = new ExtsFilter<byte[]>("label3=3".getBytes());
ExtsFilter<byte[]> filter4 = new ExtsFilter<byte[]>("label2=3".getBytes());
ArrayList<ExtsFilter<byte[]>> filterList = new ArrayList<ExtsFilter<byte[]>>();
filterList.add(filter1);
filterList.add(filter2);
paramsAgg.withLabels();
List<ExtsSkeyResult> rangeByteRet = tairTs.extsmrange(randomPKeyBinary, startTsStr.getBytes(), endTsStr.getBytes(), paramsAgg, filterList);
assertEquals(1, rangeByteRet.size());
assertEquals(new String(bSkey), rangeByteRet.get(0).getSkey());
List<ExtsLabelResult> labelRet = rangeByteRet.get(0).getLabels();
assertEquals("label1", labelRet.get(0).getName());
assertEquals("1", labelRet.get(0).getValue());
assertEquals("label2", labelRet.get(1).getName());
assertEquals("2", labelRet.get(1).getValue());
List<ExtsDataPointResult> dataPointRet = rangeByteRet.get(0).getDataPoints();
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsprangeTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<String> filter1 = new ExtsFilter<String>("label1=1");
ExtsFilter<String> filter2 = new ExtsFilter<String>("label2=2");
ExtsFilter<String> filter3 = new ExtsFilter<String>("label3=3");
ExtsFilter<String> filter4 = new ExtsFilter<String>("label2=3");
ArrayList<ExtsFilter<String>> filterList = new ArrayList<ExtsFilter<String>>();
filterList.add(filter1);
filterList.add(filter2);
ExtsSkeyResult rangeByteRet = tairTs.extsprange(randomPkey, startTsStr, endTsStr, "sum", 1000, filterList);
List<ExtsDataPointResult> dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsprangeByteTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<byte[]> filter1 = new ExtsFilter<byte[]>("label1=1".getBytes());
ExtsFilter<byte[]> filter2 = new ExtsFilter<byte[]>("label2=2".getBytes());
ExtsFilter<byte[]> filter3 = new ExtsFilter<byte[]>("label3=3".getBytes());
ExtsFilter<byte[]> filter4 = new ExtsFilter<byte[]>("label2=3".getBytes());
ArrayList<ExtsFilter<byte[]>> filterList = new ArrayList<ExtsFilter<byte[]>>();
filterList.add(filter1);
filterList.add(filter2);
ExtsSkeyResult rangeByteRet = tairTs.extsprange(randomPKeyBinary, startTsStr.getBytes(), endTsStr.getBytes(), "sum".getBytes(), 1000, filterList);
List<ExtsDataPointResult> dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsprangeAggregationSkeyTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<String> filter1 = new ExtsFilter<String>("label1=1");
ExtsFilter<String> filter2 = new ExtsFilter<String>("label2=2");
ExtsFilter<String> filter3 = new ExtsFilter<String>("label3=3");
ExtsFilter<String> filter4 = new ExtsFilter<String>("label2=3");
ArrayList<ExtsFilter<String>> filterList = new ArrayList<ExtsFilter<String>>();
filterList.add(filter1);
filterList.add(filter2);
ExtsSkeyResult rangeByteRet = tairTs.extsprange(randomPkey, startTsStr, endTsStr, "sum", 1000, filterList);
List<ExtsDataPointResult> dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
@Test
public void extsprangeAggregationSkeyRevTest() throws Exception {
int num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPkey, randomSkey, tsStr, val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<String> filter1 = new ExtsFilter<String>("label1=1");
ExtsFilter<String> filter2 = new ExtsFilter<String>("label2=2");
ExtsFilter<String> filter3 = new ExtsFilter<String>("label3=3");
ExtsFilter<String> filter4 = new ExtsFilter<String>("label2=3");
ArrayList<ExtsFilter<String>> filterList = new ArrayList<ExtsFilter<String>>();
filterList.add(filter1);
filterList.add(filter2);
ExtsAggregationParams params = new ExtsAggregationParams();
params.reverse();
ExtsSkeyResult rangeByteRet = tairTs.extsprange(randomPkey, startTsStr, endTsStr, "sum", 1000, params, filterList);
List<ExtsDataPointResult> dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(num-1-i).getTs());
assertEquals(val, dataPointRet.get(num-1-i).getDoubleValue(), 0.0);
}
}
@Test
public void extsprangeAggregationSkeyByteTest() throws Exception {
long num = 3;
long labelNum = 0;
String startTsStr = String.valueOf(startTs);
String endTsStr = String.valueOf(endTs);
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
String tsStr = String.valueOf(ts);
ExtsAttributesParams params = new ExtsAttributesParams();
params.dataEt(1000000000);
params.chunkSize(1024);
params.uncompressed();
ArrayList<String> labels = new ArrayList<String>();
labels.add("label1");
labels.add("1");
labels.add("label2");
labels.add("2");
params.labels(labels);
labelNum = labels.size()/2;
String addRet = tairTs.extsadd(randomPKeyBinary, bSkey, tsStr.getBytes(), val, params);
Assert.assertEquals("OK", addRet);
}
ExtsAggregationParams paramsAgg = new ExtsAggregationParams();
paramsAgg.maxCountSize(10);
paramsAgg.aggAvg(1000);
ExtsFilter<byte[]> filter1 = new ExtsFilter<byte[]>("label1=1".getBytes());
ExtsFilter<byte[]> filter2 = new ExtsFilter<byte[]>("label2=2".getBytes());
ExtsFilter<byte[]> filter3 = new ExtsFilter<byte[]>("label3=3".getBytes());
ExtsFilter<byte[]> filter4 = new ExtsFilter<byte[]>("label2=3".getBytes());
ArrayList<ExtsFilter<byte[]>> filterList = new ArrayList<ExtsFilter<byte[]>>();
filterList.add(filter1);
filterList.add(filter2);
ExtsSkeyResult rangeByteRet = tairTs.extsprange(randomPKeyBinary, startTsStr.getBytes(), endTsStr.getBytes(), "sum".getBytes(), 1000, filterList);
List<ExtsDataPointResult> dataPointRet = rangeByteRet.getDataPoints();
assertEquals(num, dataPointRet.size());
for (int i = 0; i < num; i++) {
double val = i;
long ts = startTs + i*1000;
assertEquals(ts, dataPointRet.get(i).getTs());
assertEquals(val, dataPointRet.get(i).getDoubleValue(), 0.0);
}
}
}
|
armange/j-commons-thread
|
commons-reflection/src/test/java/br/com/armange/commons/reflection/stream/artifact/AbstractReflectionStreamBeanArtifact.java
|
<filename>commons-reflection/src/test/java/br/com/armange/commons/reflection/stream/artifact/AbstractReflectionStreamBeanArtifact.java
/*
* Copyright [2019] [<NAME>]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* */
package br.com.armange.commons.reflection.stream.artifact;
@AnnotationTestTwoArtifact
public class AbstractReflectionStreamBeanArtifact {
@SuppressWarnings("unused")
private String nestedField1;
protected Integer nestedField2;
private long nestedField3;
public long nestedField4;
protected AbstractReflectionStreamBeanArtifact() {}
public AbstractReflectionStreamBeanArtifact(final long nestedField3) {}
public long getNestedField3() {
return nestedField3;
}
public void setNestedField3(final long nestedField3) {
this.nestedField3 = nestedField3;
}
@SuppressWarnings("unused")
private void nestedMethod() {}
}
|
a327919006/-boot-sample
|
sample-dal/src/main/java/com/cn/boot/sample/dal/mapper/SysResourceMapper.java
|
<filename>sample-dal/src/main/java/com/cn/boot/sample/dal/mapper/SysResourceMapper.java
package com.cn.boot.sample.dal.mapper;
import com.cn.boot.sample.api.model.dto.system.SysResourceDTO;
import com.cn.boot.sample.api.model.po.SysResource;
import org.apache.ibatis.annotations.Param;
import tk.mybatis.mapper.common.Mapper;
import java.util.List;
/**
* @author <NAME>
*/
public interface SysResourceMapper extends Mapper<SysResource> {
/**
* <p>根据查询条件获取资源分页</p>
*
* @param sysResourceDTO 查询条件
*/
List<SysResourceDTO> selectByConditionPage(@Param("resdto") SysResourceDTO sysResourceDTO);
/**
* <p>根据查询条件获取资源列表</p>
*
* @param sysResourceDTO 查询条件
*/
List<SysResourceDTO> selectByConditionAll(SysResourceDTO sysResourceDTO);
/**
* <p>根据查询条件统计资源记录数</p>
*
* @param sysResourceDTO 查询条件
*/
int countByCondition(SysResourceDTO sysResourceDTO);
/**
* <p>根据主键列表删除指定的资源记录</p>
*
* @param sysResourceIds 资源主键列表
* @return
*/
int deleteByPrimaryKeys(List<String> sysResourceIds);
/**
* <p>根据用户唯一标识获取用户的菜单</p>
*
* @param sysUserId 用户唯一标识
* @return {@link List <SysResourceDTO>} 用户菜单列表
*/
List<SysResourceDTO> selectMenuByUserId(String sysUserId);
/**
* <p>根据用户唯一标识获取用户的菜单</p>
*
* @param sysUserId 用户唯一标识
* @return {@link List<SysResource>} 用户资源列表
*/
List<SysResource> selectByUserId(String sysUserId);
/**
* <p>获取指定类型的资源</p>
*
* @param type 资源类型
* @return {@link List<SysResourceDTO>} 资源列表
*/
List<SysResourceDTO> selectByType(Byte type);
/**
* <p>根据资源名称获取资源列表</p>
*
* @param name 资源名称
* @return 资源列表
*/
List<SysResourceDTO> selectByName(String name);
List<SysResource> selectByImsUserId(String imsUserId);
List<SysResourceDTO> selectMenuByImsUserId(String imsUserId);
}
|
hyy1115/react-framework-cli
|
src/pages/string/Rabinkarp/Rabinkarp.js
|
<filename>src/pages/string/Rabinkarp/Rabinkarp.js
import React from 'react'
class Rabinkarp extends React.Component {
render() {
return (
<div>Rabinkarp</div>
)
}
}
export default Rabinkarp
|
jerv13/folio-rest
|
html/public/jfolio/_js/alerts.js
|
<reponame>jerv13/folio-rest
angular.module('jfolio.alert', ['jfolio.exception'])
.factory('Alerts', ['Exception', function(Exception) {
var Alerts = function(scope) {
var self = this;
self.displayTime = 4000;
self.alerts = [];
self.thrown = [];
self.timeout;
self.buildException = function(exception, alertType) {
if (typeof(exception) !== 'object') {
exception = new Exception();
}
if (typeof(alertType) !== 'string') {
alertType = 'info';
}
exception.type = alertType;
return exception;
};
self.thrwNew = function(exception, type) {
self.clearThrown();
self.thrown.push(self.buildException(exception, type));
self.setThrownTimout();
};
self.add = function(exception, type) {
self.alerts.push(self.buildException(exception, type));
};
self.thrw = function() {
self.thrown = self.alerts;
self.setThrownTimout(self.clearAlerts);
};
self.setThrownTimout = function(onClear) {
if (self.displayTime > 0) {
if (self.timeout) {
clearTimeout(self.timeout);
}
self.timeout = window.setTimeout(function() {
scope.$apply(function() {
self.clearThrown();
if (typeof(onClear) === 'function') {
onClear();
}
});
}, self.displayTime);
}
};
self.clearThrown = function() {
console.log('clearThrown');
self.thrown = [];
};
self.clearAlerts = function() {
console.log('clearAlerts');
self.alerts = [];
};
self.clearAll = function() {
console.log('clearAll');
if (self.timeout) {
clearTimeout(self.timeout);
}
self.clearThrown();
self.clearAlerts();
};
};
return Alerts;
}])
.directive('alertsInclude', ['resumeDataService', function(resumeDataService) {
return {
restrict: 'A',
template: '<div class="alerts" ng-show="alerts.thrown.length > 0"><div ng-repeat="alert in alerts.thrown"><div class="alert {{alert.type}} number_{{alert.code}}"><div class="icon"><i class="fa fa-exclamation-triangle fa-2x"></i></div><div class="code">{{alert.code}}: </div><div class="message">{{alert.message}}</div><div></div></div></div></div>',
scope: {
alerts: '='
},
link: function(scope, element, attrs, ngModel) {
console.log("Include module.res1");
}
};
}
]);
|
JamesCao2048/BlizzardData
|
Corpus/birt/5324.java
|
/*******************************************************************************
* Copyright (c) 2004 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.report.model.elements;
import org.eclipse.birt.report.model.api.DesignElementHandle;
import org.eclipse.birt.report.model.api.TemplateParameterDefinitionHandle;
import org.eclipse.birt.report.model.api.activity.NotificationEvent;
import org.eclipse.birt.report.model.api.activity.SemanticException;
import org.eclipse.birt.report.model.api.elements.ReportDesignConstants;
import org.eclipse.birt.report.model.api.util.StringUtil;
import org.eclipse.birt.report.model.core.ContainerSlot;
import org.eclipse.birt.report.model.core.DesignElement;
import org.eclipse.birt.report.model.core.Module;
import org.eclipse.birt.report.model.core.ReferenceableElement;
import org.eclipse.birt.report.model.elements.interfaces.ITemplateParameterDefinitionModel;
/**
* Represents a template parameter definition element. A template parameter
* definitio gives a definition of a template element. It can be referred by one
* template report item or one template data set. The template parameter
* definition has the following properties:
*
* <ul>
* <li>A required and unique name for this template parameter definition.
* <li>A type for this template parameter definition. It is the the enumeration
* of Table,Grid, Label, Text, ExtendedItem, other kind of report items and
* Dataset.
* <li>A static description message to display.
* </ul>
*
*/
public class TemplateParameterDefinition extends ReferenceableElement
implements
ITemplateParameterDefinitionModel
{
/**
* Holds the default report item or data set that reside directly on the
* template parameter definition.
*/
/**
* Default constructor.
*/
public TemplateParameterDefinition( )
{
initSlots( );
}
/**
* Constructs the template parameter definition with a required name.
*
* @param theName
* the required name
*/
public TemplateParameterDefinition( String theName )
{
super( theName );
initSlots( );
}
/*
* (non-Javadoc)
*
* @see org.eclipse.birt.report.model.core.DesignElement#apply(org.eclipse.birt.report.model.elements.ElementVisitor)
*/
public void apply( ElementVisitor visitor )
{
visitor.visitTemplateParameterDefinition( this );
}
/*
* (non-Javadoc)
*
* @see org.eclipse.birt.report.model.core.DesignElement#getElementName()
*/
public String getElementName( )
{
return ReportDesignConstants.TEMPLATE_PARAMETER_DEFINITION;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.birt.report.model.api.core.IDesignElement#getHandle(org.eclipse.birt.report.model.core.Module)
*/
public DesignElementHandle getHandle( Module module )
{
return handle( module );
}
/**
* Returns an API handle for this element.
*
* @param module
* the report design of the style
*
* @return an API handle for this element
*/
public TemplateParameterDefinitionHandle handle( Module module )
{
if ( handle == null )
{
handle = new TemplateParameterDefinitionHandle( module, this );
}
return (TemplateParameterDefinitionHandle) handle;
}
/**
* Returns the slot in this cell defined by the slot ID.
*
* @param slot
* the slot ID
*
* @return the retrieved slot.
*
*
*/
public ContainerSlot getSlot( int slot )
{
assert ( slot == DEFAULT_SLOT );
return slots[DEFAULT_SLOT];
}
/**
* Gets allowed type of the template parameter definition.
*
* @param module
* the module of this parameter definition
* @return the allowed type of the template parameter definition
*/
public String getAllowedType( Module module )
{
return getStringProperty( module,
ITemplateParameterDefinitionModel.ALLOWED_TYPE_PROP );
}
/**
* Sets the allowed type of the template parameter definition.
*
* @param type
* the type to set
* @throws SemanticException
* if the property is locked
*/
public void setAllowedType( String type ) throws SemanticException
{
setProperty( ITemplateParameterDefinitionModel.ALLOWED_TYPE_PROP, type );
}
/**
* Returns the static description for the template parameter definition.
*
* @param module
* the module of this parameter definition
* @return the static description to display
*/
public String getDescription( Module module )
{
return getStringProperty( module,
ITemplateParameterDefinitionModel.DESCRIPTION_PROP );
}
/**
* Returns the localized description for the template parameter definition.
* If the localized description for the description resource key is found,
* it will be returned. Otherwise, the static description will be returned.
*
* @param module
* the module of this parameter definition
* @return the localized description for the template parameter definition
*/
public String getDisplayDescription( Module module )
{
String textKey = getStringProperty( module,
ITemplateParameterDefinitionModel.DESCRIPTION_ID_PROP );
if ( !StringUtil.isBlank( textKey ) )
{
// find in report.
String localizedText = module.getMessage( textKey );
if ( !StringUtil.isBlank( localizedText ) )
return localizedText;
}
// use static text.
return getDescription( module );
}
/**
* Sets the description of the template parameter definition. Sets the
* static description itself. If the template parameter definition is to be
* externalized, then set the description ID separately.
*
* @param description
* the new description for the template parameter definition
* @throws SemanticException
* if the property is locked.
*/
public void setDescription( String description ) throws SemanticException
{
setProperty( ITemplateParameterDefinitionModel.DESCRIPTION_PROP,
description );
}
/**
* Returns the resource key of the static description of the template
* parameter definition.
*
* @param module
* the module of this parameter definition
* @return the resource key of the static description
*/
public String getDescriptionKey( Module module )
{
return getStringProperty( module,
ITemplateParameterDefinitionModel.DESCRIPTION_ID_PROP );
}
/**
* Sets the resource key of the static description of the template parameter
* definition.
*
* @param resourceKey
* the resource key of the static description
*
* @throws SemanticException
* if the property is locked.
*/
public void setDescriptionKey( String resourceKey )
throws SemanticException
{
setProperty( ITemplateParameterDefinitionModel.DESCRIPTION_ID_PROP,
resourceKey );
}
/**
* Gets the default element of this template parameter definition.
*
* @return the default element of this template parameter definition
*/
public DesignElement getDefaultElement( )
{
ContainerSlot defaultElement = getSlot( ITemplateParameterDefinitionModel.DEFAULT_SLOT );
if ( defaultElement.getCount( ) == 0 )
return null;
assert defaultElement.getCount( ) == 1;
return (DesignElement) defaultElement.getContent( 0 );
}
}
|
chandlerprall/kibana
|
src/core_plugins/metrics/public/components/lib/sort_keyhandler.js
|
import { keyCodes } from '@elastic/eui';
export function createUpDownHandler(callback) {
return (ev) => {
if (ev.keyCode === keyCodes.UP) {
ev.preventDefault();
callback('up');
} else if (ev.keyCode === keyCodes.DOWN) {
ev.preventDefault();
callback('down');
}
};
}
|
ynsingh/brihaspati2
|
WEB-INF/src/java/org/iitk/brihaspati/modules/actions/AddDepartment.java
|
<reponame>ynsingh/brihaspati2<gh_stars>0
package org.iitk.brihaspati.modules.actions;
/*
* @(#)AddDepartment.java
*
* Copyright (c) 2013 ETRG,IIT Kanpur.
* All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistribution in binary form must reproducuce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL ETRG OR ITS CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL,SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Contributors: Members of ETRG, I.I.T. Kanpur
*/
/**
* @author <a href="<EMAIL>"><NAME></a>
* @author <a href="<EMAIL>"><NAME></a>
* @modify date: 31-05-2013
*/
import java.util.List;
import java.util.Vector;
import java.util.StringTokenizer;
import org.apache.turbine.util.RunData;
import org.apache.velocity.context.Context;
import org.apache.turbine.util.parser.ParameterParser;
import org.iitk.brihaspati.modules.utils.MultilingualUtil;
import org.iitk.brihaspati.modules.utils.ErrorDumpUtil;
import org.iitk.brihaspati.om.DepartmentPeer;
import org.iitk.brihaspati.om.SchoolPeer;
import org.apache.torque.util.Criteria;
import org.iitk.brihaspati.om.DeptSchoolUniv;
import org.iitk.brihaspati.om.DeptSchoolUnivPeer;
import org.iitk.brihaspati.om.Department;
public class AddDepartment extends SecureAction
{
private String LangFile=null;
//Initialize criteria for database query
Criteria crit= new Criteria();
/**
* This method actually registers a new Department/School along with the Institute/University.
* in the system
* @param data RunData instance
* @param context Context instance
* @exception Exception, a generic exception
*/
public void doInsert(RunData data, Context context) throws Exception
{
/**
* Getting file value from temporary variable according to selection
* Replacing the value from Property file
**/
LangFile=(String)data.getUser().getTemp("LangFile");
ParameterParser pp=data.getParameters();
String instituteId=pp.getString("instituteId","");
if((instituteId.equals(""))||(instituteId.equals(null)))
{
instituteId=(data.getUser().getTemp("Institute_id")).toString();
}
int instituteid=Integer.parseInt(instituteId);
String mode=pp.getString("mode","");
/**
* Check Details for Departmernt and School.
* Gather details from the page where user has entered them
* @param deptname: Getting deptname as a String from Parameter Parser
* @param deptcode: Getting deptcode as a String from Parameter Parser
* @param deptnick: Getting deptnick as a String from Parameter Parser
* @param deptfloor: Getting deptfloor as an int
* @param schname: Getting schname as a String from Parameter Parser
* @param schcode: Getting schcode as a String from Parameter Parser
* @param schnick: Getting schnick as a String from Parameter Parser
* @param schdesc: Getting schdesc as a String from Parameter Parser
*/
// Code for Add Department
if(mode.equals("dept"))
{
String deptname=pp.getString("deptname","");
String deptcode=pp.getString("dcode","");
String deptnick=pp.getString("dnick","");
String fcount=pp.getString("fcount","");
int deptfloor;
if(fcount.equals("")){
deptfloor=0;
}
else{
deptfloor=Integer.parseInt(fcount);
}
try
{
/**
* Register a new Department with Institute.
*/
crit= new Criteria();
crit.add(DepartmentPeer.NAME,deptname);
crit.add(DepartmentPeer.DEPARTMENT_CODE,deptcode);
crit.add(DepartmentPeer.NICK_NAME,deptnick);
crit.add(DepartmentPeer.FLOORS_COUNT,deptfloor);
crit.add(DepartmentPeer.INSTITUTE_ID,instituteId);
DepartmentPeer.doInsert(crit);
//map department automatically if register by user.
crit=new Criteria();
crit.addGroupByColumn(DepartmentPeer.DEPARTMENT_ID);
crit.add(DepartmentPeer.DEPARTMENT_CODE,deptcode);
List deptlist=DepartmentPeer.doSelect(crit);
for(int j=0;j<deptlist.size();j++)
{
Department element=(Department)deptlist.get(j);
int deptid=element.getDepartmentId();
String schid=null;
crit=new Criteria();
crit.add(DeptSchoolUnivPeer.DEPT_ID,deptid);
crit.add(DeptSchoolUnivPeer.SCHOOL_ID,schid);
crit.add(DeptSchoolUnivPeer.UNIVERSITY_ID,instituteId);
DeptSchoolUnivPeer.doInsert(crit);
}
data.setMessage(MultilingualUtil.ConvertedString("brih_deptadd",LangFile));
}
catch(Exception e)
{
data.setMessage(MultilingualUtil.ConvertedString("brih_deptexist",LangFile));
}
}
// Code for Add School/Center
if(mode.equals("school")) {
String schname=pp.getString("schname","");
String schcode=pp.getString("schcode","");
String schnick=pp.getString("schnick","");
String schdesc=pp.getString("schdesc","");
try
{
/**
* Register a new School/Center with Institute.
*/
crit= new Criteria();
crit.add(SchoolPeer.NAME,schname);
crit.add(SchoolPeer.SCHOOL_CODE,schcode);
crit.add(SchoolPeer.NICK_NAME,schnick);
crit.add(SchoolPeer.DESCRIPTION,schdesc);
SchoolPeer.doInsert(crit);
data.setMessage(MultilingualUtil.ConvertedString("brih_schadd",LangFile));
}
catch(Exception e)
{
data.setMessage(MultilingualUtil.ConvertedString("brih_schexist",LangFile));
}
}
}
/**
* This method delete the existing Department/School along with the Institute/University.
* in the system
* @param data RunData instance
* @param context Context instance
* @exception Exception, a generic exception
*/
public void doDelete(RunData data, Context context) throws Exception
{
/**
*Getting file value from temporary variable according to selection
*Replacing the value from Property file
**/
LangFile=(String)data.getUser().getTemp("LangFile");
ParameterParser pp=data.getParameters();
String mode=pp.getString("mode","");
/**
* Check Details for Departmernt and School.
* Gather details from the page where user has entered them
* @param deptid: Getting deptid as a String from Parameter Parser
* @param schid: Getting schid as a String from Parameter Parser
* @param instituteid: Getting instituteid as a String from Parameter Parser
*/
// Code for Delete Department
if(mode.equals("deptdel"))
{
String deptid=pp.getString("deptid","");
try
{
/**
* Delete Existing Department along with Institute.
*/
crit= new Criteria();
crit.add(DepartmentPeer.DEPARTMENT_ID,deptid);
DepartmentPeer.doDelete(crit);
//unmap department
crit= new Criteria();
crit.add(DeptSchoolUnivPeer.DEPT_ID,deptid);
DeptSchoolUnivPeer.doDelete(crit);
data.setMessage(MultilingualUtil.ConvertedString("brih_deptdel",LangFile));
context.put("mode","lstdept");
}
catch(Exception e)
{
ErrorDumpUtil.ErrorLog("Error In Department Deletion"+e.getMessage());
}
}
// Code for Delete School/Center
if(mode.equals("schdel"))
{
String schid=pp.getString("schid","");
try
{
/**
* Delete Existing School/Center along with Institute.
*/
crit= new Criteria();
crit.add(SchoolPeer.SCHOOL_ID,schid);
SchoolPeer.doDelete(crit);
data.setMessage(MultilingualUtil.ConvertedString("brih_schdel",LangFile));
}
catch(Exception e)
{
ErrorDumpUtil.ErrorLog("Error In school/center Deletion !!! "+e.getMessage());
}
}
// Code for Unmap Department
if(mode.equals("deptunmap"))
{
String instituteId=pp.getString("instituteId","");
if((instituteId.equals(""))||(instituteId.equals(null)))
{
instituteId=(data.getUser().getTemp("Institute_id")).toString();
}
String deptid=pp.getString("deptid","");
try
{
/**
* Delete Existing Department along with Institute.
*/
crit= new Criteria();
crit.add(DeptSchoolUnivPeer.DEPT_ID,deptid);
crit.add(DeptSchoolUnivPeer.UNIVERSITY_ID,instituteId);
DeptSchoolUnivPeer.doDelete(crit);
data.setMessage(MultilingualUtil.ConvertedString("brih_deptunmap",LangFile));
}
catch(Exception e)
{
ErrorDumpUtil.ErrorLog("Error In unmam department"+e.getMessage());
}
}
// Code for Unmap School
if(mode.equals("schunmap"))
{
String instituteId=pp.getString("instituteId","");
if((instituteId.equals(""))||(instituteId.equals(null)))
{
instituteId=(data.getUser().getTemp("Institute_id")).toString();
}
String schid = pp.getString("schid","");
try
{
/**
* Delete Existing Department along with Institute.
*/
crit= new Criteria();
crit.add(DeptSchoolUnivPeer.SCHOOL_ID,schid);
crit.add(DeptSchoolUnivPeer.UNIVERSITY_ID,instituteId);
List schlist= DeptSchoolUnivPeer.doSelect(crit);
for(int j=0;j<schlist.size();j++)
{
DeptSchoolUniv element=(DeptSchoolUniv)schlist.get(j);
String schh_id=element.getSchoolId();
crit=new Criteria();
crit.add(DeptSchoolUnivPeer.SCHOOL_ID,schh_id);
crit.add(DeptSchoolUnivPeer.UNIVERSITY_ID,instituteId);
DeptSchoolUnivPeer.doDelete(crit);
}
data.setMessage(MultilingualUtil.ConvertedString("brih_schunmap",LangFile));
context.put("mode","schunmap");
}
catch(Exception e)
{
ErrorDumpUtil.ErrorLog("Error In unmam department"+e.getMessage());
}
}
}
/**
* This method actually maps existing Department with School
* along with the Institute/University in the system
* It is getting file value as a input and store all mapped deparmtent
* into table which is DEPARTMENT_SCHOOL_UNIVERSITY table
* This table is used for showing mapped department list in template.
* @param data RunData instance
* @param context Context instance
* @exception Exception, a generic exception
*/
public void doMap(RunData data, Context context) throws Exception
{
/**
* Getting file value from temporary variable according to selection
* Replacing the value from Property file
**/
LangFile=(String)data.getUser().getTemp("LangFile");
ParameterParser pp=data.getParameters();
String mode=pp.getString("mode","");
String sdept=pp.getString("sdept","");
String instituteId=pp.getString("instituteId","");
//Get institute id from temp if it is not getting form file value.
if((instituteId.equals(""))||(instituteId.equals(null)))
{
instituteId=(data.getUser().getTemp("Institute_id")).toString();
}
String List=pp.getString("selectFileNames","");
String schl=pp.getString("school","");
context.put("selectFile",List);
String schid = null;
String deptid = null;
String univid = null;
try
{
/**
* Use StringTokenizer to break string after "^".
*/
StringTokenizer st=new StringTokenizer(List,"^");
Vector v=new Vector();
for(int i=0;st.hasMoreTokens();i++)
{
v.addElement(st.nextToken());
}
/**
* Get All Department/School Id, obtained from the list
* then insert into table one by one for mapping Department with School.
*/
for(int i=0;i<v.size();i++)
{
String temp=(v.elementAt(i).toString()).toUpperCase();
String tempSplit[]=temp.split(":");
// Department Map
if(mode.equals("deptmap"))
{
deptid=temp;
}
// School/Center Map
if(mode.equals("schoolmap") && (!List.equals("")))
{
schid=tempSplit[0];
if(!sdept.equals("sdept"))
deptid=tempSplit[1];
}
try
{
/**
* Map Department With School
*/
crit=new Criteria();
crit.add(DeptSchoolUnivPeer.DEPT_ID,deptid);
crit.add(DeptSchoolUnivPeer.SCHOOL_ID,schid);
crit.add(DeptSchoolUnivPeer.UNIVERSITY_ID,instituteId);
DeptSchoolUnivPeer.doInsert(crit);
//Department map Message
if(mode.equals("deptmap"))
{
data.setMessage(MultilingualUtil.ConvertedString("brih_deptmap",LangFile));
}
// School/Center Map Message
else
{
data.setMessage(MultilingualUtil.ConvertedString("brih_schmap",LangFile));
}
}
catch(Exception e)
{
data.setMessage(MultilingualUtil.ConvertedString("brih_already",LangFile));
}
}
}
catch(Exception e)
{
ErrorDumpUtil.ErrorLog("Error In Department/school maping "+e.getMessage());
}
}
/**
* Default action to perform if the specified action
* cannot be executed.
* @param data RunData
* @param context Context
*/
public void doPerform(RunData data, Context context) throws Exception
{
String action=data.getParameters().getString("actionName","");
if(action.equals("eventSubmit_doInsert"))
{
doInsert(data,context);
}
if(action.equals("eventSubmit_doDelete"))
{
doDelete(data,context);
}
if(action.equals("eventSubmit_doMap"))
{
doMap(data,context);
}
}
}
|
X-API/PayRunIO.Demo.UI
|
services/payInstructions/PensionPayInstruction.js
|
<filename>services/payInstructions/PensionPayInstruction.js<gh_stars>0
const BaseInstruction = require("./BaseInstruction");
const ApiWrapper = require("../../services/api-wrapper");
let apiWrapper = new ApiWrapper();
module.exports = class PensionPayInstruction extends BaseInstruction {
get name() {
return "Pension";
}
get canInstructionsOverlap() {
return true;
}
async extendViewModel(ctx, vm) {
let extendedViewModel = await super.extendViewModel(ctx, vm);
let pensions = await apiWrapper.getAndExtractLinks(ctx, `Employer/${vm.EmployerId}/Pensions`);
let selectedPenId = "";
if (extendedViewModel.Pension){
let href = extendedViewModel.Pension["@href"];
let parts = href.split("/");
selectedPenId = parts[parts.length - 1];
}
extendedViewModel.Pensions = pensions.map(pension => {
return {
Id: pension.Id,
Name: `${pension.ProviderName} - ${pension.SchemeName}`,
Checked: pension.Id === selectedPenId
};
});
return extendedViewModel;
}
parseForApi(body) {
let employerId = body.EmployerId;
let cleanBody = super.parseForApi(body);
cleanBody.SalarySacrifice = (cleanBody.SalarySacrifice !== undefined && cleanBody.SalarySacrifice !== null && cleanBody.SalarySacrifice.toLowerCase() === "on");
cleanBody.Pension = {
"@href": `/Employer/${employerId}/Pension/${body.Pension}`
};
return {
StartDate: cleanBody.StartDate,
EndDate: cleanBody.EndDate,
Description: cleanBody.Description,
Code: cleanBody.Code,
EmployeeContributionCash: cleanBody.EmployeeContributionCash,
EmployerContributionCash: cleanBody.EmployerContributionCash,
EmployeeContributionPercent: cleanBody.EmployeeContributionPercent,
EmployerContributionPercent: cleanBody.EmployerContributionPercent,
AdditionalVoluntaryContributionCash: cleanBody.AdditionalVoluntaryContributionCash,
AdditionalVoluntaryContributionPercent: cleanBody.AdditionalVoluntaryContributionPercent,
LowerThreshold: cleanBody.LowerThreshold,
UpperThreshold: cleanBody.UpperThreshold,
SalarySacrifice: cleanBody.SalarySacrifice,
TaxationMethod: cleanBody.TaxationMethod,
ProRataMethod: cleanBody.ProRataMethod,
//InstructionType: cleanBody.InstructionType,
Pension: cleanBody.Pension
};
}
};
|
dorba/tint
|
src/writer/spirv/builder_unary_op_expression_test.cc
|
// Copyright 2020 The Tint Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <memory>
#include "gtest/gtest.h"
#include "src/ast/bool_literal.h"
#include "src/ast/float_literal.h"
#include "src/ast/identifier_expression.h"
#include "src/ast/scalar_constructor_expression.h"
#include "src/ast/sint_literal.h"
#include "src/ast/type/bool_type.h"
#include "src/ast/type/f32_type.h"
#include "src/ast/type/i32_type.h"
#include "src/ast/unary_op_expression.h"
#include "src/context.h"
#include "src/type_determiner.h"
#include "src/writer/spirv/builder.h"
#include "src/writer/spirv/spv_dump.h"
namespace tint {
namespace writer {
namespace spirv {
namespace {
using BuilderTest = testing::Test;
TEST_F(BuilderTest, UnaryOp_Negation_Integer) {
ast::type::I32Type i32;
ast::UnaryOpExpression expr(
ast::UnaryOp::kNegation,
std::make_unique<ast::ScalarConstructorExpression>(
std::make_unique<ast::SintLiteral>(&i32, 1)));
Context ctx;
ast::Module mod;
TypeDeterminer td(&ctx, &mod);
ASSERT_TRUE(td.DetermineResultType(&expr)) << td.error();
Builder b(&mod);
b.push_function(Function{});
EXPECT_EQ(b.GenerateUnaryOpExpression(&expr), 1u) << b.error();
EXPECT_EQ(DumpInstructions(b.types()), R"(%2 = OpTypeInt 32 1
%3 = OpConstant %2 1
)");
EXPECT_EQ(DumpInstructions(b.functions()[0].instructions()),
R"(%1 = OpSNegate %2 %3
)");
}
TEST_F(BuilderTest, UnaryOp_Negation_Float) {
ast::type::F32Type f32;
ast::UnaryOpExpression expr(
ast::UnaryOp::kNegation,
std::make_unique<ast::ScalarConstructorExpression>(
std::make_unique<ast::FloatLiteral>(&f32, 1)));
Context ctx;
ast::Module mod;
TypeDeterminer td(&ctx, &mod);
ASSERT_TRUE(td.DetermineResultType(&expr)) << td.error();
Builder b(&mod);
b.push_function(Function{});
EXPECT_EQ(b.GenerateUnaryOpExpression(&expr), 1u) << b.error();
EXPECT_EQ(DumpInstructions(b.types()), R"(%2 = OpTypeFloat 32
%3 = OpConstant %2 1
)");
EXPECT_EQ(DumpInstructions(b.functions()[0].instructions()),
R"(%1 = OpFNegate %2 %3
)");
}
TEST_F(BuilderTest, UnaryOp_Not) {
ast::type::BoolType bool_type;
ast::UnaryOpExpression expr(
ast::UnaryOp::kNot,
std::make_unique<ast::ScalarConstructorExpression>(
std::make_unique<ast::BoolLiteral>(&bool_type, false)));
Context ctx;
ast::Module mod;
TypeDeterminer td(&ctx, &mod);
ASSERT_TRUE(td.DetermineResultType(&expr)) << td.error();
Builder b(&mod);
b.push_function(Function{});
EXPECT_EQ(b.GenerateUnaryOpExpression(&expr), 1u) << b.error();
EXPECT_EQ(DumpInstructions(b.types()), R"(%2 = OpTypeBool
%3 = OpConstantFalse %2
)");
EXPECT_EQ(DumpInstructions(b.functions()[0].instructions()),
R"(%1 = OpLogicalNot %2 %3
)");
}
} // namespace
} // namespace spirv
} // namespace writer
} // namespace tint
|
YuriyAM/what-front
|
src/features/groups/edit-group/__tests__/mocks/mock-data.js
|
<reponame>YuriyAM/what-front
export const id = 1;
export const initialValues = {
name: 'testName',
startDate: '2021-05-16T00:00:00',
finishDate: '2022-06-16T00:00:00',
courseId: 9,
mentor: '',
student: ''
};
export const studentGroupData = {
data: {
courseId: 9,
finishDate: '2022-06-16T00:00:00',
id: 1,
mentorsIds: [9],
name: '122-18-3',
startDate: '2021-05-16T00:00:00',
studentIds: [1, 2],
},
error: '',
isLoaded: true,
isLoading: false
};
export const studentGroupDataLoading = {
data: {
courseId: 9,
finishDate: '2022-06-16T00:00:00',
id: 1,
mentorsIds: [9],
name: '122-18-3',
startDate: '2021-05-16T00:00:00',
studentIds: [1, 2],
},
error: '',
isLoaded: false,
isLoading: true
};
export const studentsData = {
data: [
{
email: "<EMAIL>",
firstName: "Student",
id: 4,
lastName: "Student",
avatarUrl: null
},
{
email: "<EMAIL>",
firstName: "Student222",
id: 5,
lastName: "Student222",
avatarUrl: null
}
],
error: '',
isLoaded: true,
isLoading: false
};
export const mentorsData = {
data: [
{
avatarUrl: null,
email: '<EMAIL>',
firstName: 'mentor',
id: 9,
lastName: 'mentor'
},
{
avatarUrl: null,
email: '<EMAIL>',
firstName: 'Mentor',
id: 3,
lastName: 'Mentor'
}
],
error: '',
isLoaded: true,
isLoading: false
};
export const coursesData = {
data: [
{
id: 1,
isActive: true,
name: '123 Testing'
},
{
id: 2,
isActive: true,
name: 'Testing 123'
}
],
error: '',
isLoading: false,
loaded: true
};
export const formValues = {
groupName: 'testName 1',
startDate: '2021-02-21',
finishDate: '2021-12-21'
};
|
Alec-Sobeck/FPS-Game
|
src/utils/textureloader.h
|
<reponame>Alec-Sobeck/FPS-Game
#ifndef TEXTURE_LOADER_H
#define TEXTURE_LOADER_H
#include <string>
#include <memory>
#include "render/texture.h"
std::shared_ptr<Texture> getTexture(std::string);
#endif
|
KSN2017/nodejs
|
src/components/PageComponents/SourcePointerPage.js
|
import React, { Component } from 'react'
import SourcePointer from 'components/SourcePointer/SourcePointer'
import { Heading, Text } from 'odeum-primitives'
class SourcePointerPage extends Component {
constructor(props) {
super(props)
this.state = {
value: ''
}
}
onSelectItem = (value) => {
this.setState({ value })
}
getDropdownValue = () => {
return (
<SourcePointer items={items} label={'Components'} selectedValue={this.onSelectItem} />
)
}
render() {
let value = this.getDropdownValue()
return (
<div>
<Heading>Select {this.state.value} demo</Heading>
<Text>{this.state.value === '' ? 'Selected value goes here ...' : 'You selected ' + this.state.value}</Text>
{this.state.value === '' ? value : value}
</div>
)
}
}
export default SourcePointerPage
const items = [
{
label: 'Buttons',
value: 'buttons',
href: null,
icon: '',
active: true
},
{
label: 'Cards',
value: 'cards',
href: null,
icon: '',
active: true
},
{
label: 'Dropdowns',
value: 'dropdowns',
href: null,
icon: '',
active: true
},
{
label: 'Icons',
value: 'icons',
href: null,
icon: '',
active: true
},
{
label: 'Loaders',
value: 'loaders',
href: null,
icon: '',
active: true
},
{
label: 'Meters',
value: 'meters',
href: null,
icon: '',
active: true
},
{
label: 'Tags',
value: 'tags',
href: null,
icon: '',
active: true
},
{
label: 'Modals',
value: 'modals',
href: null,
icon: '',
active: false
}
]
export const groupedItems = [
{ value: 'one', label: 'One' },
{ value: 'two', label: 'Two' },
{
type: 'group', name: 'group1', items: [
{ value: 'three', label: 'Three' },
{ value: 'four', label: 'Four' }
]
},
{
type: 'group', name: 'group2', items: [
{ value: 'five', label: 'Five' },
{ value: 'six', label: 'Six' }
]
}
]
|
leodr/chayns-js
|
src/chayns/calls/updateTapp.js
|
import {chaynsCall} from '../chaynsCall';
import {propTypes} from '../propTypes';
export function updateTapp(tappId) {
return chaynsCall({
'call': {
'action': 107,
'value': {
'tappID': tappId
}
},
'app': {
'support': {'android': 5254, 'ios': 5204}
},
'web': false,
'propTypes': {
'tappID': propTypes.number.isRequired
}
});
}
|
bruceasu/jSqlBox
|
core/src/main/java/com/github/drinkjava2/jsqlbox/ActiveRecord.java
|
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by
* applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
* OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package com.github.drinkjava2.jsqlbox;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import com.github.drinkjava2.jdialects.ArrayUtils;
import com.github.drinkjava2.jdialects.ClassCacheUtils;
import com.github.drinkjava2.jdialects.TableModelUtils;
import com.github.drinkjava2.jdialects.model.ColumnModel;
import com.github.drinkjava2.jdialects.model.TableModel;
import com.github.drinkjava2.jsqlbox.entitynet.EntityNet;
/**
* Entity class extended from ActiveRecord will have CRUD methods, see below
* difference in jSqlBox to save ActiveRecord entity and normal entity(POJO)
* into database:
*
* <pre>
* ActiveRecord style:
*
* DbContext ctx=new DbContext(dataSource);
* DbContext.setDefaultContext(ctx);
* entity.insert();
*
* or
*
* DbContext ctx=new DbContext(dataSource);
* entity.useContext(ctx);
* entity.insert();
*
* or
* DbContext ctx=new DbContext(dataSource);
* entity.insert(ctx);
*
*
* Data Mapper style:
*
* DbContext ctx=new DbContext(dataSource);
* ctx.eInsert(pojo);
*
* or
*
* DbContext ctx=new DbContext(dataSource);
* DbContext.setDefaultContext(ctx);
* eInsert(pojo); //static import JSQLBOX.eInsert
*
* or
*
* DbContext ctx0=new DbContext(dataSource);
* ctx1.insert(entity, ctx0);
*
* </pre>
*
* @author <NAME>
* @since 1.0.0
*/
@SuppressWarnings("unchecked")
public class ActiveRecord<T> implements TailType, EntityType {
static final ThreadLocal<String[]> forFieldsOrTails = new ThreadLocal<String[]>();
static final ThreadLocal<Boolean> isForfield = new ThreadLocal<Boolean>();
private DbContext ctx;
private Map<String, Object> tailsMap;
protected void miscMethods__________________() {// NOSONAR
}
public DbContext ctx() {
if (ctx != null)
return ctx;
DbException.assureNotNull(DbContext.globalDbContext, DbContext.NO_GLOBAL_SQLBOXCONTEXT_FOUND);
return DbContext.globalDbContext;
}
public TableModel model() {
return DbContextUtils.findEntityOrClassTableModel(this).newCopy();
}
public T useContext(DbContext ctx) {
this.ctx = ctx;
return (T) this;
}
public Map<String, Object> tails() {
if (tailsMap == null)
tailsMap = new TreeMap<String, Object>(String.CASE_INSENSITIVE_ORDER);
return tailsMap;
}
public <V> V getField(String fieldName) {
return (V) ClassCacheUtils.readValueFromBeanField(this, fieldName);
}
public <V> V getTail(String columnName) {
if (tailsMap == null)
return null;
return (V) tailsMap.get(columnName);
}
public T putTail(Object... columAndValues) {
DbException.assureTrue(columAndValues.length % 2 == 0, "Column and values should be paired");
for (int i = 0; i < columAndValues.length / 2; i++)
tails().put((String) columAndValues[i * 2], columAndValues[i * 2 + 1]);
return (T) this;
}
public T putField(Object... fieldAndValues) {
DbException.assureTrue(fieldAndValues.length % 2 == 0, "Field and values should be paired");
for (int i = 0; i < fieldAndValues.length / 2; i++)
ClassCacheUtils.writeValueToBeanField(this, (String) fieldAndValues[i * 2], fieldAndValues[i * 2 + 1]);
return (T) this;
}
public T forFields(String... fieldNames) {
forFieldsOrTails.set(fieldNames);
isForfield.set(true);
return (T) this;
}
public T forTails(String... columnNames) {
forFieldsOrTails.set(columnNames);
isForfield.set(false);
return (T) this;
}
public T putValues(Object... values) {
String[] fields = forFieldsOrTails.get();
if (values.length == 0 || fields == null || fields.length == 0)
throw new DbException("putValues fields or values can not be empty");
if (values.length != fields.length)
throw new DbException("putValues quantity does not match forFields or forColumns");
for (int i = 0; i < fields.length; i++)
if (Boolean.TRUE.equals(isForfield.get()))
ClassCacheUtils.writeValueToBeanField(this, fields[i], values[i]);
else
tails().put(fields[i], values[i]);
return (T) this;
}
/** Return current table based on shard key value */
public String shardTB(Object... items) {
TableModel model = DbContextUtils.findTableModel(this.getClass(), items);
ColumnModel col = model.getShardTableColumn();
if (col == null || col.getShardTable() == null || col.getShardTable().length == 0)
throw new DbException("Not found ShardTable setting for '" + model.getEntityClass() + "'");
Object shardKey1 = DbContextUtils.readValueFromBeanFieldOrTail(col, this, false, false);
return DbContextUtils.getShardedTB(ctx(), model.getEntityClass(), shardKey1);
}
/** Return current DbContext based on shard key value */
public DbContext shardDB(Object... items) {
TableModel model = DbContextUtils.findTableModel(this.getClass(), items);
ColumnModel col = model.getShardDatabaseColumn();
if (col == null || col.getShardDatabase() == null || col.getShardDatabase().length == 0)
throw new DbException("Not found ShardTable setting for '" + model.getEntityClass() + "'");
Object shardKey1 = DbContextUtils.readValueFromBeanFieldOrTail(col, this, false, false);
return DbContextUtils.getShardedDB(ctx(), model.getEntityClass(), shardKey1);
}
/** Return current table and DbContext based on shard key value */
public Object[] shard(Object... items) {
return new Object[] { shardTB(items), shardDB(items) };
}
/**
* For ORM query, if self class is ignored, use this method to put entity's
* model class
*/
protected static Object[] insertThisClassIfNotHave(Object entity, Object... items) {
Object[] newItems = items;
TableModel[] models = DbContextUtils.findAllModels(items);
if (models.length == 0)
throw new DbException("No TableMode found for entity.");
TableModel model = models[0];
if (!entity.getClass().equals(model.getEntityClass())) {// NOSONAR
model = TableModelUtils.entity2ReadOnlyModel(entity.getClass());
newItems = ArrayUtils.insertArray(model, items);
}
return newItems;
}
protected void crudMethods__________________() {// NOSONAR
}
//@formatter:off
public T insert(Object... items) {return (T) ctx().entityInsert(this, items);}
public T update(Object... items) {return ctx().entityUpdate(this, items);}
public int updateTry(Object... items) {return ctx().entityUpdateTry(this, items);}
public void delete(Object... items) {ctx().entityDelete(this, items);}
public int deleteTry(Object... items) {return ctx().entityDeleteTry(this, items);}
public void deleteById(Object id, Object... items) {ctx().entityDeleteById(this.getClass(), id, items);}
public int deleteByIdTry(Object id, Object... items) {return ctx().entityDeleteByIdTry(this.getClass(), id, items);}
public boolean existStrict(Object... items) {return ctx().entityExistStrict(this, items);}
public boolean existId(Object... items) {return ctx().entityExist(this, items);}
public boolean existById(Object id, Object... items) {return ctx().entityExistById(this.getClass(), id, items);}
public int countAll(Object... items) {return ctx().entityCount(this.getClass(), items);}
public T load(Object... items) {return (T) ctx().entityLoad(this, items);}
public int loadTry(Object... items) {return ctx().entityLoadTry(this, items);}
public T loadById(Object id, Object... items) {return (T) ctx().entityLoadById(this.getClass(), id, items);}
public T loadByIdTry(Object id, Object... items) {return (T) ctx().entityLoadByIdTry(this.getClass(), id, items);}
public T loadBySQL(Object... items) {return ctx().entityLoadBySql(items);}
public List<T> findAll(Object... items) {return (List<T>) ctx().entityFind(this.getClass(), items);}
public List<T> findBySQL(Object... items) {return ctx().entityFindBySql(this.getClass(), items);}
public List<T> findBySample(Object... items) {return ctx().entityFindBySample(this, items);}
public EntityNet autoNet(Class<?>... entityClass) {return ctx().autoNet(entityClass);}
public <E> E findRelatedOne(Object... items) {Object[] newItems = insertThisClassIfNotHave(this, items);return ctx().entityFindRelatedOne(this, newItems);}
public <E> List<E> findRelatedList(Object... items) {Object[] newItems = insertThisClassIfNotHave(this, items);return ctx().entityFindRelatedList(this, newItems);}
public <E> Set<E> findRelatedSet(Object... items) {Object[] newItems = insertThisClassIfNotHave(this, items);return ctx().entityFindRelatedSet(this, newItems);}
public <E> Map<Object, E> findRelatedMap(Object... items) {Object[] newItems = insertThisClassIfNotHave(this, items);return ctx().entityFindRelatedMap(this, newItems);}
}
|
vidma/kensu-py
|
kensu/client/models/model_metrics_pk.py
|
# coding: utf-8
"""
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: beta
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
class ModelMetricsPK(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'model_training_ref': 'ModelTrainingRef',
'lineage_run_ref': 'LineageRunRef',
'stored_in_schema_ref': 'SchemaRef'
}
attribute_map = {
'model_training_ref': 'modelTrainingRef',
'lineage_run_ref': 'lineageRunRef',
'stored_in_schema_ref': 'storedInSchemaRef'
}
def __init__(self, model_training_ref=None, lineage_run_ref=None, stored_in_schema_ref=None):
"""
ModelMetricsPK - a model defined in Swagger
"""
self._model_training_ref = None
self._lineage_run_ref = None
self._stored_in_schema_ref = None
self.model_training_ref = model_training_ref
self.lineage_run_ref = lineage_run_ref
self.stored_in_schema_ref = stored_in_schema_ref
@property
def model_training_ref(self):
"""
Gets the model_training_ref of this ModelMetricsPK.
Points to the trained model
:return: The model_training_ref of this ModelMetricsPK.
:rtype: ModelTrainingRef
"""
return self._model_training_ref
@model_training_ref.setter
def model_training_ref(self, model_training_ref):
"""
Sets the model_training_ref of this ModelMetricsPK.
Points to the trained model
:param model_training_ref: The model_training_ref of this ModelMetricsPK.
:type: ModelTrainingRef
"""
if model_training_ref is None:
raise ValueError("Invalid value for `model_training_ref`, must not be `None`")
self._model_training_ref = model_training_ref
@property
def lineage_run_ref(self):
"""
Gets the lineage_run_ref of this ModelMetricsPK.
Points to the lineage run having trained the model and seeing these metrics
:return: The lineage_run_ref of this ModelMetricsPK.
:rtype: LineageRunRef
"""
return self._lineage_run_ref
@lineage_run_ref.setter
def lineage_run_ref(self, lineage_run_ref):
"""
Sets the lineage_run_ref of this ModelMetricsPK.
Points to the lineage run having trained the model and seeing these metrics
:param lineage_run_ref: The lineage_run_ref of this ModelMetricsPK.
:type: LineageRunRef
"""
if lineage_run_ref is None:
raise ValueError("Invalid value for `lineage_run_ref`, must not be `None`")
self._lineage_run_ref = lineage_run_ref
@property
def stored_in_schema_ref(self):
"""
Gets the stored_in_schema_ref of this ModelMetricsPK.
Points to location of training result (i.e. lineage output schema) which is seeing the metrics (in edge cases a training can have multiple results)
:return: The stored_in_schema_ref of this ModelMetricsPK.
:rtype: SchemaRef
"""
return self._stored_in_schema_ref
@stored_in_schema_ref.setter
def stored_in_schema_ref(self, stored_in_schema_ref):
"""
Sets the stored_in_schema_ref of this ModelMetricsPK.
Points to location of training result (i.e. lineage output schema) which is seeing the metrics (in edge cases a training can have multiple results)
:param stored_in_schema_ref: The stored_in_schema_ref of this ModelMetricsPK.
:type: SchemaRef
"""
if stored_in_schema_ref is None:
raise ValueError("Invalid value for `stored_in_schema_ref`, must not be `None`")
self._stored_in_schema_ref = stored_in_schema_ref
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ModelMetricsPK):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
nraptis/Metal_OpenGL_MobileGameEngine
|
Framework/[C++ Core]/[[Tools]]/ToolMenuSectionRow.hpp
|
<reponame>nraptis/Metal_OpenGL_MobileGameEngine
//
// ToolMenuSectionRow.hpp
// DigMMMac
//
// Created by <NAME> on 7/4/18.
// Copyright © 2018 Darkswarm LLC. All rights reserved.
//
#ifndef ToolMenuSectionRow_hpp
#define ToolMenuSectionRow_hpp
#include "FCanvas.hpp"
#include "UIRoundedRect.hpp"
#include "ToolMenuSection.hpp"
#include "UIElement.hpp"
class ToolMenuSectionRow : public ToolMenuSection {
public:
ToolMenuSectionRow();
virtual ~ToolMenuSectionRow();
virtual void Update();
virtual void Draw();
virtual void Layout();
static float RowHeight();
virtual void Notify(void *pSender, const char *pNotification);
void AddLabel(UILabel *pLabel);
void AddTextBox(UITextBox *pTextBox);
void AddButton(UIButton *pButton);
void AddCheckBox(UICheckBox *pCheckBox);
void AddAlignmentPicker(UIAlignmentPicker *pAlignmentPicker);
void AddElement(UIElement *pElement);
FList mElementList;
};
#endif
|
smile0913/gitlabhq
|
spec/support/issuable_shared_examples.rb
|
<filename>spec/support/issuable_shared_examples.rb
shared_examples 'cache counters invalidator' do
it 'invalidates counter cache for assignees' do
expect_any_instance_of(User).to receive(:invalidate_merge_request_cache_counts)
described_class.new(project, user, {}).execute(merge_request)
end
end
|
mattjr/structured
|
vcgapps/OGF/cells/map_algos/map_approx.h
|
<filename>vcgapps/OGF/cells/map_algos/map_approx.h
/*
* OGF/Graphite: Geometry and Graphics Programming Library + Utilities
* Copyright (C) 2000 <NAME>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* If you modify this software, you should include a notice giving the
* name of the person performing the modification, the date of modification,
* and the reason for such modification.
*
* Contact: <NAME>
*
* <EMAIL>
*
* ISA Project
* LORIA, INRIA Lorraine,
* Campus Scientifique, BP 239
* 54506 VANDOEUVRE LES NANCY CEDEX
* FRANCE
*
* Note that the GNU General Public License does not permit incorporating
* the Software into proprietary programs.
*/
#ifndef __OGF_CELLS_MAP_ALGOS_MAP_APPROX__
#define __OGF_CELLS_MAP_ALGOS_MAP_APPROX__
#include <OGF/cells/common/common.h>
#include <OGF/cells/map/map.h>
#include <OGF/cells/map/map_attributes.h>
#include <OGF/cells/map/geometry.h>
#include <OGF/cells/map_algos/map_components.h>
#include <OGF/math/linear_algebra/matrix_util.h>
#include <OGF/math/geometry/normal_cycle.h>
#include <OGF/math/functions/triangle_integral.h>
#include <OGF/basic/debug/progress.h>
#include <stack>
#include <queue>
namespace OGF {
// ------- Default proxy and fitter classes (uses the norm L1,2) ------
class L12LinearProxy {
public:
static void set_compactness_importance(double x) { compactness_importance_ = x ; }
static double compactness_importance() { return compactness_importance_ ; }
static void set_angular_threshold(double x) {
angular_threshold_ = x ; n_mode_ = (angular_threshold_ < 1.0) ;
}
static double angular_threshold() { return angular_threshold_ ; }
static bool n_mode() { return n_mode_ ; }
L12LinearProxy() : N_(0,0,0), G_(0,0,0) { }
L12LinearProxy(const Vector3d& v, const Point3d& p) : N_(v), G_(p) { }
L12LinearProxy(Map::Facet* f) : N_(Geom::facet_normal(f)), G_(Geom::facet_barycenter(f)) { }
double distance_3(Map::Facet* f, double area) const {
Map::Vertex* v[3] ;
v[0]=f->halfedge()->vertex() ;
v[1]=f->halfedge()->next()->vertex() ;
v[2]=f->halfedge()->next()->next()->vertex() ;
double result = 0 ;
double d[3] ;
for(int i=0; i<3; i++) {
Vector3d V = v[i]->point() - G_ ;
V = V - (V * N_) * N_ ;
d[i] = V.norm2() ;
result += d[i] ;
d[i] = ::sqrt(d[i]) ;
}
result += d[0] * d[1] ;
result += d[1] * d[2] ;
result += d[2] * d[0] ;
result /= 6.0 ;
result *= area ;
return result ;
}
double deviation(Map::Facet* f) const {
double a = Geom::facet_area(f) ;
Vector3d n = Geom::facet_normal(f) ;
double result = a * (n - N_).norm2() ;
if(compactness_importance_ != 0.0) {
result += compactness_importance_ * distance_3(f,a) ;
}
return result ;
}
double angle_deviation(Map::Facet* f) const {
Vector3d n = Geom::facet_normal(f) ;
return (n ^ N_).norm() ;
}
private:
Vector3d N_ ;
Point3d G_ ;
static CELLS_API double compactness_importance_ ;
static CELLS_API double angular_threshold_ ;
static CELLS_API bool n_mode_ ;
} ;
class L12LinearProxyFitter {
public:
void begin() { N_ = Vector3d(0,0,0) ; G_ = Point3d(0,0,0) ; A_ = 0.0 ; }
void end() {
N_.normalize() ;
if(L12LinearProxy::compactness_importance() != 0.0 && A_ != 0.0) {
G_.set_x(G_.x() / A_) ;
G_.set_y(G_.y() / A_) ;
G_.set_z(G_.z() / A_) ;
}
}
L12LinearProxy proxy() { return L12LinearProxy(N_, G_) ; }
void add_facet(Map::Facet* f) {
double a = Geom::facet_area(f) ;
N_ = N_ + a * Geom::facet_normal(f) ;
if(L12LinearProxy::compactness_importance() != 0.0) {
Point3d g = Geom::facet_barycenter(f) ;
A_ += a ;
G_.set_x(G_.x() + a*g.x()) ;
G_.set_y(G_.y() + a*g.y()) ;
G_.set_z(G_.z() + a*g.z()) ;
}
}
private:
Vector3d N_ ;
Point3d G_ ;
double A_ ;
} ;
//____________________________________________________________________________________
struct AddFacetToChart {
AddFacetToChart(
Map::Facet* f, int c, double e
) : facet(f), chart(c), E(e) {
}
Map::Facet* facet ;
int chart ;
double E ;
} ;
class AddFacetToChartCmp {
public:
bool operator()(
const AddFacetToChart& op1, const AddFacetToChart& op2
) {
return (op1.E > op2.E) ;
}
} ;
typedef std::priority_queue<
AddFacetToChart,
std::vector<AddFacetToChart>,
AddFacetToChartCmp
> AddFacetToChartQueue ;
class ChartAttribute : public MapFacetAttribute<int> {
public:
typedef MapFacetAttribute<int> superclass ;
ChartAttribute(Map* map) : superclass(map, "chart") { }
ChartAttribute(MapComponent* component) : superclass(component->map(), "chart") { }
} ;
// This one is there to allow instanciation with MapComponent.
inline void update_graphics(MapComponent* comp) { /* does nothing */ }
inline double generic_area(Map* map) { return Geom::map_area(map) ; }
inline double generic_area(MapComponent* comp) { return Geom::component_area(comp) ; }
/**
* A generic implementation of the segmentation algorithm in:
* Cohen-Steiner, Alliez and Desbrun,
* Variational Shape Approximation, Siggraph 2004
*/
template <class Proxy, class ProxyFitter, class MAP = Map> class MapVariationalApprox {
public:
MapVariationalApprox(MAP* map) : map_(map), chart_(map), max_error_(-1) {
}
void init(
int nb_proxies, int nb_iter, double min_err = 0.0, int min_nb_proxies = 0
) {
ProgressLogger progress(nb_proxies) ;
init_one_proxy_per_component() ;
for(int i=proxy_.size(); i<=nb_proxies; i++) {
double err = optimize(nb_iter) ;
if(err < min_err && int(proxy_.size()) >= min_nb_proxies) {
return ;
}
if(int(proxy_.size()) >= int(map_->size_of_facets())) { return ; }
Map::Facet* f = new_chart() ;
chart_[f] = proxy_.size() ;
if(progress.is_canceled()) {
break ;
}
progress.notify(i) ;
}
if(L12LinearProxy::n_mode()) {
fill_holes() ;
}
}
double optimize(int nb_iter) {
double result = 0 ;
ProgressLogger progress(nb_iter) ;
nb_iter = ogf_max(1, nb_iter) ;
for(int i = 0; i<nb_iter; i++) {
bool get_uninit = (i == nb_iter / 2) ;
get_proxies(get_uninit) ;
get_seeds() ;
flood_fill() ;
update_graphics(map_) ;
if(progress.is_canceled()) {
break ;
}
result = error() ;
Logger::out("Partition") << "error = " << result << std::endl ;
progress.notify(i) ;
}
return result ;
}
void add_charts(
int nb_charts, int nb_iter, double min_err = 0.0, int min_nb_charts = 0
) {
ProgressLogger progress(nb_charts) ;
for(int i=0; i<nb_charts; i++) {
get_proxies() ;
if(int(proxy_.size()) == int(map_->size_of_facets())) {
Logger::out("Partition") << "All facets are separated" << std::endl ;
break ;
}
Map::Facet* f = new_chart() ;
chart_[f] = proxy_.size() ;
double err = optimize(nb_iter) ;
if(err < min_err && int(proxy_.size()) >= min_nb_charts) {
return ;
}
if(progress.is_canceled()) {
break ;
}
Logger::out("Partition") << "error = " << err << std::endl ;
progress.notify(i) ;
}
if(L12LinearProxy::n_mode()) {
fill_holes() ;
}
}
double error() {
double result = 0.0 ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
int chart = chart_[it] ;
if(chart != -1) {
result += proxy_[chart].deviation(it) ;
}
}
result /= generic_area(map_) ;
return result ;
}
protected:
Map::Facet* new_chart() {
Map::Facet* result = nil ;
result = largest_uninitialized_chart() ;
if(result == nil) {
result = worst_facet_in_worst_chart() ;
}
return result ;
}
void compute_chart_sizes() {
chart_size_.clear() ;
int nb_proxies = 0 ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
nb_proxies = ogf_max(chart_[it], nb_proxies) ;
}
nb_proxies++ ;
for(int i=0; i<nb_proxies; i++) {
chart_size_.push_back(0) ;
}
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
int chart = chart_[it] ;
if(chart >= 0) {
chart_size_[chart]++ ;
}
}
}
void check_facets() {
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
ogf_assert(it != nil) ;
}
}
Map::Facet* worst_facet() {
compute_chart_sizes() ;
double e = -1.0 ;
Map::Facet* result = nil ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
int chart = chart_[it] ;
if(chart >= 0 && chart_size_[chart] > 1) {
ogf_assert(chart < int(proxy_.size())) ;
double cur_e = proxy_[chart].deviation(it) ;
if(cur_e > e) {
e = cur_e ;
result = it ;
}
}
}
ogf_assert(result != nil) ;
return result ;
}
int worst_chart() {
compute_chart_sizes() ;
std::vector<double> E ;
for(unsigned int i=0; i<proxy_.size(); i++) {
E.push_back(0) ;
}
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
int chart = chart_[it] ;
if(!L12LinearProxy::n_mode()) {
ogf_range_assert(chart,0,int(proxy_.size()) - 1) ;
}
if(chart >= 0) {
E[chart] += proxy_[chart].deviation(it) ;
}
}
double worst_e = -1.0 ;
int result = -1 ;
for(unsigned int i=0; i<proxy_.size(); i++) {
if(E[i] > worst_e && chart_size_[i] > 1) {
worst_e = E[i] ;
result = i ;
}
}
ogf_assert(result != -1) ;
return result ;
}
Map::Facet* worst_facet_in_worst_chart() {
int proxy = worst_chart() ;
double e = -1.0 ;
Map::Facet* result = nil ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
int chart = chart_[it] ;
if(chart == proxy) {
double cur_e = proxy_[chart].deviation(it) ;
if(cur_e > e) {
e = cur_e ;
result = it ;
}
}
}
ogf_assert(result != nil) ;
return result ;
}
void init_one_proxy() {
Map::Facet* f = map_->facets_begin() ;
proxy_.clear() ;
seed_.clear() ;
proxy_.push_back(Proxy(f)) ;
seed_.push_back(f) ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
chart_[it] = 0 ;
}
}
void init_one_proxy_per_component() {
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
chart_[it] = -1 ;
}
proxy_.clear() ;
seed_.clear() ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
if(chart_[it] == -1) {
proxy_.push_back(Proxy(it)) ;
seed_.push_back(it) ;
init_one_proxy_from_facet(it, proxy_.size() - 1) ;
}
}
}
/** returns the number of facets in the chart */
int init_one_proxy_from_facet(Map::Facet* f, int chart_id, int background_id = -1) {
int result = 0 ;
std::stack<Map::Facet*> S ;
S.push(f) ;
while(!S.empty()) {
Map::Facet* cur = S.top() ;
S.pop() ;
if(chart_[cur] != chart_id) {
chart_[cur] = chart_id ;
result++ ;
}
Map::Halfedge* h = cur->halfedge() ;
do {
Map::Facet* neigh = h->opposite()->facet() ;
if(neigh != nil && chart_[neigh] == background_id) {
S.push(neigh) ;
}
h = h->next() ;
} while(h != cur->halfedge()) ;
}
return result ;
}
int compute_nb_proxies() {
int nb_proxies = 0 ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
nb_proxies = ogf_max(nb_proxies, chart_[it]) ;
}
nb_proxies++ ;
return nb_proxies ;
}
/** returns the total numer of charts */
int get_uninitialized_charts() {
int nb_proxies = compute_nb_proxies() ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
if(chart_[it] < 0) {
init_one_proxy_from_facet(it, nb_proxies) ;
nb_proxies++ ;
}
}
return nb_proxies ;
}
void add_uninitialized_charts(int min_size) {
std::vector<int> remap ;
remap.push_back(0) ;
remap.push_back(0) ;
int cur_remap = compute_nb_proxies() ;
int cur_id = -2 ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
if(chart_[it] == -1) {
int cur_size = init_one_proxy_from_facet(it, cur_id, chart_[it]) ;
cur_id-- ;
if(cur_size > min_size) {
remap.push_back(cur_remap) ; cur_remap++ ;
} else {
remap.push_back(-1) ;
}
}
}
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
if(chart_[it] < 0) {
chart_[it] = remap[-chart_[it]] ;
}
}
}
Map::Facet* largest_uninitialized_chart() {
int cur_id = -2 ;
int max_size = -1 ;
Map::Facet* result = nil ;
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
if(chart_[it] == -1) {
int cur_size = init_one_proxy_from_facet(it, cur_id, chart_[it]) ;
cur_id-- ;
if(cur_size > max_size) {
max_size = cur_size ;
result = it ;
}
}
}
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
if(chart_[it] < 0) {
chart_[it] = -1 ;
}
}
return (max_size > 100) ? result : nil ;
}
void get_proxies(bool get_uninit = false) {
std::vector<ProxyFitter> fitter ;
if(get_uninit) {
add_uninitialized_charts(100) ;
}
int nb_proxies = compute_nb_proxies() ;
proxy_.clear() ;
for(int i=0; i<nb_proxies; i++) {
proxy_.push_back(Proxy()) ;
fitter.push_back(ProxyFitter()) ;
fitter[i].begin() ;
}
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
int chart = chart_[it] ;
if(chart >= 0) {
ogf_assert(chart < int(proxy_.size())) ;
fitter[chart].add_facet(it) ;
} else {
// std::cerr << "get_proxies(): NUL proxy" << std::endl ;
}
}
for(int i=0; i<nb_proxies; i++) {
ogf_assert(i < int(proxy_.size())) ;
fitter[i].end() ;
proxy_[i] = fitter[i].proxy() ;
}
}
void get_seeds() {
seed_.clear() ;
std::vector<double> E ;
for(unsigned int i=0; i<proxy_.size(); i++) {
E.push_back(Numeric::big_double) ;
seed_.push_back(nil) ;
}
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
int chart = chart_[it] ;
if(chart >= 0) {
double d = proxy_[chart].deviation(it) ;
if(d < E[chart]) {
E[chart] = d ;
seed_[chart] = it ;
}
} else {
// std::cerr << "get_seeds(): NUL proxy" << std::endl ;
}
}
for(unsigned int i=0; i<seed_.size(); i++) {
if(seed_[i] == nil) {
// std::cerr << "nil seed for chart id" << i << std::endl ;
}
}
}
void insert_neighbors(Map::Facet* seed, AddFacetToChartQueue& q) {
/*
// This one is the simple version, as in the paper,
// the other version seems to me more efficient.
int chart = chart_[seed] ;
Map::Halfedge* h = seed->halfedge() ;
do {
Map::Facet* f = h->opposite()->facet() ;
if(f != nil && chart_[f] == -1) {
q.push(AddFacetToChart(f, chart, proxy_[chart].deviation(f))) ;
}
h = h->next() ;
} while(h != seed->halfedge()) ;
*/
Map::Halfedge* h = seed->halfedge() ;
do {
Map::Facet* f = h->opposite()->facet() ;
if(f != nil && chart_[f] == -1) {
Map::Halfedge* hh = f->halfedge() ;
do {
Map::Facet* ff = hh->opposite()->facet() ;
if(ff != nil) {
int chart = chart_[ff] ;
if(chart != -1) {
ogf_range_assert(chart, 0, int(proxy_.size()) - 1) ;
q.push(AddFacetToChart(f, chart, proxy_[chart].deviation(f))) ;
}
}
hh = hh->next() ;
} while(hh != f->halfedge()) ;
}
h = h->next() ;
} while(h != seed->halfedge()) ;
}
void flood_fill() {
FOR_EACH_FACET_GENERIC(MAP, map_, it) {
chart_[it] = -1 ;
}
for(unsigned int i=0; i<proxy_.size(); i++) {
ogf_assert(i < seed_.size()) ;
if(seed_[i] != nil) {
chart_[seed_[i]] = i ;
}
}
AddFacetToChartQueue q ;
for(unsigned int i=0; i<proxy_.size(); i++) {
ogf_assert(i < seed_.size()) ;
if(seed_[i] != nil) {
insert_neighbors(seed_[i], q) ;
}
}
while (!q.empty()) {
AddFacetToChart op = q.top();
q.pop();
if(chart_[op.facet] == -1) {
ogf_assert(op.chart < int(proxy_.size())) ;
if(
!L12LinearProxy::n_mode() ||
(proxy_[op.chart].angle_deviation(op.facet) < L12LinearProxy::angular_threshold())
) {
chart_[op.facet] = op.chart ;
insert_neighbors(op.facet, q) ;
}
}
}
}
void fill_holes() {
std::stack<Map::Halfedge*> S ;
FOR_EACH_HALFEDGE_GENERIC(MAP, map_, it) {
Map::Facet* f1 = it->facet() ;
Map::Facet* f2 = it->opposite()->facet() ;
if(f1 != nil && f2 != nil && chart_[f1] >= 0 && chart_[f2] < 0) {
S.push(it) ;
}
}
while(!S.empty()) {
Map::Halfedge* h = S.top() ;
S.pop() ;
Map::Facet* f1 = h->facet() ;
Map::Facet* f2 = h->opposite()->facet() ;
if(chart_[f2] < 0) {
chart_[f2] = chart_[f1] ;
Map::Halfedge* hh = f2->halfedge() ;
do {
Map::Facet* f3 = hh->opposite()->facet() ;
if(f3 != nil && chart_[f3] < 0) {
S.push(hh) ;
}
hh = hh->next() ;
} while(hh != f2->halfedge()) ;
}
}
}
private:
MAP* map_ ;
ChartAttribute chart_ ;
std::vector<Proxy> proxy_ ;
std::vector<Map::Facet*> seed_ ;
std::vector<int> chart_size_ ;
double max_error_ ;
} ;
}
#endif
|
Anadian/CNO
|
include/cno_log.h
|
<filename>include/cno_log.h
//cno_log.h
#ifndef CNO_LOG_H
#define CNO_LOG_H
#ifdef __cplusplus
extern "C" {
#endif //__cplusplus
#include "cno_build.h"
// #if !defined(CNO_LOG_ENGINE)
// #define CNO_LOG_ENGINE CNO_LOG_ENGINE_NONE
// #endif //!defined(CNO_SETTINGS_LOG_ENGINE)
//
// #if CNO_LOG_ENGINE == CNO_LOG_ENGINE_ORIGINAL
// #if CNO_HAVE_STDARG
// #include <stdarg.h>
// typedef va_list cno_va_list_type;
// #endif //CNO_HAVE_STDARG
//
// #define cno_log(priority, ...) (priority <= CNO_LowLevelConfig.debug.verbosity)?(CNO_GlobalLog(__func__, __VA_ARGS__)):(CNO_noop)
//
// cno_u8_type CNO_GlobalLog(cno_cstring_type function, cno_cstring_type format, ...);
// cno_u8_type CNO_SpecificLog(cno_cstring_type filename, cno_cstring_type function, cno_cstring_type format, ...);
// cno_u8_type CNO_NewGlobalLog();
//
// #elif CNO_LOG_ENGINE == CNO_LOG_ENGINE_LOGH
// #include "log.h"
//
// #define cno_log(...) CNO_noop
// #define cno_log_debug(...) log_debug(__VA_ARGS__)
// #define cno_log_info(...) log_info(__VA_ARGS__)
// #define cno_log_warn(...) log_warn(__VA_ARGS__)
// #define cno_log_error(...) log_error(__VA_ARGS__)
// #else
// #define cno_log(...) CNO_noop
// #define cno_log_debug(...) CNO_noop
// #define cno_log_info(...) CNO_noop
// #define cno_log_warn(...) CNO_noop
// #define cno_log_error(...) CNO_noop
// #endif //CNO_LOG_ENGINE
#if CNO_HAVE_STDARG
#include <stdarg.h>
#endif //CNO_HAVE_STDARG
#if CNO_HAVE_UNISTD
#include <unistd.h> //isatty
#endif //CNO_HAVE_UNISTD
#include "cno_time.h"
#include "cno_mutex.h"
//\033[<STYLE>;<COLOUR>m Start foreground format
//\033[<BACKGROUND>m Start background format
//\x1B[0m Reset to plain/normal.
//<STYLE>:
//0 normal
//1 bold
//2 dim
//3 italics
//4 underline
//5 blinking
//6 ???
//7 White on colour background
//8 Invisible
//<COLOUR>:
//30 black
//31 red
//32 green
//33 yellow
//34 blue
//35 magenta
//36 cyan
//37 white
//<BACKGROUND>:
//40 white background
//41 red bg
//42 green bg
//43 yellow bg
//44 blue bg
//45 magenta bg
//46 cyan bg
//47 white bg
#include "cno_string.h"
typedef enum CNO_Log_Priority_enum{
CNO_Log_Priority_None=0,
CNO_Log_Priority_Error=1, //Red
CNO_Log_Priority_Warn=2, //Yellow
CNO_Log_Priority_Note=3, //Magenta
CNO_Log_Priority_Info=4, //Blue
CNO_Log_Priority_Debug=5 //Green
} CNO_Log_Priority_type; //cyan?
typedef enum CNO_Log_Type_enum{
CNO_Log_Type_None=0,
CNO_Log_Type_File=1,
CNO_Log_Type_Stream=2,
CNO_Log_Type_Buffer=3
} CNO_Log_Type_type;
typedef struct CNO_Log_File_struct{
cno_string_type name;
cno_u8_type persist; /* Keep file open. */
cno_u8_type overwrite; /* Overwrite file if it already exists, otherwise append */
} CNO_Log_File_type;
typedef struct CNO_Log_Stream_struct{
cno_filestream_type stream;
cno_u8_type colour;
} CNO_Log_Stream_type;
typedef struct CNO_Log_Buffer_struct{
cno_u8_type *pointer;
cno_u16_type offset;
cno_u16_type length;
} CNO_Log_Buffer_type;
typedef struct CNO_Transport_struct{
CNO_Log_Priority_type priority;
CNO_Log_Type_type type;
union{
CNO_Log_File_type file;
CNO_Log_Stream_type stream;
CNO_Log_Buffer_type buffer;
}
} CNO_Transport_type;
#if !defined(CNO_NO_GLOBAL_STATE)
CNO_Transport_type *CNO_Transports_Global = NULL;
/* macro(!defined(CNO_Log_Transport_Add_Global)`CNO_Log_Transport_Add_Global(...)`CNO_Log_Transport_Add(CNO_Transports_Global,__VA_ARGS__)) */
#if !defined(CNO_Log_Transport_Add_Global)
#define CNO_Log_Transport_Add_Global(...) CNO_Log_Transport_Add(CNO_Transports_Global,__VA_ARGS__)
#endif /* !defined(CNO_Log_Transport_Add_Global) */
/* macro(!defined(CNO_Log_Transport_Clear_Global)`CNO_Log_Transport_Clear_Global(...)`CNO_Log_Transport_Clear(CNO_Transports_Global)) */
#if !defined(CNO_Log_Transport_Clear_Global)
#define CNO_Log_Transport_Clear_Global(...) CNO_Log_Transport_Clear(CNO_Transports_Global)
#endif /* !defined(CNO_Log_Transport_Clear_Global) */
/* macro(!defined(CNO_Log_Global)`CNO_Log_Global(...)`CNO_Log(CNO_Transports_Global,__VA_ARGS__)) */
#if !defined(CNO_Log_Global)
#define CNO_Log_Global(...) CNO_Log(CNO_Transports_Global,__VA_ARGS__)
#endif /* !defined(CNO_Log_Global) */
cno_mutex_type CNO_GlobalLog_Mutex;
#endif /*!defined(CNO_NO_GLOBAL_STATE)*/
//[bold] Error: [dim] file (line): [normal] function (time unix/clock): message (errno: strerror | other error messages)
cno_u8_type CNO_Log_Init();
cno_u8_type CNO_Log_Transport_Add(CNO_Transport_type *transports, CNO_Transport_type transport);
cno_u8_type CNO_Log_Transport_Clear(CNO_Transport_type *transports);
cno_u8_type CNO_Log(CNO_Transport_type *transports, cno_cstring_type process, cno_cstring_type file, cno_cstring_type function, CNO_Log_Priority_type priority, cno_string_type format, ...);
cno_u8_type CNO_Log_Test();
cno_u8_type CNO_Log_Quit();
#ifdef __cplusplus
}
#endif //__cplusplus
#endif //CNO_LOG_H
|
AsyncVoid/Industrial-Foregoing
|
src/main/java/com/buuz135/industrial/tile/magic/EnchantmentRefinerTile.java
|
<reponame>AsyncVoid/Industrial-Foregoing
package com.buuz135.industrial.tile.magic;
import com.buuz135.industrial.tile.CustomColoredItemHandler;
import com.buuz135.industrial.tile.CustomElectricMachine;
import com.buuz135.industrial.utils.WorkUtils;
import net.minecraft.init.Items;
import net.minecraft.item.EnumDyeColor;
import net.minecraft.item.ItemStack;
import net.minecraftforge.items.ItemHandlerHelper;
import net.minecraftforge.items.ItemStackHandler;
public class EnchantmentRefinerTile extends CustomElectricMachine {
private ItemStackHandler input;
private ItemStackHandler outputNoEnch;
private ItemStackHandler outputEnch;
public EnchantmentRefinerTile() {
super(EnchantmentRefinerTile.class.getName().hashCode());
}
@Override
protected void initializeInventories() {
super.initializeInventories();
this.initInputInv();
this.initOutputInv();
}
private void initInputInv() {
input = new ItemStackHandler(3) {
@Override
protected void onContentsChanged(int slot) {
EnchantmentRefinerTile.this.markDirty();
}
};
this.addInventory(new CustomColoredItemHandler(this.input, EnumDyeColor.GREEN, "Input items", 18 * 3, 25, 1, 3) {
@Override
public boolean canInsertItem(int slot, ItemStack stack) {
return true;
}
@Override
public boolean canExtractItem(int slot) {
return false;
}
});
this.addInventoryToStorage(this.input, "ench_ref_in");
}
private void initOutputInv() {
outputEnch = new ItemStackHandler(4) {
@Override
protected void onContentsChanged(int slot) {
EnchantmentRefinerTile.this.markDirty();
}
};
this.addInventory(new CustomColoredItemHandler(outputEnch, EnumDyeColor.PURPLE, "Enchanted Items", 18 * 4 + 14, 25, 4, 1) {
@Override
public boolean canInsertItem(int slot, ItemStack stack) {
return false;
}
@Override
public boolean canExtractItem(int slot) {
return true;
}
});
this.addInventoryToStorage(outputEnch, "ench_ref_out_yes");
outputNoEnch = new ItemStackHandler(4) {
@Override
protected void onContentsChanged(int slot) {
EnchantmentRefinerTile.this.markDirty();
}
};
this.addInventory(new CustomColoredItemHandler(outputNoEnch, EnumDyeColor.YELLOW, "No enchanted Items", 18 * 4 + 14, 25 + 18 * 2, 4, 1) {
@Override
public boolean canInsertItem(int slot, ItemStack stack) {
return false;
}
@Override
public boolean canExtractItem(int slot) {
return true;
}
});
this.addInventoryToStorage(outputNoEnch, "ench_ref_out_no");
}
public ItemStack getFirstItem() {
for (int i = 0; i < input.getSlots(); ++i) {
if (!input.getStackInSlot(i).isEmpty()) {
return input.getStackInSlot(i);
}
}
return ItemStack.EMPTY;
}
@Override
protected float performWork() {
if (WorkUtils.isDisabled(this.getBlockType())) return 0;
ItemStack stack = getFirstItem();
if (stack.isEmpty()) {
return 0;
}
ItemStack out = stack.copy();
out.setCount(1);
if (stack.isItemEnchanted() || stack.getItem().equals(Items.ENCHANTED_BOOK)) {
if (ItemHandlerHelper.insertItem(outputEnch, out, true).isEmpty()) {
ItemHandlerHelper.insertItem(outputEnch, out, false);
stack.setCount(stack.getCount() - 1);
return 500;
}
} else if (ItemHandlerHelper.insertItem(outputNoEnch, out, true).isEmpty()) {
ItemHandlerHelper.insertItem(outputNoEnch, out, false);
stack.setCount(stack.getCount() - 1);
return 500;
}
return 0;
}
}
|
developkits/cmb
|
Source/VTKExtensions/General/vtkModelLineSource.h
|
<filename>Source/VTKExtensions/General/vtkModelLineSource.h
//=========================================================================
// Copyright (c) Kitware, Inc.
// All rights reserved.
// See LICENSE.txt for details.
//
// This software is distributed WITHOUT ANY WARRANTY; without even
// the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the above copyright notice for more information.
//=========================================================================
// .NAME vtkModelLineSource - create a line defined by two end points
// .SECTION Description
// vtkModelLineSource is a source object that creates a polyline defined by
// two endpoints. The number of segments composing the polyline is
// controlled by setting the object resolution.
#ifndef __vtkModelLineSource_h
#define __vtkModelLineSource_h
#include "cmbSystemConfig.h"
#include "vtkCMBGeneralModule.h" // For export macro
#include "vtkPolyDataAlgorithm.h"
class VTKCMBGENERAL_EXPORT vtkModelLineSource : public vtkPolyDataAlgorithm
{
public:
static vtkModelLineSource* New();
vtkTypeMacro(vtkModelLineSource, vtkPolyDataAlgorithm);
void PrintSelf(ostream& os, vtkIndent indent) override;
// Description:
// Set position of first end point.
vtkSetVector3Macro(Point1, double);
vtkGetVectorMacro(Point1, double, 3);
// Description:
// Set position of other end point.
vtkSetVector3Macro(Point2, double);
vtkGetVectorMacro(Point2, double, 3);
// Description:
// Divide line into resolution number of pieces.
vtkSetClampMacro(Resolution, int, 1, VTK_INT_MAX);
vtkGetMacro(Resolution, int);
// Description:
// Whether or not to build verts to the output.
vtkSetMacro(BuildVertex, int);
vtkGetMacro(BuildVertex, int);
vtkBooleanMacro(BuildVertex, int);
protected:
vtkModelLineSource(int res = 1);
~vtkModelLineSource() override{};
int RequestData(vtkInformation*, vtkInformationVector**, vtkInformationVector*) override;
int RequestInformation(vtkInformation*, vtkInformationVector**, vtkInformationVector*) override;
double Point1[3];
double Point2[3];
int Resolution;
int BuildVertex;
private:
vtkModelLineSource(const vtkModelLineSource&); // Not implemented.
void operator=(const vtkModelLineSource&); // Not implemented.
};
#endif
|
MarcBaldi/ZombieZite
|
src/main/scala/de/htwg/se/zombiezite/aview/gui/DPad.scala
|
package de.htwg.se.zombiezite.aview.gui
import de.htwg.se.zombiezite.controller._
import scala.swing._
import scala.io.Source._
class DPad(c: Controller, log: Frame) extends GridPanel(5, 3) {
contents += new Label
contents += Button("Hoch") { c.move(c.actualPlayer, 0, 1) }
contents += new Label
contents += Button("Links") { c.move(c.actualPlayer, -1, 0) }
contents += Button("Suchen") { c.search(c.actualPlayer) }
contents += Button("Rechts") { c.move(c.actualPlayer, 1, 0) }
contents += new Label
contents += Button("Runter") { c.move(c.actualPlayer, 0, -1) }
contents += new Label
contents += Button("Warten") { c.wait(c.actualPlayer) }
contents += new Label
contents += Button("Angriff") { new Attack(c) }
contents += new Label
contents += Button("Log") { log.open() }
contents += new Label
}
|
hschwane/offline_production
|
PROPOSAL/private/PROPOSAL/propagation_utility/ContinuousRandomizer.cxx
|
#include "PROPOSAL/propagation_utility/ContinuousRandomizer.h"
#include "PROPOSAL/propagation_utility/PropagationUtility.h"
#include "PROPOSAL/propagation_utility/PropagationUtilityIntegral.h"
#include "PROPOSAL/propagation_utility/PropagationUtilityInterpolant.h"
#include "PROPOSAL/Constants.h"
#include "PROPOSAL/Logging.h"
using namespace PROPOSAL;
//----------------------------------------------------------------------------//
//----------------------------------------------------------------------------//
//--------------------------------constructors--------------------------------//
//----------------------------------------------------------------------------//
//----------------------------------------------------------------------------//
ContinuousRandomizer::ContinuousRandomizer(const Utility& utility)
{
DE2de = new UtilityIntegralContRand(utility);
}
ContinuousRandomizer::ContinuousRandomizer(const Utility& utility, const InterpolationDef interpolation_def)
{
DE2de = new UtilityInterpolantContRand(utility, interpolation_def);
}
ContinuousRandomizer::ContinuousRandomizer(const Utility& utility, const ContinuousRandomizer& randomizer)
: DE2de(randomizer.DE2de->clone(utility))
{
if (utility != randomizer.DE2de->GetUtility())
{
log_fatal("Utilities of the ContinuousRandomizer should have same values!");
}
}
ContinuousRandomizer::ContinuousRandomizer(const ContinuousRandomizer& randomizer)
: DE2de(randomizer.DE2de->clone(randomizer.DE2de->GetUtility()))
{
}
ContinuousRandomizer::~ContinuousRandomizer()
{
delete DE2de;
}
bool ContinuousRandomizer::operator==(const ContinuousRandomizer& randomizer) const
{
return *DE2de == *randomizer.DE2de;
}
bool ContinuousRandomizer::operator!=(const ContinuousRandomizer& randomizer) const
{
return !(*this == randomizer);
}
// ------------------------------------------------------------------------- //
// Public member function
// ------------------------------------------------------------------------- //
double ContinuousRandomizer::Randomize(double initial_energy, double final_energy, double rnd)
{
double sigma, xhi, xlo, rndtmp;
// this happens if small distances are propagated and the
// energy loss is so small that it is smaller than the precision
// which is checked for in during the calculation.
if (initial_energy == final_energy)
{
return final_energy;
}
sigma = std::sqrt(DE2de->Calculate(initial_energy, final_energy, 0.0));
// It is not drawn from the real gaus distribution but rather from the
// area which is possible due to the limits of the initial energy and the
// particle mass. Another possibility would be to draw again but that would be
// more expensive.
//
// calculate the allowed region
xhi = 0.5 + std::erf((initial_energy - final_energy) / (SQRT2 * sigma)) / 2;
xlo = 0.5 + std::erf((DE2de->GetUtility().GetParticleDef().low - final_energy) / (SQRT2 * sigma)) / 2;
// draw random number from the allowed region.
rndtmp = xlo + (xhi - xlo) * rnd;
// Calculate and return the needed value.
return sigma * inverseErrorFunction(rndtmp) + final_energy;
}
|
10088/swc
|
crates/swc/tests/tsc-references/parserES5ComputedPropertyName3_es5.2.minified.js
|
import _define_property from "@swc/helpers/lib/_define_property.js";
_define_property({}, e, function() {});
|
MedlockJ/onme
|
js/components/avatar.js
|
function initializeAvatar(component, params = {}) {
if (!component) return;
const torso = component.querySelector(".torso");
const leftArm = component.querySelector(".arm.left");
const rightArm = component.querySelector(".arm.right");
const leftLeg = component.querySelector(".leg.left");
const rightLeg = component.querySelector(".leg.right");
const bodyParts = {torso, leftArm, rightArm, leftLeg, rightLeg};
const keys = Object.keys(bodyParts);
component.setAttribute("skin-tone", params.skinTone);
component.classList.remove("masculine", "feminine", "ectomorph", "mesomorrph", "endomorph");
component.classList.add(...params.preferences);
for (let i = 0, length = keys.length; i < length; i++) {
bodyParts[keys[i]].classList.remove("disabled");
if (params.disabledBodyParts[keys[i]]) bodyParts[keys[i]].classList.add("disabled");
}
}
function initializeAvatarBodyCustomization(components) {
if (!components) return;
components.forEach(
(section) => {
section.addEventListener("click", (event) => {
event.target.classList.toggle("disabled");
});
}
);
}
|
gdeiassistant/GdeiAssistant
|
src/main/java/cn/gdeiassistant/Tools/SpringUtils/RedisDaoUtils.java
|
<filename>src/main/java/cn/gdeiassistant/Tools/SpringUtils/RedisDaoUtils.java
package cn.gdeiassistant.Tools.SpringUtils;
import cn.gdeiassistant.Enum.Module.CoreModuleEnum;
import cn.gdeiassistant.Pojo.DelayTask.DelayTask;
import cn.gdeiassistant.Pojo.DelayTask.SessionAttributeExpireDelayTaskElement;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import javax.servlet.ServletContext;
import java.util.concurrent.TimeUnit;
@Component
public class RedisDaoUtils {
@Autowired(required = false)
private RedisTemplate<String, Object> redisTemplate;
@Autowired
private ModuleUtils moduleUtils;
@Autowired
private ServletContext servletContext;
@Autowired
private DelayTaskUtils delayTaskUtils;
public <T> T get(String key) {
if (moduleUtils.CheckCoreModuleState(CoreModuleEnum.REDIS)) {
return (T) redisTemplate.opsForValue().get(key);
}
return (T) servletContext.getAttribute(key);
}
public <E> void set(String key, E object) {
if (moduleUtils.CheckCoreModuleState(CoreModuleEnum.REDIS)) {
redisTemplate.opsForValue().set(key, object);
} else {
servletContext.setAttribute(key, object);
}
}
public void delete(String key) {
if (moduleUtils.CheckCoreModuleState(CoreModuleEnum.REDIS)) {
redisTemplate.delete(key);
} else {
servletContext.removeAttribute(key);
}
}
public void expire(String key, long timeout, TimeUnit unit) {
if (moduleUtils.CheckCoreModuleState(CoreModuleEnum.REDIS)) {
redisTemplate.expire(key, timeout, unit);
} else {
delayTaskUtils.put(new DelayTask(new SessionAttributeExpireDelayTaskElement(key)
, timeout, unit));
}
}
}
|
juanfelipe82193/opensap
|
sapui5-sdk-1.74.0/resources/sap/fe/macros/Contact.metadata.js
|
<reponame>juanfelipe82193/opensap
/*!
* SAP UI development toolkit for HTML5 (SAPUI5)
(c) Copyright 2009-2017 SAP SE. All rights reserved
*/
sap.ui.define(["./MacroMetadata"],function(M){"use strict";var C=M.extend("sap.fe.macros.Contact",{name:"Contact",namespace:"sap.fe.macros",fragment:"sap.fe.macros.Contact",metadata:{stereotype:"xmlmacro",metadataContexts:{contact:{required:true},dataField:{required:false}},properties:{displayHeader:{type:"boolean",defaultValue:true},id:{type:"string"}},events:{}}});return C;});
|
anniyanvr/pipeline
|
internal/providers/azure/pke/workflow/create_vnet_activity.go
|
<filename>internal/providers/azure/pke/workflow/create_vnet_activity.go<gh_stars>1000+
// Copyright © 2019 <NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package workflow
import (
"context"
"net/http"
"emperror.dev/errors"
"github.com/Azure/azure-sdk-for-go/services/network/mgmt/2018-10-01/network"
"github.com/Azure/go-autorest/autorest/to"
"go.uber.org/cadence/activity"
zapadapter "logur.dev/adapter/zap"
"logur.dev/logur"
)
// CreateVnetActivityName is the default registration name of the activity
const CreateVnetActivityName = "pke-azure-create-vnet"
// CreateVnetActivity represents an activity for creating an Azure virtual network
type CreateVnetActivity struct {
azureClientFactory *AzureClientFactory
}
// MakeCreateVnetActivity returns a new CreateVnetActivity
func MakeCreateVnetActivity(azureClientFactory *AzureClientFactory) CreateVnetActivity {
return CreateVnetActivity{
azureClientFactory: azureClientFactory,
}
}
// CreateVnetActivityInput represents the input needed for executing a CreateVnetActivity
type CreateVnetActivityInput struct {
OrganizationID uint
SecretID string
ClusterName string
ResourceGroupName string
VirtualNetwork VirtualNetwork
}
type VirtualNetwork struct {
Name string
CIDRs []string
Location string
Subnets []Subnet
}
type Subnet struct {
Name string
CIDR string
NetworkSecurityGroupID string
RouteTableID string
}
type CreateVnetActivityOutput struct {
VirtualNetworkID string
SubnetIDs map[string]string
}
// Execute performs the activity
func (a CreateVnetActivity) Execute(ctx context.Context, input CreateVnetActivityInput) (output CreateVnetActivityOutput, err error) {
logger := activity.GetLogger(ctx).Sugar().With(
"organization", input.OrganizationID,
"cluster", input.ClusterName,
"secret", input.SecretID,
"resourceGroup", input.ResourceGroupName,
"networkName", input.VirtualNetwork.Name,
"networkLocation", input.VirtualNetwork.Location,
)
keyvals := []interface{}{
"resourceGroup", input.ResourceGroupName,
"networkName", input.VirtualNetwork.Name,
}
logger.Info("create or update virtual network")
cc, err := a.azureClientFactory.New(input.OrganizationID, input.SecretID)
if err = errors.WrapIf(err, "failed to create cloud connection"); err != nil {
return
}
client := cc.GetVirtualNetworksClient()
vnet, err := client.Get(ctx, input.ResourceGroupName, input.VirtualNetwork.Name, "")
if vnet.StatusCode == http.StatusNotFound {
vnet = input.getCreateOrUpdateVirtualNetworkParams()
} else if err = errors.WrapIf(err, "failed to get virtual network"); err != nil {
return
} else {
input.extendVirtualNetwork(&vnet, zapadapter.New(logger.Desugar()))
}
logger.Debug("sending request to create or update virtual network")
future, err := client.CreateOrUpdate(ctx, input.ResourceGroupName, input.VirtualNetwork.Name, vnet)
if err = errors.WrapIfWithDetails(err, "sending request to create or update virtual network failed", keyvals...); err != nil {
return
}
logger.Debug("waiting for the completion of create or update virtual network operation")
err = future.WaitForCompletionRef(ctx, client.Client)
if err = errors.WrapIfWithDetails(err, "waiting for the completion of create or update virtual network operation failed", keyvals...); err != nil {
return
}
vnet, err = future.Result(client.VirtualNetworksClient)
if err = errors.WrapIfWithDetails(err, "getting virtual network create or update result failed", keyvals...); err != nil {
return
}
output.VirtualNetworkID = to.String(vnet.ID)
output.SubnetIDs = make(map[string]string)
if vnet.Subnets != nil {
for _, s := range *vnet.Subnets {
if s.Name != nil && s.ID != nil {
output.SubnetIDs[*s.Name] = *s.ID
}
}
}
return
}
func (input CreateVnetActivityInput) extendVirtualNetwork(vnet *network.VirtualNetwork, logger logur.Logger) {
if vnet == nil {
return
}
if vnet.VirtualNetworkPropertiesFormat == nil {
vnet.VirtualNetworkPropertiesFormat = new(network.VirtualNetworkPropertiesFormat)
logger.Debug("vnet.VirtualNetworkPropertiesFormat was nil, created new")
}
if loc := to.String(vnet.Location); loc != input.VirtualNetwork.Location {
logger.Warn("virtual network location mismatch", map[string]interface{}{
"existing": loc,
"incoming": input.VirtualNetwork.Location,
})
}
if len(input.VirtualNetwork.CIDRs) > 0 {
if vnet.AddressSpace == nil {
vnet.AddressSpace = new(network.AddressSpace)
logger.Debug("vnet.AddressSpace was nil, created new")
}
lookup := make(map[string]bool)
aps := to.StringSlice(vnet.AddressSpace.AddressPrefixes)
for _, ap := range aps {
lookup[ap] = true
}
for _, cidr := range input.VirtualNetwork.CIDRs {
if !lookup[cidr] {
lookup[cidr] = true
aps = append(aps, cidr)
}
}
vnet.AddressSpace.AddressPrefixes = to.StringSlicePtr(aps)
}
if len(input.VirtualNetwork.Subnets) > 0 {
lookup := make(map[string]bool)
subnets := toSubnetSlice(vnet.Subnets)
for _, s := range subnets {
lookup[to.String(s.Name)] = true
}
for _, subnet := range input.VirtualNetwork.Subnets {
if !lookup[subnet.Name] {
lookup[subnet.Name] = true
subnets = append(subnets, subnetToNetworkSubnet(subnet))
}
}
vnet.Subnets = &subnets
}
tag := getSharedTag(input.ClusterName)
if _, exists := vnet.Tags[tag.Key]; !exists {
if vnet.Tags == nil {
vnet.Tags = make(map[string]*string)
}
vnet.Tags[tag.Key] = to.StringPtr(tag.Value)
}
}
func (input CreateVnetActivityInput) getCreateOrUpdateVirtualNetworkParams() network.VirtualNetwork {
subnets := make([]network.Subnet, len(input.VirtualNetwork.Subnets))
for i, s := range input.VirtualNetwork.Subnets {
subnets[i] = subnetToNetworkSubnet(s)
}
return network.VirtualNetwork{
Location: to.StringPtr(input.VirtualNetwork.Location),
VirtualNetworkPropertiesFormat: &network.VirtualNetworkPropertiesFormat{
AddressSpace: &network.AddressSpace{
AddressPrefixes: to.StringSlicePtr(input.VirtualNetwork.CIDRs),
},
Subnets: &subnets,
},
Tags: getClusterTags(input.ClusterName),
}
}
func toSubnetSlice(ptr *[]network.Subnet) []network.Subnet {
if ptr == nil {
return nil
}
return *ptr
}
func subnetToNetworkSubnet(subnet Subnet) network.Subnet {
var nsg *network.SecurityGroup
if subnet.NetworkSecurityGroupID != "" {
nsg = &network.SecurityGroup{
ID: to.StringPtr(subnet.NetworkSecurityGroupID),
}
}
return network.Subnet{
Name: to.StringPtr(subnet.Name),
SubnetPropertiesFormat: &network.SubnetPropertiesFormat{
AddressPrefix: to.StringPtr(subnet.CIDR),
NetworkSecurityGroup: nsg,
RouteTable: &network.RouteTable{
ID: to.StringPtr(subnet.RouteTableID),
},
},
}
}
|
hi019/doublequote
|
pkg/http/collection_test.go
|
package http
import (
"fmt"
"net/http"
"strings"
"testing"
"doublequote/pkg/domain"
"doublequote/pkg/utils"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
func TestServer_handleCreateCollection(t *testing.T) {
t.Run("OK", func(t *testing.T) {
s := NewTestServer()
s.CollectionService.
On("CreateCollection", mock.Anything, &domain.Collection{Name: "Test", UserID: 0}).
Return(&domain.Collection{
ID: 1,
Name: "Test",
}, nil)
req, err := http.NewRequest("POST", "", strings.NewReader(`{"name": "Test"}`))
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleCreateCollection, &domain.User{Email: "<EMAIL>"})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusCreated, rr.Code)
assert.JSONEq(t, `{"data": {"id": 1, "name": "Test"} }`, rr.Body.String())
})
t.Run("DbErr", func(t *testing.T) {
s := NewTestServer()
s.CollectionService.
On("CreateCollection", mock.Anything, &domain.Collection{Name: "Test", UserID: 0}).
Return(nil, fmt.Errorf("sqlite: /dev/null does not support sqlite"))
req, err := http.NewRequest("POST", "", strings.NewReader(`{"name": "Test"}`))
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleCreateCollection, &domain.User{Email: "<EMAIL>"})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusInternalServerError, rr.Code)
assert.JSONEq(t, `{"title": "Internal error.", "type": "about:blank"}`, rr.Body.String())
})
}
func TestServer_handleListCollections(t *testing.T) {
t.Run("OK", func(t *testing.T) {
s := NewTestServer()
s.CollectionService.
On("FindCollections", mock.Anything, domain.CollectionFilter{UserID: utils.IntPtr(0), Limit: 100}, domain.CollectionInclude{}).
Return([]*domain.Collection{{ID: 1, Name: "Test"}}, 1, nil)
req, err := http.NewRequest("GET", "", nil)
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleListCollections, &domain.User{Email: "<EMAIL>"})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusOK, rr.Code)
assert.JSONEq(t, `{ "data": { "collections": [{"id": 1, "name": "Test"}] } }`, rr.Body.String())
})
t.Run("DbErr", func(t *testing.T) {
s := NewTestServer()
s.CollectionService.
On("FindCollections", mock.Anything, domain.CollectionFilter{UserID: utils.IntPtr(0), Limit: 100}, domain.CollectionInclude{}).
Return(nil, 0, fmt.Errorf("mongo: unexpected query 'SELECT'"))
req, err := http.NewRequest("GET", "", nil)
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleListCollections, &domain.User{Email: "<EMAIL>"})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusInternalServerError, rr.Code)
assert.JSONEq(t, `{"title": "Internal error.", "type": "about:blank"}`, rr.Body.String())
})
}
func TestServer_handleGetCollectionsFeeds(t *testing.T) {
t.Run("OK", func(t *testing.T) {
s := NewTestServer()
s.FeedService.
On("FindFeeds", mock.Anything, domain.FeedFilter{CollectionID: utils.IntPtr(1), Limit: 500}, domain.FeedInclude{}).
Return([]*domain.Feed{{ID: 1, Name: "Test", Domain: "test.com"}}, 1, nil)
s.CollectionService.
On("FindCollectionByID", mock.Anything, 1, domain.CollectionInclude{}).
Return(&domain.Collection{}, nil)
req, err := http.NewRequest("GET", "", nil)
utils.AddParamToContext(t, req, "collectionID", "1")
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleGetCollectionFeeds, &domain.User{Email: "<EMAIL>"})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusOK, rr.Code)
assert.JSONEq(t, `{ "data": { "feeds": [{"id": 1, "name": "Test", "domain": "test.com"}] } }`, rr.Body.String())
})
t.Run("DbErr", func(t *testing.T) {
s := NewTestServer()
s.CollectionService.
On("FindCollections", mock.Anything, domain.CollectionFilter{UserID: utils.IntPtr(0), Limit: 100}, domain.CollectionInclude{}).
Return(nil, 0, fmt.Errorf("mongo: unexpected query 'SELECT'"))
req, err := http.NewRequest("GET", "", nil)
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleListCollections, &domain.User{Email: "<EMAIL>"})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusInternalServerError, rr.Code)
assert.JSONEq(t, `{"title": "Internal error.", "type": "about:blank"}`, rr.Body.String())
})
t.Run("CollectionNotFound", func(t *testing.T) {
s := NewTestServer()
s.FeedService.
On("FindFeeds", mock.Anything, domain.FeedFilter{CollectionID: utils.IntPtr(1), Limit: 500}, domain.FeedInclude{}).
Return([]*domain.Feed{{ID: 1, Name: "Test", Domain: "test.com"}}, 1, nil)
s.CollectionService.
On("FindCollectionByID", mock.Anything, 1, domain.CollectionInclude{}).
Return(&domain.Collection{}, domain.Errorf(domain.ENOTFOUND, domain.ErrNotFound, "Collection"))
req, err := http.NewRequest("GET", "", nil)
utils.AddParamToContext(t, req, "collectionID", "1")
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleGetCollectionFeeds, &domain.User{Email: "<EMAIL>"})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusNotFound, rr.Code)
assert.JSONEq(t, `{"title":"Collection not found.", "type":"about:blank"}`, rr.Body.String())
})
t.Run("CollectionOwnedByOtherUser", func(t *testing.T) {
s := NewTestServer()
s.FeedService.
On("FindFeeds", mock.Anything, domain.FeedFilter{CollectionID: utils.IntPtr(1), Limit: 500}, domain.FeedInclude{}).
Return([]*domain.Feed{{ID: 1, Name: "Test", Domain: "test.com"}}, 1, nil)
// Return collection owned by other user
s.CollectionService.
On("FindCollectionByID", mock.Anything, 1, domain.CollectionInclude{}).
Return(&domain.Collection{UserID: 2}, nil)
req, err := http.NewRequest("GET", "", nil)
utils.AddParamToContext(t, req, "collectionID", "1")
assert.Nil(t, err)
rr := MakeAuthenticatedRequest(req, s.handleGetCollectionFeeds, &domain.User{ID: 1})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusNotFound, rr.Code)
assert.JSONEq(t, `{"title":"Collection not found.", "type":"about:blank"}`, rr.Body.String())
})
}
func TestServer_handlePutCollectionFeeds(t *testing.T) {
t.Run("OK", func(t *testing.T) {
s := NewTestServer()
// Setup mocks
s.CollectionService.
On("FindCollectionByID", mock.Anything, 1, domain.CollectionInclude{}).
Return(&domain.Collection{}, nil)
s.CollectionService.
On("UpdateCollection", mock.Anything, 1, domain.CollectionUpdate{FeedsIDs: &[]int{1, 2}}).
Return(&domain.Collection{}, nil)
// Setup request
req, err := http.NewRequest("PUT", "", strings.NewReader(`{"feeds": [1, 2]}`))
utils.AddParamToContext(t, req, "collectionID", "1")
assert.Nil(t, err)
// Make request
rr := MakeAuthenticatedRequest(req, s.handlePutCollectionFeeds, &domain.User{})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusOK, rr.Code)
assert.JSONEq(t, `{ "data": { "feeds": [1,2] } }`, rr.Body.String())
})
t.Run("OtherUsersCollection", func(t *testing.T) {
s := NewTestServer()
// Setup mocks
s.CollectionService.
On("FindCollectionByID", mock.Anything, 1, domain.CollectionInclude{}).
Return(&domain.Collection{}, domain.Errorf(domain.ENOTFOUND, domain.ErrNotFound, "Collection"))
// Setup request
req, err := http.NewRequest("PUT", "", strings.NewReader(`{"feeds": [1, 2]}`))
utils.AddParamToContext(t, req, "collectionID", "1")
assert.Nil(t, err)
// Make request
rr := MakeAuthenticatedRequest(req, s.handlePutCollectionFeeds, &domain.User{})
s.UserService.AssertExpectations(t)
assert.Equal(t, http.StatusNotFound, rr.Code)
assert.JSONEq(t, `{ "title": "Collection not found.", "type": "about:blank" }`, rr.Body.String())
})
}
|
nickpnmrv/cumulocity-examples
|
tracker-agent/src/main/java/c8y/trackeragent/tracker/ConnectedTracker.java
|
<reponame>nickpnmrv/cumulocity-examples<filename>tracker-agent/src/main/java/c8y/trackeragent/tracker/ConnectedTracker.java<gh_stars>1-10
/*
* Copyright (c) 2012-2020 Cumulocity GmbH
* Copyright (c) 2021 Software AG, Darmstadt, Germany and/or Software AG USA Inc., Reston, VA, USA,
* and/or its subsidiaries and/or its affiliates and/or their licensors.
*
* Use, reproduction, transfer, publication or disclosure is prohibited except as specifically provided
* for in your License Agreement with Software AG.
*/
package c8y.trackeragent.tracker;
import c8y.trackeragent.context.OperationContext;
import c8y.trackeragent.protocol.TrackingProtocol;
import c8y.trackeragent.server.ConnectionDetails;
public interface ConnectedTracker {
void executeOperation(OperationContext operation) throws Exception;
void executeReports(ConnectionDetails connectionDetails, byte[] reports);
TrackingProtocol getTrackingProtocol();
String translateOperation(OperationContext operationCtx) throws Exception;
}
|
Yannic/chromium
|
components/metrics/call_stack_profile_metrics_provider.h
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_METRICS_CALL_STACK_PROFILE_METRICS_PROVIDER_H_
#define COMPONENTS_METRICS_CALL_STACK_PROFILE_METRICS_PROVIDER_H_
#include <string>
#include "base/callback.h"
#include "base/feature_list.h"
#include "base/macros.h"
#include "base/time/time.h"
#include "components/metrics/metrics_provider.h"
#include "third_party/metrics_proto/sampled_profile.pb.h"
namespace metrics {
class ChromeUserMetricsExtension;
// Performs metrics logging for the stack sampling profiler.
class CallStackProfileMetricsProvider : public MetricsProvider {
public:
// A callback type that can be registered to intercept profiles, for testing
// purposes.
using InterceptorCallback =
base::RepeatingCallback<void(SampledProfile profile)>;
CallStackProfileMetricsProvider();
CallStackProfileMetricsProvider(const CallStackProfileMetricsProvider&) =
delete;
CallStackProfileMetricsProvider& operator=(
const CallStackProfileMetricsProvider&) = delete;
~CallStackProfileMetricsProvider() override;
// Receives SampledProfile protobuf instances. May be called on any thread.
static void ReceiveProfile(base::TimeTicks profile_start_time,
SampledProfile profile);
// Receives serialized SampledProfile protobuf instances. May be called on any
// thread. Note that receiving serialized profiles is supported separately so
// that profiles received in serialized form can be kept in that form until
// upload. This significantly reduces memory costs. Serialized profile strings
// may be large, so the caller should use std::move() to provide them to this
// API rather than copying by value.
static void ReceiveSerializedProfile(base::TimeTicks profile_start_time,
std::string serialized_sampled_profile);
// Allows tests to intercept received CPU profiles, to validate that the
// expected profiles are received. This function must be invoked prior to
// starting any profiling since the callback is accessed asynchronously on the
// profiling thread.
static void SetCpuInterceptorCallbackForTesting(InterceptorCallback callback);
// MetricsProvider:
void OnRecordingEnabled() override;
void OnRecordingDisabled() override;
void ProvideCurrentSessionData(
ChromeUserMetricsExtension* uma_proto) override;
// Enables reporting of sampling heap profiles.
static const base::Feature kHeapProfilerReporting;
protected:
// base::Feature for reporting CPU profiles. Provided here for test use.
static const base::Feature kSamplingProfilerReporting;
// Reset the static state to the defaults after startup.
static void ResetStaticStateForTesting();
};
} // namespace metrics
#endif // COMPONENTS_METRICS_CALL_STACK_PROFILE_METRICS_PROVIDER_H_
|
dongboyan77/quay
|
endpoints/api/test/test_endtoend_auth.py
|
import pytest
from mock import patch
from endpoints.api.search import EntitySearch, LinkExternalEntity
from endpoints.api.test.shared import conduct_api_call
from endpoints.test.shared import client_with_identity
from test.test_ldap import mock_ldap
from test.test_external_jwt_authn import fake_jwt
from test.test_keystone_auth import fake_keystone
from test.fixtures import *
@pytest.fixture(
params=[mock_ldap, fake_jwt, fake_keystone,]
)
def auth_engine(request):
return request.param
@pytest.fixture(
params=[False, True,]
)
def requires_email(request):
return request.param
def test_entity_search(auth_engine, requires_email, client):
with auth_engine(requires_email=requires_email) as auth:
with patch("endpoints.api.search.authentication", auth):
# Try an unknown prefix.
response = conduct_api_call(client, EntitySearch, "GET", params=dict(prefix="unknown"))
results = response.json["results"]
assert len(results) == 0
# Try a known prefix.
response = conduct_api_call(client, EntitySearch, "GET", params=dict(prefix="cool"))
results = response.json["results"]
entity = results[0]
assert entity["name"] == "cool.user"
assert entity["kind"] == "external"
def test_link_external_entity(auth_engine, requires_email, client):
with auth_engine(requires_email=requires_email) as auth:
with patch("endpoints.api.search.authentication", auth):
with client_with_identity("devtable", client) as cl:
# Try an unknown user.
conduct_api_call(
cl,
LinkExternalEntity,
"POST",
params=dict(username="unknownuser"),
expected_code=400,
)
# Try a known user.
response = conduct_api_call(
cl, LinkExternalEntity, "POST", params=dict(username="cool.user")
)
entity = response.json["entity"]
assert entity["name"] == "cool_user"
assert entity["kind"] == "user"
|
msk-mind/data-processing
|
.archive/dicom_to_delta.py
|
import click
from pyspark.sql import SparkSession
from luna.common.config import ConfigSet
from luna.common.sparksession import SparkConfig
from luna.common.custom_logger import init_logger
import re, os
# Run Script: python3 dicom_to_delta.py -h <hdfs> -a <radiology_dataset_path>
log = init_logger()
APP_CFG='APP_CFG'
def create_delta_table(df, table_name, delta_path, merge, purge):
"""
Create delta table from the dataframe.
"""
# TODO repartition # based on the number of rows/performance
# TODO upsert - do we still need merge?
# dcm: update on match AccessionNumber|AcquisitionNumber|SeriesNumber|InstanceNumber, otherwise insert
# binary: ...later for when we embed it in parquet
# op: update on match filename, otherwise insert
if purge:
# Changing a column's type or name or dropping a column requires rewriting the table.
df.coalesce(128) \
.write \
.format("delta") \
.option("overwriteSchema", "true") \
.mode("overwrite") \
.save(delta_path)
if merge:
# Adding new columns can be achived with .option("mergeSchema", "true")
df.coalesce(128) \
.write \
.format("delta") \
.option("mergeSchema", "true") \
.mode("append") \
.save(delta_path)
else:
df.coalesce(128) \
.write \
.format("delta") \
.mode("append") \
.save(delta_path)
def remove_delta_table(spark, delta_path):
"""
Clean up an existing delta table.
"""
from delta.tables import DeltaTable
dt = DeltaTable.forPath(spark, delta_path)
# Disable check for retention - default 136 hours (7 days)
spark.conf.set("spark.databricks.delta.retentionDurationCheck.enabled", "false")
dt.vacuum(0) # vacuum all data
@click.command()
@click.option('-f', '--config_file', default = 'config.yaml', help="config file")
@click.option("-h", "--hdfs", default="hdfs://sandbox-hdp.hortonworks.com:8020/", show_default=True, help="HDFS uri e.g. hdfs://sandbox-hdp.hortonworks.com:8020/")
@click.option("-a", "--dataset_address", required=True, help= "path to dataset directory containing parquet/ and dicom/ directories. This path is the directory for which the new delta table directory /table is created under")
# Note: use with caution. Either use merge or purge, not both.
@click.option("-m", "--merge", is_flag=True, default=False, show_default=True, help="(optional) Merge schema - add new columns")
@click.option("-p", "--purge", is_flag=True, default=False, show_default=True, help="(optional) Delete all delta tables - then create new tables")
def cli(config_file, hdfs ,dataset_address, merge, purge):
"""
Main CLI - setup spark session and call write to delta.
"""
if merge and purge:
raise ValueError("Cannot use flags merge and purge at the same time!")
ConfigSet(name=APP_CFG, config_file=config_file)
spark_session = SparkConfig().spark_session(config_name=APP_CFG, app_name="dicom-to-delta")
write_to_delta(spark_session, hdfs , dataset_address, merge, purge)
def write_to_delta(spark, hdfs, dataset_address, merge, purge):
"""
Create proxy tables from Dicom parquet files at {dataset_address} directory
"""
# form dataset addresss (contains /parquet, /table, /dicom, and dataset yaml)
# in case dataset_address is an absolute path, we use + instead of os.path.join
dataset_address = hdfs + "/" + dataset_address
# input parquet file paths:
# path to dicom binary parquet
binary_path = os.path.join(dataset_address, "dicom")
# path to parquet containing dicom headers and op metadata
dcm_path = os.path.join(dataset_address, "parquet")
# output delta table names:
binary_table = "dicom_binary"
dcm_table = "dicom"
# output delta table paths
common_delta_path = os.path.join(dataset_address, "table")
binary_delta_path = os.path.join(common_delta_path, binary_table)
dcm_delta_path = os.path.join(common_delta_path, dcm_table)
# Read dicom binary files
binary_df = spark.read.format("binaryFile") \
.option("pathGlobFilter", "*.dcm") \
.option("recursiveFileLookup", "true") \
.load(binary_path)
# Read parquet files containing dicom header and dicom op metadata
dcm_df = spark.read.parquet(dcm_path)
# To improve read performance when you load data back, Databricks recommends
# turning off compression when you save data loaded from binary files:
spark.conf.set("spark.sql.parquet.compression.codec", "uncompressed")
# clean up the latest delta table version
if purge:
log.info("Purging dicom and dicom_binary tables..")
remove_delta_table(spark, binary_delta_path)
remove_delta_table(spark, dcm_delta_path)
# Create Delta tables
create_delta_table(binary_df, binary_table, binary_delta_path, merge, purge)
create_delta_table(dcm_df, dcm_table, dcm_delta_path, merge, purge)
log.info("Created dicom and dicom_binary tables")
if __name__ == '__main__':
cli()
|
SpikeLavender/jdk-resource-code
|
sun/security/util/NamedCurve.java
|
/*
* Copyright (c) 2006, 2014, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package sun.security.util;
import java.io.IOException;
import java.math.BigInteger;
import java.security.spec.*;
/**
* Contains Elliptic Curve parameters.
*
* @since 1.6
* @author <NAME>
*/
public final class NamedCurve extends ECParameterSpec {
// friendly name for toString() output
private final String name;
// well known OID
private final String oid;
// encoded form (as NamedCurve identified via OID)
private final byte[] encoded;
NamedCurve(String name, String oid, EllipticCurve curve,
ECPoint g, BigInteger n, int h) {
super(curve, g, n, h);
this.name = name;
this.oid = oid;
DerOutputStream out = new DerOutputStream();
try {
out.putOID(new ObjectIdentifier(oid));
} catch (IOException e) {
throw new RuntimeException("Internal error", e);
}
encoded = out.toByteArray();
}
public String getName() {
return name;
}
public byte[] getEncoded() {
return encoded.clone();
}
public String getObjectId() {
return oid;
}
public String toString() {
return name + " (" + oid + ")";
}
}
|
Cassiobsk8/TestMod3
|
src/main/java/choonster/testmod3/init/ModPotions.java
|
package choonster.testmod3.init;
import choonster.testmod3.TestMod3;
import choonster.testmod3.potion.PotionTestMod3;
import net.minecraft.potion.Potion;
import net.minecraftforge.event.RegistryEvent;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.registry.GameRegistry.ObjectHolder;
import static choonster.testmod3.util.InjectionUtil.Null;
/**
* Registers this mod's {@link Potion}s.
*
* @author Choonster
*/
@SuppressWarnings("WeakerAccess")
@ObjectHolder(TestMod3.MODID)
public class ModPotions {
public static final PotionTestMod3 TEST = Null();
@Mod.EventBusSubscriber(modid = TestMod3.MODID)
public static class RegistrationHandler {
/**
* Register this mod's {@link Potion}s.
*
* @param event The event
*/
@SubscribeEvent
public static void registerPotions(final RegistryEvent.Register<Potion> event) {
final Potion[] potions = {
new PotionTestMod3(false, 2, 2, 2, "test"),
};
event.getRegistry().registerAll(potions);
}
}
}
|
jiongjiongjiong/JavaBasic
|
src/com/zc/javabasic/designpatterns/factory/Client.java
|
package com.zc.javabasic.designpatterns.factory;
/**
* @description:
* @author: Zhangc
* @date: 2018-12-18
*/
public class Client {
public static void main(String[] args) {
CarFactory factory = new LuxuryCarFactory();
Engine e = factory.createEngine();
e.run();
e.start();
}
}
|
JandroK/Fire-Engine
|
Fire_Engine/Engine/Source/AssetsTab.h
|
#pragma once
#include "Tab.h"
#include <vector>
#include "Globals.h"
enum class AssetType
{
TEXTURE,
FBX,
FOLDER,
OTHER
};
class Asset
{
public:
void Destroy();
AssetType type;
const char* name;
const char* folder;
std::string fullPath;
std::string libraryPath;
bool remove = false;
};
class Folder
{
public:
Folder(const char* name, std::string fullpath) : name(name), fullPath(fullpath) {};
Folder() {};
~Folder() {};
const char* name = "";
std::string fullPath;
std::vector<Asset> assets;
std::vector<Folder*> childFolders;
Folder* parent;
};
class AssetsTab : public Tab
{
public:
AssetsTab();
virtual ~AssetsTab() { RELEASE_VECTOR(parentFolder->childFolders, parentFolder->childFolders.size()); RELEASE(parentFolder)};
void Draw() override;
void DrawFolder(Folder* folder);
void UpdateAssets();
void Reload();
void ReloadRecursiveFolder(Folder* folder, const char* goal);
void RemoveSearchPaths(Folder* folder);
Asset* GetSelectedAsset();
std::string currentFolderPath;
private:
Folder* parentFolder;
Folder* currentFolder;
void LoadAssets(Folder* folder);
AssetType CheckAssetType(const char* assetName);
Asset* selectedAsset = nullptr;
};
|
blinky-z/DbBackupTool
|
src/main/java/com/blog/repositories/PlannedTasksRepository.java
|
<filename>src/main/java/com/blog/repositories/PlannedTasksRepository.java
package com.blog.repositories;
import com.blog.entities.task.PlannedTask;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
public interface PlannedTasksRepository extends CrudRepository<PlannedTask, Integer> {
Iterable<PlannedTask> findAllByOrderByIdDesc();
/**
* Returns first N rows that are not locked. Also sets pessimistic lock on retrieved rows in case of method is called in transaction block.
* <p>
* This method uses {@code select for update} statement alongside with {@code skip locked}.
* <p>
* Use this method only inside manually started transaction otherwise lock will not be attempted.
*
* @param state planned task state
* @param size how many rows to retrieve
* @return first N entities
*/
@Query(value = "select * from planned_backup_tasks where state = :#{#state.name()} limit :#{#size} FOR UPDATE skip locked", nativeQuery = true)
Iterable<PlannedTask> findFirstNByStateAndLock(@Param(value = "size") Integer size, @Param(value = "state") PlannedTask.State state);
}
|
choch-o/BudgetTrend-v1
|
app/components/Program.js
|
import React from 'react';
import Paper from 'material-ui/Paper';
import {Card, CardActions, CardHeader, CardText} from 'material-ui/Card';
import FlatButton from 'material-ui/FlatButton';
class Program extends React.Component {
render() {
const programStyle = {
height: 136.3
}
var value = this.props.value + '원';
return (
<div>
<Paper zDepth={2}>
<Card style={programStyle}>
<CardHeader
title={this.props.name}
subtitle={value}
/>
<CardActions>
<FlatButton label="관련 있음" />
<FlatButton label="관련 없음" />
</CardActions>
</Card>
</Paper>
</div>
);
}
}
export default Program;
|
deiwin/luncher-api
|
handler/offer_group_post_handler.go
|
package handler
import (
"encoding/json"
"net/http"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
"github.com/Lunchr/luncher-api/db"
"github.com/Lunchr/luncher-api/db/model"
"github.com/Lunchr/luncher-api/facebook"
"github.com/Lunchr/luncher-api/router"
"github.com/Lunchr/luncher-api/session"
"github.com/julienschmidt/httprouter"
)
// OfferGroupPost handles GET requests to /restaurant/posts/:date. It returns all current day's offers for the region.
func OfferGroupPost(c db.OfferGroupPosts, sessionManager session.Manager, users db.Users, restaurants db.Restaurants) router.HandlerWithParams {
handler := func(w http.ResponseWriter, r *http.Request, user *model.User, restaurant *model.Restaurant,
date model.DateWithoutTime) *router.HandlerError {
post, err := c.GetByDate(date, restaurant.ID)
if err == mgo.ErrNotFound {
return router.NewHandlerError(err, "Offer group post not found", http.StatusNotFound)
} else if err != nil {
return router.NewHandlerError(err, "An error occured while trying to fetch a offer group post", http.StatusInternalServerError)
}
return writeJSON(w, post)
}
return forDate(sessionManager, users, restaurants, handler)
}
// PostOfferGroupPost handles POST requests to /restaurant/posts. It stores the info in the DB and updates the post in FB.
func PostOfferGroupPost(c db.OfferGroupPosts, sessionManager session.Manager, users db.Users, restaurants db.Restaurants,
facebookPost facebook.Post) router.HandlerWithParams {
handler := func(w http.ResponseWriter, r *http.Request, user *model.User, restaurant *model.Restaurant) *router.HandlerError {
post, handlerErr := parseOfferGroupPost(r, restaurant)
if handlerErr != nil {
return handlerErr
}
insertedPosts, err := c.Insert(post)
if err != nil {
return router.NewHandlerError(err, "Failed to store the post in the DB", http.StatusInternalServerError)
}
insertedPost := insertedPosts[0]
if handlerErr = facebookPost.Update(insertedPost.Date, user, restaurant); handlerErr != nil {
return handlerErr
}
return writeJSON(w, insertedPost)
}
return forRestaurant(sessionManager, users, restaurants, handler)
}
// PutOfferGroupPost handles PUT requests to /restaurant/posts/:date. It stores the info in the DB and updates the post in FB.
func PutOfferGroupPost(c db.OfferGroupPosts, sessionManager session.Manager, users db.Users, restaurants db.Restaurants,
facebookPost facebook.Post) router.HandlerWithParams {
handler := func(w http.ResponseWriter, r *http.Request, user *model.User, restaurant *model.Restaurant,
date model.DateWithoutTime) *router.HandlerError {
updatedMessageTemplate, handlerErr := parseOfferGroupPostUpdatedMessage(r)
if handlerErr != nil {
return handlerErr
}
post, err := c.GetByDate(date, restaurant.ID)
if err != nil {
return router.NewSimpleHandlerError("Failed to get the post from DB", http.StatusBadRequest)
}
post.MessageTemplate = updatedMessageTemplate
if err = c.UpdateByID(post.ID, post); err != nil {
return router.NewSimpleHandlerError("Failed to insert the post to DB", http.StatusBadRequest)
}
// Update by date, because the posted data does not include the previous FB post ID
if handlerErr = facebookPost.Update(post.Date, user, restaurant); handlerErr != nil {
return handlerErr
}
return writeJSON(w, post)
}
return forDate(sessionManager, users, restaurants, handler)
}
type HandlerWithRestaurantAndDate func(w http.ResponseWriter, r *http.Request, user *model.User, restaurant *model.Restaurant,
date model.DateWithoutTime) *router.HandlerError
func forDate(sessionManager session.Manager, users db.Users, restaurants db.Restaurants,
handler HandlerWithRestaurantAndDate) router.HandlerWithParams {
handlerWithRestaurant := func(w http.ResponseWriter, r *http.Request, ps httprouter.Params, user *model.User,
restaurant *model.Restaurant) *router.HandlerError {
date := model.DateWithoutTime(ps.ByName("date"))
if date == "" {
return router.NewStringHandlerError("Date not specified!", "Please specify a date", http.StatusBadRequest)
}
if !date.IsValid() {
return router.NewSimpleHandlerError("Invalid date specified", http.StatusBadRequest)
}
return handler(w, r, user, restaurant, date)
}
return forRestaurantWithParams(sessionManager, users, restaurants, handlerWithRestaurant)
}
func parseOfferGroupPost(r *http.Request, restaurant *model.Restaurant) (*model.OfferGroupPost, *router.HandlerError) {
var post struct {
ID bson.ObjectId `json:"_id"`
MessageTemplate string `json:"message_template"`
Date string `json:"date"`
}
err := json.NewDecoder(r.Body).Decode(&post)
if err != nil {
return nil, router.NewHandlerError(err, "Failed to parse the post", http.StatusBadRequest)
}
date := model.DateWithoutTime(post.Date)
if date == "" {
return nil, router.NewStringHandlerError("Date not specified!", "Please specify a date", http.StatusBadRequest)
}
if !date.IsValid() {
return nil, router.NewSimpleHandlerError("Invalid date specified", http.StatusBadRequest)
}
return &model.OfferGroupPost{
ID: post.ID,
MessageTemplate: post.MessageTemplate,
Date: date,
RestaurantID: restaurant.ID,
}, nil
}
// Only the message template can be updated, therefore this method
func parseOfferGroupPostUpdatedMessage(r *http.Request) (string, *router.HandlerError) {
var post struct {
MessageTemplate string `json:"message_template"`
}
err := json.NewDecoder(r.Body).Decode(&post)
if err != nil {
return "", router.NewHandlerError(err, "Failed to parse the post", http.StatusBadRequest)
}
return post.MessageTemplate, nil
}
|
deliverst/midudev-express-api
|
src/middlewares/userExtractor.js
|
const jwt = require("jsonwebtoken");
module.exports = (req, res, next) => {
const jwt = require("jsonwebtoken");
const authorization = req.get('authorization')
let token = null
if (authorization && authorization.toLowerCase().startsWith('bearer')) {
token = authorization.substring(7)
}
let decodedToken = {}
decodedToken = jwt.verify(token, process.env.SECRETWORD)
const date = new Date(decodedToken.exp * 1000)
console.log(date.toLocaleString())
if (!token || !decodedToken.id) {
return res.status(401).json({error: 'toke missing or invalid'})
}
const {id: userId} = decodedToken
req.userId = userId
next()
}
|
obibuffett/casTest
|
src/testSummary.cpp
|
<gh_stars>0
// The "Clean And Simple Test" (CAST) software framework, tools, and
// documentation are distributed under the terms of the MIT license a
// copy of which is included with this package (see the file "LICENSE"
// in the CAST poject tree's root directory). CAST may be used for any
// purpose, including commercial purposes, at absolutely no cost. No
// paperwork, no royalties, no GNU-like "copyleft" restrictions, either.
// Just download it and use it.
//
// Copyright (c) 2017 <NAME>
#include "testSummary.h"
namespace cas
{
TestSummary::TestSummary()
: passed_(0),
skipped_(0),
failed_(0)
{}
void TestSummary::addResult(Result res)
{
switch(res)
{
case PASS:
++passed_;
break;
case SKIP:
++skipped_;
break;
case FAIL:
++failed_;
break;
default:
;
}
}
size_t TestSummary::total() const
{
return passed_ +
skipped_ +
failed_;
}
size_t TestSummary::passed() const
{
return passed_;
}
size_t TestSummary::skipped() const
{
return skipped_;
}
size_t TestSummary::failed() const
{
return failed_;
}
void TestSummary::write(std::ostream& out) const
{
out << "\nSummary:"
<< "\n Tests ran: " << total()
<< "\n Passed: " << passed_
<< "\n Skipped: " << skipped_
<< "\n Failed: " << failed_ << "\n\n";
}
}
|
vrushank-agrawal/opencv-x64-cmake
|
opencv/sources/samples/python/tutorial_code/ImgTrans/distance_transformation/imageSegmentation.py
|
from __future__ import print_function
import cv2 as cv
import numpy as np
import argparse
import random as rng
rng.seed(12345)
## [load_image]
# Load the image
parser = argparse.ArgumentParser(description='Code for Image Segmentation with Distance Transform and Watershed Algorithm.\
Sample code showing how to segment overlapping objects using Laplacian filtering, \
in addition to Watershed and Distance Transformation')
parser.add_argument('--input', help='Path to input image.', default='cards.png')
args = parser.parse_args()
src = cv.imread(cv.samples.findFile(args.input))
if src is None:
print('Could not open or find the image:', args.input)
exit(0)
# Show source image
cv.imshow('Source Image', src)
## [load_image]
## [black_bg]
# Change the background from white to black, since that will help later to extract
# better results during the use of Distance Transform
src[np.all(src == 255, axis=2)] = 0
# Show output image
cv.imshow('Black Background Image', src)
## [black_bg]
## [sharp]
# Create a kernel that we will use to sharpen our image
# an approximation of second derivative, a quite strong kernel
kernel = np.array([[1, 1, 1], [1, -8, 1], [1, 1, 1]], dtype=np.float32)
# do the laplacian filtering as it is
# well, we need to convert everything in something more deeper then CV_8U
# because the kernel has some negative values,
# and we can expect in general to have a Laplacian image with negative values
# BUT a 8bits unsigned int (the one we are working with) can contain values from 0 to 255
# so the possible negative number will be truncated
imgLaplacian = cv.filter2D(src, cv.CV_32F, kernel)
sharp = np.float32(src)
imgResult = sharp - imgLaplacian
# convert back to 8bits gray scale
imgResult = np.clip(imgResult, 0, 255)
imgResult = imgResult.astype('uint8')
imgLaplacian = np.clip(imgLaplacian, 0, 255)
imgLaplacian = np.uint8(imgLaplacian)
#cv.imshow('Laplace Filtered Image', imgLaplacian)
cv.imshow('New Sharped Image', imgResult)
## [sharp]
## [bin]
# Create binary image from source image
bw = cv.cvtColor(imgResult, cv.COLOR_BGR2GRAY)
_, bw = cv.threshold(bw, 40, 255, cv.THRESH_BINARY | cv.THRESH_OTSU)
cv.imshow('Binary Image', bw)
## [bin]
## [dist]
# Perform the distance transform algorithm
dist = cv.distanceTransform(bw, cv.DIST_L2, 3)
# Normalize the distance image for range = {0.0, 1.0}
# so we can visualize and threshold it
cv.normalize(dist, dist, 0, 1.0, cv.NORM_MINMAX)
cv.imshow('Distance Transform Image', dist)
## [dist]
## [peaks]
# Threshold to obtain the peaks
# This will be the markers for the foreground objects
_, dist = cv.threshold(dist, 0.4, 1.0, cv.THRESH_BINARY)
# Dilate a bit the dist image
kernel1 = np.ones((3,3), dtype=np.uint8)
dist = cv.dilate(dist, kernel1)
cv.imshow('Peaks', dist)
## [peaks]
## [seeds]
# Create the CV_8U version of the distance image
# It is needed for findContours()
dist_8u = dist.astype('uint8')
# Find total markers
contours, _ = cv.findContours(dist_8u, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
# Create the marker image for the watershed algorithm
markers = np.zeros(dist.shape, dtype=np.int32)
# Draw the foreground markers
for i in range(len(contours)):
cv.drawContours(markers, contours, i, (i+1), -1)
# Draw the background marker
cv.circle(markers, (5,5), 3, (255,255,255), -1)
markers_8u = (markers * 10).astype('uint8')
cv.imshow('Markers', markers_8u)
## [seeds]
## [watershed]
# Perform the watershed algorithm
cv.watershed(imgResult, markers)
#mark = np.zeros(markers.shape, dtype=np.uint8)
mark = markers.astype('uint8')
mark = cv.bitwise_not(mark)
# uncomment this if you want to see how the mark
# image looks like at that point
#cv.imshow('Markers_v2', mark)
# Generate random colors
colors = []
for contour in contours:
colors.append((rng.randint(0,256), rng.randint(0,256), rng.randint(0,256)))
# Create the result image
dst = np.zeros((markers.shape[0], markers.shape[1], 3), dtype=np.uint8)
# Fill labeled objects with random colors
for i in range(markers.shape[0]):
for j in range(markers.shape[1]):
index = markers[i,j]
if index > 0 and index <= len(contours):
dst[i,j,:] = colors[index-1]
# Visualize the final image
cv.imshow('Final Result', dst)
## [watershed]
cv.waitKey()
|
ksowmya/cloudstack-1
|
plugins/hypervisors/kvm/src/com/cloud/hypervisor/kvm/resource/LibvirtNetworkDef.java
|
<gh_stars>1-10
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.hypervisor.kvm.resource;
import java.util.ArrayList;
import java.util.List;
public class LibvirtNetworkDef {
enum netType {
BRIDGE, NAT, LOCAL
}
private final String _networkName;
private final String _uuid;
private netType _networkType;
private String _brName;
private boolean _stp;
private int _delay;
private String _fwDev;
private final String _domainName;
private String _brIPAddr;
private String _brNetMask;
private final List<IPRange> ipranges = new ArrayList<IPRange>();
private final List<dhcpMapping> dhcpMaps = new ArrayList<dhcpMapping>();
public static class dhcpMapping {
String _mac;
String _name;
String _ip;
public dhcpMapping(String mac, String name, String ip) {
_mac = mac;
_name = name;
_ip = ip;
}
}
public static class IPRange {
String _start;
String _end;
public IPRange(String start, String end) {
_start = start;
_end = end;
}
}
public LibvirtNetworkDef(String netName, String uuid, String domName) {
_networkName = netName;
_uuid = uuid;
_domainName = domName;
}
public void defNATNetwork(String brName, boolean stp, int delay,
String fwNic, String ipAddr, String netMask) {
_networkType = netType.NAT;
_brName = brName;
_stp = stp;
_delay = delay;
_fwDev = fwNic;
_brIPAddr = ipAddr;
_brNetMask = netMask;
}
public void defBrNetwork(String brName, boolean stp, int delay,
String fwNic, String ipAddr, String netMask) {
_networkType = netType.BRIDGE;
_brName = brName;
_stp = stp;
_delay = delay;
_fwDev = fwNic;
_brIPAddr = ipAddr;
_brNetMask = netMask;
}
public void defLocalNetwork(String brName, boolean stp, int delay,
String ipAddr, String netMask) {
_networkType = netType.LOCAL;
_brName = brName;
_stp = stp;
_delay = delay;
_brIPAddr = ipAddr;
_brNetMask = netMask;
}
public void adddhcpIPRange(String start, String end) {
IPRange ipr = new IPRange(start, end);
ipranges.add(ipr);
}
public void adddhcpMapping(String mac, String host, String ip) {
dhcpMapping map = new dhcpMapping(mac, host, ip);
dhcpMaps.add(map);
}
@Override
public String toString() {
StringBuilder netBuilder = new StringBuilder();
netBuilder.append("<network>\n");
netBuilder.append("<name>" + _networkName + "</name>\n");
if (_uuid != null)
netBuilder.append("<uuid>" + _uuid + "</uuid>\n");
if (_brName != null) {
netBuilder.append("<bridge name='" + _brName + "'");
if (_stp) {
netBuilder.append(" stp='on'");
} else {
netBuilder.append(" stp='off'");
}
if (_delay != -1) {
netBuilder.append(" delay='" + _delay + "'");
}
netBuilder.append("/>\n");
}
if (_domainName != null) {
netBuilder.append("<domain name='" + _domainName + "'/>\n");
}
if (_networkType == netType.BRIDGE) {
netBuilder.append("<forward mode='route'");
if (_fwDev != null) {
netBuilder.append(" dev='" + _fwDev + "'");
}
netBuilder.append("/>\n");
} else if (_networkType == netType.NAT) {
netBuilder.append("<forward mode='nat'");
if (_fwDev != null) {
netBuilder.append(" dev='" + _fwDev + "'");
}
netBuilder.append("/>\n");
}
if (_brIPAddr != null || _brNetMask != null || !ipranges.isEmpty()
|| !dhcpMaps.isEmpty()) {
netBuilder.append("<ip");
if (_brIPAddr != null)
netBuilder.append(" address='" + _brIPAddr + "'");
if (_brNetMask != null) {
netBuilder.append(" netmask='" + _brNetMask + "'");
}
netBuilder.append(">\n");
if (!ipranges.isEmpty() || !dhcpMaps.isEmpty()) {
netBuilder.append("<dhcp>\n");
for (IPRange ip : ipranges) {
netBuilder.append("<range start='" + ip._start + "'"
+ " end='" + ip._end + "'/>\n");
}
for (dhcpMapping map : dhcpMaps) {
netBuilder.append("<host mac='" + map._mac + "' name='"
+ map._name + "' ip='" + map._ip + "'/>\n");
}
netBuilder.append("</dhcp>\n");
}
netBuilder.append("</ip>\n");
}
netBuilder.append("</network>\n");
return netBuilder.toString();
}
}
|
erintripp/duracloud
|
retrievaltool/src/test/java/org/duracloud/retrieval/mgmt/StatusManagerTest.java
|
<filename>retrievaltool/src/test/java/org/duracloud/retrieval/mgmt/StatusManagerTest.java<gh_stars>0
/*
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://duracloud.org/license/
*/
package org.duracloud.retrieval.mgmt;
import org.junit.Test;
import static junit.framework.Assert.assertEquals;
/**
* @author: <NAME>
* Date: Oct 14, 2010
*/
public class StatusManagerTest {
@Test
public void testStatusManager() {
StatusManager status = StatusManager.getInstance();
status.reset();
assertEquals(0, status.getInWork());
assertEquals(0, status.getSucceeded());
assertEquals(0, status.getNoChange());
assertEquals(0, status.getFailed());
for(int i=0; i<150; i++) {
status.startingWork();
}
assertEquals(150, status.getInWork());
assertEquals(0, status.getSucceeded());
assertEquals(0, status.getNoChange());
assertEquals(0, status.getFailed());
for(int i=0; i<50; i++) {
status.successfulCompletion();
}
assertEquals(100, status.getInWork());
assertEquals(50, status.getSucceeded());
assertEquals(0, status.getNoChange());
assertEquals(0, status.getFailed());
for(int i=0; i<50; i++) {
status.noChangeCompletion();
}
assertEquals(50, status.getInWork());
assertEquals(50, status.getSucceeded());
assertEquals(50, status.getNoChange());
assertEquals(0, status.getFailed());
for(int i=0; i<50; i++) {
status.failedCompletion();
}
assertEquals(0, status.getInWork());
assertEquals(50, status.getSucceeded());
assertEquals(50, status.getNoChange());
assertEquals(50, status.getFailed());
}
}
|
alexjoybc/bcparis-service
|
src/main/java/ca/bc/gov/iamp/bcparis/exception/message/InvalidMessageType.java
|
package ca.bc.gov.iamp.bcparis.exception.message;
public class InvalidMessageType extends RuntimeException{
private static final long serialVersionUID = 4101415006761946564L;
public InvalidMessageType(String message) {
super(message);
}
}
|
windystrife/UnrealEngine_NVIDIAGameWork
|
Engine/Plugins/FX/Niagara/Source/NiagaraEditor/Private/NiagaraNodeParameterMapGet.cpp
|
// Copyright 1998-2016 Epic Games, Inc. All Rights Reserved.
#include "NiagaraNodeParameterMapGet.h"
#include "EdGraphSchema_Niagara.h"
#include "NiagaraEditorUtilities.h"
#include "SNiagaraGraphNodeConvert.h"
#include "NiagaraGraph.h"
#include "NiagaraHlslTranslator.h"
#include "ScopedTransaction.h"
#include "SNiagaraGraphParameterMapGetNode.h"
#include "NiagaraEditorModule.h"
#include "INiagaraEditorTypeUtilities.h"
#include "ModuleManager.h"
#define LOCTEXT_NAMESPACE "NiagaraNodeParameterMapGet"
UNiagaraNodeParameterMapGet::UNiagaraNodeParameterMapGet() : UNiagaraNodeParameterMapBase(), PinPendingRename(nullptr)
{
}
void UNiagaraNodeParameterMapGet::AllocateDefaultPins()
{
PinPendingRename = nullptr;
const UEdGraphSchema_Niagara* Schema = GetDefault<UEdGraphSchema_Niagara>();
CreatePin(EGPD_Input, Schema->TypeDefinitionToPinType(FNiagaraTypeDefinition::GetParameterMapDef()), TEXT("Source"));
CreateAddPin(EGPD_Output);
}
TSharedPtr<SGraphNode> UNiagaraNodeParameterMapGet::CreateVisualWidget()
{
return SNew(SNiagaraGraphParameterMapGetNode, this);
}
bool UNiagaraNodeParameterMapGet::IsPinNameEditable(const UEdGraphPin* GraphPinObj) const
{
const UEdGraphSchema_Niagara* Schema = GetDefault<UEdGraphSchema_Niagara>();
FNiagaraTypeDefinition TypeDef = Schema->PinToTypeDefinition(GraphPinObj);
if (TypeDef.IsValid() && GraphPinObj && GraphPinObj->Direction == EGPD_Output && CanRenamePin(GraphPinObj))
{
return true;
}
else
{
return false;
}
}
bool UNiagaraNodeParameterMapGet::IsPinNameEditableUponCreation(const UEdGraphPin* GraphPinObj) const
{
if (GraphPinObj == PinPendingRename && GraphPinObj->Direction == EEdGraphPinDirection::EGPD_Output)
{
return true;
}
else
{
return false;
}
}
bool UNiagaraNodeParameterMapGet::VerifyEditablePinName(const FText& InName, FText& OutErrorMessage, const UEdGraphPin* InGraphPinObj) const
{
if (InName.IsEmptyOrWhitespace() && InGraphPinObj->Direction == EEdGraphPinDirection::EGPD_Output)
{
OutErrorMessage = LOCTEXT("InvalidName", "Invalid pin name");
return false;
}
return true;
}
UEdGraphPin* UNiagaraNodeParameterMapGet::CreateDefaultPin(UEdGraphPin* OutputPin)
{
if (OutputPin == nullptr)
{
return nullptr;
}
UEdGraphPin* DefaultPin = CreatePin(EEdGraphPinDirection::EGPD_Input, OutputPin->PinType, TEXT(""));
const UEdGraphSchema_Niagara* Schema = GetDefault<UEdGraphSchema_Niagara>();
FNiagaraVariable Var = Schema->PinToNiagaraVariable(OutputPin);
FNiagaraEditorUtilities::ResetVariableToDefaultValue(Var);
FNiagaraEditorModule& NiagaraEditorModule = FModuleManager::GetModuleChecked<FNiagaraEditorModule>("NiagaraEditor");
if (Var.IsDataAllocated())
{
TSharedPtr<INiagaraEditorTypeUtilities> TypeEditorUtilities = NiagaraEditorModule.GetTypeUtilities(Var.GetType());
if (TypeEditorUtilities.IsValid() && TypeEditorUtilities->CanHandlePinDefaults())
{
FString Value = TypeEditorUtilities->GetPinDefaultStringFromValue(Var);
DefaultPin->DefaultValue = Value;
}
}
if (!OutputPin->PersistentGuid.IsValid())
{
OutputPin->PersistentGuid = FGuid::NewGuid();
}
if (!DefaultPin->PersistentGuid.IsValid())
{
DefaultPin->PersistentGuid = FGuid::NewGuid();
}
PinOutputToPinDefaultPersistentId.Add(OutputPin->PersistentGuid, DefaultPin->PersistentGuid);
SynchronizeDefaultInputPin(DefaultPin, OutputPin);
return DefaultPin;
}
void UNiagaraNodeParameterMapGet::OnPinRenamed(UEdGraphPin* RenamedPin)
{
RenamedPin->PinFriendlyName = FText::FromString(RenamedPin->PinName);
}
void UNiagaraNodeParameterMapGet::OnNewTypedPinAdded(UEdGraphPin* NewPin)
{
if (NewPin->Direction == EEdGraphPinDirection::EGPD_Output)
{
const UEdGraphSchema_Niagara* Schema = GetDefault<UEdGraphSchema_Niagara>();
FNiagaraTypeDefinition TypeDef = Schema->PinToTypeDefinition(NewPin);
UEdGraphPin* MatchingDefault = GetDefaultPin(NewPin);
if (MatchingDefault == nullptr)
{
UEdGraphPin* DefaultPin = CreateDefaultPin(NewPin);
}
}
if (HasAnyFlags(RF_NeedLoad | RF_NeedPostLoad | RF_NeedInitialization))
{
return;
}
if (NewPin->Direction == EEdGraphPinDirection::EGPD_Output)
{
PinPendingRename = NewPin;
}
}
void UNiagaraNodeParameterMapGet::RemoveDynamicPin(UEdGraphPin* Pin)
{
FScopedTransaction RemovePinTransaction(LOCTEXT("RemovePinTransaction", "Remove pin"));
UEdGraphPin* DefaultPin = nullptr;
if (Pin->Direction == EEdGraphPinDirection::EGPD_Output)
{
DefaultPin = GetDefaultPin(Pin);
}
RemovePin(Pin);
if (DefaultPin != nullptr)
{
RemovePin(DefaultPin);
}
GetGraph()->NotifyGraphChanged();
}
UEdGraphPin* UNiagaraNodeParameterMapGet::GetDefaultPin(UEdGraphPin* OutputPin)
{
TArray<UEdGraphPin*> InputPins;
GetInputPins(InputPins);
const FGuid* InputGuid = PinOutputToPinDefaultPersistentId.Find(OutputPin->PersistentGuid);
if (InputGuid != nullptr)
{
for (UEdGraphPin* InputPin : InputPins)
{
if ((*InputGuid) == InputPin->PersistentGuid)
{
return InputPin;
}
}
}
return nullptr;
}
UEdGraphPin* UNiagaraNodeParameterMapGet::GetOutputPinForDefault(UEdGraphPin* DefaultPin)
{
TArray<UEdGraphPin*> OutputPins;
GetOutputPins(OutputPins);
const FGuid* OutputGuid = PinOutputToPinDefaultPersistentId.Find(DefaultPin->PersistentGuid);
if (OutputGuid != nullptr)
{
for (UEdGraphPin* OutputPin : OutputPins)
{
if ((*OutputGuid) == OutputPin->PersistentGuid)
{
return OutputPin;
}
}
}
return nullptr;
}
void UNiagaraNodeParameterMapGet::PostLoad()
{
Super::PostLoad();
TArray<UEdGraphPin*> OutputPins;
GetOutputPins(OutputPins);
for (int32 i = 0; i < OutputPins.Num(); i++)
{
if (IsAddPin(OutputPins[i]))
{
continue;
}
UEdGraphPin* InputPin = GetDefaultPin(OutputPins[i]);
if (InputPin == nullptr)
{
CreateDefaultPin(OutputPins[i]);
}
else
{
SynchronizeDefaultInputPin(InputPin, OutputPins[i]);
}
}
}
void UNiagaraNodeParameterMapGet::SynchronizeDefaultInputPin(UEdGraphPin* DefaultPin, UEdGraphPin* OutputPin)
{
const UEdGraphSchema_Niagara* Schema = GetDefault<UEdGraphSchema_Niagara>();
if (!DefaultPin)
{
return;
}
if (FNiagaraParameterMapHistory::IsEngineParameter(Schema->PinToNiagaraVariable(OutputPin)))
{
DefaultPin->bDefaultValueIsIgnored = true;
DefaultPin->bNotConnectable = true;
DefaultPin->bHidden = true;
DefaultPin->PinToolTip = FText::Format(LOCTEXT("DefaultValueTooltip", "Default value for {0}. Disabled for Engine Parameters."), FText::FromString(OutputPin->PinName)).ToString();
}
else
{
DefaultPin->bDefaultValueIsIgnored = false;
DefaultPin->bNotConnectable = false;
DefaultPin->bHidden = false;
DefaultPin->PinToolTip = FText::Format(LOCTEXT("DefaultValueTooltip", "Default value for {0} if no other module has set it previously in the stack."), FText::FromString(OutputPin->PinName)).ToString();
}
}
FText UNiagaraNodeParameterMapGet::GetNodeTitle(ENodeTitleType::Type TitleType) const
{
return LOCTEXT("UNiagaraNodeParameterMapGetName", "Map Get");
}
void UNiagaraNodeParameterMapGet::BuildParameterMapHistory(FNiagaraParameterMapHistoryBuilder& OutHistory, bool bRecursive)
{
if (bRecursive)
{
OutHistory.VisitInputPin(GetInputPin(0), this);
}
int32 ParamMapIdx = INDEX_NONE;
if (GetInputPin(0)->LinkedTo.Num() != 0)
{
ParamMapIdx = OutHistory.TraceParameterMapOutputPin(GetInputPin(0)->LinkedTo[0]);
}
if (ParamMapIdx != INDEX_NONE)
{
TArray<UEdGraphPin*> OutputPins;
GetOutputPins(OutputPins);
for (int32 i = 0; i < OutputPins.Num(); i++)
{
if (IsAddPin(OutputPins[i]))
{
continue;
}
bool bUsedDefaults = false;
if (bRecursive)
{
OutHistory.HandleVariableRead(ParamMapIdx, OutputPins[i], true, GetDefaultPin(OutputPins[i]), bUsedDefaults);
}
else
{
OutHistory.HandleVariableRead(ParamMapIdx, OutputPins[i], true, nullptr, bUsedDefaults);
}
}
}
}
void UNiagaraNodeParameterMapGet::Compile(class FHlslNiagaraTranslator* Translator, TArray<int32>& Outputs)
{
const UEdGraphSchema_Niagara* Schema = CastChecked<UEdGraphSchema_Niagara>(GetSchema());
TArray<UEdGraphPin*> InputPins;
GetInputPins(InputPins);
TArray<UEdGraphPin*> OutputPins;
GetOutputPins(OutputPins);
// Initialize the outputs to invalid values.
check(Outputs.Num() == 0);
for (int32 i = 0; i < OutputPins.Num(); i++)
{
if (IsAddPin(OutputPins[i]))
{
continue;
}
Outputs.Add(INDEX_NONE);
}
// First compile fully down the hierarchy for our predecessors..
TArray<int32> CompileInputs;
for (int32 i = 0; i < InputPins.Num(); i++)
{
UEdGraphPin* InputPin = InputPins[i];
if (InputPin->PinType.PinCategory == UEdGraphSchema_Niagara::PinCategoryType ||
InputPin->PinType.PinCategory == UEdGraphSchema_Niagara::PinCategoryEnum)
{
int32 CompiledInput = INDEX_NONE;
if (i == 0) // Only the zeroth item is not an default value pin.
{
CompiledInput = Translator->CompilePin(InputPin);
if (CompiledInput == INDEX_NONE)
{
Translator->Error(LOCTEXT("InputError", "Error compiling input for param map get node."), this, InputPin);
}
}
CompileInputs.Add(CompiledInput);
}
}
UNiagaraGraph* Graph = Cast<UNiagaraGraph>(GetGraph());
// By this point, we've visited all of our child nodes in the call graph. We can mine them to find out everyone contributing to the parameter map (and when).
if (GetInputPin(0) != nullptr && GetInputPin(0)->LinkedTo.Num() > 0)
{
Translator->ParameterMapGet(this, CompileInputs, Outputs);
}
}
bool UNiagaraNodeParameterMapGet::CommitEditablePinName(const FText& InName, UEdGraphPin* InGraphPinObj)
{
if (Pins.Contains(InGraphPinObj) && InGraphPinObj->Direction == EEdGraphPinDirection::EGPD_Output)
{
FScopedTransaction AddNewPinTransaction(LOCTEXT("Rename Pin", "Renamed pin"));
Modify();
InGraphPinObj->Modify();
UEdGraphPin* DefaultPin = GetDefaultPin(InGraphPinObj);
InGraphPinObj->PinFriendlyName = InName;
InGraphPinObj->PinName = InName.ToString();
if (DefaultPin)
{
DefaultPin->Modify();
SynchronizeDefaultInputPin(DefaultPin, InGraphPinObj);
}
if (PinPendingRename == InGraphPinObj)
{
PinPendingRename = nullptr;
}
GetGraph()->NotifyGraphChanged();
return true;
}
return false;
}
#undef LOCTEXT_NAMESPACE
|
heyx3/heyx3RT
|
RT/RT/Headers/Material.h
|
#pragma once
#include "FastRand.h"
#include "Shape.h"
#include "DataSerialization.h"
namespace RT
{
//A way to calculate the color of a surface.
class RT_API Material : public ISerializable
{
public:
//When a ray is scattered off the surface of this material,
//The ray should be moved forward this small epsilon amount.
static const float PushoffDist;
//Allocates a material on the heap with the given type-name. Used in the serialization system.
static Material* Create(const String& typeName) { return GetFactory(typeName)(); }
//Writes out the data for the given Material.
static void WriteValue(const Material& mat, DataWriter& writer, const String& name)
{
writer.WriteString(mat.GetTypeName(), name + "Type");
writer.WriteDataStructure(mat, name + "Value");
}
//Reads in the given Material.
//Note that the code calling this function is responsible for "delete"-ing the new material.
static void ReadValue(Material*& outMat, DataReader& reader, const String& name)
{
String typeName;
reader.ReadString(typeName, name + "Type");
outMat = Create(typeName);
reader.ReadDataStructure(*outMat, name + "Value");
}
//Converts a tangent-space normal to world space.
//Used for normal-mapping.
static Vector3f TangentSpaceToWorldSpace(const Vector3f& tangentSpaceNormal,
const Vector3f& worldNormal,
const Vector3f& worldTangent,
const Vector3f& worldBitangent);
//Scatters the given incoming ray after it hits the given surface point of this material.
//Also potentially attenuates/brightens the ray.
//Returns "true" if the ray scattered, or "false" if the ray was absorbed.
virtual bool Scatter(const Ray& rIn, const Vertex& surface,
const Shape& shpe, FastRand& prng,
Vector3f& outAttenuation, Vector3f& outEmission, Ray& outRay) const = 0;
virtual void ReadData(DataReader& data) override { }
virtual void WriteData(DataWriter& data) const override { }
//Gets this class's name as a string.
//Don't override this manually! Use the "ADD_MATERIAL_REFLECTION_DATA" macros instead.
virtual String GetTypeName() const = 0;
protected:
typedef Material*(*MaterialFactory)();
//Sets the factory to use for the given class name.
//Makes the given class name visible to the serialization system.
//NOTE: This should never be called manually; use the "ADD_MATERIAL_REFLECTION_DATA" macros.
static void AddReflectionData(const String& typeName, MaterialFactory factory);
//Gets the factory to create a basic material of the given type name.
//Used by the serialization system.
static MaterialFactory GetFactory(const String& typeName);
};
}
//Put this in a Material sub-class's .h file to allow it to work with the serialization system.
//The extra arguments after "typeName" are the arguments to construct an instance of the class.
//The actual value of the constructor arguments isn't important.
#define ADD_MATERIAL_REFLECTION_DATA_H(className, typeName, ...) \
public: \
virtual String GetTypeName() const override { return #typeName; } \
private: \
struct RT_API _ReflectionDataInitializer \
{ \
public: \
_ReflectionDataInitializer() \
{ \
AddReflectionData(#typeName, []() { return (Material*)(new className(__VA_ARGS__)); }); \
} \
}; \
static _ReflectionDataInitializer _RefDataInit;
//Put this in a Material sub-class's .cpp file to allow it to work with the serialization system.
#define ADD_MATERIAL_REFLECTION_DATA_CPP(className) \
className::_ReflectionDataInitializer className::_RefDataInit = className::_ReflectionDataInitializer();
|
konradotto/TS
|
dbReports/iondb/media/resources/kendo/src/js/kendo.mobile.scroller.js
|
/*
* Kendo UI Web v2012.3.1114 (http://kendoui.com)
* Copyright 2012 Telerik AD. All rights reserved.
*
* Kendo UI Web commercial licenses may be obtained at
* https://www.kendoui.com/purchase/license-agreement/kendo-ui-web-commercial.aspx
* If you do not own a commercial license, this file shall be governed by the
* GNU General Public License (GPL) version 3.
* For GPL requirements, please review: http://www.gnu.org/copyleft/gpl.html
*/
(function($, undefined) {
var kendo = window.kendo,
mobile = kendo.mobile,
fx = kendo.fx,
ui = mobile.ui,
proxy = $.proxy,
extend = $.extend,
Widget = ui.Widget,
Class = kendo.Class,
Movable = kendo.ui.Movable,
Pane = kendo.ui.Pane,
PaneDimensions = kendo.ui.PaneDimensions,
Transition = fx.Transition,
Animation = fx.Animation,
abs = Math.abs,
SNAPBACK_DURATION = 500,
SCROLLBAR_OPACITY = 0.7,
FRICTION = 0.93,
OUT_OF_BOUNDS_FRICTION = 0.5,
RELEASECLASS = "km-scroller-release",
REFRESHCLASS = "km-scroller-refresh",
PULL = "pull",
CHANGE = "change",
RESIZE = "resize",
SCROLL = "scroll";
var ZoomSnapBack = Animation.extend({
init: function(options) {
var that = this;
Animation.fn.init.call(that);
extend(that, options);
that.userEvents.bind("gestureend", proxy(that.start, that));
that.tapCapture.bind("press", proxy(that.cancel, that));
},
done: function() {
return this.dimensions.minScale - this.movable.scale < 0.01;
},
tick: function() {
var movable = this.movable;
movable.scaleWith(1.1);
this.dimensions.rescale(movable.scale);
},
onEnd: function() {
var movable = this.movable;
movable.scaleTo(this.dimensions.minScale);
this.dimensions.rescale(movable.scale);
}
});
var DragInertia = Animation.extend({
init: function(options) {
var that = this;
Animation.fn.init.call(that);
extend(that, options, {
transition: new Transition({
axis: options.axis,
movable: options.movable,
onEnd: function() { that._end(); }
})
});
that.tapCapture.bind("press", function() { that.cancel(); });
that.userEvents.bind("end", proxy(that.start, that));
that.userEvents.bind("gestureend", proxy(that.start, that));
that.userEvents.bind("tap", proxy(that.onEnd, that));
},
onCancel: function() {
this.transition.cancel();
},
freeze: function(location) {
var that = this;
that.cancel();
that._moveTo(location);
},
onEnd: function() {
var that = this;
if (that._outOfBounds()) {
that._snapBack();
} else {
that._end();
}
},
done: function() {
return abs(this.velocity) < 1;
},
start: function(e) {
var that = this;
if (!that.dimension.enabled) { return; }
if (that._outOfBounds()) {
that._snapBack();
} else {
that.velocity = e.touch[that.axis].velocity * 16;
if (that.velocity) {
that.tapCapture.captureNext();
Animation.fn.start.call(that);
}
}
},
tick: function() {
var that = this,
dimension = that.dimension,
friction = that._outOfBounds() ? OUT_OF_BOUNDS_FRICTION : FRICTION,
delta = (that.velocity *= friction),
location = that.movable[that.axis] + delta;
if (!that.elastic && dimension.outOfBounds(location)) {
location = Math.max(Math.min(location, dimension.max), dimension.min);
that.velocity = 0;
}
that.movable.moveAxis(that.axis, location);
},
_end: function() {
this.tapCapture.cancelCapture();
this.end();
},
_outOfBounds: function() {
return this.dimension.outOfBounds(this.movable[this.axis]);
},
_snapBack: function() {
var that = this,
dimension = that.dimension,
snapBack = that.movable[that.axis] > dimension.max ? dimension.max : dimension.min;
that._moveTo(snapBack);
},
_moveTo: function(location) {
this.transition.moveTo({ location: location, duration: SNAPBACK_DURATION, ease: Transition.easeOutExpo });
}
});
var ScrollBar = Class.extend({
init: function(options) {
var that = this,
horizontal = options.axis === "x",
element = $('<div class="km-touch-scrollbar km-' + (horizontal ? "horizontal" : "vertical") + '-scrollbar" />');
extend(that, options, {
element: element,
elementSize: 0,
movable: new Movable(element),
scrollMovable: options.movable,
size: horizontal ? "width" : "height"
});
that.scrollMovable.bind(CHANGE, proxy(that._move, that));
that.container.append(element);
},
_move: function() {
var that = this,
axis = that.axis,
dimension = that.dimension,
paneSize = dimension.size,
scrollMovable = that.scrollMovable,
sizeRatio = paneSize / dimension.total,
position = Math.round(-scrollMovable[axis] * sizeRatio),
size = Math.round(paneSize * sizeRatio);
if (position + size > paneSize) {
size = paneSize - position;
} else if (position < 0) {
size += position;
position = 0;
}
if (that.elementSize != size) {
that.element.css(that.size, size + "px");
that.elementSize = size;
}
that.movable.moveAxis(axis, position);
},
show: function() {
this.element.css({opacity: SCROLLBAR_OPACITY, visibility: "visible"});
},
hide: function() {
this.element.css({opacity: 0});
}
});
var Scroller = Widget.extend({
init: function(element, options) {
var that = this;
Widget.fn.init.call(that, element, options);
element = that.element;
element
.css("overflow", "hidden")
.addClass("km-scroll-wrapper")
.wrapInner('<div class="km-scroll-container"/>')
.prepend('<div class="km-scroll-header"/>');
var inner = element.children().eq(1),
tapCapture = new kendo.TapCapture(element),
movable = new Movable(inner),
dimensions = new PaneDimensions({
element: inner,
container: element,
forcedEnabled: that.options.zoom,
change: function() {
that.trigger(RESIZE);
}
}),
userEvents = new kendo.UserEvents(element, {
allowSelection: true,
preventDragEvent: true,
multiTouch: that.options.zoom,
start: function(e) {
dimensions.refresh();
var velocityX = abs(e.x.velocity),
velocityY = abs(e.y.velocity);
if (dimensions.x.enabled && velocityX * 2 >= velocityY ||
dimensions.y.enabled && velocityY * 2 >= velocityX) {
userEvents.capture();
} else {
userEvents.cancel();
}
}
}),
pane = new Pane({
movable: movable,
dimensions: dimensions,
userEvents: userEvents,
elastic: that.options.elastic
}),
zoomSnapBack = new ZoomSnapBack({
movable: movable,
dimensions: dimensions,
userEvents: userEvents,
tapCapture: tapCapture
});
movable.bind(CHANGE, function() {
that.scrollTop = - movable.y;
that.scrollLeft = - movable.x;
that.trigger(SCROLL, {
scrollTop: that.scrollTop,
scrollLeft: that.scrollLeft
});
});
extend(that, {
movable: movable,
dimensions: dimensions,
zoomSnapBack: zoomSnapBack,
userEvents: userEvents,
pane: pane,
tapCapture: tapCapture,
pulled: false,
scrollElement: inner,
fixedContainer: element.children().first()
});
that._initAxis("x");
that._initAxis("y");
dimensions.refresh();
if (that.options.pullToRefresh) {
that._initPullToRefresh();
}
kendo.onResize($.proxy(that.reset, that));
},
scrollHeight: function() {
return this.scrollElement[0].scrollHeight;
},
scrollWidth: function() {
return this.scrollElement[0].scrollWidth;
},
options: {
name: "Scroller",
zoom: false,
pullOffset: 140,
elastic: true,
pullTemplate: "Pull to refresh",
releaseTemplate: "Release to refresh",
refreshTemplate: "Refreshing"
},
events: [
PULL,
SCROLL,
RESIZE
],
setOptions: function(options) {
var that = this;
Widget.fn.setOptions.call(that, options);
if (options.pullToRefresh) {
that._initPullToRefresh();
}
},
reset: function() {
this.movable.moveTo({x: 0, y: 0});
},
scrollTo: function(x, y) {
this.movable.moveTo({x: x, y: y});
},
pullHandled: function() {
var that = this;
that.refreshHint.removeClass(REFRESHCLASS);
that.hintContainer.html(that.pullTemplate({}));
that.yinertia.onEnd();
that.xinertia.onEnd();
},
destroy: function() {
Widget.fn.destroy.call(this);
this.userEvents.destroy();
},
_initPullToRefresh: function() {
var that = this;
that.dimensions.y.forceEnabled();
that.pullTemplate = kendo.template(that.options.pullTemplate);
that.releaseTemplate = kendo.template(that.options.releaseTemplate);
that.refreshTemplate = kendo.template(that.options.refreshTemplate);
that.scrollElement.prepend('<span class="km-scroller-pull"><span class="km-icon"></span><span class="km-template">' + that.pullTemplate({}) + '</span></span>');
that.refreshHint = that.scrollElement.children().first();
that.hintContainer = that.refreshHint.children(".km-template");
that.pane.y.bind("change", proxy(that._paneChange, that));
that.userEvents.bind("end", proxy(that._dragEnd, that));
},
_dragEnd: function() {
var that = this;
if(!that.pulled) {
return;
}
that.pulled = false;
that.refreshHint.removeClass(RELEASECLASS).addClass(REFRESHCLASS);
that.hintContainer.html(that.refreshTemplate({}));
that.trigger("pull");
that.yinertia.freeze(that.options.pullOffset / 2);
},
_paneChange: function() {
var that = this;
if (that.movable.y / OUT_OF_BOUNDS_FRICTION > that.options.pullOffset) {
if (!that.pulled) {
that.pulled = true;
that.refreshHint.removeClass(REFRESHCLASS).addClass(RELEASECLASS);
that.hintContainer.html(that.releaseTemplate({}));
}
} else if (that.pulled) {
that.pulled = false;
that.refreshHint.removeClass(RELEASECLASS);
that.hintContainer.html(that.pullTemplate({}));
}
},
_initAxis: function(axis) {
var that = this,
movable = that.movable,
dimension = that.dimensions[axis],
tapCapture = that.tapCapture,
scrollBar = new ScrollBar({
axis: axis,
movable: movable,
dimension: dimension,
container: that.element
}),
inertia = new DragInertia({
axis: axis,
movable: movable,
tapCapture: tapCapture,
userEvents: that.userEvents,
dimension: dimension,
elastic: that.options.elastic,
end: function() { scrollBar.hide(); }
});
that[axis + "inertia"] = inertia;
that.pane[axis].bind(CHANGE, function() {
scrollBar.show();
});
}
});
ui.plugin(Scroller);
})(window.kendo.jQuery);
|
Bibliome/alvisnlp
|
alvisnlp-bibliome/src/main/java/fr/inra/maiage/bibliome/alvisnlp/bibliomefactory/modules/fasttext/FasttextClassifierTrain.java
|
package fr.inra.maiage.bibliome.alvisnlp.bibliomefactory.modules.fasttext;
import java.io.IOException;
import java.util.logging.Logger;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.Corpus;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.expressions.Expression;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.expressions.ResolverException;
import fr.inra.maiage.bibliome.alvisnlp.core.module.ModuleException;
import fr.inra.maiage.bibliome.alvisnlp.core.module.ProcessingContext;
import fr.inra.maiage.bibliome.alvisnlp.core.module.ProcessingException;
import fr.inra.maiage.bibliome.alvisnlp.core.module.lib.AlvisNLPModule;
import fr.inra.maiage.bibliome.alvisnlp.core.module.lib.Param;
import fr.inra.maiage.bibliome.alvisnlp.core.module.types.IntegerMapping;
import fr.inra.maiage.bibliome.util.Checkable;
import fr.inra.maiage.bibliome.util.files.InputFile;
import fr.inra.maiage.bibliome.util.files.OutputFile;
@AlvisNLPModule(beta = true)
public class FasttextClassifierTrain extends FasttextClassifierBase<FasttextClassifierTrainResolvedObjects> implements Checkable {
private OutputFile modelFile;
private IntegerMapping classWeights;
private Integer wordGrams;
private Integer minCharGrams;
private Integer maxCharGrams;
private Integer minCount = 1;
private Integer minCountLabel = 0;
private Integer buckets;
private Double samplingThreshold = 0.0001;
private Double learningRate;
private Integer learningRateUpdateRate = 100;
private Integer wordVectorSize;
private Integer windowSize = 5;
private Integer epochs;
private Integer negativeSampling = 5;
private FasttextLossFunction lossFunction;
private Integer threads = 12;
private InputFile pretrainedVectors;
private Boolean autotune = false;
private Expression validationDocuments;
private FasttextAttribute[] validationAttributes;
private Integer autotuneDuration = 300;
private String[] commandlineOptions;
@Override
public void process(ProcessingContext<Corpus> ctx, Corpus corpus) throws ModuleException {
try {
FasttextClassifierTrainExternalHandler ext = new FasttextClassifierTrainExternalHandler(ctx, this, corpus);
ext.start();
}
catch (InterruptedException|IOException e) {
throw new ProcessingException(e);
}
}
@Override
public boolean check(Logger logger) {
if (autotune && (validationDocuments == null)) {
logger.severe("autotune requires validationDocuments set");
return false;
}
if ((!autotune) && (validationDocuments != null)) {
logger.warning("validationDocuments will be ignored");
}
return true;
}
@Override
protected FasttextClassifierTrainResolvedObjects createResolvedObjects(ProcessingContext<Corpus> ctx) throws ResolverException {
return new FasttextClassifierTrainResolvedObjects(ctx, this);
}
@Param
public OutputFile getModelFile() {
return modelFile;
}
@Param(mandatory = false)
public IntegerMapping getClassWeights() {
return classWeights;
}
@Param(mandatory = false)
public Integer getWordGrams() {
return wordGrams;
}
@Param(mandatory = false)
public Integer getMinCharGrams() {
return minCharGrams;
}
@Param(mandatory = false)
public Integer getMaxCharGrams() {
return maxCharGrams;
}
@Param
public Integer getMinCount() {
return minCount;
}
@Param
public Integer getMinCountLabel() {
return minCountLabel;
}
@Param(mandatory = false)
public Integer getBuckets() {
return buckets;
}
@Param
public Double getSamplingThreshold() {
return samplingThreshold;
}
@Param(mandatory = false)
public Double getLearningRate() {
return learningRate;
}
@Param
public Integer getLearningRateUpdateRate() {
return learningRateUpdateRate;
}
@Param(mandatory = false)
public Integer getWordVectorSize() {
return wordVectorSize;
}
@Param
public Integer getWindowSize() {
return windowSize;
}
@Param(mandatory = false)
public Integer getEpochs() {
return epochs;
}
@Param
public Integer getNegativeSampling() {
return negativeSampling;
}
@Param(mandatory = false)
public FasttextLossFunction getLossFunction() {
return lossFunction;
}
@Param
public Integer getThreads() {
return threads;
}
@Param(mandatory = false)
public InputFile getPretrainedVectors() {
return pretrainedVectors;
}
@Param(mandatory = false)
public String[] getCommandlineOptions() {
return commandlineOptions;
}
@Param(mandatory = false)
public Expression getValidationDocuments() {
return validationDocuments;
}
@Param(mandatory = false)
public FasttextAttribute[] getValidationAttributes() {
return validationAttributes;
}
@Param
public Integer getAutotuneDuration() {
return autotuneDuration;
}
@Param
public Boolean getAutotune() {
return autotune;
}
public void setAutotune(Boolean autotune) {
this.autotune = autotune;
}
public void setAutotuneDuration(Integer autotuneDuration) {
this.autotuneDuration = autotuneDuration;
}
public void setValidationDocuments(Expression validationDocuments) {
this.validationDocuments = validationDocuments;
}
public void setValidationAttributes(FasttextAttribute[] validationAttributes) {
this.validationAttributes = validationAttributes;
}
public void setCommandlineOptions(String[] commandlineOptions) {
this.commandlineOptions = commandlineOptions;
}
public void setMinCount(Integer minCount) {
this.minCount = minCount;
}
public void setMinCountLabel(Integer minCountLabel) {
this.minCountLabel = minCountLabel;
}
public void setBuckets(Integer buckets) {
this.buckets = buckets;
}
public void setSamplingThreshold(Double samplingThreshold) {
this.samplingThreshold = samplingThreshold;
}
public void setLearningRate(Double learningRate) {
this.learningRate = learningRate;
}
public void setLearningRateUpdateRate(Integer learningRateUpdateRate) {
this.learningRateUpdateRate = learningRateUpdateRate;
}
public void setWordVectorSize(Integer wordVectorSize) {
this.wordVectorSize = wordVectorSize;
}
public void setWindowSize(Integer windowSize) {
this.windowSize = windowSize;
}
public void setEpochs(Integer epochs) {
this.epochs = epochs;
}
public void setNegativeSampling(Integer negativeSampling) {
this.negativeSampling = negativeSampling;
}
public void setLossFunction(FasttextLossFunction lossFunction) {
this.lossFunction = lossFunction;
}
public void setThreads(Integer threads) {
this.threads = threads;
}
public void setPretrainedVectors(InputFile pretrainedVectors) {
this.pretrainedVectors = pretrainedVectors;
}
public void setWordGrams(Integer wordGrams) {
this.wordGrams = wordGrams;
}
public void setMinCharGrams(Integer minCharGrams) {
this.minCharGrams = minCharGrams;
}
public void setMaxCharGrams(Integer maxCharGrams) {
this.maxCharGrams = maxCharGrams;
}
public void setClassWeights(IntegerMapping classWeights) {
this.classWeights = classWeights;
}
public void setModelFile(OutputFile modelFile) {
this.modelFile = modelFile;
}
}
|
kef/hieos
|
src/xds/src/com/vangent/hieos/services/xds/registry/mu/validation/DeleteDocumentSetCommandValidator.java
|
/*
* This code is subject to the HIEOS License, Version 1.0
*
* Copyright(c) 2012 Vangent, Inc. All rights reserved.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.vangent.hieos.services.xds.registry.mu.validation;
import com.vangent.hieos.services.xds.registry.mu.command.DeleteDocumentSetCommand;
import com.vangent.hieos.services.xds.registry.mu.support.MetadataUpdateContext;
import com.vangent.hieos.services.xds.registry.mu.support.MetadataUpdateHelper;
import com.vangent.hieos.services.xds.registry.storedquery.MetadataUpdateStoredQuerySupport;
import com.vangent.hieos.xutil.exception.XDSUnresolvedReferenceException;
import com.vangent.hieos.xutil.exception.XdsException;
import com.vangent.hieos.xutil.metadata.structure.Metadata;
import com.vangent.hieos.xutil.xlog.client.XLogMessage;
import java.util.List;
import org.apache.axiom.om.OMElement;
/**
*
* @author <NAME>
*/
public class DeleteDocumentSetCommandValidator extends MetadataUpdateCommandValidator {
/**
*
*/
public DeleteDocumentSetCommandValidator() {
}
/**
*
* @return
* @throws XdsException
*/
public boolean validate() throws XdsException {
DeleteDocumentSetCommand cmd = (DeleteDocumentSetCommand) this.getMetadataUpdateCommand();
Metadata submittedMetadata = cmd.getSubmittedMetadata();
// Get list of object references.
List<String> objectRefIds = submittedMetadata.getObjectRefIds();
if (objectRefIds.isEmpty()) {
throw new XDSUnresolvedReferenceException("No object references specified");
}
boolean validationSuccess = true;
Metadata currentMetadata = this.getCurrentRegistryObjects(cmd, objectRefIds);
if (currentMetadata.getObjectRefs().isEmpty()) {
throw new XDSUnresolvedReferenceException("No documents, folders or associations found to delete");
}
// Verify that each specified object reference is in the loaded metadata.
List<String> currentObjectRefIds = currentMetadata.getObjectRefIds();
for (String objectRefId : objectRefIds) {
if (!currentObjectRefIds.contains(objectRefId)) {
throw new XDSUnresolvedReferenceException("Can not find supplied object reference = " + objectRefId);
}
}
// TBD: Do some additional validation here.
return validationSuccess;
}
/**
*
* @param cmd
* @param objectRefIds
* @return
* @throws XdsException
*/
private Metadata getCurrentRegistryObjects(DeleteDocumentSetCommand cmd, List<String> objectRefIds) throws XdsException {
// Get metadata update context for use later.
MetadataUpdateContext metadataUpdateContext = cmd.getMetadataUpdateContext();
XLogMessage logMessage = metadataUpdateContext.getLogMessage();
// Prepare to issue registry query.
MetadataUpdateStoredQuerySupport muSQ = metadataUpdateContext.getStoredQuerySupport();
muSQ.setReturnLeafClass(false);
// Get full metadata for request.
Metadata currentMetadata = new Metadata();
// Load documents.
muSQ.setReason("Get Existing Documents");
OMElement documentQueryResult = muSQ.getDocumentByUUID(objectRefIds);
currentMetadata.addMetadata(documentQueryResult, true /* discard_duplicates */);
// Load folders.
muSQ.setReason("Get Existing Folders");
OMElement folderQueryResult = muSQ.getFolderByUUID(objectRefIds);
currentMetadata.addMetadata(folderQueryResult, true /* discard_duplicates */);
// Load associations.
muSQ.setReason("Get Existing Associations");
OMElement assocQueryResult = muSQ.getAssociationByUUID(objectRefIds);
currentMetadata.addMetadata(assocQueryResult, true /* discard_duplicates */);
muSQ.setReason("");
// Log metadata found.
MetadataUpdateHelper.logMetadata(logMessage, currentMetadata);
return currentMetadata;
}
}
|
Stanislav-Rybonka/studentsdb
|
students/views/students.py
|
from django.contrib import messages
from django.contrib.messages.views import SuccessMessageMixin
from django.core.urlresolvers import reverse, reverse_lazy
from django.utils.translation import ugettext as _
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from ..forms import StudentForm
from ..models.student import Student
from ..util import get_current_group
from utils.mixins import ManagerRequiredMixin
class StudentsListView(generic.ListView):
paginate_by = 10
template_name = 'cabinet/students/students_list.html'
def get_context_data(self, **kwargs):
# This method adds extra variables to template
# get original context data from parent class
context = super(StudentsListView, self).get_context_data(**kwargs)
# tell template not to show logo on a page
context['show_logo'] = False
return context
def get_queryset(self):
# check if we need to show only one group of students
current_group = get_current_group(self.request)
if current_group:
queryset = Student.objects.filter(student_group=current_group)
else:
# otherwise show all students
queryset = Student.objects.order_by('last_name')
order_by = self.request.GET.get('order_by', '')
if order_by in ('last_name', 'first_name', 'ticket'):
students = queryset.order_by(order_by)
if self.request.GET.get('reverse', '') == '1':
queryset = students.reverse()
return queryset
class StudentAddView(ManagerRequiredMixin, SuccessMessageMixin, generic.CreateView):
template_name = 'cabinet/students/student_add_form.html'
form_class = StudentForm
model = Student
success_url = reverse_lazy('students_list')
success_message = _('%(first_name)s %(last_name)s was successful added!')
def form_valid(self, form):
# This method is called when valid form data has been posted.
# It should return an HttpResponse.
# TODO notify student by email that he is added to group
return super(StudentAddView, self).form_valid(form)
class StudentsEditView(LoginRequiredMixin, SuccessMessageMixin, generic.UpdateView):
model = Student
template_name = "cabinet/students/students_edit_form.html"
form_class = StudentForm
success_url = reverse_lazy('students_list')
success_message = _('%(first_name)s %(last_name)s Successful updated!')
class StudentsDeleteView(LoginRequiredMixin, generic.DeleteView):
model = Student
template_name = 'cabinet/students/students_confirm_delete.html'
def get_success_url(self):
return reverse('students_list',
messages.success(self.request,message=_('Student successful deleted')))
class StudentDetailsView(generic.DetailView):
template_name = 'cabinet/students/student_details.html'
model = Student
|
ScalablyTyped/SlinkyTyped
|
t/twilio-video/src/main/scala/typingsSlinky/twilioVideo/mod/package.scala
|
<reponame>ScalablyTyped/SlinkyTyped<filename>t/twilio-video/src/main/scala/typingsSlinky/twilioVideo/mod/package.scala
package typingsSlinky.twilioVideo
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
package object mod {
type AudioLevel = scala.Double
type NetworkQualityLevel = scala.Double
@scala.inline
def connect(token: java.lang.String): js.Promise[typingsSlinky.twilioVideo.mod.Room] = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("connect")(token.asInstanceOf[js.Any]).asInstanceOf[js.Promise[typingsSlinky.twilioVideo.mod.Room]]
@scala.inline
def connect(token: java.lang.String, options: typingsSlinky.twilioVideo.mod.ConnectOptions): js.Promise[typingsSlinky.twilioVideo.mod.Room] = (typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("connect")(token.asInstanceOf[js.Any], options.asInstanceOf[js.Any])).asInstanceOf[js.Promise[typingsSlinky.twilioVideo.mod.Room]]
@scala.inline
def createLocalAudioTrack(): js.Promise[typingsSlinky.twilioVideo.mod.LocalAudioTrack] = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("createLocalAudioTrack")().asInstanceOf[js.Promise[typingsSlinky.twilioVideo.mod.LocalAudioTrack]]
@scala.inline
def createLocalAudioTrack(options: typingsSlinky.twilioVideo.mod.CreateLocalTrackOptions): js.Promise[typingsSlinky.twilioVideo.mod.LocalAudioTrack] = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("createLocalAudioTrack")(options.asInstanceOf[js.Any]).asInstanceOf[js.Promise[typingsSlinky.twilioVideo.mod.LocalAudioTrack]]
@scala.inline
def createLocalTracks(): js.Promise[js.Array[typingsSlinky.twilioVideo.mod.LocalTrack]] = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("createLocalTracks")().asInstanceOf[js.Promise[js.Array[typingsSlinky.twilioVideo.mod.LocalTrack]]]
@scala.inline
def createLocalTracks(options: typingsSlinky.twilioVideo.mod.CreateLocalTracksOptions): js.Promise[js.Array[typingsSlinky.twilioVideo.mod.LocalTrack]] = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("createLocalTracks")(options.asInstanceOf[js.Any]).asInstanceOf[js.Promise[js.Array[typingsSlinky.twilioVideo.mod.LocalTrack]]]
@scala.inline
def createLocalVideoTrack(): js.Promise[typingsSlinky.twilioVideo.mod.LocalVideoTrack] = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("createLocalVideoTrack")().asInstanceOf[js.Promise[typingsSlinky.twilioVideo.mod.LocalVideoTrack]]
@scala.inline
def createLocalVideoTrack(options: typingsSlinky.twilioVideo.mod.CreateLocalTrackOptions): js.Promise[typingsSlinky.twilioVideo.mod.LocalVideoTrack] = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("createLocalVideoTrack")(options.asInstanceOf[js.Any]).asInstanceOf[js.Promise[typingsSlinky.twilioVideo.mod.LocalVideoTrack]]
@scala.inline
def isSupported: scala.Boolean = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].selectDynamic("isSupported").asInstanceOf[scala.Boolean]
@scala.inline
def rewriteLocalTrackIds(
room: typingsSlinky.twilioVideo.mod.Room,
trackStats: js.Array[typingsSlinky.twilioVideo.mod.LocalTrackStats]
): js.Array[typingsSlinky.twilioVideo.mod.LocalTrackStats] = (typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].applyDynamic("rewriteLocalTrackIds")(room.asInstanceOf[js.Any], trackStats.asInstanceOf[js.Any])).asInstanceOf[js.Array[typingsSlinky.twilioVideo.mod.LocalTrackStats]]
@scala.inline
def version: java.lang.String = typingsSlinky.twilioVideo.mod.^.asInstanceOf[js.Dynamic].selectDynamic("version").asInstanceOf[java.lang.String]
}
|
shannah/CN1WebRTCDemo
|
webrtc/src/com/codename1/webrtc/VideoTrackSettings.java
|
<reponame>shannah/CN1WebRTCDemo<filename>webrtc/src/com/codename1/webrtc/VideoTrackSettings.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.codename1.webrtc;
import java.util.Map;
/**
*
* @author shannah
*/
public class VideoTrackSettings extends MediaTrackSettings {
/**
* @return the aspectRatio
*/
public Double getAspectRatio() {
return aspectRatio;
}
/**
* @param aspectRatio the aspectRatio to set
*/
public void setAspectRatio(Double aspectRatio) {
this.aspectRatio = aspectRatio;
}
/**
* @return the facingMode
*/
public FacingMode getFacingMode() {
return facingMode;
}
/**
* @param facingMode the facingMode to set
*/
public void setFacingMode(FacingMode facingMode) {
this.facingMode = facingMode;
}
/**
* @return the frameRate
*/
public Double getFrameRate() {
return frameRate;
}
/**
* @param frameRate the frameRate to set
*/
public void setFrameRate(Double frameRate) {
this.frameRate = frameRate;
}
/**
* @return the width
*/
public Integer getWidth() {
return width;
}
/**
* @param width the width to set
*/
public void setWidth(Integer width) {
this.width = width;
}
/**
* @return the height
*/
public Integer getHeight() {
return height;
}
/**
* @param height the height to set
*/
public void setHeight(Integer height) {
this.height = height;
}
/**
* @return the resizeMode
*/
public ResizeMode getResizeMode() {
return resizeMode;
}
/**
* @param resizeMode the resizeMode to set
*/
public void setResizeMode(ResizeMode resizeMode) {
this.resizeMode = resizeMode;
}
@Override
public void fromJSONStruct(Object struct) {
super.fromJSONStruct(struct);
if (struct instanceof Map) {
Map m = (Map)struct;
MapWrap w = new MapWrap(m);
if (w.has("aspectRatio")) {
aspectRatio = ((Number)m.get("aspectRatio")).doubleValue();
} else {
aspectRatio = null;
}
for (FacingMode mode : FacingMode.values()) {
if (mode.matches((String)m.get("facingMode"))) {
facingMode = mode;
break;
}
}
if (w.has("frameRate")) {
frameRate = ((Number)m.get("frameRate")).doubleValue();
} else {
frameRate = null;
}
if (w.has("width")) {
width = ((Number)m.get("width")).intValue();
} else {
width = null;
}
if (w.has("height")) {
height = ((Number)m.get("height")).intValue();
} else {
height = null;
}
for (ResizeMode mode : ResizeMode.values()) {
if (mode.matches((String)m.get("resizeMode"))) {
resizeMode = mode;
break;
}
}
}
}
private Double aspectRatio;
private FacingMode facingMode;
private Double frameRate;
private Integer width;
private Integer height;
private ResizeMode resizeMode;
}
|
nalind/machine-config-operator
|
cmd/machine-config-server/version.go
|
<reponame>nalind/machine-config-operator<filename>cmd/machine-config-server/version.go
package main
import (
"flag"
"fmt"
"github.com/openshift/machine-config-operator/pkg/version"
"github.com/spf13/cobra"
)
var (
versionCmd = &cobra.Command{
Use: "version",
Short: "Print the version number of Machine Config Server",
Long: `All software has versions. This is Machine Config Server's.`,
Run: runVersionCmd,
}
)
func init() {
rootCmd.AddCommand(versionCmd)
}
func runVersionCmd(cmd *cobra.Command, args []string) {
flag.Set("logtostderr", "true")
flag.Parse()
program := "MachineConfigServer"
version := version.Raw + "-" + version.Hash
fmt.Println(program, version)
}
|
dsofowote/KW-Tool
|
keywordTool/pubfiles/src/code/oms/124727/default.js
|
<gh_stars>0
integration.meta = {
"sectionID" : "124727",
"siteName" : "Schw�bische",
"publisher" : "oms",
"platform" : "desktop"
};
integration.testParams = {};
integration.params = {
'mf_siteId' : '707280',
"plr_UseCreativeSettings" : true,
"plr_ContentW" : 1080,
"plr_ContentType" : "PAGESKINEXPRESS",
"plr_PageAlignment" : "left",
"plr_UseFullVersion" : true,
"plr_PageHeightAdjustment" : 440,
"plr_HideElementsByID" : "[id^=adcloud_], [id^=google_ads_], [id=uAd_]",
"plr_HideElementsByClass" : ""
};
integration.on("layoutChange", function(e) {
frside = e.data.plr_FrameSide;
$('#schwaebische_container').css({
'margin-left' : '40px'
});
$('#placeholder').css({
'height' : 'auto'
});
$('footer').css({
'left' : frside
});
$('.ism-frame').css({
'z-index' : '951'
});
});
|
vladmelnikov/effector
|
src/react/useStore.js
|
<reponame>vladmelnikov/effector
//@flow
import {type Store, isStore, invariant} from 'effector'
import {useReducer, useEffect} from 'react'
export function useStore<State>(store: Store<State>): State {
invariant(
isStore(store),
'useStore: The argument must be Store, but you passed %s.',
store,
)
const [, dispatch] = useReducer((_, payload) => payload)
useEffect(() => store.watch(dispatch), [store])
return store.getState()
}
|
Marslanali/fpga_vertex_6_gtx_Interface
|
vertex6_gtx_tx_component/simulation/functional/isim/top_test_bench.exe.sim/secureip/m_00000000001816741715_0182631664.c
|
<filename>vertex6_gtx_tx_component/simulation/functional/isim/top_test_bench.exe.sim/secureip/m_00000000001816741715_0182631664.c
/**********************************************************************/
/* ____ ____ */
/* / /\/ / */
/* /___/ \ / */
/* \ \ \/ */
/* \ \ Copyright (c) 2003-2009 Xilinx, Inc. */
/* / / All Right Reserved. */
/* /---/ /\ */
/* \ \ / \ */
/* \___\/\___\ */
/***********************************************************************/
/* This file is designed for use with ISim build 0xc3576ebc */
#define XSI_HIDE_SYMBOL_SPEC true
#include "xsi.h"
#include <memory.h>
#ifdef __GNUC__
#include <stdlib.h>
#else
#include <malloc.h>
#define alloca _alloca
#endif
static const char *ng0 = "v:/hipsBuilds/P_hips_v05.0/rst/hips/gtxe1/B_GTXE1_enc.v";
static unsigned int ng1[] = {64U, 63U};
static unsigned int ng2[] = {7U, 0U};
static unsigned int ng3[] = {0U, 31U};
static unsigned int ng4[] = {3U, 0U};
static unsigned int ng5[] = {48U, 15U};
static unsigned int ng6[] = {4U, 0U};
static unsigned int ng7[] = {40U, 7U};
static unsigned int ng8[] = {6U, 0U};
static unsigned int ng9[] = {32U, 3U};
static unsigned int ng10[] = {5U, 0U};
static unsigned int ng11[] = {38U, 1U};
static unsigned int ng12[] = {37U, 0U};
static unsigned int ng13[] = {1U, 0U};
static unsigned int ng14[] = {36U, 0U};
static unsigned int ng15[] = {2U, 0U};
static unsigned int ng16[] = {0U, 0U};
static void Always_40555_0(char *t0)
{
char t4[8];
char *t1;
char *t2;
char *t3;
char *t5;
char *t6;
char *t7;
char *t8;
char *t9;
char *t10;
char *t11;
char *t12;
int t13;
char *t14;
char *t15;
LAB0: t1 = (t0 + 6048U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(40555, ng0);
t2 = (t0 + 8104);
*((int *)t2) = 1;
t3 = (t0 + 6080);
*((char **)t3) = t2;
*((char **)t1) = &&LAB4;
LAB1: return;
LAB4: xsi_set_current_line(40555, ng0);
LAB5: xsi_set_current_line(40556, ng0);
t5 = (t0 + 2328U);
t6 = *((char **)t5);
t5 = (t0 + 2488U);
t7 = *((char **)t5);
t5 = (t0 + 2648U);
t8 = *((char **)t5);
t5 = (t0 + 2808U);
t9 = *((char **)t5);
t5 = (t0 + 2968U);
t10 = *((char **)t5);
t5 = (t0 + 3128U);
t11 = *((char **)t5);
t5 = (t0 + 3288U);
t12 = *((char **)t5);
xsi_vlogtype_concat(t4, 7, 7, 7U, t12, 1, t11, 1, t10, 1, t9, 1, t8, 1, t7, 1, t6, 1);
LAB6: t5 = ((char*)((ng1)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t5, 7);
if (t13 == 1)
goto LAB7;
LAB8: t2 = ((char*)((ng3)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t2, 7);
if (t13 == 1)
goto LAB9;
LAB10: t2 = ((char*)((ng5)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t2, 7);
if (t13 == 1)
goto LAB11;
LAB12: t2 = ((char*)((ng7)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t2, 7);
if (t13 == 1)
goto LAB13;
LAB14: t2 = ((char*)((ng9)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t2, 7);
if (t13 == 1)
goto LAB15;
LAB16: t2 = ((char*)((ng11)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t2, 7);
if (t13 == 1)
goto LAB17;
LAB18: t2 = ((char*)((ng12)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t2, 7);
if (t13 == 1)
goto LAB19;
LAB20: t2 = ((char*)((ng14)));
t13 = xsi_vlog_unsigned_case_zcompare(t4, 7, t2, 7);
if (t13 == 1)
goto LAB21;
LAB22:
LAB24:
LAB23: xsi_set_current_line(40572, ng0);
t2 = ((char*)((ng16)));
t3 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t3, t2, 0, 0, 3, 50LL);
LAB25: goto LAB2;
LAB7: xsi_set_current_line(40563, ng0);
t14 = ((char*)((ng2)));
t15 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t15, t14, 0, 0, 3, 50LL);
goto LAB25;
LAB9: xsi_set_current_line(40564, ng0);
t3 = ((char*)((ng4)));
t5 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t5, t3, 0, 0, 3, 50LL);
goto LAB25;
LAB11: xsi_set_current_line(40565, ng0);
t3 = ((char*)((ng6)));
t5 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t5, t3, 0, 0, 3, 50LL);
goto LAB25;
LAB13: xsi_set_current_line(40566, ng0);
t3 = ((char*)((ng8)));
t5 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t5, t3, 0, 0, 3, 50LL);
goto LAB25;
LAB15: xsi_set_current_line(40567, ng0);
t3 = ((char*)((ng10)));
t5 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t5, t3, 0, 0, 3, 50LL);
goto LAB25;
LAB17: xsi_set_current_line(40568, ng0);
t3 = ((char*)((ng4)));
t5 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t5, t3, 0, 0, 3, 50LL);
goto LAB25;
LAB19: xsi_set_current_line(40569, ng0);
t3 = ((char*)((ng13)));
t5 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t5, t3, 0, 0, 3, 50LL);
goto LAB25;
LAB21: xsi_set_current_line(40570, ng0);
t3 = ((char*)((ng15)));
t5 = (t0 + 5128);
xsi_vlogvar_wait_assign_value(t5, t3, 0, 0, 3, 50LL);
goto LAB25;
}
static void implSig1_execute(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
LAB0: t1 = (t0 + 6296U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: t2 = ((char*)((ng16)));
t3 = (t0 + 8184);
t4 = (t3 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t2 + 4);
t11 = *((unsigned int *)t2);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t3, 0, 0);
LAB1: return;
}
static void implSig2_execute(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
LAB0: t1 = (t0 + 6544U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: t2 = ((char*)((ng13)));
t3 = (t0 + 8248);
t4 = (t3 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t2 + 4);
t11 = *((unsigned int *)t2);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t3, 0, 0);
LAB1: return;
}
static void implSig3_execute(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
LAB0: t1 = (t0 + 6792U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: t2 = ((char*)((ng16)));
t3 = (t0 + 8312);
t4 = (t3 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t2 + 4);
t11 = *((unsigned int *)t2);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t3, 0, 0);
LAB1: return;
}
static void implSig4_execute(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
LAB0: t1 = (t0 + 7040U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: t2 = ((char*)((ng16)));
t3 = (t0 + 8376);
t4 = (t3 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t2 + 4);
t11 = *((unsigned int *)t2);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t3, 0, 0);
LAB1: return;
}
static void implSig5_execute(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
LAB0: t1 = (t0 + 7288U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: t2 = ((char*)((ng13)));
t3 = (t0 + 8440);
t4 = (t3 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t2 + 4);
t11 = *((unsigned int *)t2);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t3, 0, 0);
LAB1: return;
}
static void implSig6_execute(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
LAB0: t1 = (t0 + 7536U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: t2 = ((char*)((ng16)));
t3 = (t0 + 8504);
t4 = (t3 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t2 + 4);
t11 = *((unsigned int *)t2);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t3, 0, 0);
LAB1: return;
}
static void implSig7_execute(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
LAB0: t1 = (t0 + 7784U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: t2 = ((char*)((ng13)));
t3 = (t0 + 8568);
t4 = (t3 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t2 + 4);
t11 = *((unsigned int *)t2);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t3, 0, 0);
LAB1: return;
}
extern void secureip_m_00000000001816741715_0182631664_init()
{
static char *pe[] = {(void *)Always_40555_0,(void *)implSig1_execute,(void *)implSig2_execute,(void *)implSig3_execute,(void *)implSig4_execute,(void *)implSig5_execute,(void *)implSig6_execute,(void *)implSig7_execute};
xsi_register_didat("secureip_m_00000000001816741715_0182631664", "isim/top_test_bench.exe.sim/secureip/m_00000000001816741715_0182631664.didat");
xsi_register_executes(pe);
}
|
cstom4994/SourceEngineRebuild
|
src/shadereditor/src/vgui_editor/perforcefileexplorer.cpp
|
<filename>src/shadereditor/src/vgui_editor/perforcefileexplorer.cpp<gh_stars>1-10
//===== Copyright � 1996-2005, Valve Corporation, All rights reserved. ======//
//
// Purpose: Contains a list of files, determines their perforce status
//
// $NoKeywords: $
//===========================================================================//
#include <vgui_controls/perforcefileexplorer.h>
#include <vgui_controls/perforcefilelist.h>
#include <vgui_controls/combobox.h>
#include <vgui_controls/button.h>
#include <vgui_controls/tooltip.h>
#include "tier1/keyvalues.h"
#include "vgui/isystem.h"
#include "filesystem.h"
#include <ctype.h>
#include "p4lib/ip4.h"
#include "tier2/tier2.h"
// memdbgon must be the last include file in a .cpp file!!!
#include <tier0/memdbgon.h>
using namespace vgui;
//-----------------------------------------------------------------------------
// Purpose: Constructor
//-----------------------------------------------------------------------------
PerforceFileExplorer::PerforceFileExplorer(Panel *pParent, const char *pPanelName) :
BaseClass(pParent, pPanelName) {
m_pFileList = new PerforceFileList(this, "PerforceFileList");
// Get the list of available drives and put them in a menu here.
// Start with the directory we are in.
m_pFullPathCombo = new ComboBox(this, "FullPathCombo", 8, false);
m_pFullPathCombo->GetTooltip()->SetTooltipFormatToSingleLine();
char pFullPath[MAX_PATH];
g_pFullFileSystem->GetCurrentDirectory(pFullPath, sizeof(pFullPath));
SetCurrentDirectory(pFullPath);
m_pFullPathCombo->AddActionSignalTarget(this);
m_pFolderUpButton = new Button(this, "FolderUpButton", "", this);
m_pFolderUpButton->GetTooltip()->SetText("#FileOpenDialog_ToolTip_Up");
m_pFolderUpButton->SetCommand(new KeyValues("FolderUp"));
}
//-----------------------------------------------------------------------------
// Purpose: Destructor
//-----------------------------------------------------------------------------
PerforceFileExplorer::~PerforceFileExplorer() {
}
//-----------------------------------------------------------------------------
// Inherited from Frame
//-----------------------------------------------------------------------------
void PerforceFileExplorer::ApplySchemeSettings(IScheme *pScheme) {
BaseClass::ApplySchemeSettings(pScheme);
m_pFolderUpButton->AddImage(scheme()->GetImage("resource/icon_folderup", false), -3);
}
//-----------------------------------------------------------------------------
// Inherited from Frame
//-----------------------------------------------------------------------------
void PerforceFileExplorer::PerformLayout() {
BaseClass::PerformLayout();
int x, y, w, h;
GetClientArea(x, y, w, h);
m_pFullPathCombo->SetBounds(x, y + 6, w - 30, 24);
m_pFolderUpButton->SetBounds(x + w - 24, y + 6, 24, 24);
m_pFileList->SetBounds(x, y + 36, w, h - 36);
}
//-----------------------------------------------------------------------------
// Sets the current directory
//-----------------------------------------------------------------------------
void PerforceFileExplorer::SetCurrentDirectory(const char *pFullPath) {
if (!pFullPath)
return;
while (isspace(*pFullPath)) {
++pFullPath;
}
if (!pFullPath[0])
return;
m_CurrentDirectory = pFullPath;
m_CurrentDirectory.StripTrailingSlash();
Q_FixSlashes(m_CurrentDirectory.GetForModify());
PopulateFileList();
PopulateDriveList();
char pCurrentDirectory[MAX_PATH];
m_pFullPathCombo->GetText(pCurrentDirectory, sizeof(pCurrentDirectory));
if (Q_stricmp(m_CurrentDirectory.Get(), pCurrentDirectory)) {
char pNewDirectory[MAX_PATH];
Q_snprintf(pNewDirectory, sizeof(pNewDirectory), "%s\\", m_CurrentDirectory.Get());
m_pFullPathCombo->SetText(pNewDirectory);
m_pFullPathCombo->GetTooltip()->SetText(pNewDirectory);
}
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
void PerforceFileExplorer::PopulateDriveList() {
char pFullPath[MAX_PATH * 4];
char pSubDirPath[MAX_PATH * 4];
Q_strncpy(pFullPath, m_CurrentDirectory.Get(), sizeof(pFullPath));
Q_strncpy(pSubDirPath, m_CurrentDirectory.Get(), sizeof(pSubDirPath));
m_pFullPathCombo->DeleteAllItems();
// populate the drive list
char buf[512];
int len = system()->GetAvailableDrives(buf, 512);
char *pBuf = buf;
for (int i = 0; i < len / 4; i++) {
m_pFullPathCombo->AddItem(pBuf, NULL);
// is this our drive - add all subdirectories
if (!_strnicmp(pBuf, pFullPath, 2)) {
int indent = 0;
char *pData = pFullPath;
while (*pData) {
if (*pData == '\\') {
if (indent > 0) {
memset(pSubDirPath, ' ', indent);
memcpy(pSubDirPath + indent, pFullPath, pData - pFullPath + 1);
pSubDirPath[indent + pData - pFullPath + 1] = 0;
m_pFullPathCombo->AddItem(pSubDirPath, NULL);
}
indent += 2;
}
pData++;
}
}
pBuf += 4;
}
}
//-----------------------------------------------------------------------------
// Purpose: Fill the filelist with the names of all the files in the current directory
//-----------------------------------------------------------------------------
void PerforceFileExplorer::PopulateFileList() {
// clear the current list
m_pFileList->RemoveAllFiles();
// Create filter string
char pFullFoundPath[MAX_PATH];
char pFilter[MAX_PATH + 3];
Q_snprintf(pFilter, sizeof(pFilter), "%s\\*.*", m_CurrentDirectory.Get());
// Find all files on disk
FileFindHandle_t h;
const char *pFileName = g_pFullFileSystem->FindFirstEx(pFilter, NULL, &h);
for (; pFileName; pFileName = g_pFullFileSystem->FindNext(h)) {
if (!Q_stricmp(pFileName, "..") || !Q_stricmp(pFileName, "."))
continue;
if (!Q_IsAbsolutePath(pFileName)) {
Q_snprintf(pFullFoundPath, sizeof(pFullFoundPath), "%s\\%s", m_CurrentDirectory.Get(), pFileName);
pFileName = pFullFoundPath;
}
int nItemID = m_pFileList->AddFile(pFileName, true);
m_pFileList->RefreshPerforceState(nItemID, true, NULL);
}
g_pFullFileSystem->FindClose(h);
// Now find all files in perforce
CUtlVector<P4File_t> &fileList = p4->GetFileList(m_CurrentDirectory);
int nCount = fileList.Count();
for (int i = 0; i < nCount; ++i) {
const char *pFileName = p4->String(fileList[i].m_sLocalFile);
if (!pFileName[0])
continue;
int nItemID = m_pFileList->FindFile(pFileName);
bool bFileExists = true;
if (nItemID == m_pFileList->InvalidItemID()) {
// If it didn't find it, the file must not exist
// since it already would have added it above
bFileExists = false;
nItemID = m_pFileList->AddFile(pFileName, false, fileList[i].m_bDir);
}
m_pFileList->RefreshPerforceState(nItemID, bFileExists, &fileList[i]);
}
m_pFileList->SortList();
}
//-----------------------------------------------------------------------------
// Purpose: Handle an item in the Drive combo box being selected
//-----------------------------------------------------------------------------
void PerforceFileExplorer::OnTextChanged(KeyValues *kv) {
Panel *pPanel = (Panel *) kv->GetPtr("panel", NULL);
// first check which control had its text changed!
if (pPanel == m_pFullPathCombo) {
char pCurrentDirectory[MAX_PATH];
m_pFullPathCombo->GetText(pCurrentDirectory, sizeof(pCurrentDirectory));
SetCurrentDirectory(pCurrentDirectory);
return;
}
}
//-----------------------------------------------------------------------------
// Called when the file list was doubleclicked
//-----------------------------------------------------------------------------
void PerforceFileExplorer::OnItemDoubleClicked() {
if (m_pFileList->GetSelectedItemsCount() != 1)
return;
int nItemID = m_pFileList->GetSelectedItem(0);
if (m_pFileList->IsDirectoryItem(nItemID)) {
const char *pDirectoryName = m_pFileList->GetFile(nItemID);
SetCurrentDirectory(pDirectoryName);
}
}
//-----------------------------------------------------------------------------
// Called when the folder up button was hit
//-----------------------------------------------------------------------------
void PerforceFileExplorer::OnFolderUp() {
char pUpDirectory[MAX_PATH];
Q_strncpy(pUpDirectory, m_CurrentDirectory.Get(), sizeof(pUpDirectory));
Q_StripLastDir(pUpDirectory, sizeof(pUpDirectory));
Q_StripTrailingSlash(pUpDirectory);
// This occurs at the root directory
if (!Q_stricmp(pUpDirectory, "."))
return;
SetCurrentDirectory(pUpDirectory);
}
|
OpenKGC/hypergraphdb
|
p2p/src/java/org/hypergraphdb/peer/HGDBOntology.java
|
<filename>p2p/src/java/org/hypergraphdb/peer/HGDBOntology.java<gh_stars>100-1000
/*
* This file is part of the HyperGraphDB source distribution. This is copyrighted
* software. For permitted uses, licensing options and redistribution, please see
* the LicensingInformation file at the root level of the distribution.
*
* Copyright (c) 2005-2010 <NAME>, Inc. All rights reserved.
*/
package org.hypergraphdb.peer;
public class HGDBOntology
{
public static final String REMEMBER_ACTION = "remember";
public static final String ATOM_INTEREST = "atom_interest";
public static final String CATCHUP = "catchup";
public static final String QUERY = "query";
public static final String SLOT_LAST_VERSION = "last_version";
public static final String SLOT_CURRENT_VERSION = "curr_version";
public static final String SLOT_INTEREST = "interest";
public static final String SLOT_QUERY = "query";
public static final String SLOT_GET_OBJECT = "getObj";
}
|
1stmateusz/motech
|
platform/mds/mds-web/src/main/java/org/motechproject/mds/web/domain/RelationshipsUpdate.java
|
<reponame>1stmateusz/motech<filename>platform/mds/mds-web/src/main/java/org/motechproject/mds/web/domain/RelationshipsUpdate.java
package org.motechproject.mds.web.domain;
import java.util.ArrayList;
import java.util.List;
/**
* Representation of the related instances, that have been added or removed on the UI.
*/
public class RelationshipsUpdate {
private List<Long> removedIds;
private List<Long> addedIds;
private List<EntityRecord> addedNewRecords;
public RelationshipsUpdate() {
this.removedIds = new ArrayList<>();
this.addedIds = new ArrayList<>();
this.addedNewRecords = new ArrayList<>();
}
public List<Long> getRemovedIds() {
return removedIds;
}
public void setRemovedIds(List<Long> removedIds) {
this.removedIds = removedIds;
}
public List<Long> getAddedIds() {
return addedIds;
}
public void setAddedIds(List<Long> addedIds) {
this.addedIds = addedIds;
}
public List<EntityRecord> getAddedNewRecords() {
return addedNewRecords;
}
public void setAddedNewRecords(List<EntityRecord> addedNewRecords) {
this.addedNewRecords = addedNewRecords;
}
}
|
lewnelson/node-userapi
|
test/app/ErrorHandlers/NotFoundTest.js
|
'use strict';
const NotFound = require('../../../app/ErrorHandlers/NotFound.js');
const expect = require('chai').expect;
describe('NotFound error handler class tests', () => {
it('should call logGenericError when log is called with notice log type and not_found message', () => {
const notFound = new NotFound();
notFound.logGenericError = (type, message) => {
expect(type).to.equal('notice');
expect(message).to.equal('not_found');
};
notFound.log();
});
it('should send a response with 404 status and error body when handle is called', () => {
const notFound = new NotFound();
let dataSets = [
{
error: {
code: 404,
message: 'resource not found',
context: { key: false }
},
expectedJson: {
error: {
code: 404,
message: 'resource not found',
context: { key: false }
}
}
},
{
error: {},
expectedJson: {
error: {
code: 404,
message: 'not found',
context: {}
}
}
}
];
dataSets.forEach((dataSet) => {
notFound.getError = () => dataSet.error;
notFound.getResponse = () => {
return {
status: (code) => {
expect(code).to.equal(404);
return {
json: (data) => {
expect(data).to.deep.equal(dataSet.expectedJson);
}
};
}
};
};
notFound.handle();
});
});
});
|
devilesk/hero-calculator
|
src/js/herocalc/main.js
|
'use strict';
var core = {};
core.InventoryViewModel = require("./inventory/InventoryViewModel");
core.AbilityModel = require("./AbilityModel");
core.BuffViewModel = require("./BuffViewModel");
core.HeroModel = require("./hero/HeroModel");
core.CloneModel = require("./hero/CloneModel");
core.UnitModel = require("./hero/UnitModel");
core.IllusionModel = require("./hero/IllusionModel");
core.Data = require("./data/main");
core.Util = require("./util/main");
core.init = function (HERODATA_PATH, ITEMDATA_PATH, UNITDATA_PATH, callback) {
core.Data.init(HERODATA_PATH, ITEMDATA_PATH, UNITDATA_PATH, function () {
core.HeroOptions = require("./hero/heroOptionsArray").init(core.Data.heroData);
core.BuffOptions = require("./buffs/buffOptionsArray").init(core.Data.heroData, core.Data.unitData);
core.DebuffOptions = require("./buffs/debuffOptionsArray").init(core.Data.heroData, core.Data.unitData);
core.ItemOptions = require("./inventory/itemOptionsArray").init(core.Data.itemData);
core.ItemBuffOptions = require("./inventory/itemBuffOptions").init(core.Data.itemData);
core.ItemDebuffOptions = require("./inventory/itemDebuffOptions").init(core.Data.itemData);
callback();
});
}
module.exports = core;
|
anyroadcom/anyprice
|
lib/pricing_definition/resources.rb
|
require 'pricing_definition/resources/definition'
module PricingDefinition
module Resources
end
end
|
kzx1025/spark_improve
|
core/target/java/org/apache/spark/deploy/history/HistoryServer$.java
|
<gh_stars>0
package org.apache.spark.deploy.history;
// no position
/**
* The recommended way of starting and stopping a HistoryServer is through the scripts
* start-history-server.sh and stop-history-server.sh. The path to a base log directory
* is must be specified, while the requested UI port is optional. For example:
* <p>
* ./sbin/spark-history-server.sh /tmp/spark-events
* ./sbin/spark-history-server.sh hdfs://172.16.31.10:9000/spark-events
* <p>
* This launches the HistoryServer as a Spark daemon.
*/
public class HistoryServer$ implements org.apache.spark.Logging {
/**
* Static reference to the singleton instance of this Scala object.
*/
public static final HistoryServer$ MODULE$ = null;
public HistoryServer$ () { throw new RuntimeException(); }
private org.apache.spark.SparkConf conf () { throw new RuntimeException(); }
public java.lang.String UI_PATH_PREFIX () { throw new RuntimeException(); }
public void main (java.lang.String[] argStrings) { throw new RuntimeException(); }
public void initSecurity () { throw new RuntimeException(); }
}
|
g--o/JSCF
|
jscf/graphics/graphics.js
|
/**
* @class
* @classdesc graphics related management (mainly canvas)
* @memberof Graphics
*
* @param {Number} canvasWidth the width of the canvas to be created.
* @param {Number} canvasHeight the height of the canvas to be created.
*
* @note: If canvasWidth or canvasHeight is negative, then they get the
* window's corresponding size.
* @constructor
*/
function Graphics(canvasWidth, canvasHeight)
{
// c'tor
this.init = function() {
if (document.body == null) {
alert("JSCF: Fatal error!\nCan't initialize graphics before body is loaded!")
}
this.canvas = document.createElement("canvas");
this.canvas.id = "GameCanvas";
if (canvasWidth < 0)
this.canvas.width = window.innerWidth;
else
this.canvas.width = canvasWidth;
if (canvasHeight < 0)
this.canvas.height = window.innerHeight;
else
this.canvas.height = canvasHeight;
this.context = this.canvas.getContext("2d");
document.body.insertBefore(this.canvas, document.body.childNodes[0]);
};
// call c'tor
this.init();
/**
* clear - clears the canvas
* @return null
*/
this.clear = function()
{
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
};
}
|
bopopescu/MQUIC
|
src/net/base/sdch_manager.cc
|
<reponame>bopopescu/MQUIC
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/base/sdch_manager.h"
#include <limits.h>
#include <utility>
#include "base/base64url.h"
#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "base/time/default_clock.h"
#include "base/values.h"
#include "crypto/sha2.h"
#include "net/base/parse_number.h"
#include "net/base/sdch_observer.h"
#include "net/url_request/url_request_http_job.h"
namespace {
void StripTrailingDot(GURL* gurl) {
std::string host(gurl->host());
if (host.empty())
return;
if (*host.rbegin() != '.')
return;
host.resize(host.size() - 1);
GURL::Replacements replacements;
replacements.SetHostStr(host);
*gurl = gurl->ReplaceComponents(replacements);
return;
}
} // namespace
namespace net {
SdchManager::DictionarySet::DictionarySet() {}
SdchManager::DictionarySet::~DictionarySet() {}
std::string SdchManager::DictionarySet::GetDictionaryClientHashList() const {
std::string result;
bool first = true;
for (const auto& entry: dictionaries_) {
if (!first)
result.append(",");
result.append(entry.second->data.client_hash());
first = false;
}
return result;
}
bool SdchManager::DictionarySet::Empty() const {
return dictionaries_.empty();
}
const std::string* SdchManager::DictionarySet::GetDictionaryText(
const std::string& server_hash) const {
auto it = dictionaries_.find(server_hash);
if (it == dictionaries_.end())
return nullptr;
return &it->second->data.text();
}
void SdchManager::DictionarySet::AddDictionary(
const std::string& server_hash,
const scoped_refptr<base::RefCountedData<SdchDictionary>>& dictionary) {
DCHECK(dictionaries_.end() == dictionaries_.find(server_hash));
dictionaries_[server_hash] = dictionary;
}
SdchManager::SdchManager() {
DCHECK(thread_checker_.CalledOnValidThread());
}
SdchManager::~SdchManager() {
DCHECK(thread_checker_.CalledOnValidThread());
while (!dictionaries_.empty()) {
auto it = dictionaries_.begin();
dictionaries_.erase(it->first);
}
}
void SdchManager::ClearData() {
blacklisted_domains_.clear();
allow_latency_experiment_.clear();
dictionaries_.clear();
FOR_EACH_OBSERVER(SdchObserver, observers_, OnClearDictionaries());
}
// static
void SdchManager::SdchErrorRecovery(SdchProblemCode problem) {
UMA_HISTOGRAM_ENUMERATION("Sdch3.ProblemCodes_5", problem,
SDCH_MAX_PROBLEM_CODE);
}
void SdchManager::BlacklistDomain(const GURL& url,
SdchProblemCode blacklist_reason) {
SetAllowLatencyExperiment(url, false);
BlacklistInfo* blacklist_info = &blacklisted_domains_[url.host()];
if (blacklist_info->count > 0)
return; // Domain is already blacklisted.
if (blacklist_info->exponential_count > (INT_MAX - 1) / 2) {
blacklist_info->exponential_count = INT_MAX;
} else {
blacklist_info->exponential_count =
blacklist_info->exponential_count * 2 + 1;
}
blacklist_info->count = blacklist_info->exponential_count;
blacklist_info->reason = blacklist_reason;
}
void SdchManager::BlacklistDomainForever(const GURL& url,
SdchProblemCode blacklist_reason) {
SetAllowLatencyExperiment(url, false);
BlacklistInfo* blacklist_info = &blacklisted_domains_[url.host()];
blacklist_info->count = INT_MAX;
blacklist_info->exponential_count = INT_MAX;
blacklist_info->reason = blacklist_reason;
}
void SdchManager::ClearBlacklistings() {
blacklisted_domains_.clear();
}
void SdchManager::ClearDomainBlacklisting(const std::string& domain) {
BlacklistInfo* blacklist_info =
&blacklisted_domains_[base::ToLowerASCII(domain)];
blacklist_info->count = 0;
blacklist_info->reason = SDCH_OK;
}
int SdchManager::BlackListDomainCount(const std::string& domain) {
std::string domain_lower(base::ToLowerASCII(domain));
if (blacklisted_domains_.end() == blacklisted_domains_.find(domain_lower))
return 0;
return blacklisted_domains_[domain_lower].count;
}
int SdchManager::BlacklistDomainExponential(const std::string& domain) {
std::string domain_lower(base::ToLowerASCII(domain));
if (blacklisted_domains_.end() == blacklisted_domains_.find(domain_lower))
return 0;
return blacklisted_domains_[domain_lower].exponential_count;
}
SdchProblemCode SdchManager::IsInSupportedDomain(const GURL& url) {
DCHECK(thread_checker_.CalledOnValidThread());
if (blacklisted_domains_.empty())
return SDCH_OK;
auto it = blacklisted_domains_.find(url.host());
if (blacklisted_domains_.end() == it || it->second.count == 0)
return SDCH_OK;
UMA_HISTOGRAM_ENUMERATION("Sdch3.BlacklistReason", it->second.reason,
SDCH_MAX_PROBLEM_CODE);
int count = it->second.count - 1;
if (count > 0) {
it->second.count = count;
} else {
it->second.count = 0;
it->second.reason = SDCH_OK;
}
return SDCH_DOMAIN_BLACKLIST_INCLUDES_TARGET;
}
SdchProblemCode SdchManager::OnGetDictionary(const GURL& request_url,
const GURL& dictionary_url) {
DCHECK(thread_checker_.CalledOnValidThread());
SdchProblemCode rv = CanFetchDictionary(request_url, dictionary_url);
if (rv != SDCH_OK)
return rv;
FOR_EACH_OBSERVER(SdchObserver,
observers_,
OnGetDictionary(request_url, dictionary_url));
return SDCH_OK;
}
void SdchManager::OnDictionaryUsed(const std::string& server_hash) {
FOR_EACH_OBSERVER(SdchObserver, observers_,
OnDictionaryUsed(server_hash));
}
SdchProblemCode SdchManager::CanFetchDictionary(
const GURL& referring_url,
const GURL& dictionary_url) const {
DCHECK(thread_checker_.CalledOnValidThread());
/* The user agent may retrieve a dictionary from the dictionary URL if all of
the following are true:
1 The dictionary URL host name matches the referrer URL host name and
scheme.
2 The dictionary URL host name domain matches the parent domain of the
referrer URL host name
3 The parent domain of the referrer URL host name is not a top level
domain
*/
// Item (1) above implies item (2). Spec should be updated.
// I take "host name match" to be "is identical to"
if (referring_url.host_piece() != dictionary_url.host_piece() ||
referring_url.scheme_piece() != dictionary_url.scheme_piece())
return SDCH_DICTIONARY_LOAD_ATTEMPT_FROM_DIFFERENT_HOST;
// TODO(jar): Remove this failsafe conservative hack which is more restrictive
// than current SDCH spec when needed, and justified by security audit.
if (!referring_url.SchemeIsHTTPOrHTTPS())
return SDCH_DICTIONARY_SELECTED_FROM_NON_HTTP;
return SDCH_OK;
}
scoped_ptr<SdchManager::DictionarySet>
SdchManager::GetDictionarySet(const GURL& target_url) {
if (IsInSupportedDomain(target_url) != SDCH_OK)
return NULL;
int count = 0;
scoped_ptr<SdchManager::DictionarySet> result(new DictionarySet);
for (const auto& entry: dictionaries_) {
if (entry.second->data.CanUse(target_url) != SDCH_OK)
continue;
if (entry.second->data.Expired())
continue;
++count;
result->AddDictionary(entry.first, entry.second);
}
if (count == 0)
return NULL;
UMA_HISTOGRAM_COUNTS("Sdch3.Advertisement_Count", count);
return result;
}
scoped_ptr<SdchManager::DictionarySet>
SdchManager::GetDictionarySetByHash(
const GURL& target_url,
const std::string& server_hash,
SdchProblemCode* problem_code) {
scoped_ptr<SdchManager::DictionarySet> result;
*problem_code = SDCH_DICTIONARY_HASH_NOT_FOUND;
const auto& it = dictionaries_.find(server_hash);
if (it == dictionaries_.end())
return result;
*problem_code = it->second->data.CanUse(target_url);
if (*problem_code != SDCH_OK)
return result;
result.reset(new DictionarySet);
result->AddDictionary(it->first, it->second);
return result;
}
// static
void SdchManager::GenerateHash(const std::string& dictionary_text,
std::string* client_hash, std::string* server_hash) {
char binary_hash[32];
crypto::SHA256HashString(dictionary_text, binary_hash, sizeof(binary_hash));
base::StringPiece first_48_bits(&binary_hash[0], 6);
base::StringPiece second_48_bits(&binary_hash[6], 6);
base::Base64UrlEncode(
first_48_bits, base::Base64UrlEncodePolicy::INCLUDE_PADDING, client_hash);
base::Base64UrlEncode(second_48_bits,
base::Base64UrlEncodePolicy::INCLUDE_PADDING,
server_hash);
DCHECK_EQ(server_hash->length(), 8u);
DCHECK_EQ(client_hash->length(), 8u);
}
// Methods for supporting latency experiments.
bool SdchManager::AllowLatencyExperiment(const GURL& url) const {
DCHECK(thread_checker_.CalledOnValidThread());
return allow_latency_experiment_.end() !=
allow_latency_experiment_.find(url.host());
}
void SdchManager::SetAllowLatencyExperiment(const GURL& url, bool enable) {
DCHECK(thread_checker_.CalledOnValidThread());
if (enable) {
allow_latency_experiment_.insert(url.host());
return;
}
ExperimentSet::iterator it = allow_latency_experiment_.find(url.host());
if (allow_latency_experiment_.end() == it)
return; // It was already erased, or never allowed.
SdchErrorRecovery(SDCH_LATENCY_TEST_DISALLOWED);
allow_latency_experiment_.erase(it);
}
void SdchManager::AddObserver(SdchObserver* observer) {
observers_.AddObserver(observer);
}
void SdchManager::RemoveObserver(SdchObserver* observer) {
observers_.RemoveObserver(observer);
}
SdchProblemCode SdchManager::AddSdchDictionary(
const std::string& dictionary_text,
const GURL& dictionary_url,
std::string* server_hash_p) {
DCHECK(thread_checker_.CalledOnValidThread());
std::string client_hash;
std::string server_hash;
GenerateHash(dictionary_text, &client_hash, &server_hash);
if (dictionaries_.find(server_hash) != dictionaries_.end())
return SDCH_DICTIONARY_ALREADY_LOADED; // Already loaded.
std::string domain, path;
std::set<int> ports;
base::Time expiration(base::Time::Now() + base::TimeDelta::FromDays(30));
if (dictionary_text.empty())
return SDCH_DICTIONARY_HAS_NO_TEXT; // Missing header.
size_t header_end = dictionary_text.find("\n\n");
if (std::string::npos == header_end)
return SDCH_DICTIONARY_HAS_NO_HEADER; // Missing header.
size_t line_start = 0; // Start of line being parsed.
while (1) {
size_t line_end = dictionary_text.find('\n', line_start);
DCHECK(std::string::npos != line_end);
DCHECK_LE(line_end, header_end);
size_t colon_index = dictionary_text.find(':', line_start);
if (std::string::npos == colon_index)
return SDCH_DICTIONARY_HEADER_LINE_MISSING_COLON; // Illegal line missing
// a colon.
if (colon_index > line_end)
break;
size_t value_start = dictionary_text.find_first_not_of(" \t",
colon_index + 1);
if (std::string::npos != value_start) {
if (value_start >= line_end)
break;
std::string name(dictionary_text, line_start, colon_index - line_start);
std::string value(dictionary_text, value_start, line_end - value_start);
name = base::ToLowerASCII(name);
if (name == "domain") {
domain = value;
} else if (name == "path") {
path = value;
} else if (name == "format-version") {
if (value != "1.0")
return SDCH_DICTIONARY_UNSUPPORTED_VERSION;
} else if (name == "max-age") {
int64_t seconds;
// TODO(eroman): crbug.com/596541 -- should not accept a leading +.
base::StringToInt64(value, &seconds);
expiration = base::Time::Now() + base::TimeDelta::FromSeconds(seconds);
} else if (name == "port") {
int port;
if (ParseNonNegativeDecimalInt(value, &port))
ports.insert(port);
}
}
if (line_end >= header_end)
break;
line_start = line_end + 1;
}
// Narrow fix for http://crbug.com/389451.
GURL dictionary_url_normalized(dictionary_url);
StripTrailingDot(&dictionary_url_normalized);
SdchProblemCode rv = IsInSupportedDomain(dictionary_url_normalized);
if (rv != SDCH_OK)
return rv;
rv = SdchDictionary::CanSet(domain, path, ports, dictionary_url_normalized);
if (rv != SDCH_OK)
return rv;
UMA_HISTOGRAM_COUNTS("Sdch3.Dictionary size loaded", dictionary_text.size());
DVLOG(1) << "Loaded dictionary with client hash " << client_hash
<< " and server hash " << server_hash;
SdchDictionary dictionary(dictionary_text, header_end + 2, client_hash,
server_hash, dictionary_url_normalized, domain,
path, expiration, ports);
dictionaries_[server_hash] =
new base::RefCountedData<SdchDictionary>(dictionary);
if (server_hash_p)
*server_hash_p = server_hash;
FOR_EACH_OBSERVER(SdchObserver, observers_,
OnDictionaryAdded(dictionary_url, server_hash));
return SDCH_OK;
}
SdchProblemCode SdchManager::RemoveSdchDictionary(
const std::string& server_hash) {
if (dictionaries_.find(server_hash) == dictionaries_.end())
return SDCH_DICTIONARY_HASH_NOT_FOUND;
dictionaries_.erase(server_hash);
FOR_EACH_OBSERVER(SdchObserver, observers_, OnDictionaryRemoved(server_hash));
return SDCH_OK;
}
// static
scoped_ptr<SdchManager::DictionarySet>
SdchManager::CreateEmptyDictionarySetForTesting() {
return scoped_ptr<DictionarySet>(new DictionarySet);
}
scoped_ptr<base::Value> SdchManager::SdchInfoToValue() const {
scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
value->SetBoolean("sdch_enabled", true);
scoped_ptr<base::ListValue> entry_list(new base::ListValue());
for (const auto& entry: dictionaries_) {
scoped_ptr<base::DictionaryValue> entry_dict(new base::DictionaryValue());
entry_dict->SetString("url", entry.second->data.url().spec());
entry_dict->SetString("client_hash", entry.second->data.client_hash());
entry_dict->SetString("domain", entry.second->data.domain());
entry_dict->SetString("path", entry.second->data.path());
scoped_ptr<base::ListValue> port_list(new base::ListValue());
for (std::set<int>::const_iterator port_it =
entry.second->data.ports().begin();
port_it != entry.second->data.ports().end(); ++port_it) {
port_list->AppendInteger(*port_it);
}
entry_dict->Set("ports", std::move(port_list));
entry_dict->SetString("server_hash", entry.first);
entry_list->Append(std::move(entry_dict));
}
value->Set("dictionaries", std::move(entry_list));
entry_list.reset(new base::ListValue());
for (DomainBlacklistInfo::const_iterator it = blacklisted_domains_.begin();
it != blacklisted_domains_.end(); ++it) {
if (it->second.count == 0)
continue;
scoped_ptr<base::DictionaryValue> entry_dict(new base::DictionaryValue());
entry_dict->SetString("domain", it->first);
if (it->second.count != INT_MAX)
entry_dict->SetInteger("tries", it->second.count);
entry_dict->SetInteger("reason", it->second.reason);
entry_list->Append(std::move(entry_dict));
}
value->Set("blacklisted", std::move(entry_list));
return std::move(value);
}
} // namespace net
|
RawadAlaryan/Design-Patterns
|
src/main/java/behavioralDesignPatterns/stateDesignPattern/projectVendingMachine/Main/Main.java
|
<reponame>RawadAlaryan/Design-Patterns
package behavioralDesignPatterns.stateDesignPattern.projectVendingMachine.Main;
import behavioralDesignPatterns.stateDesignPattern.projectVendingMachine.Context.VendingMachine;
//State Design Pattern Vending Machine.
//Vending Machine activated by Machine States (HasMoney, NoMoney, Sold, SoldOut).
public class Main {
public static void main(String[] args) {
System.out.println("State Design Pattern Vending Machine.");
System.out.println("Vending Machine activated by Machine States (HasMoney, NoMoney, Sold, SoldOut).\n");
VendingMachine vendingMachine = new VendingMachine(2);
System.out.println(vendingMachine); System.out.println();
vendingMachine.insertMoney();
System.out.println(vendingMachine); System.out.println();
vendingMachine.selectProduct();
System.out.println(vendingMachine); System.out.println();
vendingMachine.dispenseProduct();
System.out.println(vendingMachine); System.out.println();
vendingMachine.insertMoney();
System.out.println(vendingMachine); System.out.println();
vendingMachine.selectProduct();
System.out.println(vendingMachine); System.out.println();
vendingMachine.dispenseProduct();
System.out.println(vendingMachine); System.out.println();
vendingMachine.insertMoney();
System.out.println(vendingMachine); System.out.println();
vendingMachine.selectProduct();
System.out.println(vendingMachine); System.out.println();
vendingMachine.dispenseProduct();
System.out.println(vendingMachine); System.out.println();
}
}
|
dvaerum/r10k
|
lib/r10k/git/rugged.rb
|
<gh_stars>100-1000
require 'r10k/git'
begin
require 'rugged'
rescue LoadError
end
module R10K
module Git
module Rugged
require 'r10k/git/rugged/bare_repository'
require 'r10k/git/rugged/working_repository'
require 'r10k/git/rugged/cache'
require 'r10k/git/rugged/thin_repository'
end
end
end
|
bbleds/note-organizer
|
client/src/components/Landing.js
|
import React from 'react'
export default () => {
return(
<h1>Landing</h1>
)
}
|
WaterKnight/wc3libs
|
src/net/moonlightflower/wc3libs/txt/Jass.java
|
<reponame>WaterKnight/wc3libs<filename>src/net/moonlightflower/wc3libs/txt/Jass.java<gh_stars>0
package net.moonlightflower.wc3libs.txt;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ErrorNode;
import org.antlr.v4.runtime.tree.ParseTreeListener;
import org.antlr.v4.runtime.tree.TerminalNode;
import net.moonlightflower.wc3libs.misc.antlr.out.grammar.JassLexer;
import net.moonlightflower.wc3libs.misc.antlr.out.grammar.JassParser;
public class Jass {
private static String stripComments(String val) {
val = val.replaceAll("//.*", "");
Pattern pattern = Pattern.compile(String.format("%s.*%s", Pattern.quote("/*"), Pattern.quote("*/")), Pattern.DOTALL);
Matcher matcher = pattern.matcher(val);
val = matcher.replaceAll("");
return val;
}
private void read(InputStream inStream) throws IOException {
UTF8 reader = new UTF8(inStream);
String input = stripComments(reader.readAll());
input = input.replaceAll("\r\n", "\n");
ANTLRInputStream antlrStream = new ANTLRInputStream(input);
Lexer lexer = new JassLexer(antlrStream);
CommonTokenStream tokens = new CommonTokenStream(lexer);
JassParser parser = new JassParser(tokens);
Map<String, String> newEntries = new HashMap<>();
parser.addParseListener(new ParseTreeListener() {
@Override
public void enterEveryRule(ParserRuleContext context) {
// TODO Auto-generated method stub
System.err.println("enter: " + context.toString(parser, context.getParent()));
}
@Override
public void exitEveryRule(ParserRuleContext context) {
if (context instanceof JassParser.GlobalDecContext) {
JassParser.GlobalDecContext globalDecContext = (JassParser.GlobalDecContext) context;
//newEntries.put(globalDecContext.id.getText(), dequote(globalDecContext.val.getText()));
//System.out.println(globalDecContext.type.getText() + "->" + globalDecContext.name.getText());
}
if (context instanceof JassParser.FuncContext) {
JassParser.FuncContext funcContext = (JassParser.FuncContext) context;
//System.out.println(funcContext.name.getText() + ";" + funcContext.params.getText() + ";" + funcContext.returnType.getText());
}
if (context instanceof JassParser.StatementContext) {
JassParser.StatementContext statementContext = (JassParser.StatementContext) context;
//System.out.println(statementContext.name.getText() + ";" + statementContext.params.getText() + ";" + statementContext.returnType.getText());
}
if (context instanceof JassParser.SetVarContext) {
JassParser.SetVarContext setVarContext = (JassParser.SetVarContext) context;
//System.out.println(setVarContext.name.getText() + ";" + setVarContext.val.getText());
}
if (context instanceof JassParser.LoopBodyContext) {
//System.out.println("loopbody exit");
}
if (context instanceof JassParser.LoopContext) {
//System.out.println("loop exit");
}
if (!(context instanceof JassParser.RootContext)) {
//System.err.println("exit " + context.toString(parser, context.getParent()) + "->" + context.getText() + "|||");
}
System.err.println("exit: " + context.toString(parser, context.getParent()) + "->" + context.getText());
}
@Override
public void visitErrorNode(ErrorNode arg0) {
// TODO Auto-generated method stub
}
@Override
public void visitTerminal(TerminalNode arg0) {
// TODO Auto-generated method stub
}
});
parser.root();
BufferedWriter tokenOut = new BufferedWriter(new FileWriter("D:\\tokens.txt"));
for (int i = 0; i < tokens.getTokens().size(); i++) {
Token token = tokens.getTokens().get(i);
if (token.getType() == -1) continue;
String tokenS = token.getLine()+";"+token.getCharPositionInLine() + "->" + lexer.getTokenNames()[token.getType()];
tokenOut.write(tokenS + ";");
tokenOut.newLine();
}
tokenOut.close();
//_map.putAll(newEntries);
}
public Jass(InputStream inStream) throws IOException {
read(inStream);
}
public Jass(File file) throws IOException {
InputStream inStream = new FileInputStream(file);
read(inStream);
inStream.close();
}
}
|
TheAdityaKedia/ISAAC
|
provider/sync-git/src/main/java/sh/isaac/provider/sync/git/SyncServiceGIT.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
*
* You may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributions from 2013-2017 where performed either by US government
* employees, or under US Veterans Health Administration contracts.
*
* US Veterans Health Administration contributions by government employees
* are work of the U.S. Government and are not subject to copyright
* protection in the United States. Portions contributed by government
* employees are USGovWork (17USC §105). Not subject to copyright.
*
* Contribution by contractors to the US Veterans Health Administration
* during this period are contractually contributed under the
* Apache License, Version 2.0.
*
* See: https://www.usa.gov/government-works
*
* Contributions prior to 2013:
*
* Copyright (C) International Health Terminology Standards Development Organisation.
* Licensed under the Apache License, Version 2.0.
*
*/
package sh.isaac.provider.sync.git;
//~--- JDK imports ------------------------------------------------------------
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import javax.naming.AuthenticationException;
//~--- non-JDK imports --------------------------------------------------------
import org.eclipse.jgit.api.AddCommand;
import org.eclipse.jgit.api.CheckoutCommand.Stage;
import org.eclipse.jgit.api.CommitCommand;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeResult;
import org.eclipse.jgit.api.ResetCommand.ResetType;
import org.eclipse.jgit.api.RmCommand;
import org.eclipse.jgit.api.Status;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.StashApplyFailureException;
import org.eclipse.jgit.api.errors.TransportException;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.diff.RawTextComparator;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.NoWorkTreeException;
import org.eclipse.jgit.internal.storage.file.FileRepository;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.notes.Note;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.CredentialsProvider;
import org.eclipse.jgit.transport.FetchResult;
import org.eclipse.jgit.transport.JschConfigSessionFactory;
import org.eclipse.jgit.transport.OpenSshConfig.Host;
import org.eclipse.jgit.transport.PushResult;
import org.eclipse.jgit.transport.RefSpec;
import org.eclipse.jgit.transport.SshSessionFactory;
import org.eclipse.jgit.transport.TagOpt;
import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.jgit.util.io.DisabledOutputStream;
import org.glassfish.hk2.api.PerLookup;
import org.jvnet.hk2.annotations.Service;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.eclipse.jgit.api.InitCommand;
import sh.isaac.api.sync.MergeFailOption;
import sh.isaac.api.sync.MergeFailure;
import sh.isaac.api.sync.SyncFiles;
//~--- classes ----------------------------------------------------------------
/**
* {@link SyncServiceGIT}
*
* A GIT implementation of {@link SyncFiles}.
*
* @author <a href="mailto:daniel.armbrust.list@gmail.com"><NAME></a>
*/
@Service(name = "GIT")
@PerLookup
public class SyncServiceGIT
implements SyncFiles {
/**
* The Constant LOG.
*/
private static final Logger LOG = LogManager.getLogger();
/** The jsch configured. */
private static volatile CountDownLatch jschConfigured = new CountDownLatch(1);
//~--- fields --------------------------------------------------------------
/** The note failed merge happened on remote. */
private final String NOTE_FAILED_MERGE_HAPPENED_ON_REMOTE = "Conflicted merge happened during remote merge";
/** The note failed merge happened on stash. */
private final String NOTE_FAILED_MERGE_HAPPENED_ON_STASH = "Conflicted merge happened during stash merge";
/** The stash marker. */
private final String STASH_MARKER = ":STASH-";
/** The local folder. */
private File localFolder = null;
/** The read me file content. */
private String readMeFileContent = DEFAULT_README_CONTENT;
/** The git ignore text. */
private String gitIgnoreText = "lastUser.txt\r\n";
//~--- constructors --------------------------------------------------------
/**
* If you are in an HK2 environment, you would be better served getting this from HK2 (by asking for it by interface and name)
* but in other environments, when HK2 may not be up, you may construct it directly.
*/
public SyncServiceGIT() {
synchronized (jschConfigured) {
if (jschConfigured.getCount() > 0) {
LOG.debug("Disabling strict host key checking");
final SshSessionFactory factory = new JschConfigSessionFactory() {
@Override
protected void configure(Host hc, Session session) {
session.setConfig("StrictHostKeyChecking", "no");
}
};
SshSessionFactory.setInstance(factory);
JSch.setLogger(new com.jcraft.jsch.Logger() {
private final HashMap<Integer, Consumer<String>> logMap = new HashMap<>();
private final HashMap<Integer, BooleanSupplier> enabledMap = new HashMap<>();
{
// Note- JSCH is _really_ verbose at the INFO level, so I'm mapping info to DEBUG.
this.logMap.put(com.jcraft.jsch.Logger.DEBUG, LOG::debug);
this.logMap.put(com.jcraft.jsch.Logger.ERROR, LOG::debug); // error
this.logMap.put(com.jcraft.jsch.Logger.FATAL, LOG::debug); // error
this.logMap.put(com.jcraft.jsch.Logger.INFO, LOG::debug);
this.logMap.put(com.jcraft.jsch.Logger.WARN, LOG::debug); // warn
this.enabledMap.put(com.jcraft.jsch.Logger.DEBUG, LOG::isDebugEnabled);
this.enabledMap.put(com.jcraft.jsch.Logger.ERROR, LOG::isErrorEnabled);
this.enabledMap.put(com.jcraft.jsch.Logger.FATAL, LOG::isErrorEnabled);
this.enabledMap.put(com.jcraft.jsch.Logger.INFO, LOG::isDebugEnabled);
this.enabledMap.put(com.jcraft.jsch.Logger.WARN, LOG::isWarnEnabled);
}
@Override
public void log(int level, String message) {
this.logMap.get(level)
.accept(message);
}
@Override
public boolean isEnabled(int level) {
return this.enabledMap.get(level)
.getAsBoolean();
}
});
jschConfigured.countDown();
}
}
}
//~--- methods -------------------------------------------------------------
/**
* Adds the files.
*
* @param files the files
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @see sh.isaac.api.sync.SyncFiles#addFiles(java.io.File, java.util.Set)
*/
@Override
public void addFiles(String... files)
throws IllegalArgumentException, IOException {
LOG.info("Add Files called {}", Arrays.toString(files));
try (Git git = getGit()) {
if (files.length == 0) {
LOG.debug("No files to add");
} else {
final AddCommand ac = git.add();
for (final String file: files) {
ac.addFilepattern(file);
}
ac.call();
}
LOG.info("addFiles Complete. Current status: " + statusToString(git.status().call()));
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Adds the untracked files.
*
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @see sh.isaac.api.sync.SyncFiles#addUntrackedFiles(java.io.File)
*/
@Override
public void addUntrackedFiles()
throws IllegalArgumentException, IOException {
LOG.info("Add Untracked files called");
try (Git git = getGit()) {
final Status s = git.status()
.call();
addFiles(s.getUntracked()
.toArray(new String[s.getUntracked().size()]));
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Create a new branch, and switch to it locally. The new branch will contain no files.
*
* @param branchName the branch name
* @throws IOException Signals that an I/O exception has occurred.
*/
public void branch(String branchName)
throws IOException {
try (Git git = getGit()) {
git.checkout()
.setCreateBranch(true)
.setName(branchName)
.setOrphan(true)
.call();
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Create a new tag at the current point.
*
* @param commitMessage the commit message
* @param tagName the tag name
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
*/
public void commitAndTag(String commitMessage, String tagName)
throws IllegalArgumentException, IOException {
try (Git git = getGit()) {
git.commit()
.setAll(true)
.setMessage(commitMessage)
.call();
git.tag()
.setName(tagName)
.call();
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Link and fetch from remote.
*
* @param remoteAddress the remote address
* @param username the username
* @param password the password
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws AuthenticationException the authentication exception
* @see sh.isaac.api.sync.SyncFiles#linkAndFetchFromRemote(java.io.File, java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public void linkAndFetchFromRemote(String remoteAddress,
String username,
char[] password)
throws IllegalArgumentException,
IOException,
AuthenticationException {
LOG.info("linkAndFetchFromRemote called - folder: {}, remoteAddress: {}, username: {}",
this.localFolder,
remoteAddress,
username);
Repository r = null;
Git git = null;
try {
final File gitFolder = new File(this.localFolder, ".git");
r = new FileRepository(gitFolder);
if (!gitFolder.isDirectory()) {
LOG.debug("Root folder does not contain a .git subfolder. Creating new git repository.");
r.create();
}
relinkRemote(remoteAddress, username, password);
git = new Git(r);
final CredentialsProvider cp = new UsernamePasswordCredentialsProvider(username,
((password == null) ? new char[] {}
: password));
LOG.debug("Fetching");
final FetchResult fr = git.fetch()
.setCheckFetchedObjects(true)
.setCredentialsProvider(cp)
.call();
LOG.debug("Fetch messages: {}", fr.getMessages());
boolean remoteHasMaster = false;
final Collection<Ref> refs = git.lsRemote()
.setCredentialsProvider(cp)
.call();
for (final Ref ref: refs) {
if ("refs/heads/master".equals(ref.getName())) {
remoteHasMaster = true;
LOG.debug("Remote already has 'heads/master'");
break;
}
}
if (remoteHasMaster) {
// we need to fetch and (maybe) merge - get onto origin/master.
LOG.debug("Fetching from remote");
final String fetchResult = git.fetch()
.setCredentialsProvider(cp)
.call()
.getMessages();
LOG.debug("Fetch Result: {}", fetchResult);
LOG.debug("Resetting to origin/master");
git.reset()
.setMode(ResetType.MIXED)
.setRef("origin/master")
.call();
// Get the files from master that we didn't have in our working folder
LOG.debug("Checking out missing files from origin/master");
for (final String missing: git.status()
.call()
.getMissing()) {
LOG.debug("Checkout {}", missing);
git.checkout()
.addPath(missing)
.call();
}
for (final String newFile: makeInitialFilesAsNecessary(this.localFolder)) {
LOG.debug("Adding and committing {}", newFile);
git.add()
.addFilepattern(newFile)
.call();
git.commit()
.setMessage("Adding " + newFile)
.setAuthor(username, "42")
.call();
for (final PushResult pr: git.push()
.setCredentialsProvider(cp)
.call()) {
LOG.debug("Push Message: {}", pr.getMessages());
}
}
} else {
// just push
// make sure we have something to push
for (final String newFile: makeInitialFilesAsNecessary(this.localFolder)) {
LOG.debug("Adding and committing {}", newFile);
git.add()
.addFilepattern(newFile)
.call();
}
git.commit()
.setMessage("Adding initial files")
.setAuthor(username, "42")
.call();
LOG.debug("Pushing repository");
for (final PushResult pr: git.push()
.setCredentialsProvider(cp)
.call()) {
LOG.debug("Push Result: {}", pr.getMessages());
}
}
LOG.info("linkAndFetchFromRemote Complete. Current status: " + statusToString(git.status().call()));
} catch (final TransportException te) {
if (te.getMessage().contains("Auth fail") || te.getMessage().contains("not authorized")) {
LOG.info("Auth fail", te);
throw new AuthenticationException("Auth fail");
} else {
LOG.error("Unexpected", te);
throw new IOException("Internal error", te);
}
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
} finally {
if (git != null) {
git.close();
}
if (r != null) {
r.close();
}
}
}
/**
* Push tag.
*
* @param tagName the tag name
* @param username the username
* @param password the password
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws AuthenticationException the authentication exception
*/
public void pushTag(final String tagName,
String username,
char[] password)
throws IllegalArgumentException,
IOException,
AuthenticationException {
try (Git git = getGit()) {
final CredentialsProvider cp = new UsernamePasswordCredentialsProvider(username,
((password == null) ? new char[] {}
: password));
final Iterable<PushResult> pr = git.push()
.setRefSpecs(new RefSpec("refs/tags/" + tagName))
.setCredentialsProvider(cp)
.call();
final StringBuilder failures = new StringBuilder();
pr.forEach(t -> {
LOG.debug("Push Result Messages: " + t.getMessages());
if (t.getRemoteUpdate("refs/tags/" + tagName)
.getStatus() != org.eclipse.jgit.transport.RemoteRefUpdate.Status.OK) {
failures.append("Push Failed: ").append(t.getRemoteUpdate("refs/tags/" + tagName).getStatus().name())
.append(" reason: ").append(t.getRemoteUpdate("refs/tags/" + tagName).getMessage());
}
});
if (failures.length() > 0) {
LOG.warn("Throwing IO Exception back to pushTag call because: " + failures.toString());
throw new IOException(failures.toString());
}
} catch (final GitAPIException e) {
if (e.getMessage().contains("Auth fail") || e.getMessage().contains("not authorized")) {
LOG.info("Auth fail", e);
throw new AuthenticationException("Auth fail");
} else {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
}
/**
* Read tags.
*
* @param username the username
* @param password the password
* @return the array list
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws AuthenticationException the authentication exception
*/
public ArrayList<String> readTags(String username,
char[] password)
throws IllegalArgumentException,
IOException,
AuthenticationException {
try (Git git = getGit()) {
final ArrayList<String> results = new ArrayList<>();
final CredentialsProvider cp = new UsernamePasswordCredentialsProvider(username,
((password == null) ? new char[] {}
: password));
git.fetch()
.setTagOpt(TagOpt.FETCH_TAGS)
.setCredentialsProvider(cp)
.call();
for (final Ref x: git.tagList()
.call()) {
results.add(x.getName());
}
git.close();
return results;
} catch (final GitAPIException e) {
if (e.getMessage().contains("Auth fail") || e.getMessage().contains("not authorized")) {
LOG.info("Auth fail", e);
throw new AuthenticationException("Auth fail");
} else {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
}
/**
* Relink remote.
*
* @param remoteAddress the remote address
* @param username the username
* @param password the password
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @see sh.isaac.api.sync.SyncFiles#relinkRemote(java.lang.String, java.lang.String, java.lang.String)
*/
@Override
public void relinkRemote(String remoteAddress,
String username,
char[] password)
throws IllegalArgumentException,
IOException {
try (Git git = getGit()) {
LOG.debug("Configuring remote URL and fetch defaults to {}", remoteAddress);
final StoredConfig sc = git.getRepository()
.getConfig();
sc.setString("remote", "origin", "url", remoteAddress);
sc.setString("remote", "origin", "fetch", "+refs/heads/*:refs/remotes/origin/*");
sc.save();
}
}
/**
* Removes the files.
*
* @param files the files
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @see sh.isaac.api.sync.SyncFiles#removeFiles(java.io.File, java.util.Set)
*/
@Override
public void removeFiles(String... files)
throws IllegalArgumentException, IOException {
LOG.info("Remove Files called {}", Arrays.toString(files));
try (Git git = getGit()) {
if (files.length == 0) {
LOG.debug("No files to remove");
} else {
final RmCommand rm = git.rm();
for (final String file: files) {
rm.addFilepattern(file);
}
rm.call();
}
LOG.info("removeFiles Complete. Current status: " + statusToString(git.status().call()));
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Resolve merge failures.
*
* @param resolutions the resolutions
* @return the set
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws NoWorkTreeException the no work tree exception
* @throws MergeFailure the merge failure
* @see sh.isaac.api.sync.SyncFiles#resolveMergeFailures(java.io.File, java.util.Map)
*/
@Override
public Set<String> resolveMergeFailures(Map<String, MergeFailOption> resolutions)
throws IllegalArgumentException,
IOException,
NoWorkTreeException,
MergeFailure {
LOG.info("resolve merge failures called - resolutions: {}", resolutions);
try (Git git = getGit()) {
final List<Note> notes = git.notesList()
.call();
final Set<String> conflicting = git.status()
.call()
.getConflicting();
if (conflicting.isEmpty()) {
throw new IllegalArgumentException("You do not appear to have any conflicting files");
}
if (conflicting.size() != resolutions.size()) {
throw new IllegalArgumentException(
"You must provide a resolution for each conflicting file. Files in conflict: " + conflicting);
}
for (final String s: conflicting) {
if (!resolutions.containsKey(s)) {
throw new IllegalArgumentException("No conflit resolution specified for file " + s +
". Resolutions must be specified for all files");
}
}
if ((notes == null) || (notes.isEmpty())) {
throw new IllegalArgumentException(
"The 'note' that is required for tracking state is missing. This merge failure must be resolved on the command line");
}
final String noteValue = new String(git.getRepository().open(notes.get(0).getData()).getBytes());
MergeFailType mergeFailType;
if (noteValue.startsWith(this.NOTE_FAILED_MERGE_HAPPENED_ON_REMOTE)) {
mergeFailType = MergeFailType.REMOTE_TO_LOCAL;
} else if (noteValue.startsWith(this.NOTE_FAILED_MERGE_HAPPENED_ON_STASH)) {
mergeFailType = MergeFailType.STASH_TO_LOCAL;
} else {
throw new IllegalArgumentException(
"The 'note' that is required for tracking state contains an unexpected value of '" + noteValue + "'");
}
String stashIdToApply = null;
if (noteValue.contains(this.STASH_MARKER)) {
stashIdToApply = noteValue.substring(noteValue.indexOf(this.STASH_MARKER) + this.STASH_MARKER.length());
}
return resolveMergeFailures(mergeFailType, stashIdToApply, resolutions);
} catch (GitAPIException | LargeObjectException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Substitute URL.
*
* @param url the url
* @param username the username
* @return the string
* @see sh.isaac.api.sync.SyncFiles#substituteURL(java.lang.String, java.lang.String)
*
* Turns
* ssh://someuser@csfe.aceworkspace.net:29418/... into
* ssh://username.toString()@csfe.aceworkspace.net:29418/...
*
* Otherwise, returns URL.
*/
@Override
public String substituteURL(String url, String username) {
if (url.startsWith("ssh://") && url.contains("@")) {
final int index = url.indexOf("@");
url = "ssh://" + username + url.substring(index);
}
return url;
}
/**
* Update commit and push.
*
* @param commitMessage the commit message
* @param username the username
* @param password the password
* @param mergeFailOption the merge fail option
* @param files the files
* @return the set
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws MergeFailure the merge failure
* @throws AuthenticationException the authentication exception
* @see sh.isaac.api.sync.SyncFiles#updateCommitAndPush(java.io.File, java.lang.String, java.lang.String, java.lang.String,
* java.lang.String[])
*/
@Override
public Set<String> updateCommitAndPush(String commitMessage,
String username,
char[] password,
MergeFailOption mergeFailOption,
String... files)
throws IllegalArgumentException,
IOException,
MergeFailure,
AuthenticationException {
LOG.info("Commit Files called {}", ((files == null) ? "-null-"
: Arrays.toString(files)));
try (Git git = getGit()) {
if (git.status()
.call()
.getConflicting()
.size() > 0) {
LOG.info("Previous merge failure not yet resolved");
throw new MergeFailure(git.status().call().getConflicting(), new HashSet<>());
}
if (files == null) {
files = git.status()
.call()
.getUncommittedChanges()
.toArray(new String[0]);
LOG.info("Will commit the uncommitted files {}", Arrays.toString(files));
}
if (StringUtils.isEmptyOrNull(commitMessage) && (files.length > 0)) {
throw new IllegalArgumentException("The commit message is required when files are specified");
}
if (files.length > 0) {
final CommitCommand commit = git.commit();
for (final String file: files) {
commit.setOnly(file);
}
commit.setAuthor(username, "42");
commit.setMessage(commitMessage);
final RevCommit rv = commit.call();
LOG.debug("Local commit completed: " + rv.getFullMessage());
}
// need to merge origin/master into master now, prior to push
final Set<String> result = updateFromRemote(username, password, mergeFailOption);
LOG.debug("Pushing");
final CredentialsProvider cp = new UsernamePasswordCredentialsProvider(username,
((password == null) ? new char[] {}
: password));
final Iterable<PushResult> pr = git.push()
.setCredentialsProvider(cp)
.call();
pr.forEach(t -> LOG.debug("Push Result Messages: " + t.getMessages()));
LOG.info("commit and push complete. Current status: " + statusToString(git.status().call()));
return result;
} catch (final TransportException te) {
if (te.getMessage().contains("Auth fail") || te.getMessage().contains("not authorized")) {
LOG.info("Auth fail", te);
throw new AuthenticationException("Auth fail");
} else {
LOG.error("Unexpected", te);
throw new IOException("Internal error", te);
}
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Update from remote.
*
* @param username the username
* @param password the password
* @param mergeFailOption the merge fail option
* @return the set
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws MergeFailure the merge failure
* @throws AuthenticationException the authentication exception
* @see sh.isaac.api.sync.SyncFiles#updateFromRemote(java.io.File, java.lang.String, java.lang.String,
* sh.isaac.api.sync.MergeFailOption)
*/
@Override
public Set<String> updateFromRemote(String username,
char[] password,
MergeFailOption mergeFailOption)
throws IllegalArgumentException,
IOException,
MergeFailure,
AuthenticationException {
LOG.info("update from remote called ");
Set<String> filesChangedDuringPull;
try (Git git = getGit()) {
LOG.debug("Fetching from remote");
if (git.status()
.call()
.getConflicting()
.size() > 0) {
LOG.info("Previous merge failure not yet resolved");
throw new MergeFailure(git.status().call().getConflicting(), new HashSet<>());
}
final CredentialsProvider cp = new UsernamePasswordCredentialsProvider(username,
((password == null) ? new char[] {}
: password));
LOG.debug("Fetch Message" + git.fetch().setCredentialsProvider(cp).call().getMessages());
final ObjectId masterIdBeforeMerge = git.getRepository()
.findRef("master")
.getObjectId();
if (git.getRepository()
.exactRef("refs/remotes/origin/master")
.getObjectId()
.getName()
.equals(masterIdBeforeMerge.getName())) {
LOG.info("No changes to merge");
return new HashSet<>();
}
RevCommit stash = null;
if (git.status()
.call()
.getUncommittedChanges()
.size() > 0) {
LOG.info("Stashing uncommitted changes");
stash = git.stashCreate()
.call();
}
{
LOG.debug("Merging from remotes/origin/master");
final MergeResult mr = git.merge()
.include(git.getRepository()
.exactRef("refs/remotes/origin/master"))
.call();
final AnyObjectId headAfterMergeID = mr.getNewHead();
if (!mr.getMergeStatus()
.isSuccessful()) {
if ((mergeFailOption == null) || (MergeFailOption.FAIL == mergeFailOption)) {
addNote(this.NOTE_FAILED_MERGE_HAPPENED_ON_REMOTE + ((stash == null) ? ":NO_STASH"
: this.STASH_MARKER + stash.getName()), git);
// We can use the status here - because we already stashed the stuff that they had uncommitted above.
throw new MergeFailure(mr.getConflicts().keySet(), git.status().call().getUncommittedChanges());
} else if ((MergeFailOption.KEEP_LOCAL == mergeFailOption) ||
(MergeFailOption.KEEP_REMOTE == mergeFailOption)) {
final HashMap<String, MergeFailOption> resolutions = new HashMap<>();
mr.getConflicts()
.keySet().forEach((s) -> {
resolutions.put(s, mergeFailOption);
});
LOG.debug("Resolving merge failures with option {}", mergeFailOption);
filesChangedDuringPull = resolveMergeFailures(MergeFailType.REMOTE_TO_LOCAL, ((stash == null) ? null
: stash.getName()), resolutions);
} else {
throw new IllegalArgumentException("Unexpected option");
}
} else {
// Conflict free merge - or perhaps, no merge at all.
if (masterIdBeforeMerge.getName()
.equals(headAfterMergeID.getName())) {
LOG.debug("Merge didn't result in a commit - no incoming changes");
filesChangedDuringPull = new HashSet<>();
} else {
filesChangedDuringPull = listFilesChangedInCommit(git.getRepository(),
masterIdBeforeMerge,
headAfterMergeID);
}
}
}
if (stash != null) {
LOG.info("Replaying stash");
try {
git.stashApply()
.setStashRef(stash.getName())
.call();
LOG.debug("stash applied cleanly, dropping stash");
git.stashDrop()
.call();
} catch (final StashApplyFailureException e) {
LOG.debug("Stash failed to merge");
if ((mergeFailOption == null) || (MergeFailOption.FAIL == mergeFailOption)) {
addNote(this.NOTE_FAILED_MERGE_HAPPENED_ON_STASH, git);
throw new MergeFailure(git.status().call().getConflicting(), filesChangedDuringPull);
} else if ((MergeFailOption.KEEP_LOCAL == mergeFailOption) ||
(MergeFailOption.KEEP_REMOTE == mergeFailOption)) {
final HashMap<String, MergeFailOption> resolutions = new HashMap<>();
for (final String s: git.status()
.call()
.getConflicting()) {
resolutions.put(s, mergeFailOption);
}
LOG.debug("Resolving stash apply merge failures with option {}", mergeFailOption);
resolveMergeFailures(MergeFailType.STASH_TO_LOCAL, null, resolutions);
// When we auto resolve to KEEP_LOCAL - these files won't have really changed, even though we recorded a change above.
resolutions.entrySet().stream().filter((r) -> (MergeFailOption.KEEP_LOCAL == r.getValue())).forEachOrdered((r) -> {
filesChangedDuringPull.remove(r.getKey());
});
} else {
throw new IllegalArgumentException("Unexpected option");
}
}
}
LOG.info("Files changed during updateFromRemote: {}", filesChangedDuringPull);
return filesChangedDuringPull;
} catch (final CheckoutConflictException e) {
LOG.error("Unexpected", e);
throw new IOException(
"A local file exists (but is not yet added to source control) which conflicts with a file from the server." +
" Either delete the local file, or call addFile(...) on the offending file prior to attempting to update from remote.",
e);
} catch (final TransportException te) {
if (te.getMessage().contains("Auth fail") || te.getMessage().contains("not authorized")) {
LOG.info("Auth fail", te);
throw new AuthenticationException("Auth fail");
} else {
LOG.error("Unexpected", te);
throw new IOException("Internal error", te);
}
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Adds the note.
*
* @param message the message
* @param git the git
* @throws IOException Signals that an I/O exception has occurred.
* @throws GitAPIException the git API exception
*/
private void addNote(String message, Git git)
throws IOException, GitAPIException {
try (RevWalk walk = new RevWalk(git.getRepository())) {
final Ref head = git.getRepository()
.exactRef("refs/heads/master");
final RevCommit commit = walk.parseCommit(head.getObjectId());
git.notesAdd()
.setObjectId(commit)
.setMessage(message)
.call();
}
}
/**
* List files changed in commit.
*
* @param repository the repository
* @param beforeID the before ID
* @param afterID the after ID
* @return the hash set
* @throws MissingObjectException the missing object exception
* @throws IncorrectObjectTypeException the incorrect object type exception
* @throws IOException Signals that an I/O exception has occurred.
*/
private HashSet<String> listFilesChangedInCommit(Repository repository,
AnyObjectId beforeID,
AnyObjectId afterID)
throws MissingObjectException,
IncorrectObjectTypeException,
IOException {
LOG.info("calculating files changed in commit");
final HashSet<String> result = new HashSet<>();
final RevCommit commitBefore;
final RevCommit commitAfter;
try (RevWalk rw = new RevWalk(repository)) {
commitBefore = rw.parseCommit(beforeID);
commitAfter = rw.parseCommit(afterID);
}
try (DiffFormatter df = new DiffFormatter(DisabledOutputStream.INSTANCE)) {
df.setRepository(repository);
df.setDiffComparator(RawTextComparator.DEFAULT);
df.setDetectRenames(true);
final List<DiffEntry> diffs = df.scan(commitBefore.getTree(), commitAfter.getTree());
diffs.forEach((diff) -> {
result.add(diff.getNewPath());
});
}
LOG.debug("Files changed between commits commit: {} and {} - {}", beforeID.getName(), afterID, result);
return result;
}
/**
* returns a list of newly created files and files that were modified.
*
* @param containingFolder the containing folder
* @return the list
* @throws IOException Signals that an I/O exception has occurred.
*/
private List<String> makeInitialFilesAsNecessary(File containingFolder)
throws IOException {
final ArrayList<String> result = new ArrayList<>();
final File readme = new File(containingFolder, "README.md");
if (!readme.isFile()) {
LOG.debug("Creating {}", readme.getAbsolutePath());
Files.write(readme.toPath(), this.readMeFileContent.getBytes(), StandardOpenOption.CREATE_NEW);
result.add(readme.getName());
} else {
LOG.debug("README.md already exists");
}
final File ignore = new File(containingFolder, ".gitignore");
if (!ignore.isFile()) {
LOG.debug("Creating {}", ignore.getAbsolutePath());
Files.write(ignore.toPath(), this.gitIgnoreText.getBytes(), StandardOpenOption.CREATE_NEW);
result.add(ignore.getName());
} else {
LOG.debug(".gitignore already exists");
if (!new String(Files.readAllBytes(ignore.toPath())).contains(this.gitIgnoreText)) {
LOG.debug("Appending onto existing .gitignore file");
Files.write(ignore.toPath(),
("\r\n" + this.gitIgnoreText).getBytes(),
StandardOpenOption.APPEND);
result.add(ignore.getName());
}
}
return result;
}
/**
* Resolve merge failures.
*
* @param mergeFailType the merge fail type
* @param stashIDToApply the stash ID to apply
* @param resolutions the resolutions
* @return the set
* @throws IllegalArgumentException the illegal argument exception
* @throws IOException Signals that an I/O exception has occurred.
* @throws MergeFailure the merge failure
*/
private Set<String> resolveMergeFailures(MergeFailType mergeFailType,
String stashIDToApply,
Map<String, MergeFailOption> resolutions)
throws IllegalArgumentException,
IOException,
MergeFailure {
LOG.debug("resolve merge failures called - mergeFailType: {} stashIDToApply: {} resolutions: {}",
mergeFailType,
stashIDToApply,
resolutions);
try (Git git = getGit();) {
// We unfortunately, must know the mergeFailType option, because the resolution mechanism here uses OURS and THEIRS - but the
// meaning of OURS and THEIRS reverse, depending on if you are recovering from a merge failure, or a stash apply failure.
for (final Entry<String, MergeFailOption> r: resolutions.entrySet()) {
if (null == r.getValue()) {
throw new IllegalArgumentException("MergeFailOption is required");
} else switch (r.getValue()) {
case FAIL:
throw new IllegalArgumentException("MergeFailOption.FAIL is not a valid option");
case KEEP_LOCAL:
LOG.debug("Keeping our local file for conflict {}", r.getKey());
git.checkout()
.addPath(r.getKey())
.setStage((MergeFailType.REMOTE_TO_LOCAL == mergeFailType) ? Stage.OURS
: Stage.THEIRS)
.call();
break;
case KEEP_REMOTE:
LOG.debug("Keeping remote file for conflict {}", r.getKey());
git.checkout()
.addPath(r.getKey())
.setStage((MergeFailType.REMOTE_TO_LOCAL == mergeFailType) ? Stage.THEIRS
: Stage.OURS)
.call();
break;
default:
throw new IllegalArgumentException("MergeFailOption is required");
}
LOG.debug("calling add to mark merge resolved");
git.add()
.addFilepattern(r.getKey())
.call();
}
if (mergeFailType == MergeFailType.STASH_TO_LOCAL) {
// clean up the stash
LOG.debug("Dropping stash");
git.stashDrop()
.call();
}
final RevCommit commitWithPotentialNote;
try (RevWalk walk = new RevWalk(git.getRepository())) {
final Ref head = git.getRepository()
.exactRef("refs/heads/master");
commitWithPotentialNote = walk.parseCommit(head.getObjectId());
}
LOG.info("resolve merge failures Complete. Current status: " + statusToString(git.status().call()));
final RevCommit rc = git.commit()
.setMessage("Merging with user specified merge failure resolution for files " +
resolutions.keySet())
.call();
git.notesRemove()
.setObjectId(commitWithPotentialNote)
.call();
final Set<String> filesChangedInCommit = listFilesChangedInCommit(git.getRepository(),
commitWithPotentialNote.getId(),
rc);
// When we auto resolve to KEEP_REMOTE - these will have changed - make sure they are in the list.
// seems like this shouldn't really be necessary - need to look into the listFilesChangedInCommit algorithm closer.
// this might already be fixed by the rework on 11/12/14, but no time to validate at the moment. - doesn't do any harm.
for (final Entry<String, MergeFailOption> r: resolutions.entrySet()) {
if (MergeFailOption.KEEP_REMOTE == r.getValue()) {
filesChangedInCommit.add(r.getKey());
}
if (MergeFailOption.KEEP_LOCAL == r.getValue()) {
filesChangedInCommit.remove(r.getKey());
}
}
if (!StringUtils.isEmptyOrNull(stashIDToApply)) {
LOG.info("Replaying stash identified in note");
try {
git.stashApply()
.setStashRef(stashIDToApply)
.call();
LOG.debug("stash applied cleanly, dropping stash");
git.stashDrop()
.call();
} catch (final StashApplyFailureException e) {
LOG.debug("Stash failed to merge");
addNote(this.NOTE_FAILED_MERGE_HAPPENED_ON_STASH, git);
throw new MergeFailure(git.status().call().getConflicting(), filesChangedInCommit);
}
}
return filesChangedInCommit;
} catch (final GitAPIException e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Status to string.
*
* @param status the status
* @return the string
*/
private String statusToString(Status status) {
final StringBuilder sb = new StringBuilder();
sb.append(" Is clean: ")
.append(status.isClean())
.append(String.format("%n"));
sb.append(" Changed: ")
.append(status.getChanged())
.append(String.format("%n"));
sb.append(" Added: ")
.append(status.getAdded())
.append(String.format("%n"));
sb.append(" Conflicting: ")
.append(status.getConflicting())
.append(String.format("%n"));
sb.append(" Ignored, unindexed: ")
.append(status.getIgnoredNotInIndex())
.append(String.format("%n"));
sb.append(" Missing: ")
.append(status.getMissing())
.append(String.format("%n"));
sb.append(" Modified: ")
.append(status.getModified())
.append(String.format("%n"));
sb.append(" Removed: ")
.append(status.getRemoved())
.append(String.format("%n"));
sb.append(" UncomittedChanges: ")
.append(status.getUncommittedChanges())
.append(String.format("%n"));
sb.append(" Untracked: ")
.append(status.getUntracked())
.append(String.format("%n"));
sb.append(" UntrackedFolders: ")
.append(status.getUntrackedFolders())
.append(String.format("%n"));
return sb.toString();
}
//~--- get methods ---------------------------------------------------------
/**
* Gets the files in merge conflict.
*
* @return the files in merge conflict
* @throws IOException Signals that an I/O exception has occurred.
* @see sh.isaac.api.sync.SyncFiles#getFilesInMergeConflict()
*/
@Override
public Set<String> getFilesInMergeConflict()
throws IOException {
try (Git git = getGit()) {
return git.status()
.call()
.getConflicting();
} catch (final Exception e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
/**
* Gets the git.
*
* @return the git
* @throws IOException Signals that an I/O exception has occurred.
* @throws IllegalArgumentException the illegal argument exception
*/
private Git getGit()
throws IOException, IllegalArgumentException {
if (this.localFolder == null) {
throw new IllegalArgumentException("localFolder has not yet been set - please call setRootLocation(...)");
}
if (!this.localFolder.isDirectory()) {
LOG.error("The passed in local folder '{}' didn't exist", this.localFolder);
throw new IllegalArgumentException("The localFolder must be a folder, and must exist");
}
final File gitFolder = new File(this.localFolder, ".git");
if (!gitFolder.isDirectory()) {
LOG.error("The passed in local folder '{}' does not appear to be a git repository", this.localFolder);
throw new IllegalArgumentException("The localFolder does not appear to be a git repository");
}
return Git.open(gitFolder);
}
//~--- set methods ---------------------------------------------------------
/**
* Set the contents of the gitIgnore file.
*
* @param gitIgnoreContent the new git ignore content
*/
public void setGitIgnoreContent(String gitIgnoreContent) {
this.gitIgnoreText = gitIgnoreContent;
}
//~--- get methods ---------------------------------------------------------
/**
* Gets the locally modified file count.
*
* @return the locally modified file count
* @throws IOException Signals that an I/O exception has occurred.
* @see sh.isaac.api.sync.SyncFiles#getLocallyModifiedFileCount()
*/
@Override
public int getLocallyModifiedFileCount()
throws IOException {
try (Git git = getGit()) {
return git.status()
.call()
.getUncommittedChanges()
.size();
} catch (final Exception e) {
LOG.error("Unexpected", e);
throw new IOException("Internal error", e);
}
}
//~--- set methods ---------------------------------------------------------
/**
* Sets the readme file content.
*
* @param readmeFileContent the new readme file content
* @see sh.isaac.api.sync.SyncFiles#setReadmeFileContent(java.lang.String)
*/
@Override
public void setReadmeFileContent(String readmeFileContent) {
this.readMeFileContent = readmeFileContent;
}
//~--- get methods ---------------------------------------------------------
/**
* Gets the root location.
*
* @return the root location
* @see sh.isaac.api.sync.SyncFiles#getRootLocation()
*/
@Override
public File getRootLocation() {
return this.localFolder;
}
//~--- set methods ---------------------------------------------------------
/**
* Sets the root location.
*
* @param localFolder the new root location
* @throws IllegalArgumentException the illegal argument exception
* @see sh.isaac.api.sync.SyncFiles#setRootLocation(java.io.File)
*/
@Override
public void setRootLocation(File localFolder)
throws IllegalArgumentException {
if (localFolder == null) {
throw new IllegalArgumentException("The localFolder is required");
}
if (!localFolder.isDirectory()) {
LOG.error("The passed in local folder '{}' didn't exist", localFolder);
throw new IllegalArgumentException("The localFolder must be a folder, and must exist");
}
this.localFolder = localFolder;
}
//~--- get methods ---------------------------------------------------------
/**
* Checks if root location configured for SCM.
*
* @return true, if root location configured for SCM
* @see sh.isaac.api.sync.SyncFiles#isRootLocationConfiguredForSCM()
*/
@Override
public boolean isRootLocationConfiguredForSCM() {
return new File(this.localFolder, ".git").isDirectory();
}
public Git initialize() throws IOException {
try {
InitCommand initCommand = Git.init();
initCommand.setDirectory(localFolder);
makeInitialFilesAsNecessary(localFolder);
return initCommand.call();
} catch (GitAPIException ex) {
throw new IOException("Internal error", ex);
}
}
}
|
Tenebrar/codebase
|
hacker/challenges/crypto/the_lightest_touch.py
|
<filename>hacker/challenges/crypto/the_lightest_touch.py<gh_stars>1-10
from hacker.bytestreams import substrings
from hacker.codes import BRAILLE
from hacker.decoder import decode
value = [
' . . . . .. . . . . . . . . . .. .. . .. . . . . .. .. . ',
'.. .. . . . .. . .. . . . . . . . . . . . . . . ',
'. . . . . . . . .. . . . . . ',
]
result = decode(zip(substrings(value[0], 3), substrings(value[1], 3), substrings(value[2], 3)),
lambda t: t[0][0:2] + t[1][0:2] + t[2][0:2],
BRAILLE)
print(result)
|
lit-uriy/mtl4-mirror
|
libs/numeric/mtl/examples/nesting/blas_functor_auto.hpp
|
template <typename Backup>
struct blas_mult_t
: public Backup
{
template <typename MatrixA, typename MatrixB, typename MatrixC>
void operator()(MatrixA const& a, MatrixB const& b, MatrixC& c)
{
blas_mult_ft<MatrixA, MatrixB, MatrixC, Backup>()(a, b, c);
}
};
|
niklasf/flexmark-java
|
flexmark/src/main/java/com/vladsch/flexmark/html/RendererBuilder.java
|
package com.vladsch.flexmark.html;
import com.vladsch.flexmark.html.renderer.HeaderIdGeneratorFactory;
import com.vladsch.flexmark.util.data.DataHolder;
import org.jetbrains.annotations.NotNull;
/**
* Extension point for RenderingExtensions that only provide attributes, link resolvers or html id generators
*/
public interface RendererBuilder extends DataHolder {
/**
* Add an attribute provider for adding/changing HTML attributes to the rendered tags.
*
* @param attributeProviderFactory the attribute provider factory to add
* @return {@code this}
*/
@NotNull RendererBuilder attributeProviderFactory(@NotNull AttributeProviderFactory attributeProviderFactory);
/**
* Add a factory for resolving links in markdown to URI used in rendering
*
* @param linkResolverFactory the factory for creating a node renderer
* @return {@code this}
*/
@NotNull RendererBuilder linkResolverFactory(@NotNull LinkResolverFactory linkResolverFactory);
/**
* Add a factory for resolving URI to content
*
* @param contentResolverFactory the factory for creating a node renderer
* @return {@code this}
*/
@NotNull RendererBuilder contentResolverFactory(@NotNull UriContentResolverFactory contentResolverFactory);
/**
* Add a factory for generating the header id attribute from the header's text
*
* @param htmlIdGeneratorFactory the factory for generating header tag id attributes
* @return {@code this}
*/
@NotNull RendererBuilder htmlIdGeneratorFactory(@NotNull HeaderIdGeneratorFactory htmlIdGeneratorFactory);
}
|
wensby/advent-of-code
|
python/2020_15_2.py
|
<gh_stars>0
import sys
def solve(input):
# setup starting conditions
numbers = [int(x) for x in input.rstrip('\n').split(',')]
finished_turns = 0
previous_number = None
previous_number_previous_turn = None
turn_by_number = {}
previous_overwritten = None
# go through starting numbers
for i in range(len(numbers)):
turn = i + 1
number = numbers[i]
previous_number_previous_turn = turn_by_number.get(number, None)
turn_by_number[number] = turn
previous_number = number
finished_turns = turn
while finished_turns < 30000000:
turn = finished_turns + 1
if previous_number_previous_turn:
next_number = turn_by_number[previous_number] - previous_number_previous_turn
else:
next_number = 0
previous_number_previous_turn = turn_by_number.get(next_number, None)
turn_by_number[next_number] = turn
previous_number = next_number
finished_turns = turn
return previous_number
if __name__ == '__main__':
print(solve(sys.stdin.read()))
|
Dbevan/SunderingShadows
|
d/antioch/ruins/obj/hammer.c
|
//Added lore -Cythera 4/05
#include <std.h>
#include <spell.h>
#include <magic.h>
#include "../ruins.h"
inherit "/d/common/obj/weapon/warhammer";
void dest_effect(object targ);
void more_burn(object targ);
int num;
object targ;
void create() {
::create();
set_name("%^BOLD%^%^RED%^Crimson Hammer%^RESET%^");
set_short("%^BOLD%^%^WHITE%^Hammer of the %^RED%^Crimson Bl%^YELLOW%^a%^RED%^ze%^RESET%^");
set_obvious_short("%^BOLD%^%^RED%^A ruby adorned warhammer%^RESET%^");
set_id(({"hammer","crimson hammer","hammer of the crimson blaze","ruby hammer","Crimson Hammer","Hammer of the Crimson Blaze"}));
set_long(
"%^BOLD%^The head of the hammer is made of one of the largest %^RED%^rubies%^WHITE%^"+
" that you've ever seen. It has been smoothly polished and cut to fit"+
" this hammer. The handle joins to the bottom of the %^RED%^ruby%^WHITE%^ seamlessly. It"+
" is made out of the purest mithril, with a braided piece that wraps"+
" in cork-screw fashion around it. The tiniest of %^RED%^rubies%^WHITE%^"+
" are embedded into the braid, making the handle sparkle. It is a marvelous"+
" piece of work."
);
set_lore("The Hammer of the Crimson Blaze was first crafted by the "+
"Hammer Maiden, <NAME>, a dwarven female"+
" warrior. Brunhelda was a skilled blacksmith, as most dwarves "+
"are. She crafted the hammer herself from the finest rubies and "+
"mithril she could find. Taking over a decade to shape the hammer,"+
" Brunhelda's labor of love offered her the power to bash down anyone"+
" or thing that got in her way. Slain in a battle against a red "+
"dragon, Brunhelda's hammer was added to the red wyrm's horde. That"+
" is until almost half a century later when the wyrm was slain. As "+
"the seven adventures sorted through the trinkets, artifacts, and "+
"treasures of Fezlegarath's horde, The Hammer of Crimson Blaze was "+
"discovered. One of the adventures, a human cleric by the "+
"name of Jezzim, was so impressed with the workmanship of the hammer "+
"that he claimed it as his own. Spending nearly all of his portion "+
"of the treasure, Jezzim enlisted the aid of an elven mage, he had "+
"the hammer enchanted with the properties of the fire. - The Hammer "+
"Maiden: The Life and Creations of Brunelda Silvermine - Rathor "+
"Mithrilbeard");
set_property("lore",14);
set_value(1500);
set_property("enchantment",3);
set_wield((:TO,"wieldme":));
set_unwield((:TO,"removeme":));
set_hit((:TO,"hitme":));
}
int wieldme() {
if(ETO->query_lowest_level() < 15) {
tell_object(ETO,"You are simply not powerful enough to master a weapon"+
" such as this one.");
return 0;
}
tell_object(ETO,"%^BOLD%^%^WHITE%^The %^RED%^ruby hammer%^WHITE%^ seems"+
" to brighten for a moment as you wield it.");
tell_room(environment(ETO),"%^BOLD%^%^RED%^A ruby light reflects in"+
" "+ETO->QCN+"'s eyes as "+ETO->QP+" wields"+
" the hammer.",ETO);
return 1;
}
int removeme() {
tell_object(ETO,"%^BOLD%^%^WHITE%^The %^B_RED%^%^YELLOW%^blazing%^RESET%^"+
" %^BOLD%^hammer cools as you set it aside.");
tell_room(environment(ETO),"%^BOLD%^The %^RED%^fire%^WHITE%^ in "+ETO->QCN+"'s"+
" eyes slowly fades as "+ETO->QS+" sets the hammer aside.",ETO);
return 1;
}
int hitme(object targ) {
if(!living(ETO) || !living(targ)) return 0;
if(!random(8)) {
tell_object(ETO,"%^BOLD%^You slam your hammer with tremendous force into "+targ->QCN+"!");
tell_object(targ,"%^BOLD%^"+ETO->QCN+" slams "+ETO->QP+""+
" hammer into you with tremendous force!");
tell_room(environment(ETO),"%^BOLD%^"+ETO->QCN+" slams "+ETO->QP+""+
" hammer into "+targ->QCN+" with tremendous force!",({ETO,targ}));
set_property("magic",1);
targ->do_damage("torso",random(3)+4);
remove_property("magic");
return 1;
}
if(!random(15)) {
tell_object(ETO,"%^MAGENTA%^You swing your hammer in a full circle,"+
" solidly connecting with "+targ->QCN+".");
tell_object(targ,"%^MAGENTA%^"+ETO->QCN+" swings "+ETO->QP+""+
" hammer in a full circle, solidly connecting with your chest.\n"+
"It almost feels like something broke!");
tell_room(environment(ETO),"%^MAGENTA%^There is a sickening crunching"+
" sound as "+ETO->QCN+" swings "+ETO->QP+""+
" hammer into "+targ->QCN+".",({ETO,targ}));
set_property("magic",1);
targ->do_damage("torso",random(5)+6);
remove_property("magic");
targ->set_paralyzed(7,"You got the wind knocked out of you.");
return 1;
}
if(!random(20)) {
tell_object(ETO,"Your hammer %^B_RED%^%^YELLOW%^blazes%^RESET%^ with"+
" a %^RED%^%^BOLD%^ruby%^RESET%^ light, you can feel the fury of battle"+
" strengthen you.\n"+
"%^BOLD%^You release that fury on "+targ->QCN+"!");
tell_room(environment(ETO),""+ETO->QCN+" gets a %^RED%^mad%^RESET%^"+
" look in "+ETO->QP+" eyes as "+ETO->QP+""+
" hammer %^B_RED%^%^YELLOW%^blazes%^RESET%^ to life!",ETO);
tell_object(targ,"%^BOLD%^That hammer slams forcefully into you!");
tell_room(environment(ETO),"%^BOLD%^"+ETO->QCN+" slams"+
" "+ETO->QP+" hammer forcefully into "+targ->QCN+"!",({ETO,targ}));
set_property("magic",1);
targ->do_damage("torso",random(5)+10);
remove_property("magic");
targ->set_paralyzed(8,"You are recovering from that last hit.");
ETO->do_damage("torso",(-1)*(random(5)+3));
return 1;
}
if(!random(30)) {
tell_object(ETO,"%^YELLOW%^You feel a power swell within your hammer.");
tell_room(environment(ETO),"%^BOLD%^%^RED%^Crimson fire engulfs"+
" "+targ->QCN+", who screams in absolute agony!",targ);
tell_object(targ,"%^BOLD%^%^RED%^Crimson fire leaps into life around"+
" you, the painful burning causes you to scream in agony!");
set_property("magic",1);
targ->do_damage("torso",random(10)+15);
remove_property("magic");
if(!"daemon/saving_d"->saving_throw(targ,"spell",-10)) {
tell_object(targ,"%^RED%^Instead of dying down, the flames continue"+
" to burn around you!");
num = 3+random(3);
call_out("more_burn",ROUND_LENGTH,targ);
return 1;
}
else {
tell_object(targ,"%^BOLD%^%^MAGENTA%^Thankfully the flames die down"+
" and disappear.");
tell_room(environment(ETO),"%^BOLD%^%^MAGENTA%^The flames quickly die down"+
" around "+targ->QCN+".",targ);
return 1;
}
return 1;
}
return 3;
}
void more_burn(object targ) {
if(!objectp(TO)) return;
if(!objectp(ETO) || !objectp(targ)) {
call_out("dest_effect",ROUND_LENGTH);
return ;
}
tell_object(targ,"%^BOLD%^%^RED%^The crimson blaze continues to suround"+
" you, filling your body with pain!");
tell_room(environment(targ),"%^BOLD%^%^RED%^The crimson blaze continues"+
" to surround "+targ->QCN+" with its deadly embrace.",targ);
targ->do_damage("torso",random(4)+5);
if(num-- > 0) {
call_out("more_burn", ROUND_LENGTH);
return 1;
}
else {
call_out("dest_effect",ROUND_LENGTH);
return ;
}
return ;
}
void dest_effect(object targ) {
if(!objectp(targ)) return;
if(find_call_out("more_burn") != -1) {
remove_call_out("more_burn");
return ;
}
if(targ) {
tell_object(targ,"%^RED%^The crimson blaze at last subsides.");
tell_room(environment(targ),"%^RED%^The crimson blaze dies down around"+
" "+targ->QCN+".",targ);
return ;
}
return ;
}
|
nhsuk/pomi-data-etl
|
test/integration/downloadAndProcessFile.js
|
<reponame>nhsuk/pomi-data-etl
const fs = require('fs');
const chai = require('chai');
const parse = require('csv-parse/lib/sync');
const nock = require('nock');
const downloadAndProcessFile = require('../../app/lib/downloadAndProcessFile');
const constants = require('../../app/lib/constants');
const fileUtils = require('../../app/lib/fileUtils');
const fsHelper = require('../../app/lib/fsHelper');
const expect = chai.expect;
const OUTPUT_DIR = 'temp';
const FILE_NAME = 'TEST-POMI';
const CURRENT_RECORDS_FILE = fileUtils.getCurrentRecordsFileName(FILE_NAME);
const JSON_FILE = fileUtils.getJsonFileName(FILE_NAME);
const REDUCED_BOOKING_FILE = fileUtils.getReducedFileName(FILE_NAME);
const SIMPLE_FILE_NAME = fileUtils.getSimpleFileName(FILE_NAME);
const PERIOD_END_HEADER = constants.HEADERS.PERIOD_END;
const SUPPLIER_HEADER = constants.HEADERS.SUPPLIER;
const ODS_CODE_HEADER = constants.HEADERS.ODS_CODE;
const testFilePath = `test/resources/${SIMPLE_FILE_NAME}`;
const testFileServer = 'http://some.server';
const testFileServerPath = '/test-pomi.csv';
const testFileUrl = `${testFileServer}${testFileServerPath}`;
describe('app', () => {
describe('processing of data from URL (stubbed)', () => {
const stubbedData = fs.readFileSync(testFilePath);
nock(testFileServer)
.get(testFileServerPath)
.reply(200, stubbedData);
before('delete files and run process', () => {
if (fsHelper.fileExists(`${OUTPUT_DIR}/${testFilePath}`)) {
fs.unlinkSync(`${OUTPUT_DIR}/${testFilePath}`);
}
return Promise
.resolve(downloadAndProcessFile({ OUTPUT_DIR, type: FILE_NAME, url: testFileUrl }));
});
it('should save the original data file', () => {
fs.readdirSync(OUTPUT_DIR, (err, files) => {
expect(files).to.include(testFilePath);
});
});
it('should generate an intermediate file containing only the relevant 3 columns, with a header', () => {
fs.readdirSync(OUTPUT_DIR, (err, files) => {
expect(files).to.include(REDUCED_BOOKING_FILE);
});
const data = fs.readFileSync(`${OUTPUT_DIR}/${REDUCED_BOOKING_FILE}`);
const records = parse(data);
const headerRecord = records.shift();
expect(headerRecord.length).to.be.equal(3);
expect(headerRecord[0]).to.be.equal(PERIOD_END_HEADER);
expect(headerRecord[1]).to.be.equal(ODS_CODE_HEADER);
expect(headerRecord[2]).to.be.equal(SUPPLIER_HEADER);
});
it('should generate an intermediate file containing only the latest period for all GPs, with a header', () => {
fs.readdirSync(OUTPUT_DIR, (err, files) => {
expect(files).to.include(CURRENT_RECORDS_FILE);
});
const data = fs.readFileSync(`${OUTPUT_DIR}/${CURRENT_RECORDS_FILE}`);
const records = parse(data);
const headerRecord = records.shift();
const firstRecord = records.shift();
const lastRecord = records.pop();
expect(headerRecord.length).to.be.equal(3);
expect(headerRecord[0]).to.be.equal(PERIOD_END_HEADER);
expect(headerRecord[1]).to.be.equal(ODS_CODE_HEADER);
expect(headerRecord[2]).to.be.equal(SUPPLIER_HEADER);
expect(lastRecord.length).to.be.equal(3);
expect(firstRecord[0]).to.be.equal(lastRecord[0]);
});
it('should generate a json file containing all of the records from the latest period csv', () => {
const jsonFileContents = fs.readFileSync(`${OUTPUT_DIR}/${JSON_FILE}`, 'utf8');
const csvFileContents = fs.readFileSync(`${OUTPUT_DIR}/${CURRENT_RECORDS_FILE}`);
const csvRecords = parse(csvFileContents);
const jsonRecords = JSON.parse(jsonFileContents);
const csvRecordLength = csvRecords.length - 1; // account for header in csv
expect(csvRecordLength).to.be.equal(jsonRecords.length);
jsonRecords.forEach((item) => {
expect(item).to.have.all.keys([ODS_CODE_HEADER, PERIOD_END_HEADER, SUPPLIER_HEADER]);
});
});
});
});
|
tapis-project/notifications
|
lib/src/main/java/edu/utexas/tacc/tapis/notifications/exceptions/DuplicateEntityException.java
|
package edu.utexas.tacc.tapis.notifications.exceptions;
public class DuplicateEntityException extends Exception {
public DuplicateEntityException(String message) {
super(message);
}
public DuplicateEntityException(String message, Throwable err) {
super(message, err);
}
}
|
fourier11/interview
|
ctci/java/Chapter 10/Question10_3/Question.java
|
package Question10_3;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Scanner;
/**
* 给定一个输入文件,包含40亿的非负整数,请设计一种算法,产生一个不在该文件中的整数。
*/
public class Question {
public static long numberOfInts = ((long) Integer.MAX_VALUE) + 1;
public static byte[] bitfield = new byte [(int) (numberOfInts / 8)];
public static void findOpenNumber() throws FileNotFoundException {
Scanner in = new Scanner(new FileReader("Chapter 10/Question10_3/input_file_q10_3.txt"));
while (in.hasNextInt()) {
int n = in.nextInt ();
/* Finds the corresponding number in the bitfield by using
* the OR operator to set the nth bit of a byte
* (e.g., 10 would correspond to the 2nd bit of index 2 in
* the byte array). */
bitfield [n / 8] |= 1 << (n % 8);
}
for (int i = 0; i < bitfield.length; i++) {
for (int j = 0; j < 8; j++) {
/* 取回每个字节的各个比特,当发现某个比特为0时,即找到对应的值 */
if ((bitfield[i] & (1 << j)) == 0) {
System.out.println (i * 8 + j);
return;
}
}
}
}
public static void main(String[] args) throws IOException {
findOpenNumber();
}
}
|
mbiarnes/kogito-cloud-operator
|
cmd/kogito/command/remove/infinispan.go
|
<filename>cmd/kogito/command/remove/infinispan.go
// Copyright 2019 Red Hat, Inc. and/or its affiliates
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package remove
import (
"github.com/kiegroup/kogito-cloud-operator/cmd/kogito/command/context"
"github.com/kiegroup/kogito-cloud-operator/cmd/kogito/command/shared"
"github.com/spf13/cobra"
)
type removeInfinispanFlags struct {
namespace string
}
type removeInfinispanCommand struct {
context.CommandContext
flags removeInfinispanFlags
command *cobra.Command
Parent *cobra.Command
}
func newRemoveInfinispanCommand(ctx *context.CommandContext, parent *cobra.Command) context.KogitoCommand {
command := removeInfinispanCommand{
CommandContext: *ctx,
Parent: parent,
}
command.RegisterHook()
command.InitHook()
return &command
}
func (i *removeInfinispanCommand) Command() *cobra.Command {
return i.command
}
func (i *removeInfinispanCommand) RegisterHook() {
i.command = &cobra.Command{
Use: "infinispan",
Short: "removes installed infinispan instance into the OpenShift/Kubernetes cluster",
Example: "remove infinispan -p my-project",
Long: `removes installed infinispan instance via custom Kubernetes resources.`,
RunE: i.Exec,
PreRun: i.CommonPreRun,
PostRun: i.CommonPostRun,
Args: func(cmd *cobra.Command, args []string) error {
return nil
},
}
}
func (i *removeInfinispanCommand) InitHook() {
i.flags = removeInfinispanFlags{}
i.Parent.AddCommand(i.command)
i.command.Flags().StringVarP(&i.flags.namespace, "project", "p", "", "The project name where the operator will be deployed")
}
func (i *removeInfinispanCommand) Exec(cmd *cobra.Command, args []string) error {
var err error
if i.flags.namespace, err = shared.EnsureProject(i.Client, i.flags.namespace); err != nil {
return err
}
return shared.ServicesRemovalBuilder(i.Client, i.flags.namespace).RemoveInfinispan().GetError()
}
|
isaponsoft/libamtrs
|
include/amtrs/.inc/filesystem-vfs.hpp
|
/* Copyright (c) 2019, isaponsoft (Isao Shibuya) All rights reserved. *
* Use of this source code is governed by a BSD-style license that *
* can be found in the LICENSE file. */
#ifndef __libamtrs__filesystem__vfs__hpp
#define __libamtrs__filesystem__vfs__hpp
AMTRS_FILESYSTEM_NAMESPACE_BEGIN
// ============================================================================
//! 仮想ファイルシステムのインターフェース
// ----------------------------------------------------------------------------
//! 仮想ファイルシステムは、std::filesystem だけではなく、
//! アーカイブファイルなどの異なるストレージへのいアクセスを抽象化します。
// ----------------------------------------------------------------------------
class vfs : public ref_object
{
template<class Call>
static auto throwmode(path_type _path, Call&& _call)
{
std::error_code ec;
auto retval = _call(ec);
if (ec)
{
std::system_error se(ec, (std::string)_path);
AMTRS_DEBUG_LOG("%s", se.what());
throw se;
}
return retval;
}
template<class Call>
static auto throwmode_pass_exists(path_type _path, Call&& _call)
{
std::error_code ec;
auto retval = _call(ec);
if (ec && ec != std::errc::no_such_file_or_directory)
{
std::system_error se(ec, (std::string)_path);
AMTRS_DEBUG_LOG("%s", se.what());
throw se;
}
return retval;
}
public:
io::vstreamif open(path_type _path, std::error_code& _ec) { return on_open(_path, _ec); }
io::vstreamif open(path_type _path ) { return throwmode(_path, [&](std::error_code& _ec) { return on_open(_path, _ec); }); }
file_status status (path_type _path, std::error_code& _ec) const { return on_status (_path, _ec); }
file_status status (path_type _path ) const { return throwmode_pass_exists(_path, [&](std::error_code& _ec) { return on_status (_path, _ec); }); }
std::uintmax_t file_size (path_type _path, std::error_code& _ec) const { return on_file_size(_path, _ec); }
std::uintmax_t file_size (path_type _path ) const { return throwmode_pass_exists(_path, [&](std::error_code& _ec) { return on_file_size(_path, _ec); }); }
bool remove (path_type _path, std::error_code& _ec) const { return on_remove (_path, _ec); }
bool remove (path_type _path ) const { return throwmode(_path, [&](std::error_code& _ec) { return on_remove (_path, _ec); }); }
std::uintmax_t remove_all (path_type _path, std::error_code& _ec) const { return on_remove_all(_path, _ec); }
std::uintmax_t remove_all (path_type _path ) const { return throwmode(_path, [&](std::error_code& _ec) { return on_remove_all(_path, _ec); }); }
inline bool exists(const file_status& _status)
{
return _status.type() != file_type::not_found;
}
inline bool exists(path_type _path, std::error_code& _ec)
{
return exists(status(_path, _ec));
}
inline bool exists(path_type _path)
{
std::error_code ec;
return exists(status(_path, ec));
}
inline bool is_block_file(const file_status& _status)
{
return false;
}
inline bool is_block_file(path_type _path, std::error_code& _ec)
{
return false;
}
inline bool is_block_file(path_type _path)
{
return false;
}
inline bool is_character_file(const file_status& _status)
{
return false;
}
inline bool is_character_file(path_type _path, std::error_code& _ec)
{
return false;
}
inline bool is_character_file(path_type _path)
{
return false;
}
inline bool is_directory(const file_status& _status)
{
return _status.type() == file_type::directory;
}
inline bool is_directory(path_type _path, std::error_code& _ec)
{
return is_directory(status(_path));
}
inline bool is_directory(path_type _path)
{
return is_directory(status(_path));
}
inline bool is_fifo(const file_status& _status)
{
return _status.type() == file_type::fifo;
}
inline bool is_fifo(path_type _path, std::error_code& _ec)
{
return is_fifo(status(_path));
}
inline bool is_fifo(path_type _path)
{
return is_fifo(status(_path));
}
inline bool is_regular_file(const file_status& _status)
{
return _status.type() == file_type::regular;
}
inline bool is_regular_file(path_type _path, std::error_code& _ec)
{
return is_regular_file(status(_path));
}
inline bool is_regular_file(path_type _path)
{
return is_regular_file(status(_path));
}
inline bool is_socket(const file_status& _status)
{
return false;
}
inline bool is_socket(path_type _path, std::error_code& _ec)
{
return false;
}
inline bool is_socket(path_type _path)
{
return false;
}
inline bool is_symlink(const file_status& _status)
{
return false;
}
inline bool is_symlink(path_type _path, std::error_code& _ec)
{
return false;
}
inline bool is_symlink(path_type _path)
{
return false;
}
inline bool is_other(const file_status& _status)
{
return false;
}
inline bool is_other(path_type _path, std::error_code& _ec)
{
return false;
}
inline bool is_other(path_type _path)
{
return false;
}
protected:
virtual io::vstreamif on_open (path_type _path, std::error_code& _ec) = 0;
virtual file_status on_status (path_type _path, std::error_code& _ec) const = 0;
virtual std::uintmax_t on_file_size (path_type _path, std::error_code& _ec) const = 0;
virtual bool on_remove (path_type _path, std::error_code& _ec) const = 0;
virtual std::uintmax_t on_remove_all(path_type _path, std::error_code& _ec) const = 0;
};
AMTRS_FILESYSTEM_NAMESPACE_END
#endif
|
angeluriot/Magic_royal
|
sources/cards/creatures/blacks/SkeletonDragons.cpp
|
<reponame>angeluriot/Magic_royal
#include "cards/creatures/blacks/SkeletonDragons.hpp"
SkeletonDragons::SkeletonDragons(): Creature(get_full_power(), get_full_toughness(), get_capacities()) {}
SkeletonDragons::~SkeletonDragons() {}
std::string SkeletonDragons::get_full_type() const
{
return Creature::get_full_type() + " - Dragon";
}
Card::Color SkeletonDragons::get_color() const
{
return Color::Black;
}
std::string SkeletonDragons::get_name() const
{
return "Skeleton Dragons";
}
std::vector<Creature::Capacity> SkeletonDragons::get_capacities() const
{
return
{
Capacity::Flying,
Capacity::Reach,
Capacity::ZoneDamage
};
}
std::string SkeletonDragons::get_description() const
{
return Creature::get_description() + "";
}
Card::Cost SkeletonDragons::get_cost() const
{
return
{
{ Color::Colorless, 2 },
{ Color::Black, 2 }
};
}
int SkeletonDragons::get_full_power() const
{
return 3;
}
int SkeletonDragons::get_full_toughness() const
{
return 3;
}
Card* SkeletonDragons::clone() const
{
return new SkeletonDragons(*this);
}
|
javadev/classic-cherries
|
src/test/java/org/paukov/editdistance/LongestCommonSubsequenceTest.java
|
<reponame>javadev/classic-cherries
package org.paukov.editdistance;
import static org.fest.assertions.Assertions.assertThat;
import org.junit.Test;
/**
* Created by dpaukov on 3/23/18.
*/
public class LongestCommonSubsequenceTest {
@Test
public void calculate_democrat_republican() throws Exception {
String source = "democrat";
String target = "republican";
LongestCommonSubsequence lcs = new LongestCommonSubsequence(source, target);
String lcsStr = lcs.calculate();
assertThat(lcsStr).isEqualTo("eca");
}
}
|
Potentii/Kunlun-Core
|
libs/repository/model/synchronizer/core-model-synchronizer.js
|
<gh_stars>1-10
// *Requiring the needed modules:
const ModelSynchronizer = require('module');
const { COLLECTIONS } = require('../meta');
/**
* Model synchronization for the core database
* @class
* @augments ModelSynchronizer
*/
class CoreModelSynchronizer extends ModelSynchronizer{
constructor(){super()}
/**
* @override
* @inheritdoc
*/
sync(conn){
// *Applying the collections:
conn.model(COLLECTIONS.APPLICATION, require('../collections/application')());
conn.model(COLLECTIONS.ADMIN, require('../collections/admin')());
}
}
// *Exporting this class:
module.exports = CoreModelSynchronizer;
|
excellencejey/octoin
|
depends/x86_64-w64-mingw32/include/QtTest/qtestmouse.h
|
<reponame>excellencejey/octoin<gh_stars>10-100
/****************************************************************************
**
** Copyright (C) 2015 The Qt Company Ltd.
** Contact: http://www.qt.io/licensing/
**
** This file is part of the QtTest module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL21$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see http://www.qt.io/terms-conditions. For further
** information use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** As a special exception, The Qt Company gives you certain additional
** rights. These rights are described in The Qt Company LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QTESTMOUSE_H
#define QTESTMOUSE_H
#if 0
// inform syncqt
#pragma qt_no_master_include
#endif
#include <QtTest/qtest_global.h>
#include <QtTest/qtestassert.h>
#include <QtTest/qtestsystem.h>
#include <QtTest/qtestspontaneevent.h>
#include <QtCore/qpoint.h>
#include <QtCore/qstring.h>
#include <QtCore/qpointer.h>
#include <QtGui/qevent.h>
#include <QtGui/qwindow.h>
#ifdef QT_WIDGETS_LIB
#include <QtWidgets/qapplication.h>
#include <QtWidgets/qwidget.h>
#endif
#include <QtCore/QDebug>
QT_BEGIN_NAMESPACE
Q_GUI_EXPORT void qt_handleMouseEvent(QWindow *w, const QPointF &local, const QPointF &global, Qt::MouseButtons b, Qt::KeyboardModifiers mods, int timestamp);
namespace QTest
{
enum MouseAction { MousePress, MouseRelease, MouseClick, MouseDClick, MouseMove };
extern Q_TESTLIB_EXPORT Qt::MouseButton lastMouseButton;
extern Q_TESTLIB_EXPORT int lastMouseTimestamp;
static void waitForEvents()
{
#ifdef Q_OS_MAC
QTest::qWait(20);
#else
qApp->processEvents();
#endif
}
static void mouseEvent(MouseAction action, QWindow *window, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey, QPoint pos, int delay=-1)
{
QTEST_ASSERT(window);
extern int Q_TESTLIB_EXPORT defaultMouseDelay();
// pos is in window local coordinates
if (window->geometry().width() <= pos.x() || window->geometry().height() <= pos.y()) {
QTest::qWarn("Mouse event occurs outside of target window.");
}
if (delay == -1 || delay < defaultMouseDelay())
delay = defaultMouseDelay();
if (delay > 0) {
QTest::qWait(delay);
lastMouseTimestamp += delay;
}
if (pos.isNull())
pos = QPoint(window->width() / 2, window->height() / 2);
QTEST_ASSERT(uint(stateKey) == 0 || stateKey & Qt::KeyboardModifierMask);
stateKey &= static_cast<unsigned int>(Qt::KeyboardModifierMask);
QPointF global = window->mapToGlobal(pos);
QPointer<QWindow> w(window);
switch (action)
{
case MouseDClick:
qt_handleMouseEvent(w, pos, global, button, stateKey, ++lastMouseTimestamp);
qt_handleMouseEvent(w, pos, global, Qt::NoButton, stateKey, ++lastMouseTimestamp);
// fall through
case MousePress:
case MouseClick:
qt_handleMouseEvent(w, pos, global, button, stateKey, ++lastMouseTimestamp);
lastMouseButton = button;
if (action == MousePress)
break;
// fall through
case MouseRelease:
qt_handleMouseEvent(w, pos, global, Qt::NoButton, stateKey, ++lastMouseTimestamp);
lastMouseTimestamp += 500; // avoid double clicks being generated
lastMouseButton = Qt::NoButton;
break;
case MouseMove:
qt_handleMouseEvent(w, pos, global, lastMouseButton, stateKey, ++lastMouseTimestamp);
// No QCursor::setPos() call here. That could potentially result in mouse events sent by the windowing system
// which is highly undesired here. Tests must avoid relying on QCursor.
break;
default:
QTEST_ASSERT(false);
}
waitForEvents();
}
inline void mousePress(QWindow *window, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MousePress, window, button, stateKey, pos, delay); }
inline void mouseRelease(QWindow *window, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseRelease, window, button, stateKey, pos, delay); }
inline void mouseClick(QWindow *window, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseClick, window, button, stateKey, pos, delay); }
inline void mouseDClick(QWindow *window, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseDClick, window, button, stateKey, pos, delay); }
inline void mouseMove(QWindow *window, QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseMove, window, Qt::NoButton, Qt::KeyboardModifiers(), pos, delay); }
#ifdef QT_WIDGETS_LIB
static void mouseEvent(MouseAction action, QWidget *widget, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey, QPoint pos, int delay=-1)
{
QTEST_ASSERT(widget);
if (pos.isNull())
pos = widget->rect().center();
#ifdef QTEST_QPA_MOUSE_HANDLING
QWindow *w = widget->window()->windowHandle();
QTEST_ASSERT(w);
mouseEvent(action, w, button, stateKey, w->mapFromGlobal(widget->mapToGlobal(pos)), delay);
#else
extern int Q_TESTLIB_EXPORT defaultMouseDelay();
if (delay == -1 || delay < defaultMouseDelay())
delay = defaultMouseDelay();
if (delay > 0)
QTest::qWait(delay);
if (action == MouseClick) {
mouseEvent(MousePress, widget, button, stateKey, pos);
mouseEvent(MouseRelease, widget, button, stateKey, pos);
return;
}
QTEST_ASSERT(stateKey == 0 || stateKey & Qt::KeyboardModifierMask);
stateKey &= static_cast<unsigned int>(Qt::KeyboardModifierMask);
QMouseEvent me(QEvent::User, QPoint(), Qt::LeftButton, button, stateKey);
switch (action)
{
case MousePress:
me = QMouseEvent(QEvent::MouseButtonPress, pos, widget->mapToGlobal(pos), button, button, stateKey);
break;
case MouseRelease:
me = QMouseEvent(QEvent::MouseButtonRelease, pos, widget->mapToGlobal(pos), button, Qt::MouseButton(), stateKey);
break;
case MouseDClick:
me = QMouseEvent(QEvent::MouseButtonDblClick, pos, widget->mapToGlobal(pos), button, button, stateKey);
break;
case MouseMove:
QCursor::setPos(widget->mapToGlobal(pos));
#ifdef Q_OS_MAC
QTest::qWait(20);
#else
qApp->processEvents();
#endif
return;
default:
QTEST_ASSERT(false);
}
QSpontaneKeyEvent::setSpontaneous(&me);
if (!qApp->notify(widget, &me)) {
static const char *const mouseActionNames[] =
{ "MousePress", "MouseRelease", "MouseClick", "MouseDClick", "MouseMove" };
QString warning = QString::fromLatin1("Mouse event \"%1\" not accepted by receiving widget");
QTest::qWarn(warning.arg(QString::fromLatin1(mouseActionNames[static_cast<int>(action)])).toLatin1().data());
}
#endif
}
inline void mousePress(QWidget *widget, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MousePress, widget, button, stateKey, pos, delay); }
inline void mouseRelease(QWidget *widget, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseRelease, widget, button, stateKey, pos, delay); }
inline void mouseClick(QWidget *widget, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseClick, widget, button, stateKey, pos, delay); }
inline void mouseDClick(QWidget *widget, Qt::MouseButton button,
Qt::KeyboardModifiers stateKey = Qt::KeyboardModifiers(),
QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseDClick, widget, button, stateKey, pos, delay); }
inline void mouseMove(QWidget *widget, QPoint pos = QPoint(), int delay=-1)
{ mouseEvent(MouseMove, widget, Qt::NoButton, Qt::KeyboardModifiers(), pos, delay); }
#endif // QT_WIDGETS_LIB
}
QT_END_NAMESPACE
#endif // QTESTMOUSE_H
|
Georepublic/valhalla
|
valhalla/baldr/signinfo.h
|
#ifndef VALHALLA_BALDR_SIGNINFO_H_
#define VALHALLA_BALDR_SIGNINFO_H_
#include <valhalla/baldr/sign.h>
namespace valhalla {
namespace baldr {
/**
* Interface class used to pass information about a sign.
* Encapsulates the sign type and the associated text.
*/
class SignInfo {
public:
/**
* Constructor.
* @param type Sign type.
*
* @param rn Bool indicating if this sign is a route number.
* @param text Text string.
*/
SignInfo(const Sign::Type& type,
const bool rn,
const bool tagged,
const bool has_phoneme,
const uint32_t phoneme_start_index,
const uint32_t phoneme_count,
const std::string& text)
: phoneme_start_index_(phoneme_start_index), phoneme_count_(phoneme_count), type_(type),
is_route_num_(rn), is_tagged_(tagged), has_phoneme_(has_phoneme), text_(text) {
}
/**
* Returns the phoneme start index.
* @return Returns the phoneme start index.
*/
uint32_t phoneme_start_index() const {
return phoneme_start_index_;
}
/**
* Returns the phoneme count.
* @return Returns the phoneme count.
*/
uint32_t phoneme_count() const {
return phoneme_count_;
}
/**
* Returns the sign type.
* @return Returns the sign type.
*/
const Sign::Type& type() const {
return type_;
}
/**
* Does this sign record indicate a route number.
* @return Returns true if the sign record is a route number.
*/
bool is_route_num() const {
return is_route_num_;
}
/**
* Is the sign text tagged
* @return Returns true if the sign text is tagged.
*/
bool is_tagged() const {
return is_tagged_;
}
/**
* Does the sign have a phoneme?
* @return Returns true the sign has a phoneme?
*/
bool has_phoneme() const {
return has_phoneme_;
}
/**
* Returns the sign text.
* @return Returns the sign text as a const reference to the text string.
*/
const std::string& text() const {
return text_;
}
// operator < - for sorting. Sort by type.
bool operator<(const SignInfo& other) const {
return type() < other.type();
}
protected:
uint32_t phoneme_start_index_;
uint32_t phoneme_count_;
Sign::Type type_;
bool is_route_num_;
bool is_tagged_;
bool has_phoneme_;
std::string text_;
};
} // namespace baldr
} // namespace valhalla
#endif // VALHALLA_BALDR_SIGNINFO_H_
|
dsh225/Markdown2Html
|
src/component/MenuLeft/Theme.js
|
import React from "react";
import {Menu, Dropdown} from "antd";
import {observer, inject} from "mobx-react";
import {RIGHT_SYMBOL, TEMPLATE_NUM, MARKDOWN_THEME_ID, STYLE} from "../../utils/constant";
import {replaceStyle} from "../../utils/helper";
import TEMPLATE from "../../template/index";
import "./Theme.css";
import axios from "axios";
@inject("content")
@inject("navbar")
@inject("view")
@observer
class Theme extends React.Component {
changeTemplate = (item) => {
const index = parseInt(item.key, 10);
const {themeId, css} = this.props.content.themeList[index];
this.props.navbar.setTemplateNum(index);
// 更新style编辑器
if (themeId === "custom") {
this.props.content.setCustomStyle();
// 切换自定义自动打开css编辑
this.props.view.setStyleEditorOpen(true);
} else {
this.props.content.setStyle(css);
}
};
toggleStyleEditor = () => {
const {isStyleEditorOpen} = this.props.view;
this.props.view.setStyleEditorOpen(!isStyleEditorOpen);
};
subscribeMore = () => {
const w = window.open("about:blank");
w.location.href = "https://preview.mdnice.com/themes";
};
componentDidMount = async () => {
const themeList = [
{themeId: "normal", name: "默认主题", css: TEMPLATE.theme.normal},
{themeId: "1", name: "橙心", css: TEMPLATE.theme.one},
{themeId: "2", name: "姹紫", css: TEMPLATE.theme.two},
{themeId: "3", name: "嫩青", css: TEMPLATE.theme.three},
{themeId: "4", name: "绿意", css: TEMPLATE.theme.four},
{themeId: "5", name: "红绯", css: TEMPLATE.theme.five},
{themeId: "6", name: "蓝莹", css: TEMPLATE.theme.six},
{themeId: "7", name: "兰青", css: TEMPLATE.theme.seven},
{themeId: "8", name: "山吹", css: TEMPLATE.theme.eight},
{themeId: "9", name: "前端之巅同款", css: TEMPLATE.theme.nine},
{themeId: "10", name: "极客黑", css: TEMPLATE.theme.ten},
{themeId: "11", name: "蔷薇紫", css: TEMPLATE.theme.eleven},
{themeId: "12", name: "萌绿", css: TEMPLATE.theme.twelve},
{themeId: "13", name: "全栈蓝", css: TEMPLATE.theme.thirteen},
{themeId: "14", name: "极简黑", css: TEMPLATE.theme.fourteen},
{themeId: "15", name: "橙蓝风", css: TEMPLATE.theme.fifteen},
{themeId: "custom", name: "自定义", css: TEMPLATE.theme.custom},
];
this.props.content.setThemeList(themeList);
// 设置一下自定义的规则
if (!window.localStorage.getItem(STYLE)) {
window.localStorage.setItem(STYLE, TEMPLATE.theme.custom);
}
const templateNum = parseInt(window.localStorage.getItem(TEMPLATE_NUM), 10);
// 主题样式初始化,属于自定义主题则从localstorage中读数据
let style = "";
if (templateNum === themeList.length - 1) {
style = window.localStorage.getItem(STYLE);
} else {
if (templateNum) {
const {css} = themeList[templateNum];
style = css;
} else {
style = TEMPLATE.normal;
}
}
this.props.content.setStyle(style);
replaceStyle(MARKDOWN_THEME_ID, style);
};
render() {
const {templateNum} = this.props.navbar;
const {themeList} = this.props.content;
const mdMenu = (
<Menu onClick={this.changeTemplate}>
{themeList.map((option, index) => (
<Menu.Item key={index}>
<div id={`nice-menu-theme-${option.themeId}`} className="nice-themeselect-theme-item">
<span>
<span className="nice-themeselect-theme-item-flag">
{templateNum === index && <span>{RIGHT_SYMBOL}</span>}
</span>
<span className="nice-themeselect-theme-item-name">{option.name}</span>
{option.isNew && <span className="nice-themeselect-theme-item-new">new</span>}
</span>
</div>
</Menu.Item>
))}
<Menu.Divider />
<li className="nice-themeselect-menu-item">
<div id="nice-menu-view-css" className="nice-themeselect-theme-item" onClick={this.toggleStyleEditor}>
<span>
<span className="nice-themeselect-theme-item-flag">
{this.props.view.isStyleEditorOpen && <span>{RIGHT_SYMBOL}</span>}
</span>
<span className="nice-themeselect-theme-item-name">查看主题 CSS</span>
</span>
</div>
</li>
</Menu>
);
return (
<Dropdown overlay={mdMenu} trigger={["click"]} overlayClassName="nice-overlay">
<a id="nice-menu-theme" className="nice-menu-link" href="#">
主题
</a>
</Dropdown>
);
}
}
export default Theme;
|
Zenika/zenscaler
|
core/scaler/service.go
|
package scaler
import (
"encoding/json"
"fmt"
"net/http"
log "github.com/Sirupsen/logrus"
"github.com/Zenika/zenscaler/core"
"github.com/Zenika/zenscaler/core/tls"
"github.com/docker/engine-api/client"
"github.com/docker/engine-api/types"
"golang.org/x/net/context"
)
// ServiceScaler work with docker 1.12 swarm services (API 1.24)
type ServiceScaler struct {
ServiceID string `json:"service"`
EngineSocket string `json:"socket"`
UpperCountLimit uint64 `json:"upperCountLimit"`
LowerCountLimit uint64 `json:"lowerCountLimit"`
cli *client.Client
}
// Describe scaler
func (s *ServiceScaler) Describe() string {
return "Docker 1.12 swarm mode API scaler"
}
// JSON encode
func (s *ServiceScaler) JSON() ([]byte, error) {
encoded, err := json.Marshal(s)
if err != nil {
return nil, err
}
return encoded, nil
}
// Up using API on swarm socket
func (s *ServiceScaler) Up() error {
err := s.scaleService(func(n uint64) uint64 {
return n + 1
})
return err
}
// Down using API on swarm socket
func (s *ServiceScaler) Down() error {
err := s.scaleService(func(n uint64) uint64 {
return n - 1
})
return err
}
// Update service target replicas
func (s *ServiceScaler) scaleService(scale func(uint64) uint64) error {
cli, err := s.getDocker()
if err != nil {
return err
}
ctx := context.Background()
service, _, err := cli.ServiceInspectWithRaw(ctx, s.ServiceID)
if err != nil {
return err
}
serviceMode := &service.Spec.Mode
if serviceMode.Replicated == nil {
return fmt.Errorf("scale can only be used with replicated mode")
}
target := scale(*serviceMode.Replicated.Replicas)
logger := log.WithFields(log.Fields{
"service": s.ServiceID,
"count": *serviceMode.Replicated.Replicas,
"target": target,
})
// check boundaries, UpperCountLimit at 0 mean uncapped maximum
if (s.UpperCountLimit != 0 && target > s.UpperCountLimit) || target < s.LowerCountLimit {
logger.Debugf("cannot scale to target: limit count achieved")
return nil
}
logger.Debugf("scale service to new target")
serviceMode.Replicated.Replicas = &target
err = cli.ServiceUpdate(ctx, service.ID, service.Version, service.Spec, types.ServiceUpdateOptions{})
return err
}
// Lazy init of new API channel to docker engine
func (s *ServiceScaler) getDocker() (cli *client.Client, err error) {
var HTTPClient *http.Client
if core.Config.Orchestrator.TLS {
HTTPClient, err = tls.HTTPSClient()
if err != nil {
return nil, err
}
}
if s.cli == nil {
defaultHeaders := map[string]string{"User-Agent": "engine-api-cli-1.0"}
s.cli, err = client.NewClient(s.EngineSocket, "v1.24", HTTPClient, defaultHeaders)
}
return s.cli, err
}
|
ramonlopz1/UDEMY---Fullstack-Web-Developer
|
exercicios-js/4 - Funcao/paramsVariaveis.js
|
//arguments é o valor interno de uma função, a quantidade de parâmetros que tem nela
//toda função tem esse parâmetro: arguments
function soma() {
let soma = 0
for (i in arguments) {
soma += arguments[i]
}
return soma
}
console.log(soma()) //zero parâmetros
console.log(soma(1)) //apenas 1 parâmetros
console.log(soma(1.1, 2.2, 3.3)) //vai somar os valores dos 3 argumentos
console.log('a', 'b', 'c') // concatena a b c
|
artkuli/openvino
|
src/inference/include/openvino/runtime/intel_auto/properties.hpp
|
// Copyright (C) 2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <openvino/runtime/properties.hpp>
#include <string>
namespace ov {
/**
* @brief Namespace with Intel AUTO specific properties
*/
namespace intel_auto {
/**
* @brief auto/multi device setting that enables performance improvement by binding buffer to hw infer request
*/
static constexpr Property<bool> device_bind_buffer{"DEVICE_BIND_BUFFER"};
} // namespace intel_auto
} // namespace ov
|
sxyy/appium-uiautomator2-server
|
app/src/main/java/io/appium/uiautomator2/utils/UiExpressionParser.java
|
<filename>app/src/main/java/io/appium/uiautomator2/utils/UiExpressionParser.java<gh_stars>1-10
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.appium.uiautomator2.utils;
import android.util.Pair;
import androidx.test.uiautomator.UiObjectNotFoundException;
import androidx.test.uiautomator.UiSelector;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import io.appium.uiautomator2.common.exceptions.UiAutomator2Exception;
import io.appium.uiautomator2.common.exceptions.UiSelectorSyntaxException;
abstract class UiExpressionParser<T, U> {
protected final Class<T> clazz;
protected final StringBuilderWrapper expression;
private int currentIndex;
private T target;
UiExpressionParser(Class<T> clazz, String expression) {
this.clazz = clazz;
this.expression = new StringBuilderWrapper(expression);
prepareForParsing();
}
protected String getConstructorExpression() {
return "new " + clazz.getSimpleName();
}
public abstract U parse() throws UiSelectorSyntaxException, UiObjectNotFoundException;
// prepares text for the main parsing loop
protected void prepareForParsing() {
if (expression.startsWith(clazz.getSimpleName())) {
expression.getStringBuilder().insert(0, "new ");
}
}
@SuppressWarnings("unchecked")
protected T consumeConstructor() throws UiSelectorSyntaxException,
UiObjectNotFoundException {
skipLeadingSpaces();
final String constructorExpression = getConstructorExpression();
if (!expression.startsWith(constructorExpression, currentIndex)) {
throw new UiSelectorSyntaxException(expression.toString(), String.format(
"Was trying to parse as %1$s, but didn't start with an acceptable prefix. " +
"Acceptable prefixes are: `new %1$s` or `%1$s`",
clazz.getSimpleName()));
}
currentIndex += constructorExpression.length();
final List<String> params = consumeMethodParameters();
final Pair<Constructor, List<Object>> constructor = findConstructor(params);
try {
target = (T) constructor.first.newInstance(constructor.second.toArray());
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new UiAutomator2Exception("Can not create instance of " +
clazz.getSimpleName(), e);
}
return target;
}
protected void consumePeriod() throws UiSelectorSyntaxException {
skipLeadingSpaces();
if (hasMoreDataToParse() && expression.getStringBuilder().charAt(currentIndex) == '.') {
currentIndex++;
} else {
throw new UiSelectorSyntaxException(expression.toString(), "Expected \".\"",
currentIndex);
}
}
protected String consumeMethodName() throws UiSelectorSyntaxException {
skipLeadingSpaces();
final int firstParenIndex = expression.getStringBuilder().indexOf("(", currentIndex);
if (firstParenIndex < 0) {
throw new UiSelectorSyntaxException(expression.toString(),
"No opening parenthesis after method name", currentIndex);
}
final String methodName = expression.getStringBuilder()
.substring(currentIndex, firstParenIndex).trim();
if (methodName.isEmpty()) {
throw new UiSelectorSyntaxException(expression.toString(),
"Missing method name", currentIndex);
}
currentIndex = firstParenIndex;
return methodName;
}
protected List<String> consumeMethodParameters() throws UiSelectorSyntaxException {
skipLeadingSpaces();
final List<String> arguments = new ArrayList<>();
final Stack<Character> parenthesesStack = new Stack<>();
int startIndex = currentIndex;
boolean isInsideStringLiteral = false;
do {
final char currentChar = expression.getStringBuilder().charAt(currentIndex);
if (currentChar == '"') {
/* Skip escaped quotes */
isInsideStringLiteral = !(isInsideStringLiteral && currentIndex > 0
&& expression.getStringBuilder().charAt(currentIndex - 1) != '\\');
}
if (!isInsideStringLiteral) {
switch (currentChar) {
case ')':
if (parenthesesStack.peek() == '(') {
parenthesesStack.pop();
} else {
parenthesesStack.push(currentChar);
}
break;
case '(':
parenthesesStack.push(currentChar);
break;
case ',':
final String argument = expression.getStringBuilder()
.substring(startIndex + 1, currentIndex).trim();
if (argument.isEmpty()) {
throw new UiSelectorSyntaxException(expression.toString(),
"Missing argument", startIndex);
}
arguments.add(argument);
startIndex = currentIndex;
break;
default:
break;
}
}
currentIndex++;
} while (!parenthesesStack.empty() && hasMoreDataToParse());
if (!parenthesesStack.isEmpty()) {
throw new UiSelectorSyntaxException(expression.toString(),
"Unclosed paren in expression");
}
final String argument = expression.getStringBuilder()
.substring(startIndex + 1, currentIndex - 1).trim();
if (!argument.isEmpty()) {
arguments.add(argument);
} else if (!arguments.isEmpty()) {
/* Throw exception if the last argument is missing */
throw new UiSelectorSyntaxException(expression.toString(), "Missing argument",
startIndex);
}
return arguments;
}
/**
* consume [a-z]* then an open paren, this is our methodName
* consume .* and count open/close parens until the original open paren is close, this is
* our
* argument
*/
protected <V> V consumeMethodCall() throws UiSelectorSyntaxException,
UiObjectNotFoundException {
final String methodName = consumeMethodName();
final List<String> arguments = consumeMethodParameters();
final Pair<Method, List<Object>> methodWithArgument = findMethod(methodName, arguments);
return invokeMethod(target, methodWithArgument.first, methodWithArgument.second);
}
protected Pair<Method, List<Object>> findMethod(String methodName, List<String> arguments)
throws UiSelectorSyntaxException, UiObjectNotFoundException {
final List<Method> candidates = new ArrayList<>();
for (final Method method : clazz.getDeclaredMethods()) {
if (method.getName().equals(methodName)) {
candidates.add(method);
}
}
if (candidates.isEmpty()) {
throw new UiSelectorSyntaxException(expression.toString(),
String.format("%s has no `%s` method", getTarget().getClass().getSimpleName(),
methodName));
}
UiSelectorSyntaxException exThrown = null;
for (final Method method : candidates) {
try {
final Type[] parameterTypes = method.getGenericParameterTypes();
final List<Object> args = coerceArgsToTypes(parameterTypes, arguments);
return new Pair<>(method, args);
} catch (UiSelectorSyntaxException e) {
exThrown = e;
}
}
final String errorMsg = "`%s` doesn't have suitable method `%s` with arguments %s" +
(exThrown != null ? ": " + exThrown.getMessage() : "");
throw new UiSelectorSyntaxException(expression.toString(),
String.format(errorMsg, clazz.getSimpleName(), methodName, arguments), exThrown);
}
private Pair<Constructor, List<Object>> findConstructor(List<String> arguments) throws
UiSelectorSyntaxException, UiObjectNotFoundException {
UiSelectorSyntaxException exThrown = null;
for (final Constructor constructor : clazz.getConstructors()) {
try {
final Type[] parameterTypes = constructor.getGenericParameterTypes();
final List<Object> args = coerceArgsToTypes(parameterTypes, arguments);
return new Pair<>(constructor, args);
} catch (UiSelectorSyntaxException e) {
exThrown = e;
}
}
throw new UiSelectorSyntaxException(expression.toString(),
String.format("%s has no suitable constructor with arguments %s",
clazz.getSimpleName(), arguments), exThrown);
}
@SuppressWarnings("unchecked")
protected <V> V invokeMethod(Object receiver, Method method, List<Object> arguments) throws
UiSelectorSyntaxException, UiObjectNotFoundException {
try {
return (V) method.invoke(receiver, arguments.toArray());
} catch (IllegalAccessException e) {
e.printStackTrace();
throw new UiSelectorSyntaxException(expression.toString(),
String.format("Problem using reflection to call `%s` method",
method.getName()), e);
} catch (InvocationTargetException e) {
Throwable targetException = e.getTargetException();
if (targetException instanceof UiObjectNotFoundException) {
throw (UiObjectNotFoundException) targetException;
}
throw new UiAutomator2Exception(targetException);
}
}
private List<Object> coerceArgsToTypes(Type[] types, List<String> arguments) throws
UiSelectorSyntaxException, UiObjectNotFoundException {
if (types.length != arguments.size()) {
throw new UiSelectorSyntaxException(expression.toString(),
String.format("Invalid arguments count. Actual: %s. Expected: %s.",
arguments.size(), types.length));
}
List<Object> result = new ArrayList<>();
for (int i = 0; i < types.length; i++) {
result.add(coerceArgToType(types[i], arguments.get(i)));
}
return result;
}
private Object coerceArgToType(Type type, String argument) throws UiSelectorSyntaxException,
UiObjectNotFoundException {
Logger.debug(String.format("UiSelector coerce type:%s arg:%s", type, argument));
if (type == boolean.class) {
if (argument.matches("^(true|false)$")) {
return Boolean.valueOf(argument);
}
throw new UiSelectorSyntaxException(expression.toString(),
argument + " is not a boolean");
}
if (type == String.class) {
if (argument.matches("^\"[\\s\\S]*\"$")) {
return argument.substring(1, argument.length() - 1).replaceAll("\\\\\"", "\"");
}
throw new UiSelectorSyntaxException(expression.toString(),
argument + " is not a string");
}
if (type == int.class) {
try {
return Integer.parseInt(argument);
} catch (NumberFormatException e) {
throw new UiSelectorSyntaxException(expression.toString(),
argument + " is not a integer");
}
}
if (type == double.class) {
return Double.parseDouble(argument);
}
if ("java.lang.Class<T>".equals(type.toString())) {
try {
return Class.forName(argument);
} catch (ClassNotFoundException e) {
throw new UiSelectorSyntaxException(expression.toString(),
argument + " class could not be found");
}
}
if (type == UiSelector.class) {
UiSelectorParser parser = new UiSelectorParser(argument);
return parser.parse();
}
throw new UiSelectorSyntaxException(expression.toString(),
String.format("Type `%s` is not supported.", type));
}
protected T getTarget() {
return target;
}
protected void setTarget(T target) {
this.target = target;
}
protected void skipLeadingSpaces() {
while (hasMoreDataToParse() && expression.getStringBuilder().charAt(currentIndex) == ' ') {
currentIndex++;
}
}
protected void resetCurrentIndex() {
currentIndex = 0;
}
protected boolean hasMoreDataToParse() {
return currentIndex < expression.getStringBuilder().length();
}
class StringBuilderWrapper {
private final StringBuilder sb;
public StringBuilderWrapper(String string) {
sb = new StringBuilder(string.trim());
}
public boolean startsWith(String str, int index) {
return sb.indexOf(str, index) == index;
}
public boolean startsWith(String str) {
return startsWith(str, 0);
}
public StringBuilder getStringBuilder() {
return sb;
}
@Override
public String toString() {
return sb.toString();
}
}
}
|
trentford/iem
|
scripts/util/poker2afos.py
|
"""Ingest the files kindly sent to me by poker"""
from __future__ import print_function
import glob
import re
import datetime
import subprocess
import os
import pytz
from pyiem.util import noaaport_text, get_dbconn
from pyiem.nws.product import TextProduct
BAD_CHARS = r"[^\n\r\001\003a-zA-Z0-9:\(\)\%\.,\s\*\-\?\|/><&$=\+\@]"
DEBUG = False
PGCONN = get_dbconn('afos')
XREF_SOURCE = {
"KDSM": "KDMX",
"KOKC": "KOUN",
"KALB": "KALY",
"KATL": "KFFC",
"KAUS": "KEWX",
"KBHM": "KBMX",
"KBIL": "KBYZ",
"KBNA": "KOHX",
"KBOS": "KBOX",
"KDEN": "KBOU",
"KDFW": "KFWD",
"KDTW": "KDTX",
"KEKO": "KLKN",
"KELP": "KEPZ",
"KEYW": "KKEY",
"KFAT": "KHNX",
"KFLG": "KFGZ",
"KHOU": "KHGX",
"KHSV": "KHUN",
"KLAS": "KVEF",
"KLAX": "KLOX",
"KLBB": "KLUB",
"KLIT": "KLZK",
"KLSE": "KARX",
"KMCI": "KEAX",
"KMEM": "KMEG",
"KMIA": "KMFL",
"KMLI": "KDVN",
"KMSP": "KMPX",
"KNEW": "KLIX",
"KNYC": "KOKX",
"KOMA": "KOAX",
"KORD": "KLOT",
"KPDX": "KPQR",
"KPHX": "KPSR",
"KPIT": "KPBZ",
"KRAP": "KUNR",
"KRDU": "KRAH",
"KSAC": "KSTO",
"KSAN": "KSGX",
"KSAT": "KEWX",
"KSFO": "KSTO",
"KSTL": "KLSX",
"KTLH": "KTAE",
"KTUL": "KTSA",
"KTUS": "KTWC",
"KALO": "KDMX",
"KDBQ": "KDVN",
"KCAK": "KCLE",
"KHLN": "KTFX",
"KPNS": "KMOB",
"KAVP": "KBGM",
"KCOS": "KPUB",
"KERI": "KCLE",
"KTPA": "KTBW",
"KWMC": "KLKN",
"KSYR": "KBGM",
"KPBI": "KMFL",
"KCPR": "KRIW",
"KCLT": "KGSP",
"KBFL": "KHNX",
"KEUG": "KPQR",
"KYNG": "KCLE",
"KCSG": "KFFC",
"KALS": "KPUB",
"KBPT": "KLCH",
"KDCA": "KLWX",
"KLND": "KRIW",
"KTOL": "KCLE",
"KAHN": "KFFC",
"KMFD": "KCLE",
"KBFF": "KCYS",
"KAST": "KPQR",
"KORH": "KBOX",
"KSPS": "KOUN",
"KSMX": "KLOX",
}
def process(order):
""" Process this timestamp """
cursor = PGCONN.cursor()
ts = datetime.datetime.strptime(order[:6], "%y%m%d").replace(
tzinfo=pytz.utc)
base = ts - datetime.timedelta(days=2)
ceiling = ts + datetime.timedelta(days=2)
subprocess.call("tar -xzf %s" % (order, ), shell=True)
inserts = 0
deletes = 0
filesparsed = 0
bad = 0
for fn in glob.glob("%s[0-2][0-9].*" % (order[:6], )):
content = re.sub(BAD_CHARS, "",
open(fn, 'rb').read().decode('ascii', 'ignore'))
# Now we are getting closer, lets split by the delimter as we
# may have multiple products in one file!
for bulletin in content.split("\001"):
if bulletin == '':
continue
try:
bulletin = noaaport_text(bulletin)
prod = TextProduct(bulletin, utcnow=ts,
parse_segments=False)
prod.source = XREF_SOURCE.get(prod.source, prod.source)
except Exception as exp:
if DEBUG:
o = open('/tmp/bad/%s.txt' % (bad, ), 'w')
o.write(bulletin)
o.close()
print('Parsing Failure %s' % (exp, ))
bad += 1
continue
if prod.valid < base or prod.valid > ceiling:
# print('Timestamp out of bounds %s %s %s' % (base, prod.valid,
# ceiling))
bad += 1
continue
table = "products_%s_%s" % (prod.valid.year,
("0712" if prod.valid.month > 6
else "0106"))
cursor.execute("""
DELETE from """ + table + """ WHERE pil = %s and
entered = %s and source = %s and data = %s
""", (prod.afos, prod.valid, prod.source, bulletin))
deletes += cursor.rowcount
cursor.execute("""INSERT into """+table+"""
(data, pil, entered, source, wmo) values (%s,%s,%s,%s,%s)
""", (bulletin, prod.afos, prod.valid, prod.source, prod.wmo))
inserts += 1
os.unlink(fn)
filesparsed += 1
print(("%s Files Parsed: %s Inserts: %s Deletes: %s Bad: %s"
) % (order, filesparsed, inserts, deletes, bad))
cursor.close()
PGCONN.commit()
# remove cruft
for fn in glob.glob("*.wmo"):
os.unlink(fn)
def main():
""" Go Main Go """
os.chdir("/mesonet/tmp/poker")
for order in glob.glob("??????.DDPLUS.tar.gz"):
process(order)
if __name__ == '__main__':
# do something
main()
|
uk-gov-mirror/ministryofjustice.laa-apply-for-legal-aid
|
app/validators/currency_validator.rb
|
class CurrencyValidator < ActiveModel::Validations::NumericalityValidator
ONLY_2_DECIMALS_PATTERN = /(\A-?[0-9]+\z)|(\A-?[0-9]*\.[0-9]{,2}\z)/.freeze
def validate_each(record, attr_name, value)
clean_value = clean_numeric_value(value)
super(record, attr_name, clean_value)
return if record.errors[attr_name].any?
record.errors.add(attr_name, :too_many_decimals) unless ONLY_2_DECIMALS_PATTERN.match?(clean_value)
end
def clean_numeric_value(value)
CurrencyCleaner.new(value).call
end
end
|
redi83vl/datatools-server
|
src/main/java/com/conveyal/datatools/editor/models/transit/StatusType.java
|
<reponame>redi83vl/datatools-server
package com.conveyal.datatools.editor.models.transit;
public enum StatusType {
IN_PROGRESS,
PENDING_APPROVAL,
APPROVED,
DISABLED;
public int toInt () {
switch (this) {
case APPROVED:
return 2;
case IN_PROGRESS:
return 1;
case PENDING_APPROVAL:
return 0;
default:
return 0;
}
}
}
|
sigurasg/ghidra
|
Ghidra/Features/Base/src/main/java/ghidra/formats/gfilesystem/fileinfo/FileAttributeTypeGroup.java
|
<reponame>sigurasg/ghidra
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.formats.gfilesystem.fileinfo;
/**
* Categories of file attributes.
*/
public enum FileAttributeTypeGroup {
GENERAL_INFO("General"),
SIZE_INFO("Size Info"),
DATE_INFO("Date Info"),
OWNERSHIP_INFO("Ownership Info"),
PERMISSION_INFO("Permission Info"),
ENCRYPTION_INFO("Encryption Info"),
MISC_INFO("Misc"),
ADDITIONAL_INFO("Addional Info");
private final String descriptiveName;
private FileAttributeTypeGroup(String descriptiveName) {
this.descriptiveName = descriptiveName;
}
/**
* Returns the descriptive name of the group.
*
* @return string descriptive name
*/
public String getDescriptiveName() {
return descriptiveName;
}
}
|
Omnirobotic/godot
|
modules/scene_manager/include/rcl_interfaces/msg/parameter__rosidl_typesupport_connext_c.h
|
// generated from
// rosidl_typesupport_connext_c/resource/msg__rosidl_typesupport_connext_c.h.em
// generated code does not contain a copyright notice
#ifndef RCL_INTERFACES__MSG__PARAMETER__ROSIDL_TYPESUPPORT_CONNEXT_C_H_
#define RCL_INTERFACES__MSG__PARAMETER__ROSIDL_TYPESUPPORT_CONNEXT_C_H_
#include "rosidl_generator_c/message_type_support_struct.h"
#include "rosidl_typesupport_interface/macros.h"
#include "rcl_interfaces/msg/rosidl_typesupport_connext_c__visibility_control.h"
#ifdef __cplusplus
extern "C"
{
#endif
ROSIDL_TYPESUPPORT_CONNEXT_C_PUBLIC_rcl_interfaces
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_connext_c, rcl_interfaces, msg, Parameter)();
#ifdef __cplusplus
}
#endif
#endif // RCL_INTERFACES__MSG__PARAMETER__ROSIDL_TYPESUPPORT_CONNEXT_C_H_
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.