_id stringlengths 2 7 | title stringlengths 3 140 | partition stringclasses 3
values | text stringlengths 73 34.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q13700 | HBeanRow.setColumnFilter | train | public static void setColumnFilter(Object op, FetchType... column) {
ArrayList<byte[]> columns = new ArrayList<>();
columns.add(DUMMY_COLUMN_FAMILY);
if (column.length == 0) {
// default behaviour
columns.add(PROP_COLUMN_FAMILY);
columns.add(PRED_COLUMN_FAMILY);
columns.add(REF_COLUMN_FAMILY);
columns.add(SINGLETON_COLUMN_FAMILY);
columns.add(HBeanKeyValue.BEAN_COLUMN_FAMILY);
} else if (FetchType.KEY_ONLY == column[0]) {
// only IID_FAMILY
final FilterList list = new FilterList();
list.addFilter(new FirstKeyOnlyFilter());
if (op instanceof Scan) {
((Scan) op).setFilter(list);
} else if (op instanceof Get) {
((Get) op).setFilter(list);
}
}
for (byte[] familiy : columns) {
if (op instanceof Scan) {
((Scan) op).addFamily(familiy);
} else if (op instanceof Get) {
((Get) op).addFamily(familiy);
}
}
} | java | {
"resource": ""
} |
q13701 | Conversion.convert | train | public <T> T convert(final Object source, final Class<T> targetclass) {
if (source == null) {
return null;
}
final Class<?> sourceclass = source.getClass();
if (targetclass.isPrimitive() && String.class.isAssignableFrom(sourceclass)) {
return (T) parsePrimitive(source.toString(), targetclass);
}
final int sourceId = ids.getId(sourceclass);
final int targetId = ids.getId(targetclass);
final SourceTargetPairKey key = new SourceTargetPairKey(sourceId, targetId);
Converter converter = cache.get(key);
if (converter != null) {
return (T) converter.convert(source, targetclass);
}
final LinkedList<SourceTargetPairMatch> matches = new LinkedList<>();
for (SourceTargetPair pair : converters.values()) {
SourceTargetPairMatch match = pair.match(sourceclass, targetclass);
if (match.matchesSource() && match.matchesTarget()) {
matches.add(match);
}
}
if (matches.size() == 0) {
throw new ConversionException("No suitable converter found for target class ["
+ targetclass.getName() + "] and source value [" + sourceclass.getName()
+ "]. The following converters are available [" + converters.keySet() + "]");
}
Collections.sort(matches, SourceTargetPairMatch.bestTargetMatch());
converter = matches.get(0).pair.converter;
cache.put(key, converter);
return (T) converter.convert(source, targetclass);
} | java | {
"resource": ""
} |
q13702 | AdminCoreContext.findReferences | train | private List<Bean> findReferences(BeanId reference, Collection<Bean> predecessors,
ArrayList<Bean> matches, ArrayList<Bean> checked) {
for (Bean predecessor : predecessors) {
findReferences(reference, predecessor, matches, checked);
}
return matches;
} | java | {
"resource": ""
} |
q13703 | AdminCoreContext.initalizeReferences | train | @SuppressWarnings("unused")
private void initalizeReferences(Collection<Bean> beans) {
Map<BeanId, Bean> userProvided = BeanUtils.uniqueIndex(beans);
for (Bean bean : beans) {
for (String name : bean.getReferenceNames()) {
List<BeanId> values = bean.getReference(name);
if (values == null) {
continue;
}
for (BeanId beanId : values) {
// the does not exist in storage, but may exist in the
// set of beans provided by the user.
Bean ref = userProvided.get(beanId);
if (ref == null) {
Optional<Bean> optional = beanManager.getEager(beanId);
if (optional.isPresent()) {
ref = optional.get();
}
}
beanId.setBean(ref);
schemaManager.setSchema(Arrays.asList(beanId.getBean()));
}
}
}
} | java | {
"resource": ""
} |
q13704 | InstallModulesTask.execute | train | @Override
public void execute() throws BuildException
{
try
{
DirectoryScanner dsc = fileset.getDirectoryScanner(getProject());
File baseDir = dsc.getBasedir();
String[] files = dsc.getIncludedFiles();
for (int i = 0; i < files.length; i++)
{
String currentFile = files[i];
File moduleXml = new File(baseDir, currentFile);
String modulePath = currentFile.substring(0, currentFile.lastIndexOf(File.separator));
File libDir = new File(targetDir, modulePath);
File destFile = new File(targetDir, currentFile);
System.out.println("Processing descriptor for module " + modulePath);
System.out.println("* Source module descriptor: " + moduleXml);
System.out.println("* Destination module descriptor: " + destFile);
String c = readFileContents(moduleXml);
if (libDir.exists())
{
String[] libs = libDir.list(new FilenameFilter()
{
@Override
public boolean accept(File dir, String name)
{
return name.endsWith(".jar") || name.endsWith(".ear") || name.endsWith(".sar")
|| name.endsWith(".war");
}
});
BufferedWriter out = new BufferedWriter(new FileWriter(destFile));
out.write(updateContents(c, libs));
out.close();
}
else
{
libDir.mkdirs();
BufferedWriter out = new BufferedWriter(new FileWriter(destFile));
out.write(c);
out.close();
}
}
}
catch (Exception e)
{
e.printStackTrace();
throw new BuildException(e);
}
} | java | {
"resource": ""
} |
q13705 | JpaBean.getBeanToValidate | train | public static Set<Bean> getBeanToValidate(Set<BeanId> ids) {
List<JpaRef> targetPredecessors = JpaRef.getDirectPredecessors(ids);
Set<BeanId> beansToValidate = new HashSet<>();
for (JpaRef ref : targetPredecessors) {
beansToValidate.add(ref.getSource());
}
beansToValidate.addAll(ids);
JpaBeanQueryAssembler query = new JpaBeanQueryAssembler(beansToValidate);
collectRefs(beansToValidate, query, 2);
List<JpaProperty> allProperties = JpaProperty.findProperties(query.getIds());
query.addProperties(allProperties);
return new HashSet<>(query.assembleBeans());
} | java | {
"resource": ""
} |
q13706 | JpaBean.getJpaBeanAndProperties | train | @SuppressWarnings("unused")
private static JpaBean getJpaBeanAndProperties(BeanId id) {
List<JpaProperty> props = JpaProperty.findProperties(id);
if (props.size() == 0) {
// no marker, bean does not exist
return null;
}
JpaBean bean = new JpaBean(new JpaBeanPk(id));
JpaProperty.filterMarkerProperty(props);
bean.properties.addAll(props);
return bean;
} | java | {
"resource": ""
} |
q13707 | ConfigConcatenation.join | train | private static void join(ArrayList<AbstractConfigValue> builder, AbstractConfigValue origRight) {
AbstractConfigValue left = builder.get(builder.size() - 1);
AbstractConfigValue right = origRight;
// check for an object which can be converted to a list
// (this will be an object with numeric keys, like foo.0, foo.1)
if (left instanceof ConfigObject && right instanceof SimpleConfigList) {
left = DefaultTransformer.transform(left, ConfigValueType.LIST);
} else if (left instanceof SimpleConfigList && right instanceof ConfigObject) {
right = DefaultTransformer.transform(right, ConfigValueType.LIST);
}
// Since this depends on the type of two instances, I couldn't think
// of much alternative to an instanceof chain. Visitors are sometimes
// used for multiple dispatch but seems like overkill.
AbstractConfigValue joined = null;
if (left instanceof ConfigObject && right instanceof ConfigObject) {
joined = right.withFallback(left);
} else if (left instanceof SimpleConfigList && right instanceof SimpleConfigList) {
joined = ((SimpleConfigList)left).concatenate((SimpleConfigList)right);
} else if (left instanceof ConfigConcatenation || right instanceof ConfigConcatenation) {
throw new BugOrBroken("unflattened ConfigConcatenation");
} else if (left instanceof Unmergeable || right instanceof Unmergeable) {
// leave joined=null, cannot join
} else {
// handle primitive type or primitive type mixed with object or list
String s1 = left.transformToString();
String s2 = right.transformToString();
if (s1 == null || s2 == null) {
throw new WrongType(left.origin(),
"Cannot concatenate object or list with a non-object-or-list, " + left
+ " and " + right + " are not compatible");
} else {
ConfigOrigin joinedOrigin = SimpleConfigOrigin.mergeOrigins(left.origin(),
right.origin());
joined = new ConfigString(joinedOrigin, s1 + s2);
}
}
if (joined == null) {
builder.add(right);
} else {
builder.remove(builder.size() - 1);
builder.add(joined);
}
} | java | {
"resource": ""
} |
q13708 | VelocityEngineUtils.mergeTemplate | train | public static void mergeTemplate(
VelocityEngine velocityEngine, String templateLocation, String encoding,
Map<String, Object> model, Writer writer) throws VelocityException {
VelocityContext velocityContext = new VelocityContext(model);
velocityEngine.mergeTemplate(templateLocation, encoding, velocityContext, writer);
} | java | {
"resource": ""
} |
q13709 | SimpleConfig.parseDuration | train | public static long parseDuration(String input,
ConfigOrigin originForException, String pathForException) {
String s = ConfigImplUtil.unicodeTrim(input);
String originalUnitString = getUnits(s);
String unitString = originalUnitString;
String numberString = ConfigImplUtil.unicodeTrim(s.substring(0, s.length()
- unitString.length()));
TimeUnit units = null;
// this would be caught later anyway, but the error message
// is more helpful if we check it here.
if (numberString.length() == 0)
throw new ConfigException.BadValue(originForException,
pathForException, "No number in duration value '" + input
+ "'");
if (unitString.length() > 2 && !unitString.endsWith("s"))
unitString = unitString + "s";
// note that this is deliberately case-sensitive
if (unitString.equals("") || unitString.equals("ms")
|| unitString.equals("milliseconds")) {
units = TimeUnit.MILLISECONDS;
} else if (unitString.equals("us") || unitString.equals("microseconds")) {
units = TimeUnit.MICROSECONDS;
} else if (unitString.equals("ns") || unitString.equals("nanoseconds")) {
units = TimeUnit.NANOSECONDS;
} else if (unitString.equals("d") || unitString.equals("days")) {
units = TimeUnit.DAYS;
} else if (unitString.equals("h") || unitString.equals("hours")) {
units = TimeUnit.HOURS;
} else if (unitString.equals("s") || unitString.equals("seconds")) {
units = TimeUnit.SECONDS;
} else if (unitString.equals("m") || unitString.equals("minutes")) {
units = TimeUnit.MINUTES;
} else {
throw new ConfigException.BadValue(originForException,
pathForException, "Could not parse time unit '"
+ originalUnitString
+ "' (try ns, us, ms, s, m, d)");
}
try {
// if the string is purely digits, parse as an integer to avoid
// possible precision loss;
// otherwise as a double.
if (numberString.matches("[0-9]+")) {
return units.toNanos(Long.parseLong(numberString));
} else {
long nanosInUnit = units.toNanos(1);
return (long) (Double.parseDouble(numberString) * nanosInUnit);
}
} catch (NumberFormatException e) {
throw new ConfigException.BadValue(originForException,
pathForException, "Could not parse duration number '"
+ numberString + "'");
}
} | java | {
"resource": ""
} |
q13710 | ConfigQueryBuilder.in | train | public static Restriction in(String property, Object... values) {
return new In(property, Arrays.asList(values));
} | java | {
"resource": ""
} |
q13711 | ConfigQueryBuilder.and | train | public static Restriction and(Restriction r1, Restriction r2) {
return new And(Arrays.asList(r1, r2));
} | java | {
"resource": ""
} |
q13712 | ConfigQueryBuilder.or | train | public static Restriction or(Restriction r1, Restriction r2) {
return new Or(Arrays.asList(r1, r2));
} | java | {
"resource": ""
} |
q13713 | Launchpad.send | train | @Override
public void send(MidiMessage message, long timeStamp) {
if (this.launchpadReceiver != null && message instanceof ShortMessage) {
ShortMessage sm = (ShortMessage) message;
Pad pad = Pad.findMidi(sm);
if (pad != null) {
this.launchpadReceiver.receive(pad);
}
}
} | java | {
"resource": ""
} |
q13714 | MemoryFileSystem.getEntriesList | train | private synchronized List<Entry> getEntriesList( DirectoryEntry directory )
{
List<Entry> entries = contents.get( directory );
if ( entries == null )
{
entries = new ArrayList<Entry>();
contents.put( directory, entries );
}
return entries;
} | java | {
"resource": ""
} |
q13715 | ConfigReferenceHolder.getObjectReference | train | public Object getObjectReference(String field, String schemaName) {
List<String> instanceIds = references.get(field);
if(instanceIds == null || instanceIds.size() == 0) {
return null;
}
String instanceId = instanceIds.get(0);
if(instanceId == null) {
return null;
}
BeanId id = BeanId.create(instanceId, schemaName);
Object instance = instances.get(id);
if(instance != null) {
return instance;
}
instance = cache.get(id);
instances.put(id, instance);
return instance;
} | java | {
"resource": ""
} |
q13716 | ConfigReferenceHolder.getObjectReferenceList | train | public Collection<Object> getObjectReferenceList(String field, String schemaName) {
List<String> instanceIds = references.get(field);
if(instanceIds == null || instanceIds.size() == 0) {
return null;
}
List<Object> objects = new ArrayList<>();
for (String instanceId : instanceIds) {
BeanId id = BeanId.create(instanceId, schemaName);
Object instance = instances.get(id);
if(instance != null) {
objects.add(instance);
} else {
instance = cache.get(id);
instances.put(id, instance);
objects.add(instance);
}
}
return objects;
} | java | {
"resource": ""
} |
q13717 | ConfigReferenceHolder.getObjectReferenceMap | train | public Map<String, Object> getObjectReferenceMap(String field, String schemaName) {
List<String> instanceIds = references.get(field);
if(instanceIds == null || instanceIds.size() == 0) {
return null;
}
Map<String, Object> objects = new HashMap<>();
for (String instanceId : instanceIds) {
BeanId id = BeanId.create(instanceId, schemaName);
Object instance = instances.get(id);
if(instance != null) {
objects.put(instanceId, instance);
} else {
instance = cache.get(id);
instances.put(id, instance);
objects.put(instanceId, instance);
}
}
return objects;
} | java | {
"resource": ""
} |
q13718 | AntTaskHelper.toSystemProperties | train | static SysProperties toSystemProperties(final String[] arguments)
{
final SysProperties retVal = new SysProperties();
if (arguments != null && arguments.length != 0)
{
for (final String argument : arguments)
{
if (argument.startsWith("-D"))
{
Variable var = AntTaskHelper.toVariable(argument);
retVal.addVariable(var);
}
}
}
return retVal;
} | java | {
"resource": ""
} |
q13719 | AntTaskHelper.toVariable | train | private static Variable toVariable(final String argument)
{
final Variable retVal = new Variable();
final int equalSignIndex = argument.indexOf('=');
if (equalSignIndex == -1)
{
final String key = argument.substring(2);
retVal.setKey(key);
}
else
{
final String key = argument.substring(2, equalSignIndex);
retVal.setKey(key);
final String value = argument.substring(equalSignIndex + 1);
retVal.setValue(value);
}
return retVal;
} | java | {
"resource": ""
} |
q13720 | HBeanProperties.getProperties | train | public static String[][] getProperties(final Bean bean) {
final List<String> propertyNames = bean.getPropertyNames();
final int psize = propertyNames.size();
final String[][] properties = new String[psize][];
for (int i = 0; i < psize; i++) {
final String propertyName = propertyNames.get(i);
final List<String> values = bean.getValues(propertyName);
final int vsize = values.size();
properties[i] = new String[vsize + 1];
properties[i][0] = propertyName;
for (int j = 0; j < vsize; j++) {
properties[i][j + 1] = values.get(j);
}
}
return properties;
} | java | {
"resource": ""
} |
q13721 | HBeanProperties.setPropertiesOn | train | public void setPropertiesOn(final Bean bean) {
String[][] properties = getProperties();
for (int i = 0; i < properties.length; i++) {
if (properties[i].length < 2) {
continue;
}
for (int j = 0; j < properties[i].length - 1; j++) {
bean.addProperty(properties[i][0], properties[i][j + 1]);
}
}
} | java | {
"resource": ""
} |
q13722 | HBeanProperties.getPropertyName | train | public static String getPropertyName(KeyValue kv, UniqueIds uids) {
final byte[] qualifier = kv.getQualifier();
final byte[] pid = new byte[] { qualifier[2], qualifier[3] };
final String propertyName = uids.getUsid().getName(pid);
return propertyName;
} | java | {
"resource": ""
} |
q13723 | HBeanProperties.isProperty | train | public static boolean isProperty(KeyValue kv) {
if (Bytes.equals(kv.getFamily(), PROP_COLUMN_FAMILY)) {
return true;
}
return false;
} | java | {
"resource": ""
} |
q13724 | HBeanTable.listEager | train | public HBeanRowCollector listEager(String schemaName, FetchType... fetchType)
throws HBeanNotFoundException {
Set<HBeanRow> rows = listLazy(schemaName, fetchType);
HBeanRowCollector collector = new HBeanRowCollector(rows);
getEager(rows, collector, FETCH_DEPTH_MAX, fetchType);
return collector;
} | java | {
"resource": ""
} |
q13725 | HBeanTable.getEager | train | public HBeanRowCollector getEager(Set<HBeanRow> rows, FetchType... fetchType)
throws HBeanNotFoundException {
Set<HBeanRow> result;
result = getLazy(rows, fetchType);
HBeanRowCollector collector = new HBeanRowCollector(result);
getEager(result, collector, FETCH_DEPTH_MAX, fetchType);
return collector;
} | java | {
"resource": ""
} |
q13726 | HBeanTable.put | train | public void put(Set<HBeanRow> rows) {
final List<Row> create = new ArrayList<>();
try {
for (HBeanRow row : rows) {
final Put write = new Put(row.getRowKey());
if (row.getPropertiesKeyValue() != null) {
write.add(row.getPropertiesKeyValue());
}
for (KeyValue kv : row.getPredecessors()) {
write.add(kv);
}
for (KeyValue kv : row.getReferencesKeyValue().values()) {
write.add(kv);
}
KeyValue hBean = row.getHBeanKeyValue();
write.add(hBean);
if (row.isSingleton()) {
write.add(new KeyValue(row.getRowKey(), HBeanRow.SINGLETON_COLUMN_FAMILY,
HBeanRow.SINGLETON_COLUMN_FAMILY, new byte[] { 1 }));
}
// hbase cannot have rowkeys without columns so we need
// a dummy value to represent beans without any values
write.add(new KeyValue(row.getRowKey(), HBeanRow.DUMMY_COLUMN_FAMILY,
HBeanRow.DUMMY_COLUMN_FAMILY, new byte[] { 1 }));
create.add(write);
}
table.batch(create);
table.flushCommits();
} catch (Exception e) {
throw new RuntimeException(e);
}
} | java | {
"resource": ""
} |
q13727 | HBeanTable.getEager | train | private void getEager(Set<HBeanRow> rows, HBeanRowCollector collector, int level,
FetchType... fetchType) throws HBeanNotFoundException {
int size = rows.size();
if (size == 0) {
return;
}
if (--level < 0) {
return;
}
Set<HBeanRow> refs = new HashSet<>();
for (HBeanRow row : rows) {
refs.addAll(row.getReferenceRows());
}
// only recurse rowkeys we havent already
// visited to break circular references
refs = collector.filterUnvisted(refs);
refs = getLazy(refs, fetchType);
collector.addReferences(refs);
getEager(refs, collector, level, fetchType);
} | java | {
"resource": ""
} |
q13728 | HBeanTable.delete | train | public void delete(Set<HBeanRow> rows) {
final List<Row> delete = new ArrayList<>();
try {
for (HBeanRow row : rows) {
delete.add(new Delete(row.getRowKey()));
}
table.batch(delete);
table.flushCommits();
} catch (Exception e) {
throw new RuntimeException(e);
}
} | java | {
"resource": ""
} |
q13729 | ValidationManager.lookup | train | public static Optional<ValidationManager> lookup() {
ValidationManager manager = lookup.lookup(ValidationManager.class);
if (manager != null) {
return Optional.of(manager);
} else {
return Optional.absent();
}
} | java | {
"resource": ""
} |
q13730 | JpaBeanQueryAssembler.addProperties | train | public void addProperties(List<JpaProperty> queryProperties) {
for (JpaProperty prop : queryProperties) {
Bean bean = putIfAbsent(prop.getId());
if (!JpaProperty.BEAN_MARKER_PROPERTY_NAME.equals(prop.getPropertyName())) {
bean.addProperty(prop.getPropertyName(), prop.getValue());
}
}
} | java | {
"resource": ""
} |
q13731 | JpaBeanQueryAssembler.addRefs | train | public void addRefs(Multimap<BeanId, JpaRef> queryRefs) {
refs.putAll(queryRefs);
for (BeanId id : refs.keySet()) {
putIfAbsent(id);
for (JpaRef ref : refs.get(id)) {
putIfAbsent(ref.getTarget());
}
}
} | java | {
"resource": ""
} |
q13732 | JpaBeanQueryAssembler.getIds | train | public Set<BeanId> getIds() {
Set<BeanId> ids = new HashSet<>();
ids.addAll(beansQuery);
ids.addAll(beans.keySet());
return ids;
} | java | {
"resource": ""
} |
q13733 | JpaBeanQueryAssembler.putIfAbsent | train | private Bean putIfAbsent(BeanId id) {
Bean bean = beans.get(id);
if (bean == null) {
bean = Bean.create(id);
beans.put(id, bean);
}
return bean;
} | java | {
"resource": ""
} |
q13734 | JpaBeanQueryAssembler.assembleBeans | train | public List<Bean> assembleBeans() {
connectReferences();
if (beansQuery.size() == 0) {
// if no specific beans where requested (such as query for a
// specific schema) return what is available.
return new ArrayList<>(beans.values());
}
List<Bean> initalQuery = new ArrayList<>();
for (BeanId id : beansQuery) {
Bean bean = beans.get(id);
if (bean != null) {
initalQuery.add(beans.get(id));
}
}
return initalQuery;
} | java | {
"resource": ""
} |
q13735 | JpaBeanQueryAssembler.connectReferences | train | private void connectReferences() {
// ready to associate initalized beans with references
for (Bean bean : beans.values()) {
for (JpaRef ref : refs.get(bean.getId())) {
BeanId target = ref.getTarget();
Bean targetBean = beans.get(target);
target.setBean(targetBean);
bean.addReference(ref.getPropertyName(), target);
}
}
} | java | {
"resource": ""
} |
q13736 | CachedCacheManager.flattenReferences | train | private Set<Bean> flattenReferences(Bean bean) {
Set<Bean> beans = new HashSet<>();
for (String referenceName : bean.getReferenceNames()) {
List<BeanId> ids = bean.getReference(referenceName);
for (BeanId id : ids) {
if (id.getBean() == null) {
continue;
}
beans.addAll(flattenReferences(id.getBean()));
}
}
beans.add(bean);
return beans;
} | java | {
"resource": ""
} |
q13737 | HBaseBeanManager.createUids | train | public static UniqueIds createUids(Configuration conf) {
UniqueId usid = new UniqueId(SID_TABLE, SID_WIDTH, conf, true);
UniqueId uiid = new UniqueId(IID_TABLE, IID_WIDTH, conf, true);
UniqueId upid = new UniqueId(PID_TABLE, PID_WIDTH, conf, true);
return new UniqueIds(uiid, usid, upid);
} | java | {
"resource": ""
} |
q13738 | MD5DigestFileEntry.getContent | train | private byte[] getContent()
throws IOException
{
InputStream is = null;
try
{
MessageDigest digest = MessageDigest.getInstance( "MD5" );
digest.reset();
byte[] buffer = new byte[8192];
int read;
try
{
is = entry.getInputStream();
while ( ( read = is.read( buffer ) ) > 0 )
{
digest.update( buffer, 0, read );
}
}
catch ( IOException e )
{
if ( is != null )
{
throw e;
}
}
final String md5 = StringUtils.leftPad( new BigInteger( 1, digest.digest() ).toString( 16 ), 32, "0" );
return md5.getBytes();
}
catch ( NoSuchAlgorithmException e )
{
IOException ioe = new IOException( "Unable to calculate hash" );
ioe.initCause( e );
throw ioe;
}
finally
{
IOUtils.closeQuietly( is );
}
} | java | {
"resource": ""
} |
q13739 | AbstractConfigObject.peekPath | train | AbstractConfigValue peekPath(Path path) {
try {
return peekPath(this, path, null);
} catch (NotPossibleToResolve e) {
throw new BugOrBroken(
"NotPossibleToResolve happened though we had no ResolveContext in peekPath");
}
} | java | {
"resource": ""
} |
q13740 | AbstractConfigObject.peekPath | train | private static AbstractConfigValue peekPath(AbstractConfigObject self, Path path,
ResolveContext context) throws NotPossibleToResolve {
try {
if (context != null) {
// walk down through the path resolving only things along that
// path, and then recursively call ourselves with no resolve
// context.
AbstractConfigValue partiallyResolved = context.restrict(path).resolve(self);
if (partiallyResolved instanceof AbstractConfigObject) {
return peekPath((AbstractConfigObject) partiallyResolved, path, null);
} else {
throw new BugOrBroken("resolved object to non-object " + self
+ " to " + partiallyResolved);
}
} else {
// with no resolver, we'll fail if anything along the path can't
// be looked at without resolving.
Path next = path.remainder();
AbstractConfigValue v = self.attemptPeekWithPartialResolve(path.first());
if (next == null) {
return v;
} else {
if (v instanceof AbstractConfigObject) {
return peekPath((AbstractConfigObject) v, next, null);
} else {
return null;
}
}
}
} catch (NotResolved e) {
throw ConfigImpl.improveNotResolved(path, e);
}
} | java | {
"resource": ""
} |
q13741 | Artifact.getName | train | public String getName()
{
if ( name == null )
{
name = MessageFormat.format( "{0}-{1}{2}.{3}", new Object[]{ artifactId, getTimestampVersion(),
( classifier == null ? "" : "-" + classifier ), type } );
}
return name;
} | java | {
"resource": ""
} |
q13742 | Bean.create | train | public static Bean create(final BeanId id) {
Preconditions.checkNotNull(id);
Bean bean = new Bean(id);
bean.set(id.getSchema());
return bean;
} | java | {
"resource": ""
} |
q13743 | Bean.getPropertyNames | train | public List<String> getPropertyNames() {
ArrayList<String> names = new ArrayList<>(properties.keySet());
Collections.sort(names);
return names;
} | java | {
"resource": ""
} |
q13744 | Bean.getReferenceNames | train | public List<String> getReferenceNames() {
ArrayList<String> names = new ArrayList<>(references.keySet());
Collections.sort(names);
return names;
} | java | {
"resource": ""
} |
q13745 | Bean.addProperty | train | public void addProperty(final String propertyName, final Collection<String> values) {
Preconditions.checkNotNull(values);
Preconditions.checkNotNull(propertyName);
List<String> list = properties.get(propertyName);
if (list == null) {
properties.put(propertyName, new ArrayList<>(values));
} else {
list.addAll(values);
}
} | java | {
"resource": ""
} |
q13746 | Bean.addProperty | train | public void addProperty(final String propertyName, final String value) {
Preconditions.checkNotNull(propertyName);
Preconditions.checkNotNull(value);
List<String> values = properties.get(propertyName);
if (values == null) {
values = new ArrayList<>();
values.add(value);
properties.put(propertyName, values);
} else {
values.add(value);
}
} | java | {
"resource": ""
} |
q13747 | Bean.setProperty | train | public void setProperty(final String propertyName, final String value) {
Preconditions.checkNotNull(propertyName);
if (value == null) {
properties.put(propertyName, null);
return;
}
List<String> values = new ArrayList<>();
values.add(value);
properties.put(propertyName, values);
} | java | {
"resource": ""
} |
q13748 | Bean.clear | train | public void clear(final String propertyName) {
Preconditions.checkNotNull(propertyName);
if (properties.containsKey(propertyName)) {
properties.put(propertyName, null);
} else if (references.containsKey(propertyName)) {
references.put(propertyName, null);
}
} | java | {
"resource": ""
} |
q13749 | Bean.remove | train | public void remove(final String propertyName) {
Preconditions.checkNotNull(propertyName);
if (properties.containsKey(propertyName)) {
properties.remove(propertyName);
} else if (references.containsKey(propertyName)) {
references.remove(propertyName);
}
} | java | {
"resource": ""
} |
q13750 | Bean.getValues | train | public List<String> getValues(final String propertyName) {
Preconditions.checkNotNull(propertyName);
List<String> values = properties.get(propertyName);
if (values == null) {
return null;
}
// creates a shallow defensive copy
return new ArrayList<>(values);
} | java | {
"resource": ""
} |
q13751 | Bean.getSingleValue | train | public String getSingleValue(final String propertyName) {
Preconditions.checkNotNull(propertyName);
List<String> values = getValues(propertyName);
if (values == null || values.size() < 1) {
return null;
}
return values.get(0);
} | java | {
"resource": ""
} |
q13752 | Bean.addReference | train | public void addReference(final String propertyName, final Collection<BeanId> refs) {
Preconditions.checkNotNull(refs);
Preconditions.checkNotNull(propertyName);
checkCircularReference(refs.toArray(new BeanId[refs.size()]));
List<BeanId> list = references.get(propertyName);
if (list == null) {
list = new ArrayList<>();
list.addAll(refs);
references.put(propertyName, list);
} else {
list.addAll(refs);
}
} | java | {
"resource": ""
} |
q13753 | Bean.checkCircularReference | train | private void checkCircularReference(final BeanId... references) {
for (BeanId beanId : references) {
if (getId().equals(beanId)) {
throw CFG310_CIRCULAR_REF(getId(), getId());
}
}
} | java | {
"resource": ""
} |
q13754 | Bean.addReference | train | public void addReference(final String propertyName, final BeanId ref) {
Preconditions.checkNotNull(ref);
Preconditions.checkNotNull(propertyName);
checkCircularReference(ref);
List<BeanId> list = references.get(propertyName);
if (list == null) {
list = new ArrayList<>();
list.add(ref);
references.put(propertyName, list);
} else {
list.add(ref);
}
} | java | {
"resource": ""
} |
q13755 | Bean.getReference | train | public List<BeanId> getReference(final String propertyName) {
List<BeanId> values = references.get(propertyName);
if (values == null) {
return null;
}
return values;
} | java | {
"resource": ""
} |
q13756 | Bean.getReferences | train | public List<BeanId> getReferences() {
if (references == null) {
return new ArrayList<>();
}
ArrayList<BeanId> result = new ArrayList<>();
for (List<BeanId> b : references.values()) {
if (b != null) {
result.addAll(b);
}
}
return result;
} | java | {
"resource": ""
} |
q13757 | Bean.getFirstReference | train | public BeanId getFirstReference(final String propertyName) {
List<BeanId> refrences = getReference(propertyName);
if (refrences == null || refrences.size() < 1) {
return null;
}
return refrences.get(0);
} | java | {
"resource": ""
} |
q13758 | CacheManager.lookup | train | public static Optional<CacheManager> lookup() {
CacheManager manager = lookup.lookup(CacheManager.class);
if (manager != null) {
return Optional.of(manager);
} else {
return Optional.absent();
}
} | java | {
"resource": ""
} |
q13759 | AuthUtils.percentEncode | train | public static String percentEncode(String source) throws AuthException {
try {
return URLEncoder.encode(source, "UTF-8")
.replace("+", "%20")
.replace("*", "%2A")
.replace("%7E", "~");
} catch (UnsupportedEncodingException ex) {
throw new AuthException("cannot encode value '" + source + "'", ex);
}
} | java | {
"resource": ""
} |
q13760 | AuthUtils.percentDecode | train | public static String percentDecode(String source) throws AuthException {
try {
return URLDecoder.decode(source, "UTF-8");
} catch (java.io.UnsupportedEncodingException ex) {
throw new AuthException("cannot decode value '" + source + "'", ex);
}
} | java | {
"resource": ""
} |
q13761 | TempFileContext.wrapTempFileList | train | public static <T> T wrapTempFileList(T original, com.aoindustries.io.TempFileList tempFileList, Wrapper<T> wrapper) {
if(tempFileList != null) {
return wrapper.call(original, tempFileList);
} else {
// Warn once
synchronized(tempFileWarningLock) {
if(!tempFileWarned) {
if(logger.isLoggable(Level.WARNING)) {
logger.log(
Level.WARNING,
"TempFileContext not initialized: refusing to automatically create temp files for large buffers. "
+ "Additional heap space may be used for large requests. "
+ "Please add the " + TempFileContext.class.getName() + " filter to your web.xml file.",
new Throwable("Stack Trace")
);
}
tempFileWarned = true;
}
}
return original;
}
} | java | {
"resource": ""
} |
q13762 | MethodLister.listMethods | train | public List<Method> listMethods( final Class<?> classObj,
final String methodName )
{
//
// Get the array of methods for my classname.
//
Method[] methods = classObj.getMethods();
List<Method> methodSignatures = new ArrayList<Method>();
//
// Loop round all the methods and print them out.
//
for ( int ii = 0; ii < methods.length; ++ii )
{
if ( methods[ii].getName().equals( methodName ) )
{
methodSignatures.add( methods[ii] );
}
}
return methodSignatures;
} | java | {
"resource": ""
} |
q13763 | PerceptronClassifier.predict | train | private Map<Integer, Double> predict(final double[] x) {
Map<Integer, Double> result = new HashMap<>();
for (int i = 0; i < model.weights.length; i++) {
double y = VectorUtils.dotProduct(x, model.weights[i]);
y += model.bias[i];
result.put(i, y);
}
return result;
} | java | {
"resource": ""
} |
q13764 | PerceptronClassifier.onlineTrain | train | public void onlineTrain(final double[] x, final int labelIndex) {
Map<Integer, Double> result = predict(x);
Map.Entry<Integer, Double> maxResult = result.entrySet().stream().max((e1, e2) -> e1.getValue().compareTo(e2.getValue())).orElse(null);
if (maxResult.getKey() != labelIndex) {
double e_correction_d = 1;
model.weights[labelIndex] = reweight(x, model.weights[labelIndex], e_correction_d);
model.bias[labelIndex] = e_correction_d;
double w_correction_d = -1;
model.weights[maxResult.getKey()] = reweight(x, model.weights[maxResult.getKey()], w_correction_d);
model.bias[maxResult.getKey()] = w_correction_d;
}
if (LOG.isDebugEnabled()) {
LOG.debug("New bias: " + Arrays.toString(model.bias));
LOG.debug("New weight: " + Arrays.stream(model.weights).map(Arrays::toString).reduce((wi, wii) -> wi + ", " + wii).get());
}
} | java | {
"resource": ""
} |
q13765 | PerceptronClassifier.predict | train | @Override
public Map<String, Double> predict(Tuple predict) {
Map<Integer, Double> indexResult = predict(predict.vector.getVector());
return indexResult.entrySet().stream()
.map(e -> new ImmutablePair<>(model.labelIndexer.getLabel(e.getKey()), VectorUtils.sigmoid.apply(e.getValue()))) // Only do sigmoid here!
.collect(Collectors.toMap(ImmutablePair::getLeft, ImmutablePair::getRight));
} | java | {
"resource": ""
} |
q13766 | TrimFilterWriter.isTrimEnabled | train | private boolean isTrimEnabled() {
String contentType = response.getContentType();
// If the contentType is the same string (by identity), return the previously determined value.
// This assumes the same string instance is returned by the response when content type not changed between calls.
if(contentType!=isTrimEnabledCacheContentType) {
isTrimEnabledCacheResult =
contentType==null
|| contentType.equals("application/xhtml+xml")
|| contentType.startsWith("application/xhtml+xml;")
|| contentType.equals("text/html")
|| contentType.startsWith("text/html;")
|| contentType.equals("application/xml")
|| contentType.startsWith("application/xml;")
|| contentType.equals("text/xml")
|| contentType.startsWith("text/xml;")
;
isTrimEnabledCacheContentType = contentType;
}
return isTrimEnabledCacheResult;
} | java | {
"resource": ""
} |
q13767 | TrimFilterOutputStream.processChar | train | private boolean processChar(char c) {
if(inTextArea) {
if(
c==TrimFilterWriter.textarea_close[readCharMatchCount]
|| c==TrimFilterWriter.TEXTAREA_CLOSE[readCharMatchCount]
) {
readCharMatchCount++;
if(readCharMatchCount>=TrimFilterWriter.textarea_close.length) {
inTextArea=false;
readCharMatchCount=0;
}
} else {
readCharMatchCount=0;
}
return true;
} else if(inPre) {
if(
c==TrimFilterWriter.pre_close[preReadCharMatchCount]
|| c==TrimFilterWriter.PRE_CLOSE[preReadCharMatchCount]
) {
preReadCharMatchCount++;
if(preReadCharMatchCount>=TrimFilterWriter.pre_close.length) {
inPre=false;
preReadCharMatchCount=0;
}
} else {
preReadCharMatchCount=0;
}
return true;
} else {
if(c=='\r') {
readCharMatchCount = 0;
preReadCharMatchCount = 0;
// Carriage return only output when no longer at the beginning of the line
return !atLineStart;
} else if(c=='\n') {
readCharMatchCount = 0;
preReadCharMatchCount = 0;
// Newline only output when no longer at the beginning of the line
if(!atLineStart) {
atLineStart = true;
return true;
} else {
return false;
}
} else if(c==' ' || c=='\t') {
readCharMatchCount = 0;
preReadCharMatchCount = 0;
// Space and tab only output when no longer at the beginning of the line
return !atLineStart;
} else {
atLineStart = false;
if(
c==TrimFilterWriter.textarea[readCharMatchCount]
|| c==TrimFilterWriter.TEXTAREA[readCharMatchCount]
) {
readCharMatchCount++;
if(readCharMatchCount>=TrimFilterWriter.textarea.length) {
inTextArea=true;
readCharMatchCount=0;
}
} else {
readCharMatchCount=0;
}
if(
c==TrimFilterWriter.pre[preReadCharMatchCount]
|| c==TrimFilterWriter.PRE[preReadCharMatchCount]
) {
preReadCharMatchCount++;
if(preReadCharMatchCount>=TrimFilterWriter.pre.length) {
inPre=true;
preReadCharMatchCount=0;
}
} else {
preReadCharMatchCount=0;
}
return true;
}
}
} | java | {
"resource": ""
} |
q13768 | AtomicBitflags.set | train | public int set( final int flags )
{
for (;;)
{
int current = _flags.get();
int newValue = current | flags;
if ( _flags.compareAndSet( current, newValue ) )
{
return current;
}
}
} | java | {
"resource": ""
} |
q13769 | AtomicBitflags.unset | train | public int unset( final int flags )
{
for (;;)
{
int current = _flags.get();
int newValue = current & ~flags;
if ( _flags.compareAndSet( current, newValue ) )
{
return current;
}
}
} | java | {
"resource": ""
} |
q13770 | AtomicBitflags.change | train | public int change( final int add,
final int remove )
{
for (;;)
{
int current = _flags.get();
int newValue = ( current | add ) & ~remove;
if ( _flags.compareAndSet( current, newValue ) )
{
return current;
}
}
} | java | {
"resource": ""
} |
q13771 | Response.message | train | public static <E> String message(Response<E> response) {
return Optional.ofNullable(response).map(Response::getMessage).orElse(StringUtils.EMPTY);
} | java | {
"resource": ""
} |
q13772 | StanfordParserUtils.convertTreeBankToCoNLLX | train | public static DTree convertTreeBankToCoNLLX(final String constituentTree) {
Tree tree = Tree.valueOf(constituentTree);
SemanticHeadFinder headFinder = new SemanticHeadFinder(false); // keep copula verbs as head
Collection<TypedDependency> dependencies = new EnglishGrammaticalStructure(tree, string -> true, headFinder).typedDependencies();
List<CoreLabel> tokens = tree.taggedLabeledYield();
StanfordParser.tagLemma(tokens);
return StanfordTreeBuilder.generate(tokens, dependencies, null);
} | java | {
"resource": ""
} |
q13773 | LocaleFilter.addLocale | train | private String addLocale(Locale locale, String url, String encodedParamName, String encoding) {
// Split the anchor
int poundPos = url.lastIndexOf('#');
String beforeAnchor;
String anchor;
if(poundPos==-1) {
beforeAnchor = url;
anchor = null;
} else {
anchor = url.substring(poundPos);
beforeAnchor = url.substring(0, poundPos);
}
// Only add for non-excluded file types
if(isLocalizedPath(beforeAnchor)) {
int questionPos = beforeAnchor.lastIndexOf('?');
// Only rewrite a URL that does not already contain a paramName parameter.
if(
questionPos == -1
|| (
!beforeAnchor.startsWith("?"+encodedParamName+"=", questionPos)
&& beforeAnchor.indexOf("&"+encodedParamName+"=", questionPos + 1) == -1
)
) {
try {
beforeAnchor += (questionPos == -1 ? '?' : '&') + encodedParamName + '=' + URLEncoder.encode(toLocaleString(locale), encoding);
} catch(UnsupportedEncodingException e) {
// Should never happen with standard supported encoding
throw new WrappedException(e);
}
}
return
(anchor != null)
? (beforeAnchor + anchor)
: beforeAnchor
;
} else {
// Unmodified
return url;
}
} | java | {
"resource": ""
} |
q13774 | LocaleFilter.getEnabledLocales | train | public static Map<String,Locale> getEnabledLocales(ServletRequest request) {
@SuppressWarnings("unchecked")
Map<String,Locale> enabledLocales = (Map<String,Locale>)request.getAttribute(ENABLED_LOCALES_REQUEST_ATTRIBUTE_KEY);
if(enabledLocales==null) throw new IllegalStateException("Not in request filtered by LocaleFilter, unable to get enabled locales.");
return enabledLocales;
} | java | {
"resource": ""
} |
q13775 | LocaleFilter.isLocalizedPath | train | protected boolean isLocalizedPath(String url) {
int questionPos = url.lastIndexOf('?');
String lowerPath = (questionPos==-1 ? url : url.substring(0, questionPos)).toLowerCase(Locale.ROOT);
return
// Matches SessionResponseWrapper
// Matches NoSessionFilter
!lowerPath.endsWith(".bmp")
&& !lowerPath.endsWith(".css")
&& !lowerPath.endsWith(".exe")
&& !lowerPath.endsWith(".gif")
&& !lowerPath.endsWith(".ico")
&& !lowerPath.endsWith(".jpeg")
&& !lowerPath.endsWith(".jpg")
&& !lowerPath.endsWith(".js")
&& !lowerPath.endsWith(".png")
&& !lowerPath.endsWith(".svg")
&& !lowerPath.endsWith(".txt")
&& !lowerPath.endsWith(".zip")
;
} | java | {
"resource": ""
} |
q13776 | LocaleFilter.toLocaleString | train | protected String toLocaleString(Locale locale) {
String language = locale.getLanguage();
if(language.isEmpty()) return "";
String country = locale.getCountry();
if(country.isEmpty()) return language;
String variant = locale.getVariant();
if(variant.isEmpty()) {
return language + '-' + country;
} else {
return language + '-' + country + '-' + variant;
}
} | java | {
"resource": ""
} |
q13777 | LibSVMClassifier.loadModel | train | @Override
public void loadModel(InputStream modelIs) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
IOUtils.copy(modelIs, baos);
} catch (IOException e) {
LOG.error("Load model err.", e);
}
InputStream isForSVMLoad = new ByteArrayInputStream(baos.toByteArray());
try (ZipInputStream zipInputStream = new ZipInputStream(isForSVMLoad)) {
ZipEntry entry;
while ((entry = zipInputStream.getNextEntry()) != null) {
if (entry.getName().endsWith(".model")) {
BufferedReader br = new BufferedReader(new InputStreamReader(zipInputStream, Charset.defaultCharset()));
this.model = svm.svm_load_model(br);
}
}
} catch (IOException e) {
// Do Nothing.
}
modelIs = new ByteArrayInputStream(baos.toByteArray());
try (ZipInputStream zipInputStream = new ZipInputStream(modelIs)) {
ZipEntry entry;
while ((entry = zipInputStream.getNextEntry()) != null) {
if (entry.getName().endsWith(".lbindexer")) {
String lbIndexer = IOUtils.toString(zipInputStream, Charset.defaultCharset());
this.labelIndexer = new LabelIndexer(new ArrayList<>());
this.labelIndexer.readFromSerializedString(lbIndexer);
}
}
} catch (IOException e) {
LOG.error("Err in load LabelIndexer", e);
}
} | java | {
"resource": ""
} |
q13778 | NewEncodingUtils.getJavaScriptUnicodeEscapeString | train | static String getJavaScriptUnicodeEscapeString(char ch) {
int chInt = (int)ch;
if(chInt>=ENCODE_RANGE_1_START && chInt<ENCODE_RANGE_1_END) {
return javaScriptUnicodeEscapeStrings1[chInt - ENCODE_RANGE_1_START];
}
if(chInt>=ENCODE_RANGE_2_START && chInt<ENCODE_RANGE_2_END) {
return javaScriptUnicodeEscapeStrings2[chInt - ENCODE_RANGE_2_START];
}
if(chInt>=ENCODE_RANGE_3_START && chInt<ENCODE_RANGE_3_END) {
return javaScriptUnicodeEscapeStrings3[chInt - ENCODE_RANGE_3_START];
}
// No encoding needed
return null;
} | java | {
"resource": ""
} |
q13779 | NaiveBayesClassifier.predict | train | @Override
public Map<String, Double> predict(Tuple predict) {
Map<Integer, Double> labelProb = new HashMap<>();
for (Integer labelIndex : model.labelIndexer.getIndexSet()) {
double likelihood = 1.0D;
for (int i = 0; i < predict.vector.getVector().length; i++) {
double fi = predict.vector.getVector()[i];
likelihood = likelihood * VectorUtils.gaussianPDF(model.meanVectors[labelIndex][i], model.varianceVectors[labelIndex][i], fi);
}
double posterior = model.labelPrior.get(labelIndex) * likelihood; // prior*likelihood, This is numerator of posterior
labelProb.put(labelIndex, posterior);
}
double evidence = labelProb.values().stream().reduce((e1, e2) -> e1 + e2).orElse(-1D);
if (evidence == -1) {
LOG.error("Evidence is Empty!");
return new HashMap<>();
}
labelProb.entrySet().forEach(entry -> {
double prob = entry.getValue() / evidence;
labelProb.put(entry.getKey(), prob);
}); // This is denominator of posterior
Map<String, Double> result = model.labelIndexer.convertMapKey(labelProb);
if (predict.label == null || predict.label.isEmpty()) { // Just for write to predict tuple.
predict.label = result.entrySet().stream().max((e1, e2) -> e1.getValue().compareTo(e2.getValue())).map(Entry::getKey).orElse(StringUtils.EMPTY);
}
return result;
} | java | {
"resource": ""
} |
q13780 | NaiveBayesClassifier.splitData | train | public static void splitData(final String originalTrainingDataFile) {
List<Tuple> trainingData = NaiveBayesClassifier.readTrainingData(originalTrainingDataFile, "\\s");
List<Tuple> wrongData = new ArrayList<>();
int lastTrainingDataSize;
int iterCount = 0;
do {
System.out.println("Iteration:\t" + (++iterCount));
lastTrainingDataSize = trainingData.size();
NaiveBayesClassifier nbc = new NaiveBayesClassifier();
nbc.train(trainingData);
Iterator<Tuple> trainingDataIter = trainingData.iterator();
while (trainingDataIter.hasNext()) {
Tuple t = trainingDataIter.next();
String actual = nbc.predictLabel(t);
if (!t.label.equals(actual) && !t.label.equals("1")) { // preserve 1 since too few.
wrongData.add(t);
trainingDataIter.remove();
}
}
Iterator<Tuple> wrongDataIter = wrongData.iterator();
while (wrongDataIter.hasNext()) {
Tuple t = wrongDataIter.next();
String actual = nbc.predictLabel(t);
if (t.label.equals(actual)) {
trainingData.add(t);
wrongDataIter.remove();
}
}
} while (trainingData.size() != lastTrainingDataSize);
writeToFile(trainingData, originalTrainingDataFile + ".aligned");
writeToFile(wrongData, originalTrainingDataFile + ".wrong");
} | java | {
"resource": ""
} |
q13781 | AbstractMultipartUtility.addFilePart | train | public void addFilePart(final String fieldName, final InputStream stream, final String contentType)
throws IOException
{
addFilePart(fieldName, stream, null, contentType);
} | java | {
"resource": ""
} |
q13782 | AbstractMultipartUtility.addFilePart | train | public void addFilePart(final String fieldName, final URL urlToUploadFile)
throws IOException
{
//
// Maybe try and extract a filename from the last part of the url?
// Or have the user pass it in?
// Or just leave it blank as I have already done?
//
addFilePart(fieldName,
urlToUploadFile.openStream(),
null,
URLConnection.guessContentTypeFromName(urlToUploadFile.toString()));
} | java | {
"resource": ""
} |
q13783 | AbstractMultipartUtility.addHeaderField | train | public void addHeaderField(final String name, final String value)
{
writer.append(name + ": " + value).append(LINE_FEED);
writer.flush();
} | java | {
"resource": ""
} |
q13784 | AbstractMultipartUtility.finish | train | public HttpResponse finish() throws IOException
{
writer.append("--" + boundary + "--").append(LINE_FEED);
writer.flush();
try {
return doFinish();
} finally {
writer.close();
}
} | java | {
"resource": ""
} |
q13785 | Speedometer.update | train | public double update(final double units) {
final double speed;
lock.lock();
try {
final long currentTime = System.nanoTime();
final long timeDifference = (currentTime - lastUpdateTime) / C1; // nanoseconds to micros
if (timeDifference >= averagingPeriod) {
speed = units / averagingPeriod;
cachedSpeed = speed;
lastUpdateTime = currentTime;
elapsedTime = ZERO_TIME;
quantity = ZERO_UNITS;
} else {
if (timeDifference > ZERO_TIME) {
lastUpdateTime = currentTime;
elapsedTime += timeDifference;
}
if (units != ZERO_UNITS) {
quantity += units;
}
if (elapsedTime >= averagingPeriod) {
speed = quantity / elapsedTime;
cachedSpeed = speed;
elapsedTime = ZERO_TIME;
quantity = ZERO_UNITS;
} else {
speed = (cachedSpeed * (averagingPeriod - elapsedTime) + quantity) / averagingPeriod;
}
}
} finally {
lock.unlock();
}
return speed * C0; // units per micro to units per second
} | java | {
"resource": ""
} |
q13786 | JsonUtil.toBean | train | public static <T> Optional<T> toBean(String json, Class<T> clazz) {
if (StringUtils.isBlank(json)) {
log.warn("json is blank. ");
return Optional.empty();
}
try {
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return Optional.of(OBJECT_MAPPER.readValue(json, clazz));
} catch (Exception e) {
log.error(e.getMessage(), e);
return Optional.empty();
}
} | java | {
"resource": ""
} |
q13787 | JsonUtil.toJson | train | public static <T> String toJson(T t) {
if (Objects.isNull(t)) {
log.warn("t is blank. ");
return "";
}
try {
return OBJECT_MAPPER.writeValueAsString(t);
} catch (Exception e) {
log.error(e.getMessage(), e);
return "";
}
} | java | {
"resource": ""
} |
q13788 | RESTWebService.parse | train | @RequestMapping(produces = MediaType.APPLICATION_JSON_VALUE, value = "/parse", method = RequestMethod.GET)
public String parse(@RequestParam("sentence") String sentence, HttpServletRequest request) {
if (sentence == null || sentence.trim().isEmpty()) {
return StringUtils.EMPTY;
}
sentence = sentence.trim();
LOGGER.info("Parse [" + sentence + "]");
DTree tree = PARSER.parse(sentence);
DTreeEntity parseTreeEntity = new DTreeEntity(tree, "SAMPLE_AUTHOR");
dNodeEntityRepository.save(parseTreeEntity.dNodeEntities);
dTreeEntityRepository.save(parseTreeEntity);
return "[" + toJSON(tree.get(0)) + "]";
} | java | {
"resource": ""
} |
q13789 | AttributeUtils.resolveValue | train | public static <T> T resolveValue(ValueExpression expression, Class<T> type, ELContext elContext) {
if(expression == null) {
return null;
} else {
return type.cast(expression.getValue(elContext));
}
} | java | {
"resource": ""
} |
q13790 | AttributeUtils.resolveValue | train | public static <T> T resolveValue(Object value, Class<T> type, ELContext elContext) {
if(value == null) {
return null;
} else if(value instanceof ValueExpression) {
return resolveValue((ValueExpression)value, type, elContext);
} else {
return type.cast(value);
}
} | java | {
"resource": ""
} |
q13791 | MediaValidator.getMediaValidator | train | public static MediaValidator getMediaValidator(MediaType contentType, Writer out) throws MediaException {
// If the existing out is already validating for this type, use it.
// This occurs when one validation validates to a set of characters that are a subset of the requested validator.
// For example, a URL is always valid TEXT.
if(out instanceof MediaValidator) {
MediaValidator inputValidator = (MediaValidator)out;
if(inputValidator.isValidatingMediaInputType(contentType)) return inputValidator;
}
// Add filter if needed for the given type
switch(contentType) {
case JAVASCRIPT:
case JSON:
case LD_JSON:
return new JavaScriptValidator(out, contentType);
case SH:
return new ShValidator(out);
case MYSQL:
return new MysqlValidator(out);
case PSQL:
return new PsqlValidator(out);
case TEXT:
return new TextValidator(out);
case URL:
return new UrlValidator(out);
case XHTML:
return new XhtmlValidator(out);
case XHTML_ATTRIBUTE:
return new XhtmlAttributeValidator(out);
default:
throw new MediaException(ApplicationResources.accessor.getMessage("MediaValidator.unableToFindValidator", contentType.getContentType()));
}
} | java | {
"resource": ""
} |
q13792 | AutoGitView.getAllowRobots | train | @Override
public boolean getAllowRobots(ServletContext servletContext, HttpServletRequest request, HttpServletResponse response, Page page) {
return false;
} | java | {
"resource": ""
} |
q13793 | LabelIndexer.convertMapKey | train | public Map<String, Double> convertMapKey(Map<Integer, Double> probs) {
Map<String, Double> stringKeyProb = new HashMap<>();
probs.entrySet().forEach(e -> stringKeyProb.put(getLabel(e.getKey()), e.getValue()));
return stringKeyProb;
} | java | {
"resource": ""
} |
q13794 | TeiUtils.validateMediaType | train | public static List<ValidationMessage> validateMediaType(TagData data, List<ValidationMessage> messages) {
Object o = data.getAttribute("type");
if(
o != null
&& o != TagData.REQUEST_TIME_VALUE
&& !(o instanceof MediaType)
) {
String type = Coercion.toString(o);
try {
// First allow shortcuts (matching enum names)
MediaType mediaType = MediaType.getMediaTypeByName(type);
if(mediaType == null) {
mediaType = MediaType.getMediaTypeForContentType(type);
}
// Value is OK
} catch(MediaException err) {
messages = MinimalList.add(
messages,
new ValidationMessage(
data.getId(),
err.getMessage()
)
);
}
}
return messages;
} | java | {
"resource": ""
} |
q13795 | TeiUtils.validateScope | train | public static List<ValidationMessage> validateScope(TagData data, List<ValidationMessage> messages) {
Object o = data.getAttribute("scope");
if(
o != null
&& o != TagData.REQUEST_TIME_VALUE
) {
String scope = Coercion.toString(o);
try {
Scope.getScopeId(scope);
// Value is OK
} catch(JspTagException err) {
messages = MinimalList.add(
messages,
new ValidationMessage(
data.getId(),
err.getMessage()
)
);
}
}
return messages;
} | java | {
"resource": ""
} |
q13796 | GeoDistance.distanceKms | train | public static double distanceKms(BigDecimal lat1, BigDecimal lng1, BigDecimal lat2, BigDecimal lng2 ){
return new GeoCoordinate(lat1, lng1).distanceTo(new GeoCoordinate(lat2, lng2));
} | java | {
"resource": ""
} |
q13797 | ObjectBuilder.map | train | public static <K, V> MapBuilder<K, V> map(Map<K, V> instance) {
return new MapBuilder<>(instance);
} | java | {
"resource": ""
} |
q13798 | CrossValidation.run | train | public void run(final List<Tuple> data) {
List<Tuple> dataCopy = new ArrayList<>(data);
this.labels = data.parallelStream().map(x -> x.label).collect(Collectors.toSet());
if (shuffleData) {
Collections.shuffle(dataCopy);
}
int chunkSize = data.size() / nfold;
int reminder = data.size() % chunkSize;
for (int i = data.size() - 1; i > data.size() - 1 - reminder; i--) {
LOG.info("Dropping the tail id: " + data.get(i).id);
}
for (int i = 0; i < nfold; i++) {
System.err.println("Cross validation round " + (i + 1) + "/" + nfold);
List<Tuple> testing = new ArrayList<>(data.subList(i, i + chunkSize));
List<Tuple> training = new ArrayList<>(data.subList(0, i));
training.addAll(data.subList(i + chunkSize, data.size()));
eval(training, testing, i);
}
} | java | {
"resource": ""
} |
q13799 | CrossValidation.eval | train | private void eval(List<Tuple> training, List<Tuple> testing, int nfold) {
classifier.train(training);
for (Tuple tuple : testing) {
String actual = classifier.predict(tuple).entrySet().stream()
.max((e1, e2) -> e1.getValue().compareTo(e2.getValue()))
.map(Map.Entry::getKey).orElse(StringUtils.EMPTY);
updateScore(tuple, actual, nfold);
}
} | java | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.