_id
stringlengths
2
7
title
stringlengths
3
140
partition
stringclasses
3 values
text
stringlengths
73
34.1k
language
stringclasses
1 value
meta_information
dict
q9400
QueryReferenceBroker.hasNullifiedFK
train
private boolean hasNullifiedFK(FieldDescriptor[] fkFieldDescriptors, Object[] fkValues) { boolean result = true; for (int i = 0; i < fkValues.length; i++) { if (!pb.serviceBrokerHelper().representsNull(fkFieldDescriptors[i], fkValues[i])) { result = false; break; } } return result; }
java
{ "resource": "" }
q9401
QueryReferenceBroker.getFKQuery
train
private Query getFKQuery(Object obj, ClassDescriptor cld, CollectionDescriptor cds) { Query fkQuery; QueryByCriteria fkQueryCrit; if (cds.isMtoNRelation()) { fkQueryCrit = getFKQueryMtoN(obj, cld, cds); } else { fkQueryCrit = getFKQuery1toN(obj, cld, cds); } // check if collection must be ordered if (!cds.getOrderBy().isEmpty()) { Iterator iter = cds.getOrderBy().iterator(); while (iter.hasNext()) { fkQueryCrit.addOrderBy((FieldHelper)iter.next()); } } // BRJ: customize the query if (cds.getQueryCustomizer() != null) { fkQuery = cds.getQueryCustomizer().customizeQuery(obj, pb, cds, fkQueryCrit); } else { fkQuery = fkQueryCrit; } return fkQuery; }
java
{ "resource": "" }
q9402
QueryReferenceBroker.getPKQuery
train
public Query getPKQuery(Identity oid) { Object[] values = oid.getPrimaryKeyValues(); ClassDescriptor cld = pb.getClassDescriptor(oid.getObjectsTopLevelClass()); FieldDescriptor[] fields = cld.getPkFields(); Criteria criteria = new Criteria(); for (int i = 0; i < fields.length; i++) { FieldDescriptor fld = fields[i]; criteria.addEqualTo(fld.getAttributeName(), values[i]); } return QueryFactory.newQuery(cld.getClassOfObject(), criteria); }
java
{ "resource": "" }
q9403
QueryReferenceBroker.retrieveCollections
train
public void retrieveCollections(Object newObj, ClassDescriptor cld, boolean forced) throws PersistenceBrokerException { doRetrieveCollections(newObj, cld, forced, false); }
java
{ "resource": "" }
q9404
QueryReferenceBroker.retrieveProxyCollections
train
public void retrieveProxyCollections(Object newObj, ClassDescriptor cld, boolean forced) throws PersistenceBrokerException { doRetrieveCollections(newObj, cld, forced, true); }
java
{ "resource": "" }
q9405
QueryReferenceBroker.removePrefetchingListeners
train
public void removePrefetchingListeners() { if (prefetchingListeners != null) { for (Iterator it = prefetchingListeners.iterator(); it.hasNext(); ) { PBPrefetchingListener listener = (PBPrefetchingListener) it.next(); listener.removeThisListener(); } prefetchingListeners.clear(); } }
java
{ "resource": "" }
q9406
InfinispanCacheFactory.init
train
@PostConstruct protected void init() throws IOException { // base configuration from XML file if (null != configurationFile) { log.debug("Get base configuration from {}", configurationFile); manager = new DefaultCacheManager(configurationFile); } else { GlobalConfigurationBuilder builder = new GlobalConfigurationBuilder(); builder.globalJmxStatistics().allowDuplicateDomains(true); manager = new DefaultCacheManager(builder.build()); } if (listener == null) { listener = new InfinispanCacheListener(); } manager.addListener(listener); // cache for caching the cache configurations (hmmm, sounds a bit strange) Map<String, Map<CacheCategory, CacheService>> cacheCache = new HashMap<String, Map<CacheCategory, CacheService>>(); // build default configuration if (null != defaultConfiguration) { setCaches(cacheCache, null, defaultConfiguration); } // build layer specific configurations for (Layer layer : layerMap.values()) { CacheInfo ci = configurationService.getLayerExtraInfo(layer.getLayerInfo(), CacheInfo.class); if (null != ci) { setCaches(cacheCache, layer, ci); } } }
java
{ "resource": "" }
q9407
SearchFilter.ConvertBinaryOperator
train
protected static String ConvertBinaryOperator(int oper) { // Convert the operator into the proper string String oper_string; switch (oper) { default: case EQUAL: oper_string = "="; break; case LIKE: oper_string = "LIKE"; break; case NOT_EQUAL: oper_string = "!="; break; case LESS_THAN: oper_string = "<"; break; case GREATER_THAN: oper_string = ">"; break; case GREATER_EQUAL: oper_string = ">="; break; case LESS_EQUAL: oper_string = "<="; break; } return oper_string; }
java
{ "resource": "" }
q9408
PointWriter.writeObject
train
public void writeObject(Object o, GraphicsDocument document, boolean asChild) throws RenderException { document.writeElement("vml:shape", asChild); Point p = (Point) o; String adj = document.getFormatter().format(p.getX()) + "," + document.getFormatter().format(p.getY()); document.writeAttribute("adj", adj); }
java
{ "resource": "" }
q9409
DatabaseMetaDataTreeModel.setStatusBarMessage
train
public void setStatusBarMessage(final String message) { // Guaranteed to return a non-null array Object[] listeners = listenerList.getListenerList(); // Process the listeners last to first, notifying // those that are interested in this event for (int i = listeners.length-2; i>=0; i-=2) { if (listeners[i]==StatusMessageListener.class) { ((StatusMessageListener)listeners[i+1]).statusMessageReceived(message); } } }
java
{ "resource": "" }
q9410
DatabaseMetaDataTreeModel.reportSqlError
train
public void reportSqlError(String message, java.sql.SQLException sqlEx) { StringBuffer strBufMessages = new StringBuffer(); java.sql.SQLException currentSqlEx = sqlEx; do { strBufMessages.append("\n" + sqlEx.getErrorCode() + ":" + sqlEx.getMessage()); currentSqlEx = currentSqlEx.getNextException(); } while (currentSqlEx != null); System.err.println(message + strBufMessages.toString()); sqlEx.printStackTrace(); }
java
{ "resource": "" }
q9411
MetadataObjectCopyStrategy.copy
train
public Object copy(final Object obj, final PersistenceBroker broker) { return clone(obj, IdentityMapFactory.getIdentityMap(), broker); }
java
{ "resource": "" }
q9412
SqlBasedRsIterator.getObjectFromResultSet
train
protected Object getObjectFromResultSet() throws PersistenceBrokerException { try { // if all primitive attributes of the object are contained in the ResultSet // the fast direct mapping can be used return super.getObjectFromResultSet(); } // if the full loading failed we assume that at least PK attributes are contained // in the ResultSet and perform a slower Identity based loading... // This may of course also fail and can throw another PersistenceBrokerException catch (PersistenceBrokerException e) { Identity oid = getIdentityFromResultSet(); return getBroker().getObjectByIdentity(oid); } }
java
{ "resource": "" }
q9413
DataSet.createInsertionSql
train
public void createInsertionSql(Database model, Platform platform, Writer writer) throws IOException { for (Iterator it = _beans.iterator(); it.hasNext();) { writer.write(platform.getInsertSql(model, (DynaBean)it.next())); if (it.hasNext()) { writer.write("\n"); } } }
java
{ "resource": "" }
q9414
DataSet.insert
train
public void insert(Platform platform, Database model, int batchSize) throws SQLException { if (batchSize <= 1) { for (Iterator it = _beans.iterator(); it.hasNext();) { platform.insert(model, (DynaBean)it.next()); } } else { for (int startIdx = 0; startIdx < _beans.size(); startIdx += batchSize) { platform.insert(model, _beans.subList(startIdx, startIdx + batchSize)); } } }
java
{ "resource": "" }
q9415
GeonamesGeocoderService.connect
train
private InputStream connect(String url) throws IOException { URLConnection conn = new URL(URL_BASE + url).openConnection(); conn.setConnectTimeout(CONNECT_TIMEOUT); conn.setReadTimeout(READ_TIMEOUT); conn.setRequestProperty("User-Agent", USER_AGENT); return conn.getInputStream(); }
java
{ "resource": "" }
q9416
PersistenceBrokerImpl.abortTransaction
train
public synchronized void abortTransaction() throws TransactionNotInProgressException { if(isInTransaction()) { fireBrokerEvent(BEFORE_ROLLBACK_EVENT); setInTransaction(false); clearRegistrationLists(); referencesBroker.removePrefetchingListeners(); /* arminw: check if we in local tx, before do local rollback Necessary, because ConnectionManager may do a rollback by itself or in managed environments the used connection is already be closed */ if(connectionManager.isInLocalTransaction()) this.connectionManager.localRollback(); fireBrokerEvent(AFTER_ROLLBACK_EVENT); } }
java
{ "resource": "" }
q9417
PersistenceBrokerImpl.delete
train
public void delete(Object obj, boolean ignoreReferences) throws PersistenceBrokerException { if(isTxCheck() && !isInTransaction()) { if(logger.isEnabledFor(Logger.ERROR)) { String msg = "No running PB-tx found. Please, only delete objects in context of a PB-transaction" + " to avoid side-effects - e.g. when rollback of complex objects."; try { throw new Exception("** Delete object without active PersistenceBroker transaction **"); } catch(Exception e) { logger.error(msg, e); } } } try { doDelete(obj, ignoreReferences); } finally { markedForDelete.clear(); } }
java
{ "resource": "" }
q9418
PersistenceBrokerImpl.doDelete
train
private void doDelete(Object obj, boolean ignoreReferences) throws PersistenceBrokerException { //logger.info("DELETING " + obj); // object is not null if (obj != null) { obj = getProxyFactory().getRealObject(obj); /** * Kuali Foundation modification -- 8/24/2007 */ if ( obj == null ) return; /** * End of Kuali Foundation modification */ /** * MBAIRD * 1. if we are marked for delete already, avoid recursing on this object * * arminw: * use object instead Identity object in markedForDelete List, * because using objects we get a better performance. I can't find * side-effects in doing so. */ if (markedForDelete.contains(obj)) { return; } ClassDescriptor cld = getClassDescriptor(obj.getClass()); //BRJ: check for valid pk if (!serviceBrokerHelper().assertValidPkForDelete(cld, obj)) { String msg = "Cannot delete object without valid PKs. " + obj; logger.error(msg); return; } /** * MBAIRD * 2. register object in markedForDelete map. */ markedForDelete.add(obj); Identity oid = serviceIdentity().buildIdentity(cld, obj); // Invoke events on PersistenceBrokerAware instances and listeners BEFORE_DELETE_EVENT.setTarget(obj); fireBrokerEvent(BEFORE_DELETE_EVENT); BEFORE_DELETE_EVENT.setTarget(null); // now perform deletion performDeletion(cld, obj, oid, ignoreReferences); // Invoke events on PersistenceBrokerAware instances and listeners AFTER_DELETE_EVENT.setTarget(obj); fireBrokerEvent(AFTER_DELETE_EVENT); AFTER_DELETE_EVENT.setTarget(null); // let the connection manager to execute batch connectionManager.executeBatchIfNecessary(); } }
java
{ "resource": "" }
q9419
PersistenceBrokerImpl.deleteByQuery
train
private void deleteByQuery(Query query, ClassDescriptor cld) throws PersistenceBrokerException { if (logger.isDebugEnabled()) { logger.debug("deleteByQuery " + cld.getClassNameOfObject() + ", " + query); } if (query instanceof QueryBySQL) { String sql = ((QueryBySQL) query).getSql(); this.dbAccess.executeUpdateSQL(sql, cld); } else { // if query is Identity based transform it to a criteria based query first if (query instanceof QueryByIdentity) { QueryByIdentity qbi = (QueryByIdentity) query; Object oid = qbi.getExampleObject(); // make sure it's an Identity if (!(oid instanceof Identity)) { oid = serviceIdentity().buildIdentity(oid); } query = referencesBroker.getPKQuery((Identity) oid); } if (!cld.isInterface()) { this.dbAccess.executeDelete(query, cld); } // if class is an extent, we have to delete all extent classes too String lastUsedTable = cld.getFullTableName(); if (cld.isExtent()) { Iterator extents = getDescriptorRepository().getAllConcreteSubclassDescriptors(cld).iterator(); while (extents.hasNext()) { ClassDescriptor extCld = (ClassDescriptor) extents.next(); // read same table only once if (!extCld.getFullTableName().equals(lastUsedTable)) { lastUsedTable = extCld.getFullTableName(); this.dbAccess.executeDelete(query, extCld); } } } } }
java
{ "resource": "" }
q9420
PersistenceBrokerImpl.store
train
public void store(Object obj) throws PersistenceBrokerException { obj = extractObjectToStore(obj); // only do something if obj != null if(obj == null) return; ClassDescriptor cld = getClassDescriptor(obj.getClass()); /* if one of the PK fields was null, we assume the objects was new and needs insert */ boolean insert = serviceBrokerHelper().hasNullPKField(cld, obj); Identity oid = serviceIdentity().buildIdentity(cld, obj); /* if PK values are set, lookup cache or db to see whether object needs insert or update */ if (!insert) { insert = objectCache.lookup(oid) == null && !serviceBrokerHelper().doesExist(cld, oid, obj); } store(obj, oid, cld, insert); }
java
{ "resource": "" }
q9421
PersistenceBrokerImpl.store
train
protected void store(Object obj, Identity oid, ClassDescriptor cld, boolean insert) { store(obj, oid, cld, insert, false); }
java
{ "resource": "" }
q9422
PersistenceBrokerImpl.link
train
public void link(Object targetObject, ClassDescriptor cld, ObjectReferenceDescriptor rds, Object referencedObject, boolean insert) { // MBAIRD: we have 'disassociated' this object from the referenced object, // the object represented by the reference descriptor is now null, so set // the fk in the target object to null. // arminw: if an insert was done and ref object was null, we should allow // to pass FK fields of main object (maybe only the FK fields are set) if (referencedObject == null) { /* arminw: if update we set FK fields to 'null', because reference was disassociated We do nothing on insert, maybe only the FK fields of main object (without materialization of the reference object) are set by the user */ if(!insert) { unlinkFK(targetObject, cld, rds); } } else { setFKField(targetObject, cld, rds, referencedObject); } }
java
{ "resource": "" }
q9423
PersistenceBrokerImpl.unlinkFK
train
public void unlinkFK(Object targetObject, ClassDescriptor cld, ObjectReferenceDescriptor rds) { setFKField(targetObject, cld, rds, null); }
java
{ "resource": "" }
q9424
PersistenceBrokerImpl.linkOneToOne
train
public void linkOneToOne(Object obj, ClassDescriptor cld, ObjectReferenceDescriptor rds, boolean insert) { storeAndLinkOneToOne(true, obj, cld, rds, true); }
java
{ "resource": "" }
q9425
PersistenceBrokerImpl.linkOneToMany
train
public void linkOneToMany(Object obj, CollectionDescriptor cod, boolean insert) { Object referencedObjects = cod.getPersistentField().get(obj); storeAndLinkOneToMany(true, obj, cod,referencedObjects, insert); }
java
{ "resource": "" }
q9426
PersistenceBrokerImpl.linkMtoN
train
public void linkMtoN(Object obj, CollectionDescriptor cod, boolean insert) { Object referencedObjects = cod.getPersistentField().get(obj); storeAndLinkMtoN(true, obj, cod, referencedObjects, insert); }
java
{ "resource": "" }
q9427
PersistenceBrokerImpl.retrieveReference
train
public void retrieveReference(Object pInstance, String pAttributeName) throws PersistenceBrokerException { if (logger.isDebugEnabled()) { logger.debug("Retrieving reference named ["+pAttributeName+"] on object of type ["+ pInstance.getClass().getName()+"]"); } ClassDescriptor cld = getClassDescriptor(pInstance.getClass()); CollectionDescriptor cod = cld.getCollectionDescriptorByName(pAttributeName); getInternalCache().enableMaterializationCache(); // to avoid problems with circular references, locally cache the current object instance Identity oid = serviceIdentity().buildIdentity(pInstance); boolean needLocalRemove = false; if(getInternalCache().doLocalLookup(oid) == null) { getInternalCache().doInternalCache(oid, pInstance, MaterializationCache.TYPE_TEMP); needLocalRemove = true; } try { if (cod != null) { referencesBroker.retrieveCollection(pInstance, cld, cod, true); } else { ObjectReferenceDescriptor ord = cld.getObjectReferenceDescriptorByName(pAttributeName); if (ord != null) { referencesBroker.retrieveReference(pInstance, cld, ord, true); } else { throw new PersistenceBrokerException("did not find attribute " + pAttributeName + " for class " + pInstance.getClass().getName()); } } // do locally remove the object to avoid problems with object state detection (insert/update), // because objects found in the cache detected as 'old' means 'update' if(needLocalRemove) getInternalCache().doLocalRemove(oid); getInternalCache().disableMaterializationCache(); } catch(RuntimeException e) { getInternalCache().doLocalClear(); throw e; } }
java
{ "resource": "" }
q9428
PersistenceBrokerImpl.getCollectionByQuery
train
public ManageableCollection getCollectionByQuery(Class collectionClass, Query query) throws PersistenceBrokerException { return referencesBroker.getCollectionByQuery(collectionClass, query, false); }
java
{ "resource": "" }
q9429
PersistenceBrokerImpl.getIteratorFromQuery
train
protected OJBIterator getIteratorFromQuery(Query query, ClassDescriptor cld) throws PersistenceBrokerException { RsIteratorFactory factory = RsIteratorFactoryImpl.getInstance(); OJBIterator result = getRsIteratorFromQuery(query, cld, factory); if (query.usePaging()) { result = new PagingIterator(result, query.getStartAtIndex(), query.getEndAtIndex()); } return result; }
java
{ "resource": "" }
q9430
PersistenceBrokerImpl.doGetObjectByIdentity
train
public Object doGetObjectByIdentity(Identity id) throws PersistenceBrokerException { if (logger.isDebugEnabled()) logger.debug("getObjectByIdentity " + id); // check if object is present in ObjectCache: Object obj = objectCache.lookup(id); // only perform a db lookup if necessary (object not cached yet) if (obj == null) { obj = getDBObject(id); } else { ClassDescriptor cld = getClassDescriptor(obj.getClass()); // if specified in the ClassDescriptor the instance must be refreshed if (cld.isAlwaysRefresh()) { refreshInstance(obj, id, cld); } // now refresh all references checkRefreshRelationships(obj, id, cld); } // Invoke events on PersistenceBrokerAware instances and listeners AFTER_LOOKUP_EVENT.setTarget(obj); fireBrokerEvent(AFTER_LOOKUP_EVENT); AFTER_LOOKUP_EVENT.setTarget(null); //logger.info("RETRIEVING object " + obj); return obj; }
java
{ "resource": "" }
q9431
PersistenceBrokerImpl.refreshInstance
train
private void refreshInstance(Object cachedInstance, Identity oid, ClassDescriptor cld) { // read in fresh copy from the db, but do not cache it Object freshInstance = getPlainDBObject(cld, oid); // update all primitive typed attributes FieldDescriptor[] fields = cld.getFieldDescriptions(); FieldDescriptor fmd; PersistentField fld; for (int i = 0; i < fields.length; i++) { fmd = fields[i]; fld = fmd.getPersistentField(); fld.set(cachedInstance, fld.get(freshInstance)); } }
java
{ "resource": "" }
q9432
PersistenceBrokerImpl.getObjectByQuery
train
public Object getObjectByQuery(Query query) throws PersistenceBrokerException { Object result = null; if (query instanceof QueryByIdentity) { // example obj may be an entity or an Identity Object obj = query.getExampleObject(); if (obj instanceof Identity) { Identity oid = (Identity) obj; result = getObjectByIdentity(oid); } else { // TODO: This workaround doesn't allow 'null' for PK fields if (!serviceBrokerHelper().hasNullPKField(getClassDescriptor(obj.getClass()), obj)) { Identity oid = serviceIdentity().buildIdentity(obj); result = getObjectByIdentity(oid); } } } else { Class itemClass = query.getSearchClass(); ClassDescriptor cld = getClassDescriptor(itemClass); /* use OJB intern Iterator, thus we are able to close used resources instantly */ OJBIterator it = getIteratorFromQuery(query, cld); /* arminw: patch by Andre Clute, instead of taking the first found result try to get the first found none null result. He wrote: I have a situation where an item with a certain criteria is in my database twice -- once deleted, and then a non-deleted version of it. When I do a PB.getObjectByQuery(), the RsIterator get's both results from the database, but the first row is the deleted row, so my RowReader filters it out, and do not get the right result. */ try { while (result==null && it.hasNext()) { result = it.next(); } } // make sure that we close the used resources finally { if(it != null) it.releaseDbResources(); } } return result; }
java
{ "resource": "" }
q9433
PersistenceBrokerImpl.getPKEnumerationByQuery
train
public Enumeration getPKEnumerationByQuery(Class primaryKeyClass, Query query) throws PersistenceBrokerException { if (logger.isDebugEnabled()) logger.debug("getPKEnumerationByQuery " + query); query.setFetchSize(1); ClassDescriptor cld = getClassDescriptor(query.getSearchClass()); return new PkEnumeration(query, cld, primaryKeyClass, this); }
java
{ "resource": "" }
q9434
PersistenceBrokerImpl.store
train
public void store(Object obj, ObjectModification mod) throws PersistenceBrokerException { obj = extractObjectToStore(obj); // null for unmaterialized Proxy if (obj == null) { return; } ClassDescriptor cld = getClassDescriptor(obj.getClass()); // this call ensures that all autoincremented primary key attributes are filled Identity oid = serviceIdentity().buildIdentity(cld, obj); // select flag for insert / update selection by checking the ObjectModification if (mod.needsInsert()) { store(obj, oid, cld, true); } else if (mod.needsUpdate()) { store(obj, oid, cld, false); } /* arminw TODO: Why we need this behaviour? What about 1:1 relations? */ else { // just store 1:n and m:n associations storeCollections(obj, cld, mod.needsInsert()); } }
java
{ "resource": "" }
q9435
PersistenceBrokerImpl.storeToDb
train
private void storeToDb(Object obj, ClassDescriptor cld, Identity oid, boolean insert, boolean ignoreReferences) { // 1. link and store 1:1 references storeReferences(obj, cld, insert, ignoreReferences); Object[] pkValues = oid.getPrimaryKeyValues(); if (!serviceBrokerHelper().assertValidPksForStore(cld.getPkFields(), pkValues)) { // BRJ: fk values may be part of pk, but the are not known during // creation of Identity. so we have to get them here pkValues = serviceBrokerHelper().getKeyValues(cld, obj); if (!serviceBrokerHelper().assertValidPksForStore(cld.getPkFields(), pkValues)) { String append = insert ? " on insert" : " on update" ; throw new PersistenceBrokerException("assertValidPkFields failed for Object of type: " + cld.getClassNameOfObject() + append); } } // get super class cld then store it with the object /* now for multiple table inheritance 1. store super classes, topmost parent first 2. go down through heirarchy until current class 3. todo: store to full extent? // arminw: TODO: The extend-attribute feature dosn't work, should we remove this stuff? This if-clause will go up the inheritance heirarchy to store all the super classes. The id for the top most super class will be the id for all the subclasses too */ if(cld.getSuperClass() != null) { ClassDescriptor superCld = getDescriptorRepository().getDescriptorFor(cld.getSuperClass()); storeToDb(obj, superCld, oid, insert); // arminw: why this?? I comment out this section // storeCollections(obj, cld.getCollectionDescriptors(), insert); } // 2. store primitive typed attributes (Or is THIS step 3 ?) // if obj not present in db use INSERT if (insert) { dbAccess.executeInsert(cld, obj); if(oid.isTransient()) { // Create a new Identity based on the current set of primary key values. oid = serviceIdentity().buildIdentity(cld, obj); } } // else use UPDATE else { try { dbAccess.executeUpdate(cld, obj); } catch(OptimisticLockException e) { // ensure that the outdated object be removed from cache objectCache.remove(oid); throw e; } } // cache object for symmetry with getObjectByXXX() // Add the object to the cache. objectCache.doInternalCache(oid, obj, ObjectCacheInternal.TYPE_WRITE); // 3. store 1:n and m:n associations if(!ignoreReferences) storeCollections(obj, cld, insert); }
java
{ "resource": "" }
q9436
PersistenceBrokerImpl.getReportQueryIteratorByQuery
train
public Iterator getReportQueryIteratorByQuery(Query query) throws PersistenceBrokerException { ClassDescriptor cld = getClassDescriptor(query.getSearchClass()); return getReportQueryIteratorFromQuery(query, cld); }
java
{ "resource": "" }
q9437
PersistenceBrokerImpl.getRsIteratorFromQuery
train
private OJBIterator getRsIteratorFromQuery(Query query, ClassDescriptor cld, RsIteratorFactory factory) throws PersistenceBrokerException { query.setFetchSize(1); if (query instanceof QueryBySQL) { if(logger.isDebugEnabled()) logger.debug("Creating SQL-RsIterator for class ["+cld.getClassNameOfObject()+"]"); return factory.createRsIterator((QueryBySQL) query, cld, this); } if (!cld.isExtent() || !query.getWithExtents()) { // no extents just use the plain vanilla RsIterator if(logger.isDebugEnabled()) logger.debug("Creating RsIterator for class ["+cld.getClassNameOfObject()+"]"); return factory.createRsIterator(query, cld, this); } if(logger.isDebugEnabled()) logger.debug("Creating ChainingIterator for class ["+cld.getClassNameOfObject()+"]"); ChainingIterator chainingIter = new ChainingIterator(); // BRJ: add base class iterator if (!cld.isInterface()) { if(logger.isDebugEnabled()) logger.debug("Adding RsIterator for class ["+cld.getClassNameOfObject()+"] to ChainingIterator"); chainingIter.addIterator(factory.createRsIterator(query, cld, this)); } Iterator extents = getDescriptorRepository().getAllConcreteSubclassDescriptors(cld).iterator(); while (extents.hasNext()) { ClassDescriptor extCld = (ClassDescriptor) extents.next(); // read same table only once if (chainingIter.containsIteratorForTable(extCld.getFullTableName())) { if(logger.isDebugEnabled()) logger.debug("Skipping class ["+extCld.getClassNameOfObject()+"]"); } else { if(logger.isDebugEnabled()) logger.debug("Adding RsIterator of class ["+extCld.getClassNameOfObject()+"] to ChainingIterator"); // add the iterator to the chaining iterator. chainingIter.addIterator(factory.createRsIterator(query, extCld, this)); } } return chainingIter; }
java
{ "resource": "" }
q9438
PersistenceBrokerImpl.getReportQueryIteratorFromQuery
train
private OJBIterator getReportQueryIteratorFromQuery(Query query, ClassDescriptor cld) throws PersistenceBrokerException { RsIteratorFactory factory = ReportRsIteratorFactoryImpl.getInstance(); OJBIterator result = getRsIteratorFromQuery(query, cld, factory); if (query.usePaging()) { result = new PagingIterator(result, query.getStartAtIndex(), query.getEndAtIndex()); } return result; }
java
{ "resource": "" }
q9439
DataUtils.getModuleName
train
public static String getModuleName(final String moduleId) { final int splitter = moduleId.indexOf(':'); if(splitter == -1){ return moduleId; } return moduleId.substring(0, splitter); }
java
{ "resource": "" }
q9440
DataUtils.getModuleVersion
train
public static String getModuleVersion(final String moduleId) { final int splitter = moduleId.lastIndexOf(':'); if(splitter == -1){ return moduleId; } return moduleId.substring(splitter+1); }
java
{ "resource": "" }
q9441
DataUtils.getGroupId
train
public static String getGroupId(final String gavc) { final int splitter = gavc.indexOf(':'); if(splitter == -1){ return gavc; } return gavc.substring(0, splitter); }
java
{ "resource": "" }
q9442
DataUtils.getAllSubmodules
train
public static List<DbModule> getAllSubmodules(final DbModule module) { final List<DbModule> submodules = new ArrayList<DbModule>(); submodules.addAll(module.getSubmodules()); for(final DbModule submodule: module.getSubmodules()){ submodules.addAll(getAllSubmodules(submodule)); } return submodules; }
java
{ "resource": "" }
q9443
ScopeHandler.init
train
public void init(final MultivaluedMap<String, String> queryParameters) { final String scopeCompileParam = queryParameters.getFirst(ServerAPI.SCOPE_COMPILE_PARAM); if(scopeCompileParam != null){ this.scopeComp = Boolean.valueOf(scopeCompileParam); } final String scopeProvidedParam = queryParameters.getFirst(ServerAPI.SCOPE_PROVIDED_PARAM); if(scopeProvidedParam != null){ this.scopePro = Boolean.valueOf(scopeProvidedParam); } final String scopeRuntimeParam = queryParameters.getFirst(ServerAPI.SCOPE_RUNTIME_PARAM); if(scopeRuntimeParam != null){ this.scopeRun = Boolean.valueOf(scopeRuntimeParam); } final String scopeTestParam = queryParameters.getFirst(ServerAPI.SCOPE_TEST_PARAM); if(scopeTestParam != null){ this.scopeTest = Boolean.valueOf(scopeTestParam); } }
java
{ "resource": "" }
q9444
PBCapsule.obtainBroker
train
private PersistenceBroker obtainBroker() { PersistenceBroker _broker; try { if (pbKey == null) { //throw new OJBRuntimeException("Not possible to do action, cause no tx runnning and no PBKey is set"); log.warn("No tx runnning and PBKey is null, try to use the default PB"); _broker = PersistenceBrokerFactory.defaultPersistenceBroker(); } else { _broker = PersistenceBrokerFactory.createPersistenceBroker(pbKey); } } catch (PBFactoryException e) { log.error("Could not obtain PB for PBKey " + pbKey, e); throw new OJBRuntimeException("Unexpected micro-kernel exception", e); } return _broker; }
java
{ "resource": "" }
q9445
SequenceManagerHighLowImpl.addSequence
train
private void addSequence(String sequenceName, HighLowSequence seq) { // lookup the sequence map for calling DB String jcdAlias = getBrokerForClass() .serviceConnectionManager().getConnectionDescriptor().getJcdAlias(); Map mapForDB = (Map) sequencesDBMap.get(jcdAlias); if(mapForDB == null) { mapForDB = new HashMap(); } mapForDB.put(sequenceName, seq); sequencesDBMap.put(jcdAlias, mapForDB); }
java
{ "resource": "" }
q9446
SequenceManagerHighLowImpl.removeSequence
train
protected void removeSequence(String sequenceName) { // lookup the sequence map for calling DB Map mapForDB = (Map) sequencesDBMap.get(getBrokerForClass() .serviceConnectionManager().getConnectionDescriptor().getJcdAlias()); if(mapForDB != null) { synchronized(SequenceManagerHighLowImpl.class) { mapForDB.remove(sequenceName); } } }
java
{ "resource": "" }
q9447
JdbcConnectionDescriptor.getPBKey
train
public PBKey getPBKey() { if (pbKey == null) { this.pbKey = new PBKey(this.getJcdAlias(), this.getUserName(), this.getPassWord()); } return pbKey; }
java
{ "resource": "" }
q9448
JdbcConnectionDescriptor.setJdbcLevel
train
public void setJdbcLevel(String jdbcLevel) { if (jdbcLevel != null) { try { double intLevel = Double.parseDouble(jdbcLevel); setJdbcLevel(intLevel); } catch(NumberFormatException nfe) { setJdbcLevel(2.0); logger.info("Specified JDBC level was not numeric (Value=" + jdbcLevel + "), used default jdbc level of 2.0 "); } } else { setJdbcLevel(2.0); logger.info("Specified JDBC level was null, used default jdbc level of 2.0 "); } }
java
{ "resource": "" }
q9449
LockManagerDefaultImpl.checkWrite
train
public synchronized boolean checkWrite(TransactionImpl tx, Object obj) { if (log.isDebugEnabled()) log.debug("LM.checkWrite(tx-" + tx.getGUID() + ", " + new Identity(obj, tx.getBroker()).toString() + ")"); LockStrategy lockStrategy = LockStrategyFactory.getStrategyFor(obj); return lockStrategy.checkWrite(tx, obj); }
java
{ "resource": "" }
q9450
StateOldDelete.checkpoint
train
public void checkpoint(ObjectEnvelope mod) throws org.apache.ojb.broker.PersistenceBrokerException { mod.doDelete(); mod.setModificationState(StateTransient.getInstance()); }
java
{ "resource": "" }
q9451
StateNewDirty.checkpoint
train
public void checkpoint(ObjectEnvelope mod) throws PersistenceBrokerException { mod.doInsert(); mod.setModificationState(StateOldClean.getInstance()); }
java
{ "resource": "" }
q9452
ModuleHandler.getModuleVersions
train
public List<String> getModuleVersions(final String name, final FiltersHolder filters) { final List<String> versions = repositoryHandler.getModuleVersions(name, filters); if (versions.isEmpty()) { throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND) .entity("Module " + name + " does not exist.").build()); } return versions; }
java
{ "resource": "" }
q9453
ModuleHandler.getModule
train
public DbModule getModule(final String moduleId) { final DbModule dbModule = repositoryHandler.getModule(moduleId); if (dbModule == null) { throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND) .entity("Module " + moduleId + " does not exist.").build()); } return dbModule; }
java
{ "resource": "" }
q9454
ModuleHandler.deleteModule
train
public void deleteModule(final String moduleId) { final DbModule module = getModule(moduleId); repositoryHandler.deleteModule(module.getId()); for (final String gavc : DataUtils.getAllArtifacts(module)) { repositoryHandler.deleteArtifact(gavc); } }
java
{ "resource": "" }
q9455
ModuleHandler.getModuleLicenses
train
public List<DbLicense> getModuleLicenses(final String moduleId, final LicenseMatcher licenseMatcher) { final DbModule module = getModule(moduleId); final List<DbLicense> licenses = new ArrayList<>(); final FiltersHolder filters = new FiltersHolder(); final ArtifactHandler artifactHandler = new ArtifactHandler(repositoryHandler, licenseMatcher); for (final String gavc : DataUtils.getAllArtifacts(module)) { licenses.addAll(artifactHandler.getArtifactLicenses(gavc, filters)); } return licenses; }
java
{ "resource": "" }
q9456
ModuleHandler.promoteModule
train
public void promoteModule(final String moduleId) { final DbModule module = getModule(moduleId); for (final String gavc : DataUtils.getAllArtifacts(module)) { final DbArtifact artifact = repositoryHandler.getArtifact(gavc); artifact.setPromoted(true); repositoryHandler.store(artifact); } repositoryHandler.promoteModule(module); }
java
{ "resource": "" }
q9457
LoggerFactoryImpl.getLogger
train
public Logger getLogger(String loggerName) { Logger logger; //lookup in the cache first logger = (Logger) cache.get(loggerName); if(logger == null) { try { // get the configuration (not from the configurator because this is independent) logger = createLoggerInstance(loggerName); if(getBootLogger().isDebugEnabled()) { getBootLogger().debug("Using logger class '" + (getConfiguration() != null ? getConfiguration().getLoggerClass() : null) + "' for " + loggerName); } // configure the logger getBootLogger().debug("Initializing logger instance " + loggerName); logger.configure(conf); } catch(Throwable t) { // do reassign check and signal logger creation failure reassignBootLogger(true); logger = getBootLogger(); getBootLogger().error("[" + this.getClass().getName() + "] Could not initialize logger " + (conf != null ? conf.getLoggerClass() : null), t); } //cache it so we can get it faster the next time cache.put(loggerName, logger); // do reassign check reassignBootLogger(false); } return logger; }
java
{ "resource": "" }
q9458
LoggerFactoryImpl.createLoggerInstance
train
private Logger createLoggerInstance(String loggerName) throws Exception { Class loggerClass = getConfiguration().getLoggerClass(); Logger log = (Logger) ClassHelper.newInstance(loggerClass, String.class, loggerName); log.configure(getConfiguration()); return log; }
java
{ "resource": "" }
q9459
PersistentFieldBase.getFieldRecursive
train
private Field getFieldRecursive(Class c, String name) throws NoSuchFieldException { try { return c.getDeclaredField(name); } catch (NoSuchFieldException e) { // if field could not be found in the inheritance hierarchy, signal error if ((c == Object.class) || (c.getSuperclass() == null) || c.isInterface()) { throw e; } // if field could not be found in class c try in superclass else { return getFieldRecursive(c.getSuperclass(), name); } } }
java
{ "resource": "" }
q9460
PersistentFieldBase.buildErrorSetMsg
train
protected String buildErrorSetMsg(Object obj, Object value, Field aField) { String eol = SystemUtils.LINE_SEPARATOR; StringBuffer buf = new StringBuffer(); buf .append(eol + "[try to set 'object value' in 'target object'") .append(eol + "target obj class: " + (obj != null ? obj.getClass().getName() : null)) .append(eol + "target field name: " + (aField != null ? aField.getName() : null)) .append(eol + "target field type: " + (aField != null ? aField.getType() : null)) .append(eol + "target field declared in: " + (aField != null ? aField.getDeclaringClass().getName() : null)) .append(eol + "object value class: " + (value != null ? value.getClass().getName() : null)) .append(eol + "object value: " + (value != null ? value : null)) .append(eol + "]"); return buf.toString(); }
java
{ "resource": "" }
q9461
PersistenceBrokerFactoryBaseImpl.createNewBrokerInstance
train
protected PersistenceBrokerInternal createNewBrokerInstance(PBKey key) throws PBFactoryException { if (key == null) throw new PBFactoryException("Could not create new broker with PBkey argument 'null'"); // check if the given key really exists if (MetadataManager.getInstance().connectionRepository().getDescriptor(key) == null) { throw new PBFactoryException("Given PBKey " + key + " does not match in metadata configuration"); } if (log.isEnabledFor(Logger.INFO)) { // only count created instances when INFO-Log-Level log.info("Create new PB instance for PBKey " + key + ", already created persistence broker instances: " + instanceCount); // useful for testing ++this.instanceCount; } PersistenceBrokerInternal instance = null; Class[] types = {PBKey.class, PersistenceBrokerFactoryIF.class}; Object[] args = {key, this}; try { instance = (PersistenceBrokerInternal) ClassHelper.newInstance(implementationClass, types, args); OjbConfigurator.getInstance().configure(instance); instance = (PersistenceBrokerInternal) InterceptorFactory.getInstance().createInterceptorFor(instance); } catch (Exception e) { log.error("Creation of a new PB instance failed", e); throw new PBFactoryException("Creation of a new PB instance failed", e); } return instance; }
java
{ "resource": "" }
q9462
ActionAddClassDescriptor.actionPerformed
train
public void actionPerformed(java.awt.event.ActionEvent e) { System.out.println("Action Command: " + e.getActionCommand()); System.out.println("Action Params : " + e.paramString()); System.out.println("Action Source : " + e.getSource()); System.out.println("Action SrcCls : " + e.getSource().getClass().getName()); org.apache.ojb.broker.metadata.ClassDescriptor cld = new org.apache.ojb.broker.metadata.ClassDescriptor(rootNode.getRepository()); // cld.setClassNameOfObject("New Class"); cld.setTableName("New Table"); rootNode.addClassDescriptor(cld); }
java
{ "resource": "" }
q9463
LayerAuthentication.getMethod
train
@Override public ProxyAuthenticationMethod getMethod() { switch (authenticationMethod) { case BASIC: return ProxyAuthenticationMethod.BASIC; case DIGEST: return ProxyAuthenticationMethod.DIGEST; case URL: return ProxyAuthenticationMethod.URL; default: return null; } }
java
{ "resource": "" }
q9464
MetaTinyTypes.metaFor
train
public static <T> MetaTinyType<T> metaFor(Class<?> candidate) { for (MetaTinyType meta : metas) { if (meta.isMetaOf(candidate)) { return meta; } } throw new IllegalArgumentException(String.format("not a tinytype: %s", candidate == null ? "null" : candidate.getCanonicalName())); }
java
{ "resource": "" }
q9465
VectorLayerInfo.getNamedStyleInfo
train
public NamedStyleInfo getNamedStyleInfo(String name) { for (NamedStyleInfo info : namedStyleInfos) { if (info.getName().equals(name)) { return info; } } return null; }
java
{ "resource": "" }
q9466
AbstractFoundationLoggingMarker.scanClassPathForFormattingAnnotations
train
public static void scanClassPathForFormattingAnnotations() { ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2); // scan classpath and filter out classes that don't begin with "com.nds" Reflections reflections = new Reflections("com.nds","com.cisco"); Set<Class<?>> annotated = reflections.getTypesAnnotatedWith(DefaultFormat.class); // Reflections ciscoReflections = new Reflections("com.cisco"); // // annotated.addAll(ciscoReflections.getTypesAnnotatedWith(DefaultFormat.class)); for (Class<?> markerClass : annotated) { // if the marker class is indeed implementing FoundationLoggingMarker // interface if (FoundationLoggingMarker.class.isAssignableFrom(markerClass)) { final Class<? extends FoundationLoggingMarker> clazz = (Class<? extends FoundationLoggingMarker>) markerClass; executorService.execute(new Runnable() { @Override public void run() { if (markersMap.get(clazz) == null) { try { // generate formatter class for this marker // class generateAndUpdateFormatterInMap(clazz); } catch (Exception e) { LOGGER.trace("problem generating formatter class from static scan method. error is: " + e.toString()); } } } }); } else {// if marker class does not implement FoundationLoggingMarker // interface, log ERROR // verify the LOGGER was initialized. It might not be as this // Method is called in a static block if (LOGGER == null) { LOGGER = LoggerFactory.getLogger(AbstractFoundationLoggingMarker.class); } LOGGER.error("Formatter annotations should only appear on foundationLoggingMarker implementations"); } } try { TimeUnit.SECONDS.sleep(30); } catch (InterruptedException e) { LOGGER.trace(e.toString(), e); } executorService.shutdown(); // try { // executorService.awaitTermination(15, TimeUnit.SECONDS); // } catch (InterruptedException e) { // LOGGER.error("creation of formatters has been interrupted"); // } }
java
{ "resource": "" }
q9467
FoundationHierarchyEventListener.addAppenderEvent
train
public void addAppenderEvent(final Category cat, final Appender appender) { updateDefaultLayout(appender); if (appender instanceof FoundationFileRollingAppender) { final FoundationFileRollingAppender timeSizeRollingAppender = (FoundationFileRollingAppender) appender; // update the appender with default vales such as logging pattern, file size etc. //updateDefaultTimeAndSizeRollingAppender(timeSizeRollingAppender); // read teh proeprties and determine if archiving should be enabled. updateArchivingSupport(timeSizeRollingAppender); // by default add the rolling file listener to enable application // state. timeSizeRollingAppender.setFileRollEventListener(FoundationRollEventListener.class.getName()); boolean rollOnStartup = true; if (FoundationLogger.log4jConfigProps != null && FoundationLogger.log4jConfigProps.containsKey(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())) { rollOnStartup = Boolean.valueOf(FoundationLogger.log4jConfigProps.getProperty(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())); } timeSizeRollingAppender.setRollOnStartup(rollOnStartup); // refresh the appender timeSizeRollingAppender.activateOptions(); // timeSizeRollingAppender.setOriginalLayout(); //So application state will not make any problems }else if(!(appender instanceof FoundationFileRollingAppender) && (appender instanceof TimeAndSizeRollingAppender)){ //TimeAndSizeRollingAppender final TimeAndSizeRollingAppender timeSizeRollingAppender = (TimeAndSizeRollingAppender) appender; // update the appender with default vales such as logging pattern, file size etc. updateDefaultTimeAndSizeRollingAppender(timeSizeRollingAppender); // read teh proeprties and determine if archiving should be enabled. updateArchivingSupport(timeSizeRollingAppender); // by default add the rolling file listener to enable application // state. timeSizeRollingAppender.setFileRollEventListener(FoundationRollEventListener.class.getName()); boolean rollOnStartup = true; if (FoundationLogger.log4jConfigProps != null && FoundationLogger.log4jConfigProps.containsKey(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())) { rollOnStartup = Boolean.valueOf(FoundationLogger.log4jConfigProps.getProperty(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())); } timeSizeRollingAppender.setRollOnStartup(rollOnStartup); // refresh the appender timeSizeRollingAppender.activateOptions(); // timeSizeRollingAppender.setOriginalLayout(); } if ( ! (appender instanceof org.apache.log4j.AsyncAppender)) initiateAsyncSupport(appender); }
java
{ "resource": "" }
q9468
FoundationHierarchyEventListener.updateDefaultTimeAndSizeRollingAppender
train
private void updateDefaultTimeAndSizeRollingAppender(final FoundationFileRollingAppender appender) { if (appender.getDatePattern().trim().length() == 0) { appender.setDatePattern(FoundationLoggerConstants.DEFAULT_DATE_PATTERN.toString()); } String maxFileSizeKey = "log4j.appender."+appender.getName()+".MaxFileSize"; appender.setMaxFileSize(FoundationLogger.log4jConfigProps.getProperty(maxFileSizeKey, FoundationLoggerConstants.Foundation_MAX_FILE_SIZE.toString())); // if (appender.getMaxFileSize() == null || appender.getMaxFileSize().equals(FoundationLoggerConstants.DEFAULT_FILE_SIZE.toString())) { // appender.setMaxFileSize(FoundationLoggerConstants.Foundation_MAX_FILE_SIZE.toString()); // } String maxRollCountKey = "log4j.appender."+appender.getName()+".MaxRollFileCount"; appender.setMaxRollFileCount(Integer.parseInt(FoundationLogger.log4jConfigProps.getProperty(maxRollCountKey,"100"))); }
java
{ "resource": "" }
q9469
FileRollEvent.dispatchToAppender
train
final void dispatchToAppender(final String message) { // dispatch a copy, since events should be treated as being immutable final FoundationFileRollingAppender appender = this.getSource(); if (appender != null) { appender.append(new FileRollEvent(this, message)); } }
java
{ "resource": "" }
q9470
FileRollEvent.dispatchToAppender
train
final void dispatchToAppender(final LoggingEvent customLoggingEvent) { // wrap the LoggingEvent in a FileRollEvent to prevent recursion bug final FoundationFileRollingAppender appender = this.getSource(); if (appender != null) { appender.append(new FileRollEvent(customLoggingEvent, this)); } }
java
{ "resource": "" }
q9471
SqlExistStatement.getStatement
train
public String getStatement() { if(sql == null) { StringBuffer stmt = new StringBuffer(128); ClassDescriptor cld = getClassDescriptor(); FieldDescriptor[] fieldDescriptors = cld.getPkFields(); if(fieldDescriptors == null || fieldDescriptors.length == 0) { throw new OJBRuntimeException("No PK fields defined in metadata for " + cld.getClassNameOfObject()); } FieldDescriptor field = fieldDescriptors[0]; stmt.append(SELECT); stmt.append(field.getColumnName()); stmt.append(FROM); stmt.append(cld.getFullTableName()); appendWhereClause(cld, false, stmt); sql = stmt.toString(); } return sql; }
java
{ "resource": "" }
q9472
FieldDescriptor.getComparator
train
public static Comparator getComparator() { return new Comparator() { public int compare(Object o1, Object o2) { FieldDescriptor fmd1 = (FieldDescriptor) o1; FieldDescriptor fmd2 = (FieldDescriptor) o2; if (fmd1.getColNo() < fmd2.getColNo()) { return -1; } else if (fmd1.getColNo() > fmd2.getColNo()) { return 1; } else { return 0; } } }; }
java
{ "resource": "" }
q9473
FieldDescriptor.setFieldConversionClassName
train
public void setFieldConversionClassName(String fieldConversionClassName) { try { this.fieldConversion = (FieldConversion) ClassHelper.newInstance(fieldConversionClassName); } catch (Exception e) { throw new MetadataException( "Could not instantiate FieldConversion class using default constructor", e); } }
java
{ "resource": "" }
q9474
TorqueDBHandling.setConnection
train
public void setConnection(JdbcConnectionDescriptor jcd) throws PlatformException { _jcd = jcd; String targetDatabase = (String)_dbmsToTorqueDb.get(_jcd.getDbms().toLowerCase()); if (targetDatabase == null) { throw new PlatformException("Database "+_jcd.getDbms()+" is not supported by torque"); } if (!targetDatabase.equals(_targetDatabase)) { _targetDatabase = targetDatabase; _creationScript = null; _initScripts.clear(); } }
java
{ "resource": "" }
q9475
TorqueDBHandling.writeSchemata
train
private String writeSchemata(File dir) throws IOException { writeCompressedTexts(dir, _torqueSchemata); StringBuffer includes = new StringBuffer(); for (Iterator it = _torqueSchemata.keySet().iterator(); it.hasNext();) { includes.append((String)it.next()); if (it.hasNext()) { includes.append(","); } } return includes.toString(); }
java
{ "resource": "" }
q9476
TorqueDBHandling.createDB
train
public void createDB() throws PlatformException { if (_creationScript == null) { createCreationScript(); } Project project = new Project(); TorqueDataModelTask modelTask = new TorqueDataModelTask(); File tmpDir = null; File scriptFile = null; try { tmpDir = new File(getWorkDir(), "schemas"); tmpDir.mkdir(); scriptFile = new File(tmpDir, CREATION_SCRIPT_NAME); writeCompressedText(scriptFile, _creationScript); project.setBasedir(tmpDir.getAbsolutePath()); // we use the ant task 'sql' to perform the creation script SQLExec sqlTask = new SQLExec(); SQLExec.OnError onError = new SQLExec.OnError(); onError.setValue("continue"); sqlTask.setProject(project); sqlTask.setAutocommit(true); sqlTask.setDriver(_jcd.getDriver()); sqlTask.setOnerror(onError); sqlTask.setUserid(_jcd.getUserName()); sqlTask.setPassword(_jcd.getPassWord() == null ? "" : _jcd.getPassWord()); sqlTask.setUrl(getDBCreationUrl()); sqlTask.setSrc(scriptFile); sqlTask.execute(); deleteDir(tmpDir); } catch (Exception ex) { // clean-up if ((tmpDir != null) && tmpDir.exists()) { try { scriptFile.delete(); } catch (NullPointerException e) { LoggerFactory.getLogger(this.getClass()).error("NPE While deleting scriptFile [" + scriptFile.getName() + "]", e); } } throw new PlatformException(ex); } }
java
{ "resource": "" }
q9477
TorqueDBHandling.initDB
train
public void initDB() throws PlatformException { if (_initScripts.isEmpty()) { createInitScripts(); } Project project = new Project(); TorqueSQLTask sqlTask = new TorqueSQLTask(); File outputDir = null; try { outputDir = new File(getWorkDir(), "sql"); outputDir.mkdir(); writeCompressedTexts(outputDir, _initScripts); project.setBasedir(outputDir.getAbsolutePath()); // executing the generated sql, but this time with a torque task TorqueSQLExec sqlExec = new TorqueSQLExec(); TorqueSQLExec.OnError onError = new TorqueSQLExec.OnError(); sqlExec.setProject(project); onError.setValue("continue"); sqlExec.setAutocommit(true); sqlExec.setDriver(_jcd.getDriver()); sqlExec.setOnerror(onError); sqlExec.setUserid(_jcd.getUserName()); sqlExec.setPassword(_jcd.getPassWord() == null ? "" : _jcd.getPassWord()); sqlExec.setUrl(getDBManipulationUrl()); sqlExec.setSrcDir(outputDir.getAbsolutePath()); sqlExec.setSqlDbMap(SQL_DB_MAP_NAME); sqlExec.execute(); deleteDir(outputDir); } catch (Exception ex) { // clean-up if (outputDir != null) { deleteDir(outputDir); } throw new PlatformException(ex); } }
java
{ "resource": "" }
q9478
TorqueDBHandling.getDBManipulationUrl
train
protected String getDBManipulationUrl() { JdbcConnectionDescriptor jcd = getConnection(); return jcd.getProtocol()+":"+jcd.getSubProtocol()+":"+jcd.getDbAlias(); }
java
{ "resource": "" }
q9479
TorqueDBHandling.readStreamCompressed
train
private byte[] readStreamCompressed(InputStream stream) throws IOException { ByteArrayOutputStream bao = new ByteArrayOutputStream(); GZIPOutputStream gos = new GZIPOutputStream(bao); OutputStreamWriter output = new OutputStreamWriter(gos); BufferedReader input = new BufferedReader(new InputStreamReader(stream)); String line; while ((line = input.readLine()) != null) { output.write(line); output.write('\n'); } input.close(); stream.close(); output.close(); gos.close(); bao.close(); return bao.toByteArray(); }
java
{ "resource": "" }
q9480
TorqueDBHandling.readTextsCompressed
train
private void readTextsCompressed(File dir, HashMap results) throws IOException { if (dir.exists() && dir.isDirectory()) { File[] files = dir.listFiles(); for (int idx = 0; idx < files.length; idx++) { if (files[idx].isDirectory()) { continue; } results.put(files[idx].getName(), readTextCompressed(files[idx])); } } }
java
{ "resource": "" }
q9481
TorqueDBHandling.writeCompressedText
train
private void writeCompressedText(File file, byte[] compressedContent) throws IOException { ByteArrayInputStream bais = new ByteArrayInputStream(compressedContent); GZIPInputStream gis = new GZIPInputStream(bais); BufferedReader input = new BufferedReader(new InputStreamReader(gis)); BufferedWriter output = new BufferedWriter(new FileWriter(file)); String line; while ((line = input.readLine()) != null) { output.write(line); output.write('\n'); } input.close(); gis.close(); bais.close(); output.close(); }
java
{ "resource": "" }
q9482
TorqueDBHandling.writeCompressedTexts
train
private void writeCompressedTexts(File dir, HashMap contents) throws IOException { String filename; for (Iterator nameIt = contents.keySet().iterator(); nameIt.hasNext();) { filename = (String)nameIt.next(); writeCompressedText(new File(dir, filename), (byte[])contents.get(filename)); } }
java
{ "resource": "" }
q9483
TorqueDBHandling.setWorkDir
train
public void setWorkDir(String dir) throws IOException { File workDir = new File(dir); if (!workDir.exists() || !workDir.canWrite() || !workDir.canRead()) { throw new IOException("Cannot access directory "+dir); } _workDir = workDir; }
java
{ "resource": "" }
q9484
TorqueDBHandling.getWorkDir
train
private File getWorkDir() throws IOException { if (_workDir == null) { File dummy = File.createTempFile("dummy", ".log"); String workDir = dummy.getPath().substring(0, dummy.getPath().lastIndexOf(File.separatorChar)); if ((workDir == null) || (workDir.length() == 0)) { workDir = "."; } dummy.delete(); _workDir = new File(workDir); } return _workDir; }
java
{ "resource": "" }
q9485
TorqueDBHandling.deleteDir
train
private void deleteDir(File dir) { if (dir.exists() && dir.isDirectory()) { File[] files = dir.listFiles(); for (int idx = 0; idx < files.length; idx++) { if (!files[idx].exists()) { continue; } if (files[idx].isDirectory()) { deleteDir(files[idx]); } else { files[idx].delete(); } } dir.delete(); } }
java
{ "resource": "" }
q9486
Sequoia.getModuleGraph
train
@GET @Produces(MediaType.APPLICATION_JSON) @Path("/graph/{name}/{version}") public Response getModuleGraph(@PathParam("name") final String moduleName, @PathParam("version") final String moduleVersion, @Context final UriInfo uriInfo){ LOG.info("Dependency Checker got a get module graph export request."); if(moduleName == null || moduleVersion == null){ return Response.serverError().status(HttpStatus.NOT_ACCEPTABLE_406).build(); } final FiltersHolder filters = new FiltersHolder(); filters.init(uriInfo.getQueryParameters()); final String moduleId = DbModule.generateID(moduleName, moduleVersion); final AbstractGraph moduleGraph = getGraphsHandler(filters).getModuleGraph(moduleId); return Response.ok(moduleGraph).build(); }
java
{ "resource": "" }
q9487
GeoToolsLayer.update
train
void update(Object feature) throws LayerException { SimpleFeatureSource source = getFeatureSource(); if (source instanceof SimpleFeatureStore) { SimpleFeatureStore store = (SimpleFeatureStore) source; String featureId = getFeatureModel().getId(feature); Filter filter = filterService.createFidFilter(new String[] { featureId }); transactionSynchronization.synchTransaction(store); List<Name> names = new ArrayList<Name>(); Map<String, Attribute> attrMap = getFeatureModel().getAttributes(feature); List<Object> values = new ArrayList<Object>(); for (Map.Entry<String, Attribute> entry : attrMap.entrySet()) { String name = entry.getKey(); names.add(store.getSchema().getDescriptor(name).getName()); values.add(entry.getValue().getValue()); } try { store.modifyFeatures(names.toArray(new Name[names.size()]), values.toArray(), filter); store.modifyFeatures(store.getSchema().getGeometryDescriptor().getName(), getFeatureModel() .getGeometry(feature), filter); log.debug("Updated feature {} in {}", featureId, getFeatureSourceName()); } catch (IOException ioe) { featureModelUsable = false; throw new LayerException(ioe, ExceptionCode.LAYER_MODEL_IO_EXCEPTION); } } else { log.error("Don't know how to create or update " + getFeatureSourceName() + ", class " + source.getClass().getName() + " does not implement SimpleFeatureStore"); throw new LayerException(ExceptionCode.CREATE_OR_UPDATE_NOT_IMPLEMENTED, getFeatureSourceName(), source .getClass().getName()); } }
java
{ "resource": "" }
q9488
FoundationLoggingPatternConverter.format
train
@Override public void format(final StringBuffer sbuf, final LoggingEvent event) { for (int i = 0; i < patternConverters.length; i++) { final int startField = sbuf.length(); patternConverters[i].format(event, sbuf); patternFields[i].format(startField, sbuf); } }
java
{ "resource": "" }
q9489
RepositoryDataTask.readSingleSchemaFile
train
private Database readSingleSchemaFile(DatabaseIO reader, File schemaFile) { Database model = null; if (!schemaFile.isFile()) { log("Path "+schemaFile.getAbsolutePath()+" does not denote a schema file", Project.MSG_ERR); } else if (!schemaFile.canRead()) { log("Could not read schema file "+schemaFile.getAbsolutePath(), Project.MSG_ERR); } else { try { model = reader.read(schemaFile); log("Read schema file "+schemaFile.getAbsolutePath(), Project.MSG_INFO); } catch (Exception ex) { throw new BuildException("Could not read schema file "+schemaFile.getAbsolutePath()+": "+ex.getLocalizedMessage(), ex); } } return model; }
java
{ "resource": "" }
q9490
RepositoryDataTask.initOJB
train
private MetadataManager initOJB() { try { if (_ojbPropertiesFile == null) { _ojbPropertiesFile = new File("OJB.properties"); if (!_ojbPropertiesFile.exists()) { throw new BuildException("Could not find OJB.properties, please specify it via the ojbpropertiesfile attribute"); } } else { if (!_ojbPropertiesFile.exists()) { throw new BuildException("Could not load the specified OJB properties file "+_ojbPropertiesFile); } log("Using properties file "+_ojbPropertiesFile.getAbsolutePath(), Project.MSG_INFO); System.setProperty("OJB.properties", _ojbPropertiesFile.getAbsolutePath()); } MetadataManager metadataManager = MetadataManager.getInstance(); RepositoryPersistor persistor = new RepositoryPersistor(); if (_repositoryFile != null) { if (!_repositoryFile.exists()) { throw new BuildException("Could not load the specified repository file "+_repositoryFile); } log("Loading repository file "+_repositoryFile.getAbsolutePath(), Project.MSG_INFO); // this will load the info from the specified repository file // and merge it with the existing info (if it has been loaded) metadataManager.mergeConnectionRepository(persistor.readConnectionRepository(_repositoryFile.getAbsolutePath())); metadataManager.mergeDescriptorRepository(persistor.readDescriptorRepository(_repositoryFile.getAbsolutePath())); } else if (metadataManager.connectionRepository().getAllDescriptor().isEmpty() && metadataManager.getGlobalRepository().getDescriptorTable().isEmpty()) { // Seems nothing was loaded, probably because we're not starting in the directory // that the properties file is in, and the repository file path is relative // So lets try to resolve this path and load the repository info manually Properties props = new Properties(); props.load(new FileInputStream(_ojbPropertiesFile)); String repositoryPath = props.getProperty("repositoryFile", "repository.xml"); File repositoryFile = new File(repositoryPath); if (!repositoryFile.exists()) { repositoryFile = new File(_ojbPropertiesFile.getParentFile(), repositoryPath); } metadataManager.mergeConnectionRepository(persistor.readConnectionRepository(repositoryFile.getAbsolutePath())); metadataManager.mergeDescriptorRepository(persistor.readDescriptorRepository(repositoryFile.getAbsolutePath())); } // we might have to determine the default pb key ourselves if (metadataManager.getDefaultPBKey() == null) { for (Iterator it = metadataManager.connectionRepository().getAllDescriptor().iterator(); it.hasNext();) { JdbcConnectionDescriptor descriptor = (JdbcConnectionDescriptor)it.next(); if (descriptor.isDefaultConnection()) { metadataManager.setDefaultPBKey(new PBKey(descriptor.getJcdAlias(), descriptor.getUserName(), descriptor.getPassWord())); break; } } } return metadataManager; } catch (Exception ex) { if (ex instanceof BuildException) { throw (BuildException)ex; } else { throw new BuildException(ex); } } }
java
{ "resource": "" }
q9491
PrintServiceImpl.putDocument
train
public String putDocument(Document document) { String key = UUID.randomUUID().toString(); documentMap.put(key, document); return key; }
java
{ "resource": "" }
q9492
PrintServiceImpl.removeDocument
train
public Document removeDocument(String key) throws PrintingException { if (documentMap.containsKey(key)) { return documentMap.remove(key); } else { throw new PrintingException(PrintingException.DOCUMENT_NOT_FOUND, key); } }
java
{ "resource": "" }
q9493
SqlSelectByPkStatement.buildQuery
train
private static Query buildQuery(ClassDescriptor cld) { FieldDescriptor[] pkFields = cld.getPkFields(); Criteria crit = new Criteria(); for(int i = 0; i < pkFields.length; i++) { crit.addEqualTo(pkFields[i].getAttributeName(), null); } return new QueryByCriteria(cld.getClassOfObject(), crit); }
java
{ "resource": "" }
q9494
GeomajasException.getMessage
train
public String getMessage(Locale locale) { if (getCause() != null) { String message = getShortMessage(locale) + ", " + translate("ROOT_CAUSE", locale) + " "; if (getCause() instanceof GeomajasException) { return message + ((GeomajasException) getCause()).getMessage(locale); } return message + getCause().getMessage(); } else { return getShortMessage(locale); } }
java
{ "resource": "" }
q9495
GeomajasException.getShortMessage
train
public String getShortMessage(Locale locale) { String message; message = translate(Integer.toString(exceptionCode), locale); if (message != null && msgParameters != null && msgParameters.length > 0) { for (int i = 0; i < msgParameters.length; i++) { boolean isIncluded = false; String needTranslationParam = "$${" + i + "}"; if (message.contains(needTranslationParam)) { String translation = translate(msgParameters[i], locale); if (null == translation && null != msgParameters[i]) { translation = msgParameters[i].toString(); } if (null == translation) { translation = "[null]"; } message = message.replace(needTranslationParam, translation); isIncluded = true; } String verbatimParam = "${" + i + "}"; String rs = null == msgParameters[i] ? "[null]" : msgParameters[i].toString(); if (message.contains(verbatimParam)) { message = message.replace(verbatimParam, rs); isIncluded = true; } if (!isIncluded) { message = message + " (" + rs + ")"; // NOSONAR replace/contains makes StringBuilder use difficult } } } return message; }
java
{ "resource": "" }
q9496
RasterDirectLayer.toDirectColorModel
train
public PlanarImage toDirectColorModel(RenderedImage img) { BufferedImage dest = new BufferedImage(img.getWidth(), img.getHeight(), BufferedImage.TYPE_4BYTE_ABGR); BufferedImage source = new BufferedImage(img.getColorModel(), (WritableRaster) img.getData(), img .getColorModel().isAlphaPremultiplied(), null); ColorConvertOp op = new ColorConvertOp(null); op.filter(source, dest); return PlanarImage.wrapRenderedImage(dest); }
java
{ "resource": "" }
q9497
OTMJCAManagedConnectionFactory.createManagedConnection
train
public ManagedConnection createManagedConnection(Subject subject, ConnectionRequestInfo info) { Util.log("In OTMJCAManagedConnectionFactory.createManagedConnection"); try { Kit kit = getKit(); PBKey key = ((OTMConnectionRequestInfo) info).getPbKey(); OTMConnection connection = kit.acquireConnection(key); return new OTMJCAManagedConnection(this, connection, key); } catch (ResourceException e) { throw new OTMConnectionRuntimeException(e.getMessage()); } }
java
{ "resource": "" }
q9498
SinglePageDocument.render
train
public void render(OutputStream outputStream, Format format, int dpi) throws PrintingException { try { if (baos == null) { prepare(); } writeDocument(outputStream, format, dpi); } catch (Exception e) { // NOSONAR throw new PrintingException(e, PrintingException.DOCUMENT_RENDER_PROBLEM); } }
java
{ "resource": "" }
q9499
SinglePageDocument.prepare
train
private void prepare() throws IOException, DocumentException, PrintingException { if (baos == null) { baos = new ByteArrayOutputStream(); // let it grow as much as needed } baos.reset(); boolean resize = false; if (page.getConstraint().getWidth() == 0 || page.getConstraint().getHeight() == 0) { resize = true; } // Create a document in the requested ISO scale. Document document = new Document(page.getBounds(), 0, 0, 0, 0); PdfWriter writer; writer = PdfWriter.getInstance(document, baos); // Render in correct colors for transparent rasters writer.setRgbTransparencyBlending(true); // The mapView is not scaled to the document, we assume the mapView // has the right ratio. // Write document title and metadata document.open(); PdfContext context = new PdfContext(writer); context.initSize(page.getBounds()); // first pass of all children to calculate size page.calculateSize(context); if (resize) { // we now know the bounds of the document // round 'm up and restart with a new document int width = (int) Math.ceil(page.getBounds().getWidth()); int height = (int) Math.ceil(page.getBounds().getHeight()); page.getConstraint().setWidth(width); page.getConstraint().setHeight(height); document = new Document(new Rectangle(width, height), 0, 0, 0, 0); writer = PdfWriter.getInstance(document, baos); // Render in correct colors for transparent rasters writer.setRgbTransparencyBlending(true); document.open(); baos.reset(); context = new PdfContext(writer); context.initSize(page.getBounds()); } // int compressionLevel = writer.getCompressionLevel(); // For testing // writer.setCompressionLevel(0); // Actual drawing document.addTitle("Geomajas"); // second pass to layout page.layout(context); // finally render (uses baos) page.render(context); document.add(context.getImage()); // Now close the document document.close(); }
java
{ "resource": "" }