index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/FortressResponseInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.jaxrs.interceptor.JAXRSOutInterceptor;
import org.apache.cxf.message.Message;
import org.apache.cxf.message.MessageContentsList;
import org.apache.cxf.phase.AbstractPhaseInterceptor;
import org.apache.cxf.phase.Phase;
import org.apache.directory.fortress.core.model.FortResponse;
/**
* Interceptor to set the HTTP Status code based on the value present in FortResponse.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class FortressResponseInterceptor extends AbstractPhaseInterceptor<Message>
{
/**
* default constructor
*/
public FortressResponseInterceptor()
{
super(Phase.MARSHAL);
addBefore(JAXRSOutInterceptor.class.getName());
}
@Override
public void handleMessage(Message message) throws Fault
{
boolean isOutbound = false;
if( ( message == message.getExchange().getOutMessage() ) || ( message == message.getExchange().getOutFaultMessage() ) )
{
isOutbound = true;
}
if( isOutbound )
{
MessageContentsList objs = MessageContentsList.getContentsList(message);
if (objs != null && !objs.isEmpty())
{
Object o = objs.get(0);
if( o instanceof FortResponse )
{
message.getExchange().put( Message.RESPONSE_CODE, ((FortResponse)o).getHttpStatus() );
}
}
}
}
}
| 1,200 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/AccessMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.AccessMgr;
import org.apache.directory.fortress.core.AccessMgrFactory;
import org.apache.directory.fortress.core.GlobalErrIds;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Set;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class AccessMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( AccessMgrImpl.class.getName() );
/** A flag for trusted sessions */
private static final boolean TRUSTED = true;
/** A flag for untrusted sessions */
private static final boolean UNTRUSTED = false;
/**
* ************************************************************************************************************************************
* BEGIN ACCESSMGR
* **************************************************************************************************************************************
*/
/* No qualifier */ FortResponse authenticate( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
User inUser = (User) request.getEntity();
Session outSession = accessMgr.authenticate( inUser.getUserId(), inUser.getPassword() );
response.setSession( outSession );
response.setErrorCode( GlobalErrIds.NO_ERROR );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
* Creates an untrusted session
*
* @param request The request We want to create a session for
* @return The created response
*/
/* no qualifier*/ FortResponse createSession( FortRequest request )
{
return createSession( request, UNTRUSTED );
}
/**
* Creates a trusted session
*
* @param request The request We want to create a session for
* @return The created response
*/
/* no qualifier*/ FortResponse createSessionTrusted( FortRequest request )
{
return createSession( request, TRUSTED );
}
/**
* Creates a group-type trusted session
*
* @param request The request We want to create a session for
* @return The created response
*/
/* no qualifier*/ FortResponse createGroupSession( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Group inGroup = (Group) request.getEntity();
Session outSession = accessMgr.createSession( inGroup );
response.setSession( outSession );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
* Creates a session, trusted or untrested
*
* @param request The request We want to create a session for
* @param trusted Is the session trusted or not
* @return The created response
*/
private FortResponse createSession( FortRequest request, boolean trusted )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
User inUser = (User) request.getEntity();
Session outSession = accessMgr.createSession( inUser, trusted );
response.setSession( outSession );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
* Perform user RBAC authorization.
*
* @param request The {@link FortRequest} we have to check
* @return a {@link FortResponse} containing the response
*/
/* no qualifier*/ FortResponse checkAccess( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Permission perm = (Permission)request.getEntity();
perm.setAdmin( false );
Session session = request.getSession();
boolean result = accessMgr.checkAccess( session, perm );
response.setSession( session );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
* Perform user RBAC authorization.
*
* @param request The {@link FortRequest} we have to check
* @return a {@link FortResponse} containing the response
*/
/* no qualifier*/ FortResponse createSessionCheckAccess( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Permission perm = (Permission)request.getEntity();
perm.setAdmin( false );
User user = (User) request.getEntity2();
boolean isTrusted = request.getIsFlag();
boolean result = accessMgr.checkAccess( user, perm, isTrusted );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
* Perform user ROLE check.
*
* @param request The {@link FortRequest} we have to check
* @return a {@link FortResponse} containing the response
*/
/* no qualifier*/ FortResponse isUserInRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Role role = (Role)request.getEntity();
User user = (User) request.getEntity2();
boolean isTrusted = request.getIsFlag();
boolean result = accessMgr.isUserInRole( user, role, isTrusted );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse sessionPermissions( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Session session = request.getSession();
List<Permission> perms = accessMgr.sessionPermissions( session );
response.setSession( session );
response.setEntities( perms );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse sessionRoles( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Session session = request.getSession();
List<UserRole> roles = accessMgr.sessionRoles( session );
response.setEntities( roles );
response.setSession( session );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse authorizedSessionRoles( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Session session = request.getSession();
Set<String> roles = accessMgr.authorizedRoles( session );
response.setValueSet( roles );
response.setSession( session );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addActiveRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
UserRole uRole = (UserRole)request.getEntity();
Session session = request.getSession();
accessMgr.addActiveRole( session, uRole );
response.setSession( session );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse dropActiveRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
UserRole uRole = (UserRole)request.getEntity();
Session session = request.getSession();
accessMgr.dropActiveRole( session, uRole );
response.setSession( session );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse getUserId( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Session session = request.getSession();
String userId = accessMgr.getUserId( session );
User outUser = new User( userId );
response.setSession( session );
response.setEntity( outUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse getUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
AccessMgr accessMgr = AccessMgrFactory.createInstance( request.getContextId() );
Session session = request.getSession();
User outUser = accessMgr.getUser( session );
response.setSession( session );
response.setEntity( outUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,201 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/AuditMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.AuditMgr;
import org.apache.directory.fortress.core.AuditMgrFactory;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.AuthZ;
import org.apache.directory.fortress.core.model.Bind;
import org.apache.directory.fortress.core.model.Mod;
import org.apache.directory.fortress.core.model.UserAudit;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class AuditMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( AuditMgrImpl.class.getName() );
/**
* ************************************************************************************************************************************
* BEGIN AUDIT
* **************************************************************************************************************************************
*/
/* No qualifier */ FortResponse searchBinds(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAudit inAudit = (UserAudit) request.getEntity();
AuditMgr auditMgr = AuditMgrFactory.createInstance( request.getContextId() );
auditMgr.setAdmin( request.getSession() );
List<Bind> outAudit = auditMgr.searchBinds( inAudit );
response.setEntities( outAudit );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse getUserAuthZs(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAudit inAudit = (UserAudit)request.getEntity();
AuditMgr auditMgr = AuditMgrFactory.createInstance( request.getContextId() );
auditMgr.setAdmin( request.getSession() );
List<AuthZ> outAudit = auditMgr.getUserAuthZs( inAudit );
response.setEntities( outAudit );
}
catch (SecurityException se)
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse searchAuthZs(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAudit inAudit = (UserAudit)request.getEntity();
AuditMgr auditMgr = AuditMgrFactory.createInstance( request.getContextId() );
auditMgr.setAdmin( request.getSession() );
List<AuthZ> outAudit = auditMgr.searchAuthZs( inAudit );
response.setEntities( outAudit );
}
catch (SecurityException se)
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse searchUserSessions(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAudit inAudit = (UserAudit)request.getEntity();
AuditMgr auditMgr = AuditMgrFactory.createInstance( request.getContextId() );
auditMgr.setAdmin( request.getSession() );
List<Mod> outAudit = auditMgr.searchUserSessions( inAudit );
response.setEntities( outAudit );
}
catch (SecurityException se)
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse searchAdminMods(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAudit inAudit = (UserAudit)request.getEntity();
AuditMgr auditMgr = AuditMgrFactory.createInstance( request.getContextId() );
auditMgr.setAdmin( request.getSession() );
List<Mod> outAudit = auditMgr.searchAdminMods( inAudit );
response.setEntities( outAudit );
}
catch (SecurityException se)
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse searchInvalidUsers(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAudit inAudit = (UserAudit)request.getEntity();
AuditMgr auditMgr = AuditMgrFactory.createInstance( request.getContextId() );
auditMgr.setAdmin( request.getSession() );
List<AuthZ> outAudit = auditMgr.searchInvalidUsers( inAudit );
response.setEntities( outAudit );
}
catch (SecurityException se)
{
createError( response, LOG, se );
}
return response;
}
}
| 1,202 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/PswdPolicyMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.PwPolicyMgr;
import org.apache.directory.fortress.core.PwPolicyMgrFactory;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.PwPolicy;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class PswdPolicyMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( PswdPolicyMgrImpl.class.getName() );
/**
* ************************************************************************************************************************************
* BEGIN PSWDPOLICYMGR
* **************************************************************************************************************************************
*/
/* No qualifier */ FortResponse addPolicy( FortRequest request )
{
FortResponse response = createResponse();
try
{
PwPolicy inPolicy = (PwPolicy) request.getEntity();
PwPolicyMgr policyMgr = PwPolicyMgrFactory.createInstance( request.getContextId() );
policyMgr.setAdmin( request.getSession() );
policyMgr.add( inPolicy );
response.setEntity( inPolicy );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updatePolicy( FortRequest request )
{
FortResponse response = createResponse();
try
{
PwPolicy inPolicy = (PwPolicy) request.getEntity();
PwPolicyMgr policyMgr = PwPolicyMgrFactory.createInstance( request.getContextId() );
policyMgr.setAdmin( request.getSession() );
policyMgr.update( inPolicy );
response.setEntity( inPolicy );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deletePolicy( FortRequest request )
{
FortResponse response = createResponse();
try
{
PwPolicy inPolicy = (PwPolicy) request.getEntity();
PwPolicyMgr policyMgr = PwPolicyMgrFactory.createInstance( request.getContextId() );
policyMgr.setAdmin( request.getSession() );
policyMgr.delete( inPolicy );
response.setEntity( inPolicy );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse readPolicy( FortRequest request )
{
FortResponse response = createResponse();
PwPolicy outPolicy;
try
{
PwPolicy inPolicy = (PwPolicy) request.getEntity();
PwPolicyMgr policyMgr = PwPolicyMgrFactory.createInstance( request.getContextId() );
policyMgr.setAdmin( request.getSession() );
outPolicy = policyMgr.read( inPolicy.getName() );
response.setEntity( outPolicy );
}
catch ( SecurityException se )
{
response.setErrorCode( se.getErrorId() );
response.setErrorMessage( se.getMessage() );
}
return response;
}
/* No qualifier */ FortResponse searchPolicy( FortRequest request )
{
FortResponse response = createResponse();
List<PwPolicy> policyList;
try
{
PwPolicy inPolicy = (PwPolicy) request.getEntity();
PwPolicyMgr policyMgr = PwPolicyMgrFactory.createInstance( request.getContextId() );
policyMgr.setAdmin( request.getSession() );
policyList = policyMgr.search( inPolicy.getName() );
response.setEntities( policyList );
}
catch ( SecurityException se )
{
response.setErrorCode( se.getErrorId() );
response.setErrorMessage( se.getMessage() );
}
return response;
}
/* No qualifier */ FortResponse updateUserPolicy( FortRequest request )
{
FortResponse response = createResponse();
try
{
PwPolicy inPolicy = (PwPolicy) request.getEntity();
PwPolicyMgr policyMgr = PwPolicyMgrFactory.createInstance( request.getContextId() );
policyMgr.setAdmin( request.getSession() );
String userId = request.getValue();
policyMgr.updateUserPolicy( userId, inPolicy.getName() );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteUserPolicy( FortRequest request )
{
FortResponse response = createResponse();
try
{
PwPolicyMgr policyMgr = PwPolicyMgrFactory.createInstance( request.getContextId() );
policyMgr.setAdmin( request.getSession() );
String userId = request.getValue();
policyMgr.deletePasswordPolicy( userId );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,203 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/SecUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import jakarta.servlet.http.HttpServletRequest;
import org.apache.directory.fortress.core.GlobalErrIds;
import org.apache.directory.fortress.core.GlobalIds;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.apache.directory.fortress.core.model.Session;
import org.apache.directory.fortress.core.util.Config;
import org.apache.directory.fortress.realm.J2eePolicyMgr;
import org.apache.directory.fortress.realm.J2eePolicyMgrFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Grab the Apache Fortress (RBAC) session from Tomcat container via the HttpServletRequest interface. This class is thread safe.
*
*/
public class SecUtils
{
private static final Logger LOG = LoggerFactory.getLogger( SecUtils.class.getName() );
private static J2eePolicyMgr j2eePolicyMgr;
static
{
try
{
j2eePolicyMgr = J2eePolicyMgrFactory.createInstance();
}
catch (SecurityException se)
{
String error = "initializeSession caught SecurityException in static block=" + se.getMessage();
LOG.warn( error );
}
}
/**
* default constructor
*/
public SecUtils()
{
}
/**
* Use Apache Fortress Realm interface to load the RBAC session via a standard interface.
*
* @param fortRequest Used to carry the session and other data.
* @param httpRequest Used to get the security principal.
* @return Response will contain the RBAC session object (if found) or a system error if a problem in the get. If arbac02 isn't enabled, it will return a NULL.
*/
static FortResponse initializeSession(FortRequest fortRequest, HttpServletRequest httpRequest)
{
Session realmSession;
FortResponse fortResponse = null;
// Have the fortress arbac02 runtime checks been enabled?.
if (Config.getInstance().getBoolean(GlobalIds.IS_ARBAC02))
{
if (httpRequest == null)
{
// Improper container config.
fortResponse = createError( GlobalErrIds.REST_NULL_HTTP_REQ_ERR, "initializeSession detected null HTTP Request", 403);
}
else
{
try
{
// Get the security principal from the runtime.
String szPrincipal = httpRequest.getUserPrincipal().toString();
// This has to happen before it can be used by Fortress.
realmSession = j2eePolicyMgr.deserialize(szPrincipal);
if (realmSession != null)
{
// The RBAC Session successfully grabbed from the container.
fortRequest.setSession(realmSession);
}
else
{
fortResponse = createError( GlobalErrIds.USER_SESS_NULL, "initializeSession couldn't get a Security Session.", 403);
}
}
catch (SecurityException se)
{
// A problem deserializing the security principal.
fortResponse = createError( se.getErrorId(), "initializeSession caught SecurityException=" + se.getMessage(), se.getHttpStatus());
}
}
}
return fortResponse;
}
private static FortResponse createError(int errId, String errMsg, int hCode)
{
FortResponse fortResponse = new FortResponse();
fortResponse.setErrorCode(errId);
fortResponse.setErrorMessage(errMsg);
fortResponse.setHttpStatus(hCode);
LOG.info(errMsg);
return fortResponse;
}
}
| 1,204 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/FortressServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import jakarta.annotation.security.RolesAllowed;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.core.Context;
import org.apache.directory.fortress.core.GlobalErrIds;
import org.apache.directory.fortress.core.model.*;
import org.apache.directory.fortress.core.rest.HttpIds;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
/**
* Implementation for Fortress Rest Service methods forwards to delegate. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
@Service("fortressService")
public class FortressServiceImpl implements FortressService
{
private static final Logger LOG = LoggerFactory.getLogger( FortressServiceImpl.class.getName() );
// Instantiate the implementation classes where the actual work is done:
private final ReviewMgrImpl reviewMgrImpl = new ReviewMgrImpl();
private final AdminMgrImpl adminMgrImpl = new AdminMgrImpl();
private final PswdPolicyMgrImpl pswdPolicyMgrImpl = new PswdPolicyMgrImpl();
private final DelegatedAccessMgrImpl delegatedAccessMgrImpl = new DelegatedAccessMgrImpl();
private final DelegatedReviewMgrImpl delegatedReviewMgrImpl = new DelegatedReviewMgrImpl();
private final DelegatedAdminMgrImpl delegatedAdminMgrImpl = new DelegatedAdminMgrImpl();
private final AccessMgrImpl accessMgrImpl = new AccessMgrImpl();
private final AuditMgrImpl auditMgrImpl = new AuditMgrImpl();
private final ConfigMgrImpl configMgrImpl = new ConfigMgrImpl();
private final GroupMgrImpl groupMgrImpl = new GroupMgrImpl();
// These are the allowed roles for the Fortress Rest services:
private static final String SUPER_USER = "fortress-rest-super-user";
private static final String ACCESS_MGR_USER = "fortress-rest-access-user";
private static final String ADMIN_MGR_USER = "fortress-rest-admin-user";
private static final String REVIEW_MGR_USER = "fortress-rest-review-user";
private static final String DELEGATED_ACCESS_MGR_USER = "fortress-rest-delaccess-user";
private static final String DELEGATED_ADMIN_MGR_USER = "fortress-rest-deladmin-user";
private static final String DELEGATED_REVIEW_MGR_USER = "fortress-rest-delreview-user";
private static final String PASSWORD_MGR_USER = "fortress-rest-pwmgr-user";
private static final String AUDIT_MGR_USER = "fortress-rest-audit-user";
private static final String CONFIG_MGR_USER = "fortress-rest-config-user";
@Context
private HttpServletRequest httpRequest;
/**
* ************************************************************************************************************************************
* BEGIN ADMINMGR
* **************************************************************************************************************************************
*/
/**
* default contructor
*/
public FortressServiceImpl()
{
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_ADD + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_DELETE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deleteUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_DISABLE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse disableUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.disableUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updateUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.updateUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_CHGPW + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse changePassword( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.changePassword( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_LOCK + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse lockUserAccount( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.lockUserAccount( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_UNLOCK + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse unlockUserAccount( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.unlockUserAccount( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_RESET + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse resetPassword( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.resetPassword( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_ADD + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_DELETE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deleteRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updateRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.updateRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_ASGN + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse assignUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.assignUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_DEASGN + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deassignUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deassignUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_ADD + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addPermission( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addPermission( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updatePermission( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.updatePermission( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_DELETE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deletePermission( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deletePermission( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.OBJ_ADD + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addPermObj( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addPermObj( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.OBJ_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updatePermObj( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.updatePermObj( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.OBJ_DELETE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deletePermObj( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deletePermObj( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_GRANT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse grant( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.grant( request, this );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_REVOKE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse revoke( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.revoke( request, this );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_GRANT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse grantUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.grantUser( request, this );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_REVOKE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse revokeUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.revokeUser( request, this );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_DESC + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addDescendant( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addDescendant( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_ASC + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addAscendant( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addAscendant( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_ADDINHERIT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addInheritance( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addInheritance( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_DELINHERIT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteInheritance( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deleteInheritance( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_ADD + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse createSsdSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.createSsdSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updateSsdSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.updateSsdSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_ADD_MEMBER + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addSsdRoleMember( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addSsdRoleMember( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_DEL_MEMBER + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteSsdRoleMember( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deleteSsdRoleMember( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_DELETE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteSsdSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deleteSsdSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_CARD_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse setSsdSetCardinality( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.setSsdSetCardinality( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_ADD + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse createDsdSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.createDsdSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updateDsdSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.updateDsdSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_ADD_MEMBER + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addDsdRoleMember( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addDsdRoleMember( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_DEL_MEMBER + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteDsdRoleMember( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deleteDsdRoleMember( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_DELETE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteDsdSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deleteDsdSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_CARD_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse setDsdSetCardinality( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.setDsdSetCardinality( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_ADD_CONSTRAINT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addRoleConstraint( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addRoleConstraint( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_DELETE_CONSTRAINT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse removeRoleConstraint( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.removeRoleConstraint( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_DELETE_CONSTRAINT_ID + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse removeRoleConstraintWid( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.removeRoleConstraintWid( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_ADD_ATTRIBUTE_SET + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addPermissionAttributeSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addPermissionAttributeSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_DELETE_ATTRIBUTE_SET + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deletePermissionAttributeSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.deletePermissionAttributeSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_ADD_PERM_ATTRIBUTE_TO_SET + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addPermissionAttributeToSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.addPermissionAttributeToSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_DELETE_PERM_ATTRIBUTE_TO_SET + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse removePermissionAttributeFromSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.removePermissionAttributeFromSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_UPDATE_PERM_ATTRIBUTE_IN_SET + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updatePermissionAttributeInSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.updatePermissionAttributeInSet( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_ENABLE_CONSTRAINT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse enableRoleConstraint( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.enableRoleConstraint( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_DISABLE_CONSTRAINT + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse disableRoleConstraint( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = adminMgrImpl.disableRoleConstraint( request );
return response;
}
/**
* ************************************************************************************************************************************
* BEGIN REVIEWMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_READ + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse readPermission( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.readPermission( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.OBJ_READ + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse readPermObj( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.readPermObj( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_SEARCH + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse findPermissions( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.findPermissions( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_OBJ_SEARCH + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse findPermsByObj( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.findObjPermissions( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_SEARCH_ANY + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse findAnyPermissions( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.findAnyPermissions( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.OBJ_SEARCH + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse findPermObjs( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.findPermObjs( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_READ + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse readRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.readRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_SEARCH + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse findRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.findRoles( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_READ + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse readUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.readUserM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_SEARCH + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse findUsers( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.findUsersM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_ASGNED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse assignedUsers( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.assignedUsersM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_ASGNED_CONSTRAINTS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse assignedUsersConstraints( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.assignedUsersConstraints( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_ASGNED_CONSTRAINTS_KEY + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse assignedUsersConstraintsKey( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.assignedUsersConstraintsKey( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_ASGNED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse assignedRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.assignedRolesM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_AUTHZED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse authorizedUsers( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.authorizedUsersM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_AUTHZED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse authorizedRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.authorizedRoleM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_ROLES + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse permissionRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.permissionRolesM( request );
return response;
}
@POST
@Path("/" + HttpIds.ROLE_FIND_CONSTRAINTS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse findRoleConstraints( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.findRoleConstraintsM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ROLE_PERMS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse rolePermissions( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.rolePermissionsM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_PERMS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse userPermissions( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.userPermissionsM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_ROLES_AUTHZED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse authorizedPermissionRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.authorizedPermissionRolesM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_USERS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse permissionUsers( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.permissionUsersM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PERM_USERS_AUTHZED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse authorizedPermissionUsers( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.authorizedPermissionUsersM( request );
return response;
}
@POST
@Path("/" + HttpIds.PERM_READ_PERM_ATTRIBUTE_SET + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse readPermAttributeSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.readPermAttributeSetM( request );
return response;
}
@POST
@Path("/" + HttpIds.ROLE_PERM_ATTR_SETS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse rolePermissionAttributeSets( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.rolePermissionAttributeSetsM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_ROLE_SETS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse ssdRoleSets( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.ssdRoleSetsM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_READ + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse ssdRoleSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.ssdRoleSetM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_ROLES + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse ssdRoleSetRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.ssdRoleSetRolesM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_CARD + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse ssdRoleSetCardinality( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.ssdRoleSetCardinalityM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.SSD_SETS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse ssdSets( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.ssdSetsM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_ROLE_SETS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse dsdRoleSets( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.dsdRoleSetsM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_READ + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse dsdRoleSet( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.dsdRoleSetM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_ROLES + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse dsdRoleSetRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.dsdRoleSetRolesM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_CARD + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse dsdRoleSetCardinality( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.dsdRoleSetCardinalityM( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.DSD_SETS + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse dsdSets( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = reviewMgrImpl.dsdSetsM( request );
return response;
}
/**
* ************************************************************************************************************************************
* BEGIN ACCESSMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_AUTHN + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse authenticate( FortRequest request )
{
return accessMgrImpl.authenticate( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_CREATE + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse createSession( FortRequest request )
{
return accessMgrImpl.createSession( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_CREATE_TRUSTED + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse createSessionTrusted( FortRequest request )
{
return accessMgrImpl.createSessionTrusted( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_CREATE_GROUP_SESSION + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse createGroupSession(FortRequest request )
{
return accessMgrImpl.createGroupSession( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_AUTHZ + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse checkAccess( FortRequest request )
{
return accessMgrImpl.checkAccess( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_CHECK + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse createSessionCheckAccess( FortRequest request )
{
return accessMgrImpl.createSessionCheckAccess( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_CHECK_ROLE + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse isUserInRole( FortRequest request )
{
return accessMgrImpl.isUserInRole( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_PERMS + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse sessionPermissions( FortRequest request )
{
return accessMgrImpl.sessionPermissions( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_ROLES + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse sessionRoles( FortRequest request )
{
return accessMgrImpl.sessionRoles( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_AUTHZ_ROLES + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse authorizedSessionRoles( FortRequest request )
{
return accessMgrImpl.authorizedSessionRoles( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_ADD + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse addActiveRole( FortRequest request )
{
return accessMgrImpl.addActiveRole( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_DROP + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse dropActiveRole( FortRequest request )
{
return accessMgrImpl.dropActiveRole( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_USERID + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse getUserId( FortRequest request )
{
return accessMgrImpl.getUserId( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.RBAC_USER + "/")
@RolesAllowed({SUPER_USER, ACCESS_MGR_USER})
@Override
public FortResponse getUser( FortRequest request )
{
return accessMgrImpl.getUser( request );
}
/**
* ************************************************************************************************************************************
* BEGIN DELEGATEDADMINMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_ADD + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addAdminRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addAdminRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_DELETE + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse deleteAdminRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.deleteAdminRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_UPDATE + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse updateAdminRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.updateAdminRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_ASGN + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse assignAdminUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.assignAdminUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_DEASGN + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse deassignAdminUser( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.deassignAdminUser( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_DESC + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addAdminDescendant( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addAdminDescendant( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_ASC + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addAdminAscendant( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addAdminAscendant( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_ADDINHERIT + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addAdminInheritance( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addAdminInheritance( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_DELINHERIT + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse deleteAdminInheritance( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.deleteAdminInheritance( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_ADD + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addOrg( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addOrg( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_UPDATE + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse updateOrg( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.updateOrg( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_DELETE + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse deleteOrg( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.deleteOrg( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_DESC + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addOrgDescendant( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addOrgDescendant( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_ASC + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addOrgAscendant( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addOrgAscendant( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_ADDINHERIT + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse addOrgInheritance( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.addOrgInheritance( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_DELINHERIT + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ADMIN_MGR_USER})
@Override
public FortResponse deleteOrgInheritance( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedAdminMgrImpl.deleteOrgInheritance( request );
return response;
}
/**
* ************************************************************************************************************************************
* BEGIN DELEGATEDREVIEWMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_READ + "/")
@RolesAllowed({SUPER_USER, DELEGATED_REVIEW_MGR_USER})
@Override
public FortResponse readAdminRole( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedReviewMgrImpl.readAdminRole( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_SEARCH + "/")
@RolesAllowed({SUPER_USER, DELEGATED_REVIEW_MGR_USER})
@Override
public FortResponse findAdminRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedReviewMgrImpl.findAdminRoles( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ARLE_ASGNED + "/")
@RolesAllowed({SUPER_USER, DELEGATED_REVIEW_MGR_USER})
@Override
public FortResponse assignedAdminRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedReviewMgrImpl.assignedAdminRoles( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.USER_ASGNED_ADMIN + "/")
@RolesAllowed({SUPER_USER, DELEGATED_REVIEW_MGR_USER})
@Override
public FortResponse assignedAdminUsers( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedReviewMgrImpl.assignedAdminUsers( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_READ + "/")
@RolesAllowed({SUPER_USER, DELEGATED_REVIEW_MGR_USER})
@Override
public FortResponse readOrg( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedReviewMgrImpl.readOrg( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ORG_SEARCH + "/")
@RolesAllowed({SUPER_USER, DELEGATED_REVIEW_MGR_USER})
@Override
public FortResponse searchOrg( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = delegatedReviewMgrImpl.searchOrg( request );
return response;
}
/**
* ************************************************************************************************************************************
* BEGIN DELEGATEDACCESSMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_ASSIGN + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse canAssign( FortRequest request )
{
return delegatedAccessMgrImpl.canAssign( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_DEASSIGN + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse canDeassign( FortRequest request )
{
return delegatedAccessMgrImpl.canDeassign( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_GRANT + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse canGrant( FortRequest request )
{
return delegatedAccessMgrImpl.canGrant( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_REVOKE + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse canRevoke( FortRequest request )
{
return delegatedAccessMgrImpl.canRevoke( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_AUTHZ + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse checkAdminAccess( FortRequest request )
{
return delegatedAccessMgrImpl.checkAdminAccess( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_ADD + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse addActiveAdminRole( FortRequest request )
{
return delegatedAccessMgrImpl.addActiveAdminRole( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_DROP + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse dropActiveAdminRole( FortRequest request )
{
return delegatedAccessMgrImpl.dropActiveAdminRole( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_ROLES + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse sessionAdminRoles( FortRequest request )
{
return delegatedAccessMgrImpl.sessionAdminRoles( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_PERMS + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse sessionAdminPermissions( FortRequest request )
{
return delegatedAccessMgrImpl.sessionAdminPermissions( request );
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.ADMIN_AUTHZ_ROLES + "/")
@RolesAllowed({SUPER_USER, DELEGATED_ACCESS_MGR_USER})
@Override
public FortResponse authorizedSessionAdminRoles( FortRequest request )
{
return delegatedAccessMgrImpl.authorizedSessionRoles( request );
}
/**
* ************************************************************************************************************************************
* BEGIN PSWDPOLICYMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PSWD_ADD + "/")
@RolesAllowed({SUPER_USER, PASSWORD_MGR_USER})
@Override
public FortResponse addPolicy( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = pswdPolicyMgrImpl.addPolicy( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PSWD_UPDATE + "/")
@RolesAllowed({SUPER_USER, PASSWORD_MGR_USER})
@Override
public FortResponse updatePolicy( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = pswdPolicyMgrImpl.updatePolicy( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PSWD_DELETE + "/")
@RolesAllowed({SUPER_USER, PASSWORD_MGR_USER})
@Override
public FortResponse deletePolicy( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = pswdPolicyMgrImpl.deletePolicy( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PSWD_READ + "/")
@RolesAllowed({SUPER_USER, PASSWORD_MGR_USER})
@Override
public FortResponse readPolicy( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = pswdPolicyMgrImpl.readPolicy( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PSWD_SEARCH + "/")
@RolesAllowed({SUPER_USER, PASSWORD_MGR_USER})
@Override
public FortResponse searchPolicy( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = pswdPolicyMgrImpl.searchPolicy( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PSWD_USER_ADD + "/")
@RolesAllowed({SUPER_USER, PASSWORD_MGR_USER})
@Override
public FortResponse updateUserPolicy( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = pswdPolicyMgrImpl.updateUserPolicy( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.PSWD_USER_DELETE + "/")
@RolesAllowed({SUPER_USER, PASSWORD_MGR_USER})
@Override
public FortResponse deleteUserPolicy( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = pswdPolicyMgrImpl.deleteUserPolicy( request );
return response;
}
/**
* ************************************************************************************************************************************
* BEGIN AUDIT MGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.AUDIT_BINDS + "/")
@RolesAllowed({SUPER_USER, AUDIT_MGR_USER})
@Override
public FortResponse searchBinds( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = auditMgrImpl.searchBinds( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.AUDIT_UAUTHZS + "/")
@RolesAllowed({SUPER_USER, AUDIT_MGR_USER})
@Override
public FortResponse getUserAuthZs( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = auditMgrImpl.getUserAuthZs( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.AUDIT_AUTHZS + "/")
@RolesAllowed({SUPER_USER, AUDIT_MGR_USER})
@Override
public FortResponse searchAuthZs( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = auditMgrImpl.searchAuthZs( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.AUDIT_SESSIONS + "/")
@RolesAllowed({SUPER_USER, AUDIT_MGR_USER})
@Override
public FortResponse searchUserSessions( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = auditMgrImpl.searchUserSessions( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.AUDIT_MODS + "/")
@RolesAllowed({SUPER_USER, AUDIT_MGR_USER})
@Override
public FortResponse searchAdminMods( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = auditMgrImpl.searchAdminMods( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.AUDIT_INVLD + "/")
@RolesAllowed({SUPER_USER, AUDIT_MGR_USER})
@Override
public FortResponse searchInvalidUsers( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = auditMgrImpl.searchInvalidUsers( request );
return response;
}
/**
* ************************************************************************************************************************************
* BEGIN CONFIGMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.CFG_ADD + "/")
@RolesAllowed({SUPER_USER, CONFIG_MGR_USER})
@Override
public FortResponse addConfig( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = configMgrImpl.addConfig( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.CFG_UPDATE + "/")
@RolesAllowed({SUPER_USER, CONFIG_MGR_USER})
@Override
public FortResponse updateConfig( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = configMgrImpl.updateConfig( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.CFG_DELETE + "/")
@RolesAllowed({SUPER_USER, CONFIG_MGR_USER})
@Override
public FortResponse deleteConfig( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = configMgrImpl.deleteConfig( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.CFG_READ + "/")
@RolesAllowed({SUPER_USER, CONFIG_MGR_USER})
@Override
public FortResponse readConfig( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = configMgrImpl.readConfig( request );
return response;
}
/**
* ************************************************************************************************************************************
* BEGIN GROUPMGR
* **************************************************************************************************************************************
*/
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_READ + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse readGroup( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.readGroup( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_ADD + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse addGroup( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.addGroup( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_DELETE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse deleteGroup( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.deleteGroup( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_UPDATE + "/")
@RolesAllowed({SUPER_USER, ADMIN_MGR_USER})
@Override
public FortResponse updateGroup( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.updateGroup( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_ROLE_ASGNED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse assignedGroupRoles( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.assignedRoles( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_ASGNED + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse assignedGroups( FortRequest request )
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.assignedGroups( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_ASGN + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse assignGroup(FortRequest request)
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.assignGroup( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/" + HttpIds.GROUP_DEASGN + "/")
@RolesAllowed({SUPER_USER, REVIEW_MGR_USER})
@Override
public FortResponse deassignGroup(FortRequest request)
{
FortResponse response = SecUtils.initializeSession(request, httpRequest);
if( response == null )
response = groupMgrImpl.deassignGroup( request );
return response;
}
/**
* {@inheritDoc}
*/
@POST
@Path("/{any : .*}")
@RolesAllowed(
{
SUPER_USER,
ACCESS_MGR_USER,
ADMIN_MGR_USER,
REVIEW_MGR_USER,
DELEGATED_ACCESS_MGR_USER,
DELEGATED_ADMIN_MGR_USER,
DELEGATED_REVIEW_MGR_USER,
PASSWORD_MGR_USER,
AUDIT_MGR_USER,
CONFIG_MGR_USER
} )
@Override
public FortResponse invalid(FortRequest request)
{
String szError = "Could not find a matching service. HTTP request URI:" + httpRequest.getRequestURI() + ". User: " + httpRequest.getRemoteUser();
LOG.warn( szError );
FortResponse response = new FortResponse();
response.setErrorCode( GlobalErrIds.REST_NOT_FOUND_ERR );
response.setErrorMessage( szError );
return response;
}
}
| 1,205 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/AbstractMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.slf4j.Logger;
import org.apache.directory.fortress.core.GlobalErrIds;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.FortResponse;
/**
* An abstract class containing some methods shared by all the implementations.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
abstract class AbstractMgrImpl
{
/**
* Create an error message and log it.
*
* @param response The {@link FortResponse} instance in which we will store the error message and ID
* @param LOG The Logger
* @param se The exception
*/
protected void createError( FortResponse response, Logger LOG, SecurityException se )
{
LOG.info( "Caught " + se );
response.setErrorCode( se.getErrorId() );
response.setErrorMessage( se.getMessage() );
response.setHttpStatus(se.getHttpStatus());
}
/**
* Creates a {@link FortResponse} instance where the error code is set with a default value.
*
* @return The created instancd
*/
protected FortResponse createResponse()
{
FortResponse response = new FortResponse();
response.setErrorCode( GlobalErrIds.NO_ERROR );
return response;
}
}
| 1,206 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/JacksonFieldOnlyMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.model.FortEntity;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* This class is used to marshall/unmarshall subtypes of {@link FortEntity} using only the fields.
* This mapper ignores all the getter and setters.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class JacksonFieldOnlyMapper extends ObjectMapper
{
/**
* default constructor
*/
public JacksonFieldOnlyMapper()
{
super();
// allow access to fields
setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
setVisibility(PropertyAccessor.GETTER, Visibility.NONE); // and do not use getters and setters
setVisibility(PropertyAccessor.IS_GETTER, Visibility.NONE);
setVisibility(PropertyAccessor.SETTER, Visibility.NONE);
}
}
| 1,207 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/ConfigMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.ConfigMgr;
import org.apache.directory.fortress.core.ConfigMgrFactory;
import org.apache.directory.fortress.core.model.Configuration;
import org.apache.directory.fortress.core.model.Props;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.apache.directory.fortress.core.rest.RestUtils;
import org.apache.directory.fortress.core.SecurityException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Properties;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class ConfigMgrImpl extends AbstractMgrImpl
{
private static final Logger LOG = LoggerFactory.getLogger( ConfigMgrImpl.class.getName() );
/**
*
* @param request
* @return
*/
/* No qualifier */ FortResponse addConfig(FortRequest request)
{
FortResponse response = createResponse();
try
{
ConfigMgr configMgr = ConfigMgrFactory.createInstance();
Configuration inCfg = (Configuration)request.getEntity();
Configuration outCfg = configMgr.add( inCfg );
if ( outCfg != null )
{
response.setEntity( outCfg );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
*
* @param request
* @return
*/
/* No qualifier */ FortResponse updateConfig(FortRequest request)
{
FortResponse response = createResponse();
try
{
ConfigMgr configMgr = ConfigMgrFactory.createInstance();
Configuration inCfg = (Configuration)request.getEntity();
Configuration outCfg = configMgr.update( inCfg );
if ( outCfg != null )
{
response.setEntity( outCfg );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
*
* @param request
* @return
*/
/* No qualifier */ FortResponse deleteConfig(FortRequest request)
{
FortResponse response = createResponse();
try
{
ConfigMgr configMgr = ConfigMgrFactory.createInstance();
if ( request.getEntity() == null )
{
configMgr.delete( request.getValue() );
}
else
{
Properties inProperties = RestUtils.getProperties( (Props)request.getEntity() );
configMgr.delete( request.getValue(), inProperties );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/**
*
* @param request
* @return
*/
/* No qualifier */ FortResponse readConfig(FortRequest request)
{
FortResponse response = createResponse();
try
{
ConfigMgr configMgr = ConfigMgrFactory.createInstance();
Configuration outCfg = configMgr.read( request.getValue() );
if ( outCfg != null )
{
response.setEntity( outCfg );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,208 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/SecurityOutFaultInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.interceptor.security.AccessDeniedException;
import org.apache.cxf.message.Message;
import org.apache.cxf.phase.AbstractPhaseInterceptor;
import org.apache.cxf.phase.Phase;
import org.apache.cxf.transport.http.AbstractHTTPDestination;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//import javax.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class SecurityOutFaultInterceptor extends AbstractPhaseInterceptor<Message>
{
private static final Logger LOG = LoggerFactory.getLogger( SecurityOutFaultInterceptor.class.getName() );
/**
* default constructor
*/
public SecurityOutFaultInterceptor()
{
super( Phase.PRE_STREAM );
}
public void handleMessage( Message message ) throws Fault
{
Fault fault = (Fault) message.getContent( Exception.class );
Throwable ex = fault.getCause();
HttpServletResponse response = (HttpServletResponse) message.getExchange().getInMessage()
.get( AbstractHTTPDestination.HTTP_RESPONSE );
// Not a security violation:
if ( !(ex instanceof SecurityException) )
{
LOG.warn("SecurityOutFaultInterceptor caught exception: " + ex );
response.setStatus( 500 );
}
// Security violation:
else
{
int status = ex instanceof AccessDeniedException ? 403 : 401;
response.setStatus( status );
LOG.warn("SecurityOutFaultInterceptor caught security violation: " + ex );
}
try
{
response.getOutputStream().write( ex.getMessage().getBytes() );
response.getOutputStream().flush();
}
catch ( IOException iex )
{
LOG.warn("SecurityOutFaultInterceptor caught IOException: " + iex);
}
message.getInterceptorChain().abort();
}
}
| 1,209 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/DelegatedAdminMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.DelAdminMgr;
import org.apache.directory.fortress.core.DelAdminMgrFactory;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.AdminRole;
import org.apache.directory.fortress.core.model.AdminRoleRelationship;
import org.apache.directory.fortress.core.model.OrgUnit;
import org.apache.directory.fortress.core.model.OrgUnitRelationship;
import org.apache.directory.fortress.core.model.UserAdminRole;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class DelegatedAdminMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( DelegatedAdminMgrImpl.class.getName() );
/**
* ************************************************************************************************************************************
* BEGIN DELEGATEDADMINMGR
* **************************************************************************************************************************************
*/
/* No qualifier */ FortResponse addAdminRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminRole inRole = (AdminRole) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
AdminRole retRole = delegatedAdminMgr.addRole( inRole );
response.setEntity(retRole);
}
catch ( SecurityException se )
{
LOG.info( "Caught " + se + " warnId=" + se.getErrorId() );
response.setErrorCode( se.getErrorId() );
response.setErrorMessage( se.getMessage() );
}
return response;
}
/* No qualifier */ FortResponse deleteAdminRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminRole inRole = (AdminRole) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.deleteRole( inRole );
response.setEntity(inRole);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updateAdminRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminRole inRole = (AdminRole) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
AdminRole retRole = delegatedAdminMgr.updateRole( inRole );
response.setEntity(retRole);
}
catch ( SecurityException se )
{
LOG.info( "Caught " + se + " errorId=" + se.getErrorId() );
response.setErrorCode( se.getErrorId() );
response.setErrorMessage( se.getMessage() );
}
return response;
}
/* No qualifier */ FortResponse assignAdminUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
UserAdminRole inRole = (UserAdminRole) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.assignUser( inRole );
response.setEntity(inRole);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deassignAdminUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
UserAdminRole inRole = (UserAdminRole) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.deassignUser( inRole );
response.setEntity(inRole);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addAdminDescendant( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminRoleRelationship relationship = (AdminRoleRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.addDescendant( relationship.getParent(), relationship.getChild() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addAdminAscendant( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminRoleRelationship relationship = (AdminRoleRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.addAscendant( relationship.getChild(), relationship.getParent() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addAdminInheritance( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminRoleRelationship relationship = (AdminRoleRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.addInheritance( relationship.getParent(), relationship.getChild() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteAdminInheritance( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminRoleRelationship relationship = (AdminRoleRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.deleteInheritance( relationship.getParent(), relationship.getChild() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addOrg( FortRequest request )
{
FortResponse response = createResponse();
try
{
OrgUnit inOrg = (OrgUnit) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
OrgUnit retOrg = delegatedAdminMgr.add( inOrg );
response.setEntity(retOrg);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updateOrg( FortRequest request )
{
FortResponse response = createResponse();
try
{
OrgUnit inOrg = (OrgUnit) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
OrgUnit retOrg = delegatedAdminMgr.update( inOrg );
response.setEntity(retOrg);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteOrg( FortRequest request )
{
FortResponse response = createResponse();
try
{
OrgUnit inOrg = (OrgUnit) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
OrgUnit retOrg = delegatedAdminMgr.delete( inOrg );
response.setEntity(retOrg);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addOrgDescendant( FortRequest request )
{
FortResponse response = createResponse();
try
{
OrgUnitRelationship relationship = (OrgUnitRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.addDescendant( relationship.getParent(), relationship.getChild() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addOrgAscendant( FortRequest request )
{
FortResponse response = createResponse();
try
{
OrgUnitRelationship relationship = (OrgUnitRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.addAscendant( relationship.getChild(), relationship.getParent() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addOrgInheritance( FortRequest request )
{
FortResponse response = createResponse();
try
{
OrgUnitRelationship relationship = (OrgUnitRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.addInheritance(relationship.getParent(), relationship.getChild());
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteOrgInheritance( FortRequest request )
{
FortResponse response = createResponse();
try
{
OrgUnitRelationship relationship = (OrgUnitRelationship) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
delegatedAdminMgr.deleteInheritance( relationship.getParent(), relationship.getChild() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,210 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/GroupMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.commons.lang.StringUtils;
import org.apache.directory.fortress.core.*;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class GroupMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( GroupMgrImpl.class.getName() );
/* No qualifier */ FortResponse addGroup( FortRequest request )
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
Group inGroup = (Group) request.getEntity();
Group outGroup = groupMgr.add( inGroup );
response.setEntity( outGroup );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse readGroup( FortRequest request )
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
Group inGroup = (Group) request.getEntity();
Group outGroup = groupMgr.read( inGroup );
response.setEntity( outGroup );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteGroup( FortRequest request )
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
Group inGroup = (Group) request.getEntity();
Group outGroup = groupMgr.read( inGroup );
groupMgr.delete( inGroup );
response.setEntity( outGroup );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updateGroup( FortRequest request )
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
Group inGroup = (Group) request.getEntity();
Group outGroup = groupMgr.update( inGroup );
response.setEntity( outGroup );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignedGroups( FortRequest request )
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
List<Group> groups = groupMgr.roleGroups( inRole );
response.setEntities( groups );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignedRoles( FortRequest request )
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
if ( StringUtils.isNotEmpty( request.getValue() ) )
{
String groupName = request.getValue();
Group outGroup = groupMgr.read( new Group(groupName) );
List<String> retRoles = new ArrayList<>();
if ( Group.Type.ROLE.equals( outGroup.getType() ) )
{
retRoles = outGroup.getMembers();
}
response.setValues( retRoles );
}
else
{
Group inGroup = (Group) request.getEntity();
List<UserRole> uRoles = groupMgr.groupRoles( inGroup );
response.setEntities( uRoles );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignGroup(FortRequest request)
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
Group inGroup = (Group) request.getEntity();
String member = request.getValue();
groupMgr.assign( inGroup, member );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deassignGroup(FortRequest request)
{
FortResponse response = createResponse();
try
{
GroupMgr groupMgr = GroupMgrFactory.createInstance( request.getContextId() );
groupMgr.setAdmin( request.getSession() );
Group inGroup = (Group) request.getEntity();
String member = request.getValue();
groupMgr.deassign( inGroup, member );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,211 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/AdminMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.*;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class AdminMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( AdminMgrImpl.class.getName() );
/* No qualifier */ FortResponse addUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User)request.getEntity();
User outUser = adminMgr.addUser( inUser );
response.setEntity( outUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User)request.getEntity();
adminMgr.deleteUser( inUser );
response.setEntity( inUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse disableUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User)request.getEntity();
adminMgr.disableUser( inUser );
response.setEntity( inUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updateUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
User outUser = adminMgr.updateUser( inUser );
response.setEntity( outUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse changePassword( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
adminMgr.changePassword( inUser, inUser.getNewPassword() );
response.setEntity( inUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse lockUserAccount( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
adminMgr.lockUserAccount( inUser );
response.setEntity( inUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse unlockUserAccount( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
adminMgr.unlockUserAccount( inUser );
response.setEntity( inUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse resetPassword( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
adminMgr.resetPassword( inUser, inUser.getNewPassword() );
response.setEntity( inUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
Role outRole = adminMgr.addRole( inRole );
response.setEntity( outRole );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
adminMgr.deleteRole( inRole );
response.setEntity( inRole );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updateRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
Role outRole = adminMgr.updateRole( inRole );
response.setEntity( outRole );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
UserRole inRole = (UserRole) request.getEntity();
adminMgr.assignUser( inRole );
response.setEntity( inRole );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deassignUser( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
UserRole inRole = (UserRole) request.getEntity();
adminMgr.deassignUser( inRole );
response.setEntity( inRole );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addPermission( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Permission inPerm = (Permission) request.getEntity();
Permission outPerm = adminMgr.addPermission( inPerm );
response.setEntity( outPerm );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updatePermission( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Permission inPerm = (Permission) request.getEntity();
Permission outPerm = adminMgr.updatePermission( inPerm );
response.setEntity( outPerm );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deletePermission( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Permission inPerm = (Permission) request.getEntity();
adminMgr.deletePermission( inPerm );
response.setEntity( inPerm );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addPermObj( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermObj inObj = (PermObj) request.getEntity();
PermObj outObj = adminMgr.addPermObj( inObj );
response.setEntity( outObj );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updatePermObj( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermObj inObj = (PermObj) request.getEntity();
PermObj outObj = adminMgr.updatePermObj( inObj );
response.setEntity( outObj );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deletePermObj( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermObj inObj = (PermObj) request.getEntity();
adminMgr.deletePermObj( inObj );
response.setEntity( inObj );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
private void grantPerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Role role = new Role( permGrant.getRoleNm() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( false );
adminMgr.grantPermission( perm, role );
}
private void grantAdminPerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
AdminRole role = new AdminRole( permGrant.getRoleNm() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( true );
delegatedAdminMgr.grantPermission( perm, role );
}
private void revokePerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Role role = new Role( permGrant.getRoleNm() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( false );
adminMgr.revokePermission( perm, role );
}
private void revokeAdminPerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
AdminRole role = new AdminRole( permGrant.getRoleNm() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( true );
delegatedAdminMgr.revokePermission( perm, role );
}
private void grantUserPerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User user = new User( permGrant.getUserId() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( false );
adminMgr.grantPermission( perm, user );
}
private void grantAdminUserPerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
User user = new User( permGrant.getUserId() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( true );
delegatedAdminMgr.grantPermission( perm, user );
}
private void revokeUserPerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
User user = new User( permGrant.getUserId() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( false );
adminMgr.revokePermission( perm, user );
}
private void revokeAdminUserPerm( FortRequest request ) throws SecurityException
{
PermGrant permGrant = (PermGrant) request.getEntity();
DelAdminMgr delegatedAdminMgr = DelAdminMgrFactory.createInstance( request.getContextId() );
delegatedAdminMgr.setAdmin( request.getSession() );
User user = new User( permGrant.getUserId() );
Permission perm = new Permission( permGrant.getObjName(), permGrant.getOpName(), permGrant.getObjId() );
perm.setAdmin( true );
delegatedAdminMgr.revokePermission( perm, user );
}
/* No qualifier */ FortResponse grant(FortRequest request, FortressServiceImpl fortressService)
{
FortResponse response = createResponse();
try
{
PermGrant permGrant = (PermGrant) request.getEntity();
if ( permGrant.isAdmin() )
{
grantAdminPerm( request );
}
else
{
grantPerm( request );
}
response.setEntity(permGrant);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse revoke(FortRequest request, FortressServiceImpl fortressService)
{
FortResponse response = createResponse();
try
{
PermGrant permGrant = (PermGrant) request.getEntity();
if (permGrant.isAdmin())
{
revokeAdminPerm( request );
}
else
{
revokePerm( request );
}
response.setEntity( permGrant );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse grantUser(FortRequest request, FortressServiceImpl fortressService)
{
FortResponse response = createResponse();
try
{
PermGrant permGrant = (PermGrant) request.getEntity();
if ( permGrant.isAdmin() )
{
grantAdminUserPerm( request );
}
else
{
grantUserPerm( request );
}
response.setEntity( permGrant );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse revokeUser(FortRequest request, FortressServiceImpl fortressService)
{
FortResponse response = createResponse();
try
{
PermGrant permGrant = (PermGrant) request.getEntity();
if ( permGrant.isAdmin() )
{
revokeAdminUserPerm( request );
}
else
{
revokeUserPerm( request );
}
response.setEntity( permGrant );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addDescendant( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
RoleRelationship relationship = (RoleRelationship) request.getEntity();
adminMgr.addDescendant(relationship.getParent(), relationship.getChild());
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addAscendant( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
RoleRelationship relationship = (RoleRelationship) request.getEntity();
adminMgr.addAscendant(relationship.getChild(), relationship.getParent());
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addInheritance( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
RoleRelationship relationship = (RoleRelationship) request.getEntity();
adminMgr.addInheritance(relationship.getParent(), relationship.getChild());
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteInheritance( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
RoleRelationship relationship = (RoleRelationship) request.getEntity();
adminMgr.deleteInheritance( relationship.getParent(), relationship.getChild() );
response.setEntity( relationship );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse createSsdSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.createSsdSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updateSsdSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.updateSsdSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addSsdRoleMember( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
Role role = new Role( request.getValue() );
SDSet outSet = adminMgr.addSsdRoleMember( inSet, role );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteSsdRoleMember( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
Role role = new Role( request.getValue() );
SDSet outSet = adminMgr.deleteSsdRoleMember( inSet, role );
response.setEntity(outSet);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteSsdSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.deleteSsdSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse setSsdSetCardinality( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.setSsdSetCardinality( inSet, inSet.getCardinality() );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse createDsdSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.createDsdSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updateDsdSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.updateDsdSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addDsdRoleMember( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
Role role = new Role(request.getValue());
SDSet outSet = adminMgr.addDsdRoleMember( inSet, role );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteDsdRoleMember( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
Role role = new Role(request.getValue());
SDSet outSet = adminMgr.deleteDsdRoleMember( inSet, role );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deleteDsdSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.deleteDsdSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse setDsdSetCardinality( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = adminMgr.setDsdSetCardinality( inSet, inSet.getCardinality() );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addRoleConstraint( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
UserRole inRole = (UserRole) request.getEntity();
RoleConstraint inConstraint = (RoleConstraint) request.getEntity2();
RoleConstraint outRole = adminMgr.addRoleConstraint( inRole, inConstraint );
response.setEntity( outRole );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse removeRoleConstraint( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
UserRole inRole = (UserRole) request.getEntity();
RoleConstraint inConstraint = (RoleConstraint) request.getEntity2();
adminMgr.removeRoleConstraint( inRole, inConstraint );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse removeRoleConstraintWid( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
UserRole inRole = (UserRole) request.getEntity();
String szConstraintId = request.getValue();
adminMgr.removeRoleConstraint( inRole, szConstraintId );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse enableRoleConstraint( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
RoleConstraint inConstraint = (RoleConstraint) request.getEntity2();
adminMgr.enableRoleConstraint( inRole, inConstraint );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse disableRoleConstraint( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
RoleConstraint inConstraint = (RoleConstraint) request.getEntity2();
adminMgr.disableRoleConstraint( inRole, inConstraint );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addPermissionAttributeToSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermissionAttribute inAttr = (PermissionAttribute) request.getEntity();
String attrName = request.getValue();
PermissionAttribute outAttr = adminMgr.addPermissionAttributeToSet( inAttr, attrName );
response.setEntity( outAttr );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse updatePermissionAttributeInSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermissionAttribute inAttr = (PermissionAttribute) request.getEntity();
String attrName = request.getValue();
boolean isReplace = request.getIsFlag();
adminMgr.updatePermissionAttributeInSet( inAttr, attrName, isReplace );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse removePermissionAttributeFromSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermissionAttribute inAttr = (PermissionAttribute) request.getEntity();
String attrName = request.getValue();
adminMgr.removePermissionAttributeFromSet( inAttr, attrName );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addPermissionAttributeSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermissionAttributeSet inSet = (PermissionAttributeSet) request.getEntity();
PermissionAttributeSet outSet = adminMgr.addPermissionAttributeSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse deletePermissionAttributeSet( FortRequest request )
{
FortResponse response = createResponse();
try
{
AdminMgr adminMgr = AdminMgrFactory.createInstance( request.getContextId() );
adminMgr.setAdmin( request.getSession() );
PermissionAttributeSet inSet = (PermissionAttributeSet) request.getEntity();
adminMgr.deletePermissionAttributeSet( inSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,212 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/ReviewMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.commons.lang.StringUtils;
import org.apache.directory.fortress.core.ReviewMgr;
import org.apache.directory.fortress.core.ReviewMgrFactory;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.OrgUnit;
import org.apache.directory.fortress.core.model.PermObj;
import org.apache.directory.fortress.core.model.Permission;
import org.apache.directory.fortress.core.model.PermissionAttributeSet;
import org.apache.directory.fortress.core.model.Role;
import org.apache.directory.fortress.core.model.RoleConstraint;
import org.apache.directory.fortress.core.model.SDSet;
import org.apache.directory.fortress.core.model.User;
import org.apache.directory.fortress.core.model.UserRole;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Set;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class ReviewMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( ReviewMgrImpl.class.getName() );
/* No qualifier */ FortResponse readPermission( FortRequest request )
{
FortResponse response = createResponse();
try
{
Permission inPerm = (Permission) request.getEntity();
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Permission retPerm = reviewMgr.readPermission( inPerm );
response.setEntity( retPerm );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse readPermObj( FortRequest request )
{
FortResponse response = createResponse();
try
{
PermObj inObj = (PermObj) request.getEntity();
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
PermObj retObj = reviewMgr.readPermObj( inObj );
response.setEntity( retObj );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse findPermissions( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Permission inPerm = (Permission) request.getEntity();
List<Permission> perms = reviewMgr.findPermissions( inPerm );
response.setEntities( perms );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse findObjPermissions( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
PermObj inObj = (PermObj) request.getEntity();
List<Permission> perms = reviewMgr.findPermsByObj( inObj );
response.setEntities( perms );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse findAnyPermissions( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Permission inPerm = (Permission) request.getEntity();
List<Permission> perms = reviewMgr.findAnyPermissions( inPerm );
response.setEntities( perms );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse findPermObjs( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
PermObj inObj = (PermObj) request.getEntity();
List<PermObj> objs = null;
if ( StringUtils.isNotEmpty( inObj.getOu() ) )
{
objs = reviewMgr.findPermObjs( new OrgUnit( inObj.getOu(), OrgUnit.Type.PERM ) );
}
else
{
objs = reviewMgr.findPermObjs( inObj );
}
response.setEntities(objs);
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse readRole( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
Role outRole = reviewMgr.readRole( inRole );
response.setEntity( outRole );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse findRoles( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
String searchValue = request.getValue();
if ( request.getLimit() != null )
{
List<String> retRoles = reviewMgr.findRoles( searchValue, request.getLimit() );
response.setValues( retRoles );
}
else
{
List<Role> roles = reviewMgr.findRoles( searchValue );
response.setEntities( roles );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse readUserM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
User outUser = reviewMgr.readUser( inUser );
response.setEntity( outUser );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse findUsersM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
if ( request.getLimit() != null )
{
List<String> retUsers = reviewMgr.findUsers( inUser, request.getLimit() );
response.setValues( retUsers );
}
else
{
List<User> retUsers;
if ( StringUtils.isNotEmpty( inUser.getOu() ) )
{
retUsers = reviewMgr.findUsers( new OrgUnit( inUser.getOu(), OrgUnit.Type.USER ) );
}
else
{
retUsers = reviewMgr.findUsers( inUser );
}
response.setEntities( retUsers );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignedUsersM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
if ( request.getLimit() != null )
{
List<String> retUsers = reviewMgr.assignedUsers( inRole, request.getLimit() );
response.setValues( retUsers );
}
else
{
List<User> users = reviewMgr.assignedUsers( inRole );
response.setEntities( users );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignedUsersConstraints( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
RoleConstraint inConstraint = (RoleConstraint) request.getEntity2();
List<User> users = reviewMgr.assignedUsers( inRole, inConstraint );
response.setEntities( users );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignedUsersConstraintsKey( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
RoleConstraint inConstraint = (RoleConstraint) request.getEntity2();
List<UserRole> uRoles = reviewMgr.assignedUsers( inRole, inConstraint.getType(), inConstraint.getKey() );
response.setEntities( uRoles );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse assignedRolesM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
if ( StringUtils.isNotEmpty( request.getValue() ) )
{
String userId = request.getValue();
List<String> retRoles = reviewMgr.assignedRoles( userId );
response.setValues( retRoles );
}
else
{
User inUser = (User) request.getEntity();
List<UserRole> uRoles = reviewMgr.assignedRoles( inUser );
response.setEntities( uRoles );
}
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse authorizedUsersM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
List<User> users = reviewMgr.authorizedUsers( inRole );
response.setEntities( users );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse authorizedRoleM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
Set<String> outSet = reviewMgr.authorizedRoles( inUser );
response.setValueSet( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse permissionRolesM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Permission inPerm = (Permission) request.getEntity();
List<String> outList = reviewMgr.permissionRoles( inPerm );
response.setValues( outList );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse authorizedPermissionRolesM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Permission inPerm = (Permission) request.getEntity();
Set<String> outSet = reviewMgr.authorizedPermissionRoles( inPerm );
response.setValueSet( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse permissionUsersM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
Permission inPerm = (Permission) request.getEntity();
List<String> outList = reviewMgr.permissionUsers( inPerm );
response.setValues( outList );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse authorizedPermissionUsersM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
Permission inPerm = (Permission) request.getEntity();
Set<String> outSet = reviewMgr.authorizedPermissionUsers( inPerm );
response.setValueSet( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse userPermissionsM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
List<Permission> perms = reviewMgr.userPermissions( inUser );
response.setEntities( perms );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse rolePermissionsM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
boolean noInheritance = request.getIsFlag();
List<Permission> perms = reviewMgr.rolePermissions( inRole, noInheritance );
response.setEntities( perms );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse ssdRoleSetsM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
List<SDSet> outSets = reviewMgr.ssdRoleSets( inRole );
response.setEntities( outSets );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse ssdRoleSetM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = reviewMgr.ssdRoleSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse ssdRoleSetRolesM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
SDSet inSet = (SDSet) request.getEntity();
Set<String> outSet = reviewMgr.ssdRoleSetRoles( inSet );
response.setValueSet( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse ssdRoleSetCardinalityM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
SDSet inSet = (SDSet) request.getEntity();
int cardinality = reviewMgr.ssdRoleSetCardinality( inSet );
inSet.setCardinality( cardinality );
response.setEntity( inSet );
}
catch ( SecurityException se )
{
LOG.info( "Caught " + se );
}
return response;
}
/* No qualifier */ FortResponse ssdSetsM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
SDSet inSdSet = (SDSet) request.getEntity();
List<SDSet> outSets = reviewMgr.ssdSets( inSdSet );
response.setEntities( outSets );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse dsdRoleSetsM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
List<SDSet> outSets = reviewMgr.dsdRoleSets( inRole );
response.setEntities( outSets );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse dsdRoleSetM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
SDSet inSet = (SDSet) request.getEntity();
SDSet outSet = reviewMgr.dsdRoleSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse dsdRoleSetRolesM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
SDSet inSet = (SDSet) request.getEntity();
Set<String> outSet = reviewMgr.dsdRoleSetRoles( inSet );
response.setValueSet( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse dsdRoleSetCardinalityM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
SDSet inSet = (SDSet) request.getEntity();
int cardinality = reviewMgr.dsdRoleSetCardinality( inSet );
inSet.setCardinality( cardinality );
response.setEntity( inSet );
}
catch ( SecurityException se )
{
LOG.info( "Caught " + se );
}
return response;
}
/* No qualifier */ FortResponse dsdSetsM( FortRequest request )
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
SDSet inSdSet = (SDSet) request.getEntity();
List<SDSet> outSets = reviewMgr.dsdSets( inSdSet );
response.setEntities( outSets );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse findRoleConstraintsM(FortRequest request)
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
User inUser = (User) request.getEntity();
Permission inPerm = (Permission) request.getEntity2();
RoleConstraint.RCType inType = RoleConstraint.RCType.valueOf( request.getValue() );
List<RoleConstraint> outConstraints = reviewMgr.findRoleConstraints( inUser, inPerm, inType );
response.setEntities( outConstraints );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse rolePermissionAttributeSetsM(FortRequest request)
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
Role inRole = (Role) request.getEntity();
boolean isReplace = request.getIsFlag();
List<PermissionAttributeSet> retAttrSets = reviewMgr.rolePermissionAttributeSets( inRole, isReplace );
response.setEntities( retAttrSets );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse readPermAttributeSetM(FortRequest request)
{
FortResponse response = createResponse();
try
{
ReviewMgr reviewMgr = ReviewMgrFactory.createInstance( request.getContextId() );
reviewMgr.setAdmin( request.getSession() );
PermissionAttributeSet inSet = (PermissionAttributeSet) request.getEntity();
PermissionAttributeSet outSet = reviewMgr.readPermAttributeSet( inSet );
response.setEntity( outSet );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,213 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/FortressService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.FinderException;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.ValidationException;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.apache.directory.fortress.core.model.Group;
import org.apache.directory.fortress.core.model.UserRole;
import jakarta.annotation.security.RolesAllowed;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
/**
* Interface for Fortress Rest Service methods.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public interface FortressService
{
//------------ AdminMgr -----------------------------------------------------------------------------------------------
/**
* This command creates a new RBAC user. The command is valid only if the new user is
* not already a member of the USERS data set. The USER data set is updated. The new user
* does not own any session at the time of its creation.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User}
* object
* </li>
* </ul>
*
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* </li>
* <li>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* <li>{@link org.apache.directory.fortress.core.model.User#password} - used to authenticate the User</li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#ou} - contains the name of an already existing
* User OU node
* </li>
* </ul>
* </li>
* </ul>
* </li>
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User optional parameters</h5>
* </li>
* <li>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#pwPolicy} - contains the name of an already existing
* OpenLDAP password policy node
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#cn} - maps to INetOrgPerson common name
* attribute
* </li>
* <li>{@link org.apache.directory.fortress.core.model.User#sn} - maps to INetOrgPerson surname attribute</li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#description} - maps to INetOrgPerson description
* attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#phones} * - multi-occurring attribute maps to
* organizationalPerson telephoneNumber attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#mobiles} * - multi-occurring attribute maps to
* INetOrgPerson mobile attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#emails} * - multi-occurring attribute maps to
* INetOrgPerson mail attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#address} * - multi-occurring attribute maps to
* organizationalPerson postalAddress, st, l, postalCode, postOfficeBox attributes
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#beginTime} - HHMM - determines begin hour user
* may activate session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#endTime} - HHMM - determines end hour user may
* activate session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#beginDate} - YYYYMMDD - determines date when user
* may sign on
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#endDate} - YYYYMMDD - indicates latest date user
* may sign on
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#beginLockDate} - YYYYMMDD - determines beginning
* of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#endLockDate} - YYYYMMDD - determines end of enforced
* inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#dayMask} - 1234567, 1 = Sunday, 2 = Monday, etc -
* specifies which day of user may sign on
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#timeout} - number in seconds of session inactivity
* time allowed
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#props} * - multi-occurring attribute contains
* property key and values are separated with a ':'. e.g. mykey1:myvalue1
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#roles} * - multi-occurring attribute contains the
* name of already existing role to assign to user
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#adminRoles} * - multi-occurring attribute contains
* the name of already existing adminRole to assign to user
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* optional parameters
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addUser( FortRequest request );
/**
* This command deletes an existing user from the RBAC database. The command is valid
* if and only if the user to be deleted is a member of the USERS data set. The USERS and
* UA data sets and the assigned_users function are updated.
* This method performs a "hard" delete. It completely removes all data associated with this user from the directory.
* User entity must exist in directory prior to making this call else exception will be thrown.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteUser( FortRequest request );
/**
* This command deletes an existing user from the RBAC database. The command is valid
* if and only if the user to be deleted is a member of the USERS data set. The USERS and
* UA data sets and the assigned_users function are updated.
* Method performs a "soft" delete. It performs the following:
* <ul>
* <li>sets the user status to "deleted"</li>
* <li>deassigns all roles from the user</li>
* <li>locks the user's password in LDAP</li>
* <li>revokes all perms that have been granted to user entity.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse disableUser( FortRequest request );
/**
* This method performs an update on User entity in directory. Prior to making this call the entity must exist in
* directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* </ul>
* </li>
* <li>
* <h5>User optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#password} - used to authenticate the User</li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#ou} - contains the name of an already existing User
* OU node
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#pwPolicy} - contains the name of an already existing
* OpenLDAP password policy node
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#cn} - maps to INetOrgPerson common name attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#sn} - maps to INetOrgPerson surname attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#description} - maps to INetOrgPerson description
* attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#phones} * - multi-occurring attribute maps to
* organizationalPerson telephoneNumber attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#mobiles} * - multi-occurring attribute maps to
* INetOrgPerson mobile attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#emails} * - multi-occurring attribute maps to
* INetOrgPerson mail attribute
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#address} * - multi-occurring attribute maps to
* organizationalPerson postalAddress, st, l, postalCode, postOfficeBox attributes
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#beginTime} - HHMM - determines begin hour user may
* activate session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#endTime} - HHMM - determines end hour user may
* activate session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#beginDate} - YYYYMMDD - determines date when user
* may sign on
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#endDate} - YYYYMMDD - indicates latest date user
* may sign on
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#beginLockDate} - YYYYMMDD - determines beginning
* of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#endLockDate} - YYYYMMDD - determines end of enforced
* inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#dayMask} - 1234567, 1 = Sunday, 2 = Monday, etc -
* specifies which day of user may sign on
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#timeout} - number in seconds of session inactivity
* time allowed
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#props} * - multi-occurring attribute contains property
* key and values are separated with a ':'. e.g. mykey1:myvalue1
*
* <li>
* {@link org.apache.directory.fortress.core.model.User#roles} * - multi-occurring attribute contains the name
* of already existing role to assign to user
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#adminRoles} * - multi-occurring attribute contains the
* name of already existing adminRole to assign to user
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updateUser( FortRequest request );
/**
* Method will change user's password. This method will evaluate user's password policies.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.User} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* <li>{@link org.apache.directory.fortress.core.model.User#password} - contains the User's old password</li>
* <li>newPassword - contains the User's new password</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse changePassword( FortRequest request );
/**
* Method will lock user's password which will prevent the user from authenticating with directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse lockUserAccount( FortRequest request );
/**
* Method will unlock user's password which will enable user to authenticate with directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse unlockUserAccount( FortRequest request );
/**
* Method will reset user's password which will require user to change password before successful authentication
* with directory.
* This method will not evaluate password policies on the new user password as it must be changed before use.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* <li>newPassword - contains the User's new password</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse resetPassword( FortRequest request );
/**
* This command creates a new role. The command is valid if and only if the new role is not
* already a member of the ROLES data set. The ROLES data set is updated.
* Initially, no user or permission is assigned to the new role.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role to
* be created.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#description} - maps to description attribute on
* organizationalRole object class
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#beginTime} - HHMM - determines begin hour role may
* be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#endTime} - HHMM - determines end hour role may be
* activated into user's RBAC session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#beginDate} - YYYYMMDD - determines date when role
* may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#endDate} - YYYYMMDD - indicates latest date role
* may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#beginLockDate} - YYYYMMDD - determines beginning
* of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#endLockDate} - YYYYMMDD - determines end of enforced
* inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#dayMask} - 1234567, 1 = Sunday, 2 = Monday, etc -
* specifies which day role may be activated into user's RBAC session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addRole( FortRequest request );
/**
* This command deletes an existing role from the RBAC database. The command is valid
* if and only if the role to be deleted is a member of the ROLES data set. This command will
* also deassign role from all users.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* to be removed.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>Role optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteRole( FortRequest request );
/**
* Method will update a Role entity in the directory. The role must exist in role container prior to this call. *
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role to be
* updated.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#description} - maps to description attribute
* on organizationalRole object class
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#beginTime} - HHMM - determines begin hour role
* may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#endTime} - HHMM - determines end hour role may
* be activated into user's RBAC session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#beginDate} - YYYYMMDD - determines date when role
* may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#endDate} - YYYYMMDD - indicates latest date role
* may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#beginLockDate} - YYYYMMDD - determines beginning
* of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#endLockDate} - YYYYMMDD - determines end of
* enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#dayMask} - 1234567, 1 = Sunday, 2 = Monday, etc -
* specifies which day role may be activated into user's RBAC session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>{@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updateRole( FortRequest request );
/**
* This command assigns a user to a role.
* <ul>
* <li> The command is valid if and only if:
* <li> The user is a member of the USERS data set
* <li> The role is a member of the ROLES data set
* <li> The user is not already assigned to the role
* <li> The SSD constraints are satisfied after assignment.
* </ul>
* Successful completion of this op, the following occurs:
* <ul>
* <li>
* User entity (resides in people container) has role assignment added to aux object class attached to actual user
* record.
* </li>
* <li> Role entity (resides in role container) has userId added as role occupant.</li>
* <li> (optional) Temporal constraints may be associated with <code>ftUserAttrs</code> aux object class based on:</li>
* <li>
* <ul>
* <li> timeout - number in seconds of session inactivity time allowed.</li>
* <li> beginDate - YYYYMMDD - determines date when role may be activated.</li>
* <li> endDate - YYMMDD - indicates latest date role may be activated.</li>
* <li> beginLockDate - YYYYMMDD - determines beginning of enforced inactive status</li>
* <li> endLockDate - YYMMDD - determines end of enforced inactive status.</li>
* <li> beginTime - HHMM - determines begin hour role may be activated in user's session.</li>
* <li> endTime - HHMM - determines end hour role may be activated in user's session.*</li>
* <li> dayMask - 1234567, 1 = Sunday, 2 = Monday, etc - specifies which day of week role may be activated.</li>
* </ul>
* </li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>UserRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the name for already existing
* Role to be assigned
* </li>
* <li>{@link org.apache.directory.fortress.core.model.UserRole#userId} - contains the userId for existing User</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>UserRole optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#beginTime} - HHMM - determines begin hour role
* may be activated into user's RBAC session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#endTime} - HHMM - determines end hour role may
* be activated into user's RBAC session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#beginDate} - YYYYMMDD - determines date when
* role may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#endDate} - YYYYMMDD - indicates latest date role
* may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#beginLockDate} - YYYYMMDD - determines beginning
* of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#endLockDate} - YYYYMMDD - determines end of
* enforced
* inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#dayMask} - 1234567, 1 = Sunday, 2 = Monday, etc -
* specifies which day role may be activated into user's RBAC session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will enforce
* ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse assignUser( FortRequest request );
/**
* This command deletes the assignment of the User from the Role entities. The command is
* valid if and only if the user is a member of the USERS data set, the role is a member of
* the ROLES data set, and the user is assigned to the role.
* Any sessions that currently have this role activated will not be effected.
* Successful completion includes:
* User entity in USER data set has role assignment removed.
* Role entity in ROLE data set has userId removed as role occupant.
* (optional) Temporal constraints will be removed from user aux object if set prior to call.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>UserRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the name for already existing
* Role to be deassigned
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#userId} - contains the userId for existing
* User
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deassignUser( FortRequest request );
/**
* This method will add permission operation to an existing permission object which resides under
* {@code ou=Permissions,ou=RBAC,dc=yourHostName,dc=com} container in directory information tree.
* The perm operation entity may have {@link org.apache.directory.fortress.core.model.Role} or
* {@link org.apache.directory.fortress.core.model.User} associations. The target
* {@link org.apache.directory.fortress.core.model.Permission} must not exist prior to calling.
* A Fortress Permission instance exists in a hierarchical, one-many relationship between its parent and itself
* as stored in ldap tree: ({@link org.apache.directory.fortress.core.model.PermObj}*->
* {@link org.apache.directory.fortress.core.model.Permission}).
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Permission required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing object
* being targeted for the permission add
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of new permission
* operation being added
* </li>
* </ul>
* <h5>Permission optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#roles} * - multi occurring attribute contains
* RBAC Roles that permission operation is being granted to
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#users} * - multi occurring attribute contains
* Users that permission operation is being granted to
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#props} * - multi-occurring property key and
* values are separated with a ':'. e.g. mykey1:myvalue1
* </li>
* <li>{@link org.apache.directory.fortress.core.model.Permission#type} - any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will enforce
* ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addPermission( FortRequest request );
/**
* This method will update permission operation pre-existing in target directory under
* {@code ou=Permissions,ou=RBAC,dc=yourHostName,dc=com} container in directory information tree.
* The perm operation entity may also contain {@link org.apache.directory.fortress.core.model.Role}
* or {@link org.apache.directory.fortress.core.model.User} associations to add or remove using this function.
* The perm operation must exist before making this call. Only non-null attributes will be updated.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Permission required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing object
* being targeted for the permission update
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of new permission
* operation being updated
* </li>
* </ul>
* <h5>Permission optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#roles} * - multi occurring attribute contains
* RBAC Roles that permission operation is being granted to
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#users} * - multi occurring attribute contains
* Users that permission operation is being granted to
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#props} * - multi-occurring property key and
* values are separated with a ':'. e.g. mykey1:myvalue1
* </li>
* <li>{@link org.apache.directory.fortress.core.model.Permission#type} - any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updatePermission( FortRequest request );
/**
* This method will remove permission operation entity from permission object. A Fortress permission is
* (object->operation).
* The perm operation must exist before making this call.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Permission required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted for the permission removal
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of new permission
* operation being deleted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deletePermission( FortRequest request );
/**
* This method will add permission object to perms container in directory. The perm object must not exist before
* making this call. A {@link org.apache.directory.fortress.core.model.PermObj} instance exists in a hierarchical,
* one-many relationship between itself and children as stored in ldap tree:
* ({@link org.apache.directory.fortress.core.model.PermObj}*->
* {@link org.apache.directory.fortress.core.model.Permission}).
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermObj}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>PermObj required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#objName} - contains the name of new object being
* added
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#ou} - contains the name of an existing PERMS
* OrgUnit this object is associated with
* </li>
* </ul>
* <h5>PermObj optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermObj#description} - any safe text</li>
* <li>{@link org.apache.directory.fortress.core.model.PermObj#type} - contains any safe text</li>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#props} * - multi-occurring property key and
* values are separated with a ':'. e.g. mykey1:myvalue1
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will enforce
* ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addPermObj( FortRequest request );
/**
* This method will update permission object in perms container in directory. The perm object must exist before making
* this call.
* A {@link org.apache.directory.fortress.core.model.PermObj} instance exists in a hierarchical, one-many relationship
* between itself and children as stored in ldap tree: ({@link org.apache.directory.fortress.core.model.PermObj}*->
* {@link org.apache.directory.fortress.core.model.Permission}).
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermObj}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>PermObj required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#objName} - contains the name of new object
* being updated
* </li>
* </ul>
* <h5>PermObj optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#ou} - contains the name of an existing PERMS
* OrgUnit this object is associated with
* </li>
* <li>{@link org.apache.directory.fortress.core.model.PermObj#description} - any safe text</li>
* <li>{@link org.apache.directory.fortress.core.model.PermObj#type} - contains any safe text</li>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#props} * -
* multi-occurring property key and values are separated with a ':'. e.g. mykey1:myvalue1
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service
* will enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updatePermObj( FortRequest request );
/**
* This method will remove permission object to perms container in directory. This method will also remove
* in associated permission objects that are attached to this object.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermObj}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>PermObj required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#objName} - contains the name of new object
* being removed
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h5>optional parameters</h5>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deletePermObj( FortRequest request );
/**
* This command grants a role the permission to perform an operation on an object to a role.
* The command is implemented by granting permission by setting the access control list of
* the object involved.
* The command is valid if and only if the pair (operation, object) represents a permission,
* and the role is a member of the ROLES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermGrant}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>PermGrant required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objName} - contains the object name</li>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#opName} - contains the operation name</li>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#roleNm} - contains the role name</li>
* </ul>
* <h5>PermGrant optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objId} - contains the object id</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse grant( FortRequest request );
/**
* This command revokes the permission to perform an operation on an object from the set
* of permissions assigned to a role. The command is implemented by setting the access control
* list of the object involved.
* The command is valid if and only if the pair (operation, object) represents a permission,
* the role is a member of the ROLES data set, and the permission is assigned to that role.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermGrant}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>PermGrant required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objName} - contains the object name</li>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#opName} - contains the operation name</li>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#roleNm} - contains the role name</li>
* </ul>
* <h5>PermGrant optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objId} - contains the object id</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse revoke( FortRequest request );
/**
* This command grants a user the permission to perform an operation on an object to a role.
* The command is implemented by granting permission by setting the access control list of
* the object involved.
* The command is valid if and only if the pair (operation, object) represents a permission,
* and the user is a member of the USERS data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermGrant}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>PermGrant required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objName} - contains the object name</li>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#opName} - contains the operation name</li>
* <li>
* {@link org.apache.directory.fortress.core.model.PermGrant#userId} - contains the userId for existing User
* </li>
* </ul>
* <h5>PermGrant optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objId} - contains the object id</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse grantUser( FortRequest request );
/**
* This command revokes the permission to perform an operation on an object from the set
* of permissions assigned to a user. The command is implemented by setting the access control
* list of the object involved.
* The command is valid if and only if the pair (operation, object) represents a permission,
* the user is a member of the USERS data set, and the permission is assigned to that user.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermGrant}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>PermGrant required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objName} - contains the object name</li>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#opName} - contains the operation name</li>
* <li>
* {@link org.apache.directory.fortress.core.model.PermGrant#userId} - contains the userId for existing User
* </li>
* </ul>
* <h5>PermGrant optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.PermGrant#objId} - contains the object id</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse revokeUser( FortRequest request );
/**
* This commands creates a new role childRole, and inserts it in the role hierarchy as an immediate descendant of
* the existing role parentRole.
* <p>
* The command is valid if and only if:
* <ul>
* <li> The childRole is not a member of the ROLES data set.
* <li> The parentRole is a member of the ROLES data set.
* </ul>
* This method:
* <ul>
* <li> Adds new role.
* <li> Assigns role relationship between new childRole and pre-existing parentRole.
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains the name of
* existing parent role
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains the name of new
* child role
* </li>
* </ul>
* <h5>optional parameters {@link org.apache.directory.fortress.core.model.RoleRelationship#child}</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#description} - maps to description
* attribute on organizationalRole object class for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#beginTime} - HHMM - determines
* begin hour role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#endTime} - HHMM - determines end
* hour role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#beginDate} - YYYYMMDD - determines
* date when role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#endDate} - YYYYMMDD - indicates
* latest date role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#beginLockDate} - YYYYMMDD -
* determines beginning of enforced inactive status for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#endLockDate} - YYYYMMDD -
* determines end of enforced inactive status for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#dayMask} - 1234567, 1 = Sunday,
* 2 = Monday, etc - specifies which day role may be activated into user's RBAC session for new child
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addDescendant( FortRequest request );
/**
* This commands creates a new role parentRole, and inserts it in the role hierarchy as an immediate ascendant of
* the existing role childRole.
* <p>
* The command is valid if and only if:
* <ul>
* <li> The parentRole is not a member of the ROLES data set.
* <li> The childRole is a member of the ROLES data set.
* </ul>
* <p>
* This method:
* <ul>
* <li> Adds new role.
* <li> Assigns role relationship between new parentRole and pre-existing childRole.
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>childRole - {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains
* the name of existing child Role</li>
* <li>parentRole - {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains
* the name of new Role to be parent</li>
* </ul>
* <h5>optional parameters {@link org.apache.directory.fortress.core.model.RoleRelationship#parent}</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#description} - maps to
* description attribute on organizationalRole object class for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#beginTime} - HHMM - determines
* begin hour role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#endTime} - HHMM - determines
* end hour role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#beginDate} - YYYYMMDD -
* determines date when role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#endDate} - YYYYMMDD - indicates
* latest date role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#beginLockDate} - YYYYMMDD -
* determines beginning of enforced inactive status for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#endLockDate} - YYYYMMDD -
* determines end of enforced inactive status for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#dayMask} - 1234567, 1 = Sunday,
* 2 = Monday, etc - specifies which day role may be activated into user's RBAC session for new parent
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>{@link FortRequest#session} - contains a reference to administrative session and if included service will enforce ARBAC constraints</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addAscendant( FortRequest request );
/**
* This commands establishes a new immediate inheritance relationship parentRole <<-- childRole between existing
* roles parentRole, childRole.
* <p>
* The command is valid if and only if:
* <ul>
* <li> The parentRole and childRole are members of the ROLES data set.
* <li> The parentRole is not an immediate ascendant of childRole.
* <li> The childRole does not properly inherit parentRole (in order to avoid cycle creation).
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains the name of
* existing role to be parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains the name of
* existing role to be child
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addInheritance( FortRequest request );
/**
* This command deletes an existing immediate inheritance relationship parentRole <<-- childRole.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The roles parentRole and childRole are members of the ROLES data set.</li>
* <li>The parentRole is an immediate ascendant of childRole.</li>
* <li>
* The new inheritance relation is computed as the reflexive-transitive closure of the immediate inheritance
* relation resulted after deleting the relationship parentRole <<-- childRole.
* </li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains the name of existing
* Role to remove parent relationship
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains the name of existing
* Role to remove child relationship
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will enforce
* ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteInheritance( FortRequest request );
/**
* This command creates a named SSD set of roles and sets the cardinality n of its subsets
* that cannot have common users.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The name of the SSD set is not already in use.</li>
* <li>All the roles in the SSD set are members of the ROLES data set.</li>
* <li>
* n is a natural number greater than or equal to 2 and less than or equal to the cardinality of the SSD role set.
* </li>
* <li>The SSD constraint for the new role set is satisfied.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of new SSD role set
* to be added
* </li>
* </ul>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#members} * - multi-occurring attribute contains
* the RBAC Role names to be added to this set
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#cardinality} - default is 2 which is one more than
* maximum number of Roles that may be assigned to User from a particular set
* </li>
* <li>{@link org.apache.directory.fortress.core.model.SDSet#description} - contains any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse createSsdSet( FortRequest request );
/**
* This command updates existing SSD set of roles and sets the cardinality n of its subsets
* that cannot have common users.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The name of the SSD set exists in directory.</li>
* <li>All the roles in the SSD set are members of the ROLES data set.</li>
* <li>
* n is a natural number greater than or equal to 2 and less than or equal to the cardinality of the SSD role set.
* </li>
* <li>The SSD constraint for the new role set is satisfied.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing SSD role
* set to be modified
* </li>
* </ul>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#members} * - multi-occurring attribute contains the
* RBAC Role names to be added to this set
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#cardinality} - default is 2 which is one more than
* maximum number of Roles that may be assigned to User from a particular set
* </li>
* <li>{@link org.apache.directory.fortress.core.model.SDSet#description} - contains any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse updateSsdSet( FortRequest request );
/**
* This command adds a role to a named SSD set of roles. The cardinality associated with the role set remains unchanged.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The SSD role set exists.</li>
* <li>The role to be added is a member of the ROLES data set but not of a member of the SSD role set.</li>
* <li>The SSD constraint is satisfied after the addition of the role to the SSD role set.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the Role name to add as member to SSD set</li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing SSD role
* set targeted for update
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse addSsdRoleMember( FortRequest request );
/**
* This command removes a role from a named SSD set of roles. The cardinality associated with the role set remains
* unchanged.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The SSD role set exists.</li>
* <li>The role to be removed is a member of the SSD role set.</li>
* <li>The cardinality associated with the SSD role set is less than the number of elements of the SSD role set.</li>
* </ul>
* Note that the SSD constraint should be satisfied after the removal of the role from the SSD role set.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the Role name to remove as member to SSD set</li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing SSD role set
* targeted for update
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will enforce
* ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse deleteSsdRoleMember( FortRequest request );
/**
* This command deletes a SSD role set completely. The command is valid if and only if the SSD role set exists.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing SSD role
* set targeted for removal
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse deleteSsdSet( FortRequest request );
/**
* This command sets the cardinality associated with a given SSD role set.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The SSD role set exists.</li>
* <li>
* The new cardinality is a natural number greater than or equal to 2 and less than or equal to the number of
* elements of the SSD role set.
* </li>
* <li>The SSD constraint is satisfied after setting the new cardinality.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing SSD role set targeted
* for update
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#cardinality} - contains new cardinality setting
* for SSD
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse setSsdSetCardinality( FortRequest request );
/**
* This command creates a named DSD set of roles and sets the cardinality n of its subsets
* that cannot have common users.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The name of the DSD set is not already in use.</li>
* <li>All the roles in the DSD set are members of the ROLES data set.</li>
* <li>
* n is a natural number greater than or equal to 2 and less than or equal to the cardinality of the DSD role set.
* </li>
* <li>The DSD constraint for the new role set is satisfied.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of new DSD role set to
* be added
* </li>
* </ul>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#members} * - multi-occurring attribute contains
* the RBAC Role names to be added to this set
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#cardinality} - default is 2 which is one more
* than maximum number of Roles that may be assigned to User from a particular set
* </li>
* <li>{@link org.apache.directory.fortress.core.model.SDSet#description} - contains any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse createDsdSet( FortRequest request );
/**
* This command updates existing DSD set of roles and sets the cardinality n of its subsets
* that cannot have common users.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The name of the DSD set exists in directory.</li>
* <li>All the roles in the DSD set are members of the ROLES data set.</li>
* <li>
* n is a natural number greater than or equal to 2 and less than or equal to the cardinality of the DSD role set.
* </li>
* <li>The DSD constraint for the new role set is satisfied.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing SSD
* role set to be modified
* </li>
* </ul>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#members} * - multi-occurring attribute contains
* the RBAC Role names to be added to this set
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#cardinality} - default is 2 which is one more
* than maximum number of Roles that may be assigned to User from a particular set
* </li>
* <li>{@link org.apache.directory.fortress.core.model.SDSet#description} - contains any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will enforce
* ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse updateDsdSet( FortRequest request );
/**
* This command adds a role to a named DSD set of roles. The cardinality associated with the role set remains unchanged.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The DSD role set exists.</li>
* <li>The role to be added is a member of the ROLES data set but not of a member of the DSD role set.</li>
* <li>The DSD constraint is satisfied after the addition of the role to the DSD role set.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the Role name to add as member to DSD set</li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing DSD role
* set targeted for update
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse addDsdRoleMember( FortRequest request );
/**
* This command removes a role from a named DSD set of roles. The cardinality associated with the role set remains
* unchanged.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The DSD role set exists.</li>
* <li>The role to be removed is a member of the DSD role set.</li>
* <li>The cardinality associated with the DSD role set is less than the number of elements of the DSD role set.</li>
* </ul>
* Note that the DSD constraint should be satisfied after the removal of the role from the DSD role set.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the Role name to remove as member to DSD set</li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing DSD role set
* targeted for update
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse deleteDsdRoleMember( FortRequest request );
/**
* This command deletes a DSD role set completely. The command is valid if and only if the DSD role set exists.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing DSD role
* set targeted for removal
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse deleteDsdSet( FortRequest request );
/**
* This command sets the cardinality associated with a given DSD role set.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The DSD role set exists.</li>
* <li>
* The new cardinality is a natural number greater than or equal to 2 and less than or equal to the number of
* elements of the DSD role set.
* </li>
* <li>The DSD constraint is satisfied after setting the new cardinality.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing DSD role set
* targeted for update
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#cardinality} - contains new cardinality setting for
* DSD
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.SDSet}
*/
FortResponse setDsdSetCardinality( FortRequest request );
/**
* This command enables a role to be constained by attributes.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The role exists.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role to
* be created.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <ul>
* <li>
* {@link FortRequest#entity2} - contains a reference to {@link org.apache.directory.fortress.core.model.RoleConstraint} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.RoleConstraint#key} - contains the name of the constraint being set onto role.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity}
*/
FortResponse enableRoleConstraint( FortRequest request );
/**
* This command enables a role to be removed from being constained by attributes.
* <p>
* The command is valid if and only if:
* <ul>
* <li>The role exists.</li>
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role to removed.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <ul>
* <li>
* {@link FortRequest#entity2} - contains a reference to {@link org.apache.directory.fortress.core.model.RoleConstraint} object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Role required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.RoleConstraint#key} - contains the name of the constraint being set onto role.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity}
*/
FortResponse disableRoleConstraint( FortRequest request );
//------------ ReviewMgr ----------------------------------------------------------------------------------------------
/**
* This method returns a matching permission entity to caller.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing
* permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.Permission}
*/
FortResponse readPermission( FortRequest request );
/**
* Method reads permission object from perm container in directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermObj}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PermObj} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#objName} - contains the name of existing object
* being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.PermObj}
*/
FortResponse readPermObj( FortRequest request );
/**
* Method returns a list of type Permission that match the perm object search string.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains one or more leading
* characters of existing object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains one or more leading
* characters of existing permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.Permission}
*/
FortResponse findPermissions( FortRequest request );
/**
* Method returns Permission operations for the provided permission object.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermObj}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains one or more leading
* characters of existing object being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.Permission}
*/
FortResponse findPermsByObj( FortRequest request );
/**
* Method returns a list of type Permission that match any part of either
* {@link org.apache.directory.fortress.core.model.Permission#objName} or
* {@link org.apache.directory.fortress.core.model.Permission#opName} search strings.
* This method differs from findPermissions in that any permission that matches any part of the perm obj or any part
* of the perm op will be returned in result set (uses substring string matching).
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains one or more substring
* characters of existing object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains one or more substring
* characters of existing permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.Permission}
*/
FortResponse findAnyPermissions( FortRequest request );
/**
* Method returns a list of type Permission that match the perm object search string.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PermObj}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PermObj} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PermObj#objName} - contains one or more characters of
* existing object being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.PermObj}
*/
FortResponse findPermObjs( FortRequest request );
/**
* Method reads Role entity from the role container in directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role to read.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.Role}
*/
FortResponse readRole( FortRequest request );
/**
* Method will return a list of type Role matching all or part of
* {@link org.apache.directory.fortress.core.model.Role#name}.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#value} - contains all or some of the chars corresponding to role entities stored in directory.
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.Role}
*/
FortResponse findRoles( FortRequest request );
/**
* Method returns matching User entity that is contained within the people container in the directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#userId} - contains the userId associated with the
* User object targeted for read.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.User}
*/
FortResponse readUser( FortRequest request );
/**
* Return a list of type User of all users in the people container that match all or part of the
* {@link org.apache.directory.fortress.core.model.User#userId} or
* {@link org.apache.directory.fortress.core.model.User#ou} fields passed in User entity.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#userId} - contains all or some leading chars that
* match userId(s) stored in the directory.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#ou} - contains one or more characters of org unit
* associated with existing object(s) being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.User}
*/
FortResponse findUsers( FortRequest request );
/**
* This method returns the data set of all users who are assigned the given role. This searches the User data set for
* Role relationship. This method does NOT search for hierarchical RBAC Roles relationships.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.User}
*/
FortResponse assignedUsers( FortRequest request );
/**
* This method returns the data set of all users who are assigned the given role constraint. This searches the User data set for
* RoleConstraint relationship. This method does NOT search for hierarchical RBAC Roles relationships.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.RoleConstraint} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.RoleConstraint} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.RoleConstraint#key} - contains the name to use for the Role
* {@link org.apache.directory.fortress.core.model.RoleConstraint#value} - contains the name to use for the Role
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.User}
*/
FortResponse assignedUsersConstraints( FortRequest request );
/**
* This method returns the data set of all users who are assigned the given role constraint. This searches the User data set for
* RoleConstraint relationship. This method does NOT search for hierarchical RBAC Roles relationships.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.RoleConstraint} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.RoleConstraint} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* {@link org.apache.directory.fortress.core.model.RoleConstraint#type} - contains the name to use for the RoleConstraint type
* {@link org.apache.directory.fortress.core.model.RoleConstraint#key} - contains the name to use for the key
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.UserRole}
*/
FortResponse assignedUsersConstraintsKey( FortRequest request );
/**
* This function returns the set of roles assigned to a given user. The function is valid if and
* only if the user is a member of the USERS data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#userId} - contains the userId associated with
* the User object targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of
* type {@link org.apache.directory.fortress.core.model.UserRole}
*/
FortResponse assignedRoles( FortRequest request );
/**
* This function returns the set of users authorized to a given role, i.e., the users that are assigned to a role that
* inherits the given role. The function is valid if and only if the given role is a member of the ROLES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.User}
*/
FortResponse authorizedUsers( FortRequest request );
/**
* This function returns the set of roles authorized for a given user. The function is valid if
* and only if the user is a member of the USERS data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#userId} - contains the userId associated with the
* User object targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#values} contains a reference to a List of type
* String containing the User's authorized role names.
*/
FortResponse authorizedRoles( FortRequest request );
/**
* Return a list of type String of all roles that have granted a particular permission.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing
* permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#values} contains a reference to a List of type
* String containing role names that permission has been granted to.
*/
FortResponse permissionRoles( FortRequest request );
/**
* This function returns the set of all permissions (op, obj), granted to or inherited by a
* given role. The function is valid if and only if the role is a member of the ROLES data
* set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of
* type {@link org.apache.directory.fortress.core.model.Permission} containing permissions for role.
*/
FortResponse rolePermissions( FortRequest request );
/**
* This function returns the set of permissions a given user gets through his/her authorized
* roles. The function is valid if and only if the user is a member of the USERS data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#userId} - contains the userId associated with the
* User object targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of
* type {@link org.apache.directory.fortress.core.model.Permission} containing permissions for user.
*/
FortResponse userPermissions( FortRequest request );
/**
* Return all role names that have been authorized for a given permission. This will process role hierarchies to
* determine set of all Roles who have access to a given permission.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing
* permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#values} contains a reference to a List of type
* String containing role names that permission has been granted to.
*/
FortResponse authorizedPermissionRoles( FortRequest request );
/**
* Return all userIds that have been granted (directly) a particular permission. This will not consider assigned
* or authorized Roles.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing
* permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#values} contains a reference to a List of type
* String containing userIds that permission has been granted to.
*/
FortResponse permissionUsers( FortRequest request );
/**
* Return all userIds that have been authorized for a given permission. This will process role hierarchies to determine
* set of all Users who have access to a given permission.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing
* permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#values} contains a reference to a List of type
* String containing userIds that permission is authorized for.
*/
FortResponse authorizedPermissionUsers( FortRequest request );
/**
* This function returns the list of all SSD role sets that have a particular Role as member or Role's
* parent as a member. If the Role parameter is left blank, function will return all SSD role sets.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.SDSet} containing all matching SSD sets.
*/
FortResponse ssdRoleSets( FortRequest request );
/**
* This function returns the SSD data set that matches a particular set name.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing object
* being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to an object of type
* {@link org.apache.directory.fortress.core.model.SDSet} containing matching SSD set.
*/
FortResponse ssdRoleSet( FortRequest request );
/**
* This function returns the set of roles of a SSD role set. The function is valid if and only if the
* role set exists.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing object
* being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#values} contains a reference to a List of type
* String containing all member roles of SSD set.
*/
FortResponse ssdRoleSetRoles( FortRequest request );
/**
* This function returns the cardinality associated with a SSD role set. The function is valid if and only if the
* role set exists.
* <h4>required parameters</h4>
* <ul>
* <li>name contains the name of existing SSD set being targeted</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains the cardinality.
*/
FortResponse ssdRoleSetCardinality( FortRequest request );
/**
* This function returns the list of all SSD sets that have a particular SSD set name.
* If the parameter is left blank, function will return all SSD sets.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name to use for the search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.SDSet} containing all matching SSD sets.
*/
FortResponse ssdSets( FortRequest request );
/**
* This function returns the list of all DSD role sets that have a particular Role as member or Role's
* parent as a member. If the Role parameter is left blank, function will return all DSD role sets.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.SDSet} containing all matching DSD sets.
*/
FortResponse dsdRoleSets( FortRequest request );
/**
* This function returns the DSD data set that matches a particular set name.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing object being
* targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to an object of type
* {@link org.apache.directory.fortress.core.model.SDSet} containing matching DSD set.
*/
FortResponse dsdRoleSet( FortRequest request );
/**
* This function returns the set of roles of a DSD role set. The function is valid if and only if the
* role set exists.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name of existing object being
* targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#values} contains a reference to a List of type
* String containing all member roles of DSD set.
*/
FortResponse dsdRoleSetRoles( FortRequest request );
/**
* This function returns the cardinality associated with a DSD role set. The function is valid if and only if the
* role set exists.
* <h4>required parameters</h4>
* <ul>
* <li>name contains the name of existing DSD set being targeted</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains the cardinality.
*/
FortResponse dsdRoleSetCardinality( FortRequest request );
/**
* This function returns the list of all DSD sets that have a particular DSD set name.
* If the parameter is left blank, function will return all DSD sets.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.SDSet} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.SDSet} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.SDSet#name} - contains the name to use for the search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.SDSet} containing all matching DSD sets.
*/
FortResponse dsdSets( FortRequest request );
//------------ AccessMgr ----------------------------------------------------------------------------------------------
/**
* Perform user authentication only. It does not activate RBAC roles in session but will evaluate
* password policies.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* <li>{@link org.apache.directory.fortress.core.model.User#password} - used to authenticate the User</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#session} object will be returned if authentication
* successful. This will not contain user's roles.
*/
FortResponse authenticate( FortRequest request );
/**
* Perform user authentication {@link org.apache.directory.fortress.core.model.User#password} and role activations.<br>
* This method must be called once per user prior to calling other methods within this class.
* The successful result is {@link org.apache.directory.fortress.core.model.Session} that contains target user's RBAC
* {@link org.apache.directory.fortress.core.model.User#roles} and Admin role
* {@link org.apache.directory.fortress.core.model.User#adminRoles}.<br>
* In addition to checking user password validity it will apply configured password policy checks
* {@link org.apache.directory.fortress.core.model.User#pwPolicy}..<br>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* <li>{@link org.apache.directory.fortress.core.model.User#password} - used to authenticate the User</li>
* </ul>
* <h5>User optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#roles} * - multi-occurring attribute contains the
* names of assigned RBAC roles targeted for activation into Session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#adminRoles} * - multi-occurring attribute contains
* the names of assigned ARBAC roles targeted for activation into Session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#props} collection of name value pairs collected on
* behalf of User during signon. For example hostname:myservername or ip:192.168.1.99
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>This API will...</h4>
* <ul>
* <li>authenticate user password.</li>
* <li>perform <a href="http://www.openldap.org/">OpenLDAP</a>
* <a href="http://tools.ietf.org/html/draft-behera-ldap-password-policy-10">password policy evaluation</a>.
* </li>
* <li>
* fail for any user who is locked by OpenLDAP's policies
* {@link org.apache.directory.fortress.core.model.User#isLocked()}.
* </li>
* <li>
* evaluate temporal {@link org.apache.directory.fortress.core.model.Constraint}(s) on
* {@link org.apache.directory.fortress.core.model.User}, {@link org.apache.directory.fortress.core.model.UserRole}
* and {@link org.apache.directory.fortress.core.model.UserAdminRole} entities.
* </li>
* <li>
* process selective role activations into User RBAC Session
* {@link org.apache.directory.fortress.core.model.User#roles}.
* </li>
* <li>
* check Dynamic Separation of Duties {@link org.apache.directory.fortress.core.impl.DSDChecker#validate} on
* {@link org.apache.directory.fortress.core.model.User#roles}.
* </li>
* <li>
* process selective administrative role activations {@link org.apache.directory.fortress.core.model.User#adminRoles}.
* </li>
* <li>
* return a {@link org.apache.directory.fortress.core.model.Session} containing
* {@link org.apache.directory.fortress.core.model.Session#getUser()},
* {@link org.apache.directory.fortress.core.model.Session#getRoles()} and (if admin user)
* {@link org.apache.directory.fortress.core.model.Session#getAdminRoles()} if everything checks out good.
* </li>
* <li>
* return a checked exception that will be {@link org.apache.directory.fortress.core.SecurityException}
* or its derivation.
* </li>
* <li>return a {@link org.apache.directory.fortress.core.SecurityException} for system failures.</li>
* <li>
* return a {@link org.apache.directory.fortress.core.PasswordException} for authentication and password policy
* violations.
* </li>
* <li>return a {@link org.apache.directory.fortress.core.ValidationException} for data validation errors.</li>
* <li>return a {@link org.apache.directory.fortress.core.FinderException} if User id not found.</li>
* <li>(optionally) store parms passed in by client for audit trail purposes.</li>
* </ul>
* <h4>The function is valid if and only if:</h4>
* <ul>
* <li>the user is a member of the USERS data set</li>
* <li>the password is supplied (unless trusted).</li>
* <li>the (optional) active role set is a subset of the roles authorized for that user.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#session} object will contain authentication result
* code {@link org.apache.directory.fortress.core.model.Session#errorId}, RBAC role activations
* {@link org.apache.directory.fortress.core.model.Session#getRoles()}, Admin Role activations
* {@link org.apache.directory.fortress.core.model.Session#getAdminRoles()},OpenLDAP pw policy codes
* {@link org.apache.directory.fortress.core.model.Session#warnings},
* {@link org.apache.directory.fortress.core.model.Session#expirationSeconds},
* {@link org.apache.directory.fortress.core.model.Session#graceLogins} and more.
*/
FortResponse createSession( FortRequest request );
/**
* This service accepts userId for validation and returns RBAC session. This service will not check the password nor
* perform password policy validations.<br>
* The successful result is {@link org.apache.directory.fortress.core.model.Session} that contains target user's
* RBAC {@link org.apache.directory.fortress.core.model.User#roles} and Admin role
* {@link org.apache.directory.fortress.core.model.User#adminRoles}.<br>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* </ul>
* <h5>User optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#roles} * - multi-occurring attribute contains
* the names of assigned RBAC roles targeted for activation into Session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#adminRoles} * - multi-occurring attribute contains
* the names of assigned ARBAC roles targeted for activation into Session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#props} collection of name value pairs collected
* on behalf of User during signon. For example hostname:myservername or ip:192.168.1.99
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>This API will...</h4>
* <ul>
* <li>
* fail for any user who is locked by OpenLDAP's policies
* {@link org.apache.directory.fortress.core.model.User#isLocked()}.
* </li>
* <li>
* evaluate temporal {@link org.apache.directory.fortress.core.model.Constraint}(s) on
* {@link org.apache.directory.fortress.core.model.User}, {@link org.apache.directory.fortress.core.model.UserRole}
* and {@link org.apache.directory.fortress.core.model.UserAdminRole} entities.
* </li>
* <li>
* process selective role activations into User RBAC Session
* {@link org.apache.directory.fortress.core.model.User#roles}.
* </li>
* <li>
* check Dynamic Separation of Duties {@link org.apache.directory.fortress.core.impl.DSDChecker#validate} on
* {@link org.apache.directory.fortress.core.model.User#roles}.
* </li>
* <li>
* process selective administrative role activations
* {@link org.apache.directory.fortress.core.model.User#adminRoles}.
* </li>
* <li>
* return a {@link org.apache.directory.fortress.core.model.Session} containing
* {@link org.apache.directory.fortress.core.model.Session#getUser()},
* {@link org.apache.directory.fortress.core.model.Session#getRoles()} and (if admin user)
* {@link org.apache.directory.fortress.core.model.Session#getAdminRoles()} if everything checks out good.
* </li>
* <li>
* return a checked exception that will be {@link org.apache.directory.fortress.core.SecurityException} or
* its derivation.
* </li>
* <li>return a {@link org.apache.directory.fortress.core.SecurityException} for system failures.</li>
* <li>return a {@link org.apache.directory.fortress.core.ValidationException} for data validation errors.</li>
* <li>return a {@link org.apache.directory.fortress.core.FinderException} if User id not found.</li>
* <li>(optionally) store parms passed in by client for audit trail purposes.</li>
* </ul>
* <h4>The function is valid if and only if:</h4>
* <ul>
* <li> the user is a member of the USERS data set</li>
* <li> the (optional) active role set is a subset of the roles authorized for that user.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#session} object will contain authentication result
* code {@link org.apache.directory.fortress.core.model.Session#errorId}, RBAC role activations
* {@link org.apache.directory.fortress.core.model.Session#getRoles()}, Admin Role activations
* {@link org.apache.directory.fortress.core.model.Session#getAdminRoles()},OpenLDAP pw policy codes
* {@link org.apache.directory.fortress.core.model.Session#warnings},
* {@link org.apache.directory.fortress.core.model.Session#expirationSeconds},
* {@link org.apache.directory.fortress.core.model.Session#graceLogins} and more.
*/
FortResponse createSessionTrusted( FortRequest request );
/**
* Perform group {@link Group} role activations {@link Group#members}.<br>
* Group sessions are always trusted. <br>
* This method must be called once per group prior to calling other methods within this class.
* The successful result is {@link org.apache.directory.fortress.core.model.Session} that contains target group's RBAC
* {@link Group#members}
* <h4>This API will...</h4>
* <ul>
* <li>
* fail for any non-existing group
* </li>
* <li>
* evaluate temporal {@link org.apache.directory.fortress.core.model.Constraint}(s) on member {@link UserRole} entities.
* <li>process selective role activations into Group RBAC Session {@link Group#roles}.</li>
* <li>
* check Dynamic Separation of Duties {@link org.apache.directory.fortress.core.impl.DSDChecker#validate(
* org.apache.directory.fortress.core.model.Session,
* org.apache.directory.fortress.core.model.Constraint,
* org.apache.directory.fortress.core.util.time.Time,
* org.apache.directory.fortress.core.util.VUtil.ConstraintType)} on
* {@link org.apache.directory.fortress.core.model.User#roles}.
* </li>
* <li>
* return a {@link org.apache.directory.fortress.core.model.Session} containing
* {@link org.apache.directory.fortress.core.model.Session#getGroup()},
* {@link org.apache.directory.fortress.core.model.Session#getRoles()}
* </li>
* <li>throw a checked exception that will be {@link SecurityException} or its derivation.</li>
* <li>throw a {@link SecurityException} for system failures.</li>
* <li>throw a {@link ValidationException} for data validation errors.</li>
* <li>throw a {@link FinderException} if Group name not found.</li>
* </ul>
* <h4>
* The function is valid if and only if:
* </h4>
* <ul>
* <li> the group is a member of the GROUPS data set</li>
* <li> the (optional) active role set is a subset of the roles authorized for that group.</li>
* </ul>
* <h4>
* The following attributes may be set when calling this method
* </h4>
* <ul>
* <li>{@link Group#name} - required</li>
* <li>
* {@link org.apache.directory.fortress.core.model.Group#members} contains a list of RBAC role names authorized for group
* and targeted for activation within this session. Default is all authorized RBAC roles will be activated into this
* Session.
* </li>
* </ul>
* <h4>
* Notes:
* </h4>
* <ul>
* <li> roles that violate Dynamic Separation of Duty Relationships will not be activated into session.
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* (optional), optional {@link Group#type}, optional
* @return reference to {@code FortResponse}, {@link FortResponse#session} object will contain authentication result
* {@link org.apache.directory.fortress.core.model.Session#errorId},
* RBAC role activations {@link org.apache.directory.fortress.core.model.Session#getRoles()},
* OpenLDAP pw policy codes {@link org.apache.directory.fortress.core.model.Session#warnings},
* {@link org.apache.directory.fortress.core.model.Session#expirationSeconds},
* {@link org.apache.directory.fortress.core.model.Session#graceLogins} and more.
*/
FortResponse createGroupSession(FortRequest request );
/**
* Perform user RBAC authorization. This function returns a Boolean value meaning whether the subject of a given
* session is allowed or not to perform a given operation on a given object. The function is valid if and
* only if the session is a valid Fortress session, the object is a member of the OBJS data set,
* and the operation is a member of the OPS data set. The session's subject has the permission
* to perform the operation on that object if and only if that permission is assigned to (at least)
* one of the session's active roles. This implementation will verify the roles or userId correspond
* to the subject's active roles are registered in the object's access control list.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing
* permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if User
* authorized, otherwise 'false'. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse checkAccess( FortRequest request );
/**
* Combine createSession and checkAccess into a single method.
* This function returns a Boolean value meaning whether the User is allowed or not to perform a given operation on a given object.
* The function is valid if and only if the user is a valid Fortress user, the object is a member of the OBJS data set,
* and the operation is a member of the OPS data set. The user has the permission
* to perform the operation on that object if and only if that permission is assigned to (at least)
* one of the session's active roles. This implementation will verify the roles or userId correspond
* to the user's active roles are registered in the object's access control list.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Permission}
* entity
* </li>
* <li>
* {@link FortRequest#entity2} - contains a reference to User object containing userId.
* </li>
* <li>
* {@link FortRequest#isFlag} - boolean value if true, password check will not be performed.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing
* permission operation
* </li>
* </ul>
* </li>
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* <li>{@link org.apache.directory.fortress.core.model.User#password} - used to authenticate the User</li>
* </ul>
* <h5>User optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#roles} * - multi-occurring attribute contains the
* names of assigned RBAC roles targeted for activation into Session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#props} collection of name value pairs collected on
* behalf of User during signon. For example locale:east
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if User
* authorized, otherwise 'false'.
*/
FortResponse createSessionCheckAccess( FortRequest request );
/**
* Combine createSession and a role check into a single method.
* This function returns a Boolean value meaning whether the User has a particular role.
* The function is valid if and only if the user is a valid Fortress user and the role is a member of the ROLES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role}
* entity
* </li>
* <li>
* {@link FortRequest#entity2} - contains a reference to User object containing userId.
* </li>
* <li>
* {@link FortRequest#isFlag} - boolean value if true, password check will not be performed.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name of existing
* role being targeted for check.
* </li>
* </ul>
* </li>
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.User#userId} - maps to INetOrgPerson uid</li>
* <li>{@link org.apache.directory.fortress.core.model.User#password} - used to authenticate the User</li>
* </ul>
* <h5>User optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#roles} * - multi-occurring attribute contains the
* names of assigned RBAC roles targeted for activation into Session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.User#props} collection of name value pairs collected on
* behalf of User during signon. For example locale:east
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if User
* authorized, otherwise 'false'..
*/
FortResponse isUserInRole( FortRequest request );
/**
* This function returns the permissions of the session, i.e., the permissions assigned
* to its authorized roles. The function is valid if and only if the session is a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} containing a List of type
* {@link org.apache.directory.fortress.core.model.Permission}. Updated {@link FortResponse#session} will be included
* in response as well.
*/
FortResponse sessionPermissions( FortRequest request );
/**
* This function returns the active roles associated with a session. The function is valid if
* and only if the session is a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} containing a List of type
* {@link org.apache.directory.fortress.core.model.UserRole}. Updated {@link FortResponse#session} will be included
* in response as well.
*/
FortResponse sessionRoles( FortRequest request );
/**
* This function returns the authorized roles associated with a session based on hierarchical relationships. The
* function is valid if and only if the session is a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#valueSet} containing a Set of type String containing
* role names authorized for User. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse authorizedSessionRoles( FortRequest request );
/**
* This function adds a role as an active role of a session whose owner is a given user.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the Role name targeted for
* activation into User's session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* The function is valid if and only if:
* <ul>
* <li>the user is a member of the USERS data set</li>
* <li>the role is a member of the ROLES data set</li>
* <li>the role inclusion does not violate Dynamic Separation of Duty Relationships</li>
* <li>the session is a valid Fortress session</li>
* <li>the user is authorized to that role</li>
* <li>the session is owned by that user.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, Updated {@link FortResponse#session} will be included in response.
*/
FortResponse addActiveRole( FortRequest request );
/**
* This function deletes a role from the active role set of a session owned by a given user.
* The function is valid if and only if the user is a member of the USERS data set, the
* session object contains a valid Fortress session, the session is owned by the user,
* and the role is an active role of that session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the Role name targeted for
* removal from User's session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, Updated {@link FortResponse#session} will be included in response.
*/
FortResponse dropActiveRole( FortRequest request );
/**
* This function returns the userId value that is contained within the session object.
* The function is valid if and only if the session object contains a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains reference to
* {@link org.apache.directory.fortress.core.model.User#userId} only.
*/
FortResponse getUserId( FortRequest request );
/**
* This function returns the user object that is contained within the session object.
* The function is valid if and only if the session object contains a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains reference to
* {@link org.apache.directory.fortress.core.model.User}.
*/
FortResponse getUser( FortRequest request );
//------------ DelegatedAdminMgr --------------------------------------------------------------------------------------
/**
* This command creates a new admin role. The command is valid if and only if the new admin role is not
* already a member of the ADMIN ROLES data set. The ADMIN ROLES data set is updated.
* Initially, no user or permission is assigned to the new role.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.AdminRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>AdminRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#name} - contains the name of the new AdminRole
* being targeted for addition to LDAP
* </li>
* </ul>
* <h5>AdminRole optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.AdminRole#description} - contains any safe text</li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#osPs} * - multi-occurring attribute used to
* set associations to existing PERMS OrgUnits
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#osUs} * - multi-occurring attribute used to
* set associations to existing USERS OrgUnits
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginRange} - contains the name of an existing
* RBAC Role that represents the lowest role in hierarchy that administrator (whoever has this AdminRole
* activated) controls
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endRange} - contains the name of an existing
* RBAC Role that represents that highest role in hierarchy that administrator may control
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginInclusive} - if 'true' the RBAC Role
* specified in beginRange is also controlled by the posessor of this AdminRole
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endInclusive} - if 'true' the RBAC Role
* specified in endRange is also controlled by the administratrator
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginTime} - HHMM - determines begin hour
* adminRole may be activated into user's ARBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endTime} - HHMM - determines end hour adminRole
* may be activated into user's ARBAC session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginDate} - YYYYMMDD - determines date when
* adminRole may be activated into user's ARBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endDate} - YYYYMMDD - indicates latest date
* adminRole may be activated into user's ARBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginLockDate} - YYYYMMDD - determines
* beginning of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endLockDate} - YYYYMMDD - determines end
* of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#dayMask} - 1234567, 1 = Sunday, 2 = Monday,
* etc - specifies which day role may be activated into user's ARBAC session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to a
* {@link org.apache.directory.fortress.core.model.AdminRole}.
*/
FortResponse addAdminRole( FortRequest request );
/**
* This command deletes an existing admin role from the ARBAC database. The command is valid
* if and only if the role to be deleted is a member of the ADMIN ROLES data set. This command will
* also deassign role from all users.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.AdminRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>AdminRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#name} - contains the name of the new AdminRole
* being targeted for removal from LDAP
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to a
* {@link org.apache.directory.fortress.core.model.AdminRole}.
*/
FortResponse deleteAdminRole( FortRequest request );
/**
* Method will update an AdminRole entity in the directory. The role must exist in directory prior to this call. *
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.AdminRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>AdminRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#name} - contains the name of the new AdminRole
* being targeted for update to LDAP
* </li>
* </ul>
* <h5>AdminRole optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.AdminRole#description} - contains any safe text</li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#osPs} * - multi-occurring attribute used to set
* associations to existing PERMS OrgUnits
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#osUs} * - multi-occurring attribute used to set
* associations to existing USERS OrgUnits
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginRange} - contains the name of an existing
* RBAC Role that represents the lowest role in hierarchy that administrator (whoever has this AdminRole
* activated) controls
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endRange} - contains the name of an existing
* RBAC Role that represents that highest role in hierarchy that administrator may control
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginInclusive} - if 'true' the RBAC Role
* specified in beginRange is also controlled by the posessor of this AdminRole
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endInclusive} - if 'true' the RBAC Role
* specified in endRange is also controlled by the administratrator
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginTime} - HHMM - determines begin hour
* adminRole may be activated into user's ARBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endTime} - HHMM - determines end hour
* adminRole may be activated into user's ARBAC session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginDate} - YYYYMMDD - determines date
* when adminRole may be activated into user's ARBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endDate} - YYYYMMDD - indicates latest date
* adminRole may be activated into user's ARBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#beginLockDate} - YYYYMMDD - determines
* beginning of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#endLockDate} - YYYYMMDD - determines end
* of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#dayMask} - 1234567, 1 = Sunday, 2 = Monday,
* etc - specifies which day role may be activated into user's ARBAC session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to a
* {@link org.apache.directory.fortress.core.model.AdminRole}.
*/
FortResponse updateAdminRole( FortRequest request );
/**
* This command assigns a user to an administrative role.
* <ul>
* <li> The command is valid if and only if:
* <li> The user is a member of the USERS data set
* <li> The role is a member of the ADMIN ROLES data set
* <li> The user is not already assigned to the admin role
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAdminRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>UserAdminRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#name} - contains the name for already
* existing AdminRole to be assigned
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#userId} - contains the userId for
* existing User
* </li>
* </ul>
* <h5>UserAdminRole optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#beginTime} - HHMM - determines begin
* hour AdminRole may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#endTime} - HHMM - determines end hour
* AdminRole may be activated into user's RBAC session.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#beginDate} - YYYYMMDD - determines date
* when AdminRole may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#endDate} - YYYYMMDD - indicates latest
* date AdminRole may be activated into user's RBAC session
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#beginLockDate} - YYYYMMDD - determines
* beginning of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#endLockDate} - YYYYMMDD - determines
* end of enforced inactive status
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#dayMask} - 1234567, 1 = Sunday,
* 2 = Monday, etc - specifies which day role may be activated into user's ARBAC session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* <p>
* Successful completion of this op, the following occurs:
* <ul>
* <li>
* User entity (resides in people container) has role assignment added to aux object class attached to actual
* user record.
* </li>
* <li> AdminRole entity (resides in adminRole container) has userId added as role occupant.</li>
* <li> (optional) Temporal constraints may be associated with <code>ftUserAttrs</code> aux object class based on:</li>
* <li>
* <ul>
* <li> timeout - number in seconds of session inactivity time allowed.</li>
* <li> beginDate - YYYYMMDD - determines date when role may be activated.</li>
* <li> endDate - YYMMDD - indicates latest date role may be activated.</li>
* <li> beginLockDate - YYYYMMDD - determines beginning of enforced inactive status</li>
* <li> endLockDate - YYMMDD - determines end of enforced inactive status.</li>
* <li> beginTime - HHMM - determines begin hour role may be activated in user's session.</li>
* <li> endTime - HHMM - determines end hour role may be activated in user's session.*</li>
* <li> dayMask - 1234567, 1 = Sunday, 2 = Monday, etc - specifies which day of week role may be activated.</li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse assignAdminUser( FortRequest request );
/**
* This method removes assigned admin role from user entity. Both user and admin role entities must exist and have
* role relationship before calling this method.
* <p>
* Successful completion :<br>
* del Role to User assignment in User data set<br>
* AND<br>
* User to Role assignment in Admin Role data set.<br>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAdminRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>UserAdminRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#name} - contains the name for already
* existing AdminRole to be deassigned
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#userId} - contains the userId for existing
* User
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deassignAdminUser( FortRequest request );
/**
* This commands creates a new role childRole, and inserts it in the role hierarchy as an immediate descendant of
* the existing role parentRole. The command is valid if and only if childRole is not a member of the ADMINROLES data
* set, and parentRole is a member of the ADMINROLES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains the name of
* existing parent role
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains the name of new
* child role
* </li>
* </ul>
* <h5>optional parameters {@code org.apache.directory.fortress.core.model.RoleRelationship#child}</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#description} - maps to description
* attribute on organizationalRole object class for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#beginTime} - HHMM - determines
* begin hour role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#endTime} - HHMM - determines end
* hour role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#beginDate} - YYYYMMDD - determines
* date when role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#endDate} - YYYYMMDD - indicates
* latest date role may be activated into user's RBAC session for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#beginLockDate} - YYYYMMDD -
* determines beginning of enforced inactive status for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#endLockDate} - YYYYMMDD -
* determines end of enforced inactive status for new child
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#dayMask} - 1234567, 1 = Sunday,
* 2 = Monday, etc - specifies which day role may be activated into user's RBAC session for new child
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The childRole is not a member of the ADMINROLES data set.</li>
* <li>The parentRole is a member of the ADMINROLES data set.</li>
* </ul>
* <p>
* This method:
* <ul>
* <li>Adds new adminRole.</li>
* <li>Assigns role relationship between new childRole and pre-existing parentRole.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addAdminDescendant( FortRequest request );
/**
* This commands creates a new role parentRole, and inserts it in the role hierarchy as an immediate ascendant of
* the existing role childRole. The command is valid if and only if parentRole is not a member of the ADMINROLES data set,
* and childRole is a member of the ADMINROLES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>
* childRole - {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains the
* name of existing child AdminRole</li>
* <li>
* parentRole - {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains the
* name of new AdminRole to be parent</li>
* </ul>
* <h5>optional parameters {@link org.apache.directory.fortress.core.model.RoleRelationship#parent}</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#description} - maps to description
* attribute on organizationalRole object class for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#beginTime} - HHMM - determines
* begin hour role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#endTime} - HHMM - determines end
* hour role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#beginDate} - YYYYMMDD -
* determines date when role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#endDate} - YYYYMMDD - indicates
* latest date role may be activated into user's RBAC session for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#beginLockDate} - YYYYMMDD -
* determines beginning of enforced inactive status for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#endLockDate} - YYYYMMDD -
* determines end of enforced inactive status for new parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#dayMask} - 1234567, 1 = Sunday,
* 2 = Monday, etc - specifies which day role may be activated into user's RBAC session for new parent
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The parentRole is not a member of the ADMINROLES data set.</li>
* <li>The childRole is a member of the ADMINROLES data set.</li>
* </ul>
* This method:
* <ul>
* <li>Adds new adminRole.</li>
* <li>Assigns role relationship between new parentRole and pre-existing childRole.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addAdminAscendant( FortRequest request );
/**
* This commands establishes a new immediate inheritance relationship parentRole <<-- childRole between existing
* roles parentRole, childRole. The command is valid if and only if parentRole and childRole are members of the
* ADMINROLES data set, parentRole is not an immediate ascendant of childRole, and childRole does not properly
* inherit parentRole (in order to avoid cycle creation).
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains the name
* of existing AdminRole to be parent
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains the name of
* existing AdminRole to be child
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The parentRole and childRole are members of the ADMINROLES data set.</li>
* <li>The parentRole is not an immediate ascendant of childRole.</li>
* <li>The childRole does not properly inherit parentRole (in order to avoid cycle creation).</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addAdminInheritance( FortRequest request );
/**
* This command deletes an existing immediate inheritance relationship parentRole <<-- childRole. The command is
* valid if and only if the adminRoles parentRole and childRole are members of the ADMINROLES data set, and parentRole
* is an immediate ascendant of childRole. The new inheritance relation is computed as the reflexive-transitive
* closure of the immediate inheritance relation resulted after deleting the relationship parentRole <<-- childRole.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.RoleRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>RoleRelationship required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#parent#name} - contains the name of
* existing Role to remove parent relationship
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RoleRelationship#child#name} - contains the name of
* existing Role to remove child relationship
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The roles parentRole and childRole are members of the ADMINROLES data set.</li>
* <li>The parentRole is an immediate ascendant of childRole.</li>
* <li>
* The new inheritance relation is computed as the reflexive-transitive closure of the immediate inheritance
* relation resulted after deleting the relationship parentRole <<-- childRole.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteAdminInheritance( FortRequest request );
/**
* Commands adds a new OrgUnit entity to OrgUnit dataset. The OrgUnit can be either User or Perm and is
* set by setting type attribute.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.OrgUnit}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>OrgUnit required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#name} - contains the name of new USERS or
* PERMS OrgUnit to be added
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#type} - contains the type of OU:
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* </ul>
* <h5>OrgUnit optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.OrgUnit#description} - contains any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addOrg( FortRequest request );
/**
* Commands updates existing OrgUnit entity to OrgUnit dataset. The OrgUnit can be either User or Perm and is
* set by setting type attribute.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.OrgUnit}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>OrgUnit required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#name} - contains the name of USERS or PERMS
* OrgUnit to be updated
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#type} - contains the type of OU:
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* </ul>
* <h5>OrgUnit optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.OrgUnit#description} - contains any safe text</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updateOrg( FortRequest request );
/**
* Commands deletes existing OrgUnit entity to OrgUnit dataset. The OrgUnit can be either User or Perm and is
* set by setting type attribute.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.OrgUnit}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>OrgUnit required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#name} - contains the name of USERS or
* PERMS OrgUnit to be removed
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#type} - contains the type of OU:
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteOrg( FortRequest request );
/**
* This commands creates a new orgunit child, and inserts it in the orgunit hierarchy as an immediate descendant of
* the existing orgunit parent.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.OrgUnitRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>OrgUnitRelationship required parameters</h5>
* <ul>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#name} -
* contains the name of existing OrgUnit to be parent
* </li>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#type} -
* contains the type of OrgUnit targeted: {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER}
* or {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* <li>
* child - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#child#name} -
* contains the name of new OrgUnit to be child
* </li>
* </ul>
* <h5>optional parameters {@code org.apache.directory.fortress.core.model.RoleRelationship#child}</h5>
* <ul>
* <li>
* child - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#child#description} - maps
* to description attribute on organizationalUnit object class for new child
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The child orgunit is not a member of the ORGUNITS data set.</li>
* <li>The parent orgunit is a member of the ORGUNITS data set.</li>
* </ul>
* This method:
* <ul>
* <li>Adds new orgunit.</li>
* <li>Assigns orgunit relationship between new child and pre-existing parent.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addOrgDescendant( FortRequest request );
/**
* This commands creates a new orgunit parent, and inserts it in the orgunit hierarchy as an immediate ascendant of
* the existing child orgunit.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.OrgUnitRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>OrgUnitRelationship required parameters</h5>
* <ul>
* <li>
* child - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#child#name} - contains
* the name of existing OrgUnit to be child
* </li>
* <li>
* child - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#child#type} - contains
* the type of OrgUnit targeted: {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#name} - contains
* the name of new OrgUnit to be parent
* </li>
* </ul>
* <h5>optional parameters {@link org.apache.directory.fortress.core.model.RoleRelationship#parent}</h5>
* <ul>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#description} -
* maps to description attribute on organizationalUnit object class for new parent
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The parent is not a member of the ORGUNITS data set.</li>
* <li>The child is a member of the ORGUNITS data set.</li>
* </ul>
* This method:
* <ul>
* <li>Adds new orgunit.</li>
* <li>Assigns orgunit relationship between new parent and pre-existing child.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addOrgAscendant( FortRequest request );
/**
* This commands establishes a new immediate inheritance relationship with parent orgunit <<-- child orgunit
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.OrgUnitRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>OrgUnitRelationship required parameters</h5>
* <ul>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#name} - contains the
* name of existing OrgUnit to be parent
* </li>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#type} - contains the
* type of OrgUnit targeted: {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* <li>
* child - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#child#name} - contains the
* name of new OrgUnit to be child
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The parent and child are members of the ORGUNITS data set.</li>
* <li>The parent is not an immediate ascendant of child.</li>
* <li>The child does not properly inherit parent (in order to avoid cycle creation).</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addOrgInheritance( FortRequest request );
/**
* This command deletes an existing immediate inheritance relationship parent <<-- child.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.OrgUnitRelationship} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>OrgUnitRelationship required parameters</h5>
* <ul>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#name} - contains the
* name of existing OrgUnit to remove as parent
* </li>
* <li>
* parent - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#parent#type} - contains the
* type of OrgUnit targeted: {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* <li>
* child - {@code org.apache.directory.fortress.core.model.OrgUnitRelationship#child#name} - contains the
* name of new OrgUnit to remove as child
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
* The command is valid if and only if:
* <ul>
* <li>The orgunits parent and child are members of the ORGUNITS data set.</li>
* <li>The parent is an immediate ascendant of child.</li>
* <li>
* The new inheritance relation is computed as the reflexive-transitive closure of the immediate inheritance
* relation resulted after deleting the relationship parent <<-- child.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteOrgInheritance( FortRequest request );
//------------ DelegatedReviewtMgr ------------------------------------------------------------------------------------
/**
* Method reads Admin Role entity from the admin role container in directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.AdminRole}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.AdminRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#name} - contains the name of the AdminRole
* being targeted for read
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.AdminRole}
*/
FortResponse readAdminRole( FortRequest request );
/**
* Method will return a list of type AdminRole matching all or part of
* {@link org.apache.directory.fortress.core.model.AdminRole#name}.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#value} - contains all or some of the chars corresponding to adminRole entities stored
* in directory.
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.AdminRole}
*/
FortResponse findAdminRoles( FortRequest request );
/**
* This function returns the set of adminRoles assigned to a given user. The function is valid if and
* only if the user is a member of the USERS data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.User} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.User} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#userId} - contains the userId associated with
* the User object targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.UserAdminRole}
*/
FortResponse assignedAdminRoles( FortRequest request );
/**
* This method returns the data set of all users who are assigned the given admin role. This searches the User data set
* for AdminRole relationship. This method does NOT search for hierarchical AdminRoles relationships.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.AdminRole}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.AdminRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.AdminRole#name} - contains the name to use for the
* AdminRole targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.User}
*/
FortResponse assignedAdminUsers( FortRequest request );
/**
* Commands reads existing OrgUnit entity from OrgUnit dataset. The OrgUnit can be either User or Perm and is
* set by setting type attribute.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.OrgUnit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.OrgUnit} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#name} - contains the name associated with the
* OrgUnit object targeted for search.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#type} - contains the type of OU:
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.OrgUnit}
*/
FortResponse readOrg( FortRequest request );
/**
* Commands searches existing OrgUnit entities from OrgUnit dataset. The OrgUnit can be either User or Perm and is
* set by setting type parameter on API.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.OrgUnit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.OrgUnit} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#name} - contains some or all of the chars
* associated with the OrgUnit objects targeted for search.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.OrgUnit#type} - contains the type of OU:
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#USER} or
* {@link org.apache.directory.fortress.core.model.OrgUnit.Type#PERM}
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.OrgUnit}
*/
FortResponse searchOrg( FortRequest request );
//------------ DelegatedAccessMgr -------------------------------------------------------------------------------------
/**
* This function will determine if the user contains an AdminRole that is authorized assignment control over
* User-Role Assignment (URA). This adheres to the ARBAC02 functional specification for can-assign URA.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#userId} - contains the userId targeted for
* operation
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the Role name targeted for
* operation.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if
* User authorized, otherwise 'false'. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse canAssign( FortRequest request );
/**
* This function will determine if the user contains an AdminRole that is authorized revoke control over
* User-Role Assignment (URA). This adheres to the ARBAC02 functional specification for can-revoke URA.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#userId} - contains the userId targeted for
* operation
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the Role name targeted for
* operation.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if User
* authorized, otherwise 'false'. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse canDeassign( FortRequest request );
/**
* This function will determine if the user contains an AdminRole that is authorized assignment control over
* Permission-Role Assignment (PRA). This adheres to the ARBAC02 functional specification for can-assign-p PRA.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.RolePerm}
* entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.RolePerm} required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RolePerm#perm#objectName} - contains the permission
* object name targeted for operation
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RolePerm#perm#opName} - contains the permission operation
* name targeted
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RolePerm#role#name} - contains the Role name targeted for
* operation.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if User
* authorized, otherwise 'false'. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse canGrant( FortRequest request );
/**
* This function will determine if the user contains an AdminRole that is authorized revoke control over
* Permission-Role Assignment (PRA). This adheres to the ARBAC02 functional specification for can-revoke-p PRA.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.RolePerm}
* entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.RolePerm} required parameters</h5>
* <ul>
* <li>
* {@code org.apache.directory.fortress.core.model.RolePerm#perm#objectName} - contains the permission
* object name targeted for operation
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RolePerm#perm#opName} - contains the permission operation
* name targeted
* </li>
* <li>
* {@code org.apache.directory.fortress.core.model.RolePerm#role#name} - contains the Role name targeted
* for operation.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if User
* authorized, otherwise 'false'. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse canRevoke( FortRequest request );
/**
* This function returns a Boolean value meaning whether the subject of a given session is
* allowed or not to perform a given operation on a given object. The function is valid if and
* only if the session is a valid Fortress session, the object is a member of the OBJS data set,
* and the operation is a member of the OPS data set. The session's subject has the permission
* to perform the operation on that object if and only if that permission is assigned to (at least)
* one of the session's active roles. This implementation will verify the roles or userId correspond
* to the subject's active roles are registered in the object's access control list.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to admin
* {@link org.apache.directory.fortress.core.model.Permission} entity
* </li>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Permission} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#objName} - contains the name of existing
* admin object being targeted
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Permission#opName} - contains the name of existing admin
* permission operation
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#isAuthorized} boolean will be 'true' if User
* authorized, otherwise 'false'. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse checkAdminAccess( FortRequest request );
/**
* This function adds an AdminRole as an active role of a session whose owner is a given user.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAdminRole}
* entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserAdminRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole} - contains the AdminRole name targeted for
* activation into User's session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* The function is valid if and only if:
* <ul>
* <li>the user is a member of the USERS data set</li>
* <li>the AdminRole is a member of the ADMINROLES data set</li>
* <li>the session is a valid Fortress session</li>
* <li>the user is authorized to that AdminRole</li>
* <li>the session is owned by that user.</li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, Updated {@link FortResponse#session} will be included in response.
*/
FortResponse addActiveAdminRole( FortRequest request );
/**
* This function deletes an AdminRole from the active role set of a session owned by a given user.
* The function is valid if and only if the user is a member of the USERS data set, the
* session object contains a valid Fortress session, the session is owned by the user,
* and the AdminRole is an active role of that session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* <li>
* {@link FortRequest#entity} - contains a reference to
* {@link org.apache.directory.fortress.core.model.UserAdminRole} entity.
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserRole} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAdminRole#name} - contains the AdminRole name
* targeted for removal from User's session
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, Updated {@link FortResponse#session} will be included in response.
*/
FortResponse dropActiveAdminRole( FortRequest request );
/**
* This function returns the active admin roles associated with a session. The function is valid if
* and only if the session is a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's RBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} containing a List of type
* {@link org.apache.directory.fortress.core.model.UserAdminRole}. Updated {@link FortResponse#session} will
* be included in response as well.
*/
FortResponse sessionAdminRoles( FortRequest request );
/**
* This function returns the ARBAC (administrative) permissions of the session, i.e., the admin permissions assigned
* to its authorized admin roles. The function is valid if and only if the session is a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's ARBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} containing a List of type
* {@link org.apache.directory.fortress.core.model.Permission}. Updated {@link FortResponse#session} will
* be included in response as well.
*/
FortResponse sessionAdminPermissions( FortRequest request );
/**
* This function returns the authorized ARBAC (administrative) roles associated with a session based on hierarchical
* relationships. The function is valid if
* and only if the session is a valid Fortress session.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to User's ARBAC session that is created by calling
* {@link FortressServiceImpl#createSession} method before use in this service.
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#valueSet} containing a Set of type String
* containing role names authorized for User. Updated {@link FortResponse#session} will be included in response as well.
*/
FortResponse authorizedSessionAdminRoles( FortRequest request );
//------------ PswdPolicyMgr ------------------------------------------------------------------------------------------
/**
* This method will add a new policy entry to the POLICIES data set. This command is valid
* if and only if the policy entry is not already present in the POLICIES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PwPolicy}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#name} - Maps to name attribute of pwdPolicy
* object class being added.
* </li>
* </ul>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#minAge} - This attribute holds the number of
* seconds that must elapse between modifications to the password. If this attribute is not present, 0
* seconds is assumed.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#maxAge} - This attribute holds the number of
* seconds after which a modified password will expire. If this attribute is not present, or if the value
* is 0 the password does not expire. If not 0, the value must be greater than or equal to the value of the
* pwdMinAge.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#inHistory} - This attribute specifies the maximum
* number of used passwords stored in the pwdHistory attribute. If this attribute is not present, or if the
* value is 0, used passwords are not stored in the pwdHistory attribute and thus may be reused.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#minLength} - When quality checking is enabled,
* this attribute holds the minimum number of characters that must be used in a password. If this attribute
* is not present, no minimum password length will be enforced. If the server is unable to check the length
* (due to a hashed password or otherwise), the server will, depending on the value of the pwdCheckQuality
* attribute, either accept the password without checking it ('0' or '1') or refuse it ('2').
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#expireWarning} - This attribute specifies the
* maximum number of seconds before a password is due to expire that expiration warning messages will be
* returned to an authenticating user. If this attribute is not present, or if the value is 0 no warnings
* will be returned. If not 0, the value must be smaller than the value of the pwdMaxAge attribute.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#graceLoginLimit} - This attribute specifies the
* number of times an expired password can be used to authenticate. If this attribute is not present or if
* the value is 0, authentication will fail.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#lockout} - This attribute indicates, when its
* value is "TRUE", that the password may not be used to authenticate after a specified number of
* consecutive failed bind attempts. The maximum number of consecutive failed bind attempts is specified
* in pwdMaxFailure. If this attribute is not present, or if the value is "FALSE", the password may be used
* to authenticate when the number of failed bind attempts has been reached.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#lockoutDuration} - This attribute holds the
* number of seconds that the password cannot be used to authenticate due to too many failed bind attempts.
* If this attribute is not present, or if the value is 0 the password cannot be used to authenticate until
* reset by a password administrator.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#maxFailure} - This attribute specifies the number
* of consecutive failed bind attempts after which the password may not be used to authenticate.<br>
* If this attribute is not present, or if the value is 0, this policy is not checked, and the value of
* pwdLockout will be ignored.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#failureCountInterval} - This attribute holds the
* number of seconds after which the password failures are purged from the failure counter, even though no
* successful authentication occurred. If this attribute is not present, or if its value is 0, the failure
* counter is only reset by a successful authentication.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#mustChange} - This attribute specifies with a
* value of "TRUE" that users must change their passwords when they first bind to the directory after a
* password is set or reset by a password administrator. If this attribute is not present, or if the value
* is "FALSE", users are not required to change their password upon binding after the password
* administrator sets or resets the password. This attribute is not set due to any actions specified by
* this document, it is typically set by a password administrator after resetting a user's password.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#allowUserChange} - This attribute indicates
* whether users can change their own passwords, although the change operation is still subject to access
* control. If this attribute is not present, a value of "TRUE" is assumed. This attribute is intended
* to be used in the absence of an access control mechanism.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#safeModify} - This attribute specifies whether
* or not the existing password must be sent along with the new password when being changed. If this
* attribute is not present, a "FALSE" value is assumed.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#checkQuality} - This attribute indicates how
* the password quality will be verified while being modified or added. If this attribute is not present,
* or if the value is '0', quality checking will not be enforced. A value of '1' indicates that the server
* will check the quality, and if the server is unable to check it (due to a hashed password or other
* reasons) it will be accepted. A value of '2' indicates that the server will check the quality, and if
* the server is unable to verify it, it will return an error refusing the password.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#attribute} - This holds the name of the attribute
* to which the password policy is applied. For example, the password policy may be applied to the
* userPassword attribute
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addPolicy( FortRequest request );
/**
* This method will update an exiting policy entry to the POLICIES data set. This command is valid
* if and only if the policy entry is already present in the POLICIES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PwPolicy}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#name} - Maps to name attribute of pwdPolicy
* object class being updated.
* </li>
* </ul>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#minAge} - This attribute holds the number of
* seconds that must elapse between modifications to the password. If this attribute is not present, 0
* seconds is assumed.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#maxAge} - This attribute holds the number of
* seconds after which a modified password will expire. If this attribute is not present, or if the value
* is 0 the password does not expire. If not 0, the value must be greater than or equal to the value of the
* pwdMinAge.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#inHistory} - This attribute specifies the
* maximum number of used passwords stored in the pwdHistory attribute. If this attribute is not present, or
* if the value is 0, used passwords are not stored in the pwdHistory attribute and thus may be reused.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#minLength} - When quality checking is enabled,
* this attribute holds the minimum number of characters that must be used in a password. If this attribute
* is not present, no minimum password length will be enforced. If the server is unable to check the length
* (due to a hashed password or otherwise), the server will, depending on the value of the pwdCheckQuality
* attribute, either accept the password without checking it ('0' or '1') or refuse it ('2').
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#expireWarning} - This attribute specifies the
* maximum number of seconds before a password is due to expire that expiration warning messages will be
* returned to an authenticating user. If this attribute is not present, or if the value is 0 no warnings
* will be returned. If not 0, the value must be smaller than the value of the pwdMaxAge attribute.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#graceLoginLimit} - This attribute specifies
* the number of times an expired password can be used to authenticate. If this attribute is not present
* or if the value is 0, authentication will fail.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#lockout} - This attribute indicates, when its
* value is "TRUE", that the password may not be used to authenticate after a specified number of
* consecutive failed bind attempts. The maximum number of consecutive failed bind attempts is specified
* in pwdMaxFailure. If this attribute is not present, or if the value is "FALSE", the password may be
* used to authenticate when the number of failed bind attempts has been reached.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#lockoutDuration} - This attribute holds the
* number of seconds that the password cannot be used to authenticate due to too many failed bind attempts.
* If this attribute is not present, or if the value is 0 the password cannot be used to authenticate until
* reset by a password administrator.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#maxFailure} - This attribute specifies the number
* of consecutive failed bind attempts after which the password may not be used to authenticate.
* If this attribute is not present, or if the value is 0, this policy is not checked, and the value of
* pwdLockout will be ignored.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#failureCountInterval} - This attribute holds the
* number of seconds after which the password failures are purged from the failure counter, even though no
* successful authentication occurred. If this attribute is not present, or if its value is 0, the failure
* counter is only reset by a successful authentication.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#mustChange} - This attribute specifies with a
* value of "TRUE" that users must change their passwords when they first bind to the directory after a
* password is set or reset by a password administrator. If this attribute is not present, or if the value
* is "FALSE", users are not required to change their password upon binding after the password administrator
* sets or resets the password. This attribute is not set due to any actions specified by this document, it
* is typically set by a password administrator after resetting a user's password.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#allowUserChange} - This attribute indicates
* whether users can change their own passwords, although the change operation is still subject to access
* control. If this attribute is not present, a value of "TRUE" is assumed. This attribute is intended to
* be used in the absence of an access control mechanism.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#safeModify} - This attribute specifies whether
* or not the existing password must be sent along with the new password when being changed. If this
* attribute is not present, a "FALSE" value is assumed.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#checkQuality} - This attribute indicates how
* the password quality will be verified while being modified or added. If this attribute is not present,
* or if the value is '0', quality checking will not be enforced. A value of '1' indicates that the server
* will check the quality, and if the server is unable to check it (due to a hashed password or other
* reasons) it will be accepted. A value of '2' indicates that the server will check the quality, and if
* the server is unable to verify it, it will return an error refusing the password.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#attribute} - This holds the name of the attribute
* to which the password policy is applied. For example, the password policy may be applied to the
* userPassword attribute
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updatePolicy( FortRequest request );
/**
* This method will delete exiting policy entry from the POLICIES data set. This command is valid
* if and only if the policy entry is already present in the POLICIES data set. Existing users that
* are assigned this policy will be removed from association.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PwPolicy}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#name} - Maps to name attribute of pwdPolicy
* object class being removed.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deletePolicy( FortRequest request );
/**
* This method will return the password policy entity to the caller. This command is valid
* if and only if the policy entry is present in the POLICIES data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PwPolicy}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#name} - contains the name of existing object
* being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.PwPolicy}
*/
FortResponse readPolicy( FortRequest request );
/**
* This method will return a list of all password policy entities that match a particular search string.
* This command will return an empty list of no matching entries are found.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PwPolicy}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#name} - contains the name of existing object
* being targeted
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.PwPolicy}
*/
FortResponse searchPolicy( FortRequest request );
/**
* This method will associate a user entity with a password policy entity. This function is valid
* if and only if the user is a member of the USERS data set and the policyName refers to a
* policy that is a member of the PWPOLICIES data set.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the userId targeted for update</li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.PwPolicy}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.PwPolicy} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.PwPolicy#name} - Maps to name attribute of pwdPolicy
* object class targeted for assignment.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updateUserPolicy( FortRequest request );
/**
* This method will remove the pw policy assignment from a user entity. This function is valid
* if and only if the user is a member of the USERS data set and the policy attribute is assigned.
* Removal of pw policy assignment will revert the user's policy to use the global default for OpenLDAP
* instance that contains user.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the userId targeted for removal of policy assignment</li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service
* will enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteUserPolicy( FortRequest request );
//------------ AuditMg ------------------------------------------------------------------------------------------------
/**
* This method returns a list of authentication audit events for a particular user
* {@link org.apache.directory.fortress.core.model.UserAudit#userId}, and given timestamp field
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate}.<BR>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAudit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserAudit} optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.UserAudit#userId} - contains the target userId</li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate} - contains the date in which to
* begin search
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#failedOnly} - if set to 'true', return only
* failed authorization events
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.Bind}
*/
FortResponse searchBinds( FortRequest request );
/**
* This method returns a list of authorization events for a particular user
* {@link org.apache.directory.fortress.core.model.UserAudit#userId} and given timestamp field
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate}.<BR>
* Method also can discriminate between all events or failed only by setting
* {@link org.apache.directory.fortress.core.model.UserAudit#failedOnly}.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAudit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserAudit} optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.UserAudit#userId} - contains the target userId</li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate} - contains the date in which to
* begin search
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#failedOnly} - if set to 'true', return only
* failed authorization events
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.AuthZ}
*/
FortResponse getUserAuthZs( FortRequest request );
/**
* This method returns a list of authorization events for a particular user
* {@link org.apache.directory.fortress.core.model.UserAudit#userId}, object
* {@link org.apache.directory.fortress.core.model.UserAudit#objName}, and given timestamp field
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate}.<BR>
* Method also can discriminate between all events or failed only by setting flag
* {@link org.apache.directory.fortress.core.model.UserAudit#failedOnly}..
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAudit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserAudit} optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.UserAudit#userId} - contains the target userId</li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#objName} - contains the object (authorization
* resource) name
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.AuthZ}
*/
FortResponse searchAuthZs( FortRequest request );
/**
* This method returns a list of sessions created for a given user
* {@link org.apache.directory.fortress.core.model.UserAudit#userId}, and timestamp
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate}.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAudit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserAudit} required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.UserAudit#userId} - contains the target userId</li>
* </ul>
* <h5>{@link org.apache.directory.fortress.core.model.UserAudit} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate} - contains the date in which to
* begin search
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.Mod}
*/
FortResponse searchUserSessions( FortRequest request );
/**
* This method returns a list of admin operations events for a particular entity
* {@link org.apache.directory.fortress.core.model.UserAudit#dn}, object
* {@link org.apache.directory.fortress.core.model.UserAudit#objName} and timestamp
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate}. If the internal
* userId {@link org.apache.directory.fortress.core.model.UserAudit#internalUserId} is set it will limit search by that
* field.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAudit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserAudit} optional parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#dn} - contains the LDAP distinguished name for
* the updated object. For example if caller wants to find out what changes were made to John Doe's user
* object this would be 'uid=jdoe,ou=People,dc=example,dc=com'
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#objName} - contains the object (authorization
* resource) name corresponding to the event. For example if caller wants to return events where User object
* was modified, this would be 'updateUser'
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#internalUserId} - maps to the internalUserId
* of user who changed the record in LDAP. This maps to
* {@link org.apache.directory.fortress.core.model.User#internalId}.
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate} - contains the date in which to
* begin search
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#endDate} - contains the date in which to end
* search
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.Mod}
*/
FortResponse searchAdminMods( FortRequest request );
/**
* This method returns a list of failed authentication attempts on behalf of an invalid identity
* {@link org.apache.directory.fortress.core.model.UserAudit#userId}, and given timestamp
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate}. If the
* {@link org.apache.directory.fortress.core.model.UserAudit#failedOnly} is true it will return only authentication
* attempts made with invalid userId. This event represents either User incorrectly entering userId during signon or
* possible fraudulent logon attempt by hostile agent.
* <p>
* This event is generated when Fortress looks up User record prior to LDAP bind operation.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserAudit}
* entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.UserAudit} optional parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.UserAudit#userId} - contains the target userId</li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#beginDate} - contains the date in which to
* begin search
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserAudit#failedOnly} - if set to 'true', return only
* failed authorization events
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.AuthZ}
*/
FortResponse searchInvalidUsers( FortRequest request );
//------------ ConfigMgr ----------------------------------------------------------------------------------------------
/**
* Create a new configuration node with given name and properties. The name is required. If node already exists,
* a {@link org.apache.directory.fortress.core.SecurityException} with error
* {@link org.apache.directory.fortress.core.GlobalErrIds#FT_CONFIG_ALREADY_EXISTS} will be thrown.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the name to call the new configuration node</li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Props}
* object
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addConfig( FortRequest request );
/**
* Update existing configuration node with additional properties, or, replace existing properties. The name is
* required. If node does not exist, a {@link org.apache.directory.fortress.core.SecurityException} with error
* {@link org.apache.directory.fortress.core.GlobalErrIds#FT_CONFIG_NOT_FOUND} will be thrown.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the name of existing configuration node targeted for update</li>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Props}
* object
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updateConfig( FortRequest request );
/**
* This service will either completely remove named configuration node from the directory or specified
* properties depending on the arguments passed in.
* <p style="font-size:1.5em; color:red;">
* If properties are not passed in along with the name, this method will remove the configuration node completely from
* directory.<br>
* Care should be taken during execution to ensure target name is correct and permanent removal of all parameters located
* there is intended. There is no 'undo' for this operation.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the name of existing configuration node targeted for removal</li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Props}
* object. If this argument is passed service will remove only the properties listed
* </li>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will enforce
* ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteConfig( FortRequest request );
/**
* Read an existing configuration node with given name and return to caller. The name is required. If node doesn't
* exist, a {@link org.apache.directory.fortress.core.SecurityException} with error
* {@link org.apache.directory.fortress.core.GlobalErrIds#FT_CONFIG_NOT_FOUND} will be thrown.
* <h4>required parameters</h4>
* <ul>
* <li>{@link FortRequest#value} - contains the name to call the new configuration node</li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to List of type
* {@link org.apache.directory.fortress.core.model.Props}
*/
FortResponse readConfig( FortRequest request );
//----------------------- GroupMgr -----------------------------------------
/**
* This command creates a new group. The command is valid only if the new group is
* not already a member of the GROUPS data set. The GROUP data set is updated. The new group
* does not own any session at the time of its creation.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Group}
* object
* </li>
* </ul>
*
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Group required parameters</h5>
* </li>
* <li>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.Group#name} - group name </li>
* <li>{@link org.apache.directory.fortress.core.model.Group#type} - either ROLE or USER group </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Group#protocol} - protocol
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Group#members} - multi-occurring contains the dn(s)
* of Group members, either Roles or Users
* </li>
* </ul>
* </li>
* </ul>
* </li>
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Group optional parameters</h5>
* </li>
* </ul>
* </li>
* </ul>
* optional parameters
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addGroup( FortRequest request );
/**
* Method returns matching Group entity that is contained within the GROUPS container in the directory.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Group} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Group} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.User#name} - contains the name associated with the
* Group object targeted for read.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entity} contains a reference to
* {@link org.apache.directory.fortress.core.model.Group}
*/
FortResponse readGroup( FortRequest request );
/**
* This command deletes an existing Group from the database. The command is valid
* if and only if the Group to be deleted is a member of the GROUPS data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Group}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>User required parameters</h5>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.Group#name} - name of the Group
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deleteGroup( FortRequest request );
/**
* This command update an existing Group.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Group}
* object
* </li>
* </ul>
*
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Group required parameters</h5>
* </li>
* <li>
* <ul>
* <li>{@link org.apache.directory.fortress.core.model.Group#name} - group name </li>
* </ul>
* </li>
* </ul>
* </li>
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>Group optional parameters</h5>
* </li>
* <li>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Group#members} - multi-occurring contains the dn(s)
* of Group members, either Roles or Users
* </li>
* <li>{@link org.apache.directory.fortress.core.model.Group#type} - either ROLE or USER group </li>
* <li>
* {@link org.apache.directory.fortress.core.model.Group#protocol} - protocol
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* optional parameters
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updateGroup( FortRequest request );
/**
* This method returns the data set of all groups who are assigned the given role. This searches the Groups data set
* for Role relationship. This method does NOT search for hierarchical RBAC Roles relationships.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Role} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Role} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Role#name} - contains the name to use for the Role
* targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of type
* {@link org.apache.directory.fortress.core.model.Group}
*/
FortResponse assignedGroups( FortRequest request );
/**
* This function returns the set of roles assigned to a given group. The function is valid if and
* only if the group is a member of the USERS data set.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.Group} entity
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>{@link org.apache.directory.fortress.core.model.Group} required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.Group#name} - contains the name associated with
* the Group object targeted for search.
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
* <h4>optional parameters</h4>
* <ul>
* <li>
* {@link FortRequest#session} - contains a reference to administrative session and if included service will
* enforce ARBAC constraints
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}, {@link FortResponse#entities} contains a reference to a List of
* type {@link org.apache.directory.fortress.core.model.UserRole}
*/
FortResponse assignedGroupRoles( FortRequest request );
/**
* This command assigns a group to a role.
* <ul>
* <li> The command is valid if and only if:
* <li> The group is a member of the GROUPS data set
* <li> The role is a member of the ROLES data set
* <li> The group is not already assigned to the role
* </ul>
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>UserRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the name for already existing
* Role to be assigned
* </li>
* <li>{@link org.apache.directory.fortress.core.model.UserRole#userId} - contains the group name for
* existing Group</li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse assignGroup(FortRequest request);
/**
* This command deletes the assignment of the User from the Role entities. The command is
* valid if and only if the group is a member of the GROUPS data set, the role is a member of
* the ROLES data set, the group is assigned to the role and group have at least one role assigned.
* Any sessions that currently have this role activated will not be effected.
* Successful completion includes:
* Group entity in GROUP data set has role assignment removed.
* <h4>required parameters</h4>
* <ul>
* <li>
* {@link FortRequest#entity} - contains a reference to {@link org.apache.directory.fortress.core.model.UserRole}
* object
* </li>
* </ul>
* <ul style="list-style-type:none">
* <li>
* <ul style="list-style-type:none">
* <li>
* <h5>UserRole required parameters</h5>
* <ul>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#name} - contains the name for already existing
* Role to be deassigned
* </li>
* <li>
* {@link org.apache.directory.fortress.core.model.UserRole#userId} - contains the group name for existing
* Group
* </li>
* </ul>
* </li>
* </ul>
* </li>
* </ul>
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deassignGroup(FortRequest request);
/**
* This method adds a roleConstraint (ftRC) to the user ldap entry.
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addRoleConstraint( FortRequest request );
/**
* Thie method removes a roleConstraint (ftRC) from the user ldap entry.
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse removeRoleConstraint( FortRequest request );
/**
* Thie method removes a roleConstraint (ftRC) from the user ldap entry.
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse removeRoleConstraintWid( FortRequest request );
/**
* This method will create a new permission attribute set object with resides under the
* {@code ou=Constraints,ou=RBAC,dc=yourHostName,dc=com} container in directory information tree.
* The attribute set may contain 0 to many {@link org.apache.directory.fortress.core.model.PermissionAttribute}
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addPermissionAttributeSet( FortRequest request );
/**
* This method will delete a permission attribute set object.
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse deletePermissionAttributeSet( FortRequest request );
/**
* This method adds a permission attribute (ftPA) to a permission attribute set.
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse addPermissionAttributeToSet( FortRequest request );
/**
* This method updates a permission attribute (ftPA) on a permission attribute set.
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse updatePermissionAttributeInSet( FortRequest request );
/**
* This method removed a permission attribute (ftPA) from an existing permission attribute set.
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse removePermissionAttributeFromSet( FortRequest request );
/**
* Find all of the role constraints for the given user and permission attribute set.
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse findRoleConstraints( FortRequest request );
/**
* This function returns all the permission attribute set (which contain 0 to many permission attributes)
* for a given role. The function is valid if and only if the role is a member of the ROLES data
* set.
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse readPermAttributeSet( FortRequest request );
/**
* This function returns all the permission attribute set (which contain 0 to many permission attributes)
* for a given role. The function is valid if and only if the role is a member of the ROLES data
* set.
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
public FortResponse rolePermissionAttributeSets( FortRequest request );
/**
* If matching jax-rs service was not found, the client will be returned a response with an error generated by this method.
*
* @param request contains a reference to {@code FortRequest}
* @return reference to {@code FortResponse}
*/
FortResponse invalid( FortRequest request );
}
| 1,214 |
0 | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress | Create_ds/directory-fortress-enmasse/src/main/java/org/apache/directory/fortress/rest/DelegatedAccessMgrImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.rest;
import org.apache.directory.fortress.core.DelAccessMgr;
import org.apache.directory.fortress.core.DelAccessMgrFactory;
import org.apache.directory.fortress.core.SecurityException;
import org.apache.directory.fortress.core.model.RolePerm;
import org.apache.directory.fortress.core.model.UserAdminRole;
import org.apache.directory.fortress.core.model.Permission;
import org.apache.directory.fortress.core.model.Role;
import org.apache.directory.fortress.core.model.Session;
import org.apache.directory.fortress.core.model.User;
import org.apache.directory.fortress.core.model.UserRole;
import org.apache.directory.fortress.core.model.FortRequest;
import org.apache.directory.fortress.core.model.FortResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Set;
/**
* Utility for Fortress Rest Server. This class is thread safe.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class DelegatedAccessMgrImpl extends AbstractMgrImpl
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( DelegatedAccessMgrImpl.class.getName() );
/**
* ************************************************************************************************************************************
* BEGIN DELEGATEDACCESSMGR
* **************************************************************************************************************************************
*/
/* No qualifier */ FortResponse canAssign(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserRole uRole = (UserRole) request.getEntity();
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
boolean result = accessMgr.canAssign( session, new User( uRole.getUserId() ), new Role( uRole.getName() ) );
response.setSession( session );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse canDeassign(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserRole uRole = (UserRole) request.getEntity();
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
boolean result = accessMgr.canDeassign( session, new User( uRole.getUserId() ), new Role( uRole.getName() ) );
response.setSession( session );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse canGrant(FortRequest request)
{
FortResponse response = createResponse();
try
{
RolePerm context = (RolePerm) request.getEntity();
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
boolean result = accessMgr.canGrant( session, new Role( context.getRole().getName() ), context.getPerm() );
response.setSession( session );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse canRevoke(FortRequest request)
{
FortResponse response = createResponse();
try
{
RolePerm context = (RolePerm) request.getEntity();
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
boolean result = accessMgr.canRevoke( session, new Role( context.getRole().getName() ), context.getPerm() );
response.setSession( session );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
public FortResponse checkAdminAccess(FortRequest request)
{
FortResponse response = createResponse();
try
{
Permission perm = (Permission) request.getEntity();
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
perm.setAdmin( true );
boolean result = accessMgr.checkAccess( session, perm );
response.setSession( session );
response.setAuthorized( result );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse addActiveAdminRole(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAdminRole uAdminRole = (UserAdminRole) request.getEntity();
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
accessMgr.addActiveRole( session, uAdminRole );
response.setSession( session );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse dropActiveAdminRole(FortRequest request)
{
FortResponse response = createResponse();
try
{
UserAdminRole uAdminRole = (UserAdminRole) request.getEntity();
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
accessMgr.dropActiveRole( session, uAdminRole );
response.setSession( session );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse sessionAdminRoles(FortRequest request)
{
FortResponse response = createResponse();
try
{
Session session = request.getSession();
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
List<UserAdminRole> roles = accessMgr.sessionAdminRoles( session );
response.setEntities( roles );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse sessionAdminPermissions(FortRequest request)
{
FortResponse response = createResponse();
try
{
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
Session session = request.getSession();
List<Permission> perms = accessMgr.sessionPermissions( session );
response.setSession( session );
response.setEntities( perms );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
/* No qualifier */ FortResponse authorizedSessionRoles(FortRequest request)
{
FortResponse response = createResponse();
try
{
DelAccessMgr accessMgr = DelAccessMgrFactory.createInstance( request.getContextId() );
Session session = request.getSession();
Set<String> roles = accessMgr.authorizedAdminRoles( session );
response.setValueSet( roles );
response.setSession( session );
}
catch ( SecurityException se )
{
createError( response, LOG, se );
}
return response;
}
}
| 1,215 |
0 | Create_ds | Create_ds/gobblin/FlowTriggerHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.service.modules.orchestration;
import java.util.Properties;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.service.modules.scheduler.GobblinServiceJobScheduler;
import org.junit.Assert;
import org.quartz.JobDataMap;
import org.testng.annotations.Test;
public class FlowTriggerHandlerTest {
String newCronExpression = "0 0 0 ? * * 2024";
long newEventToRevisit = 123L;
long newEventToTrigger = 456L;
/**
* Provides an input with all three values (cronExpression, reminderTimestamp, originalEventTime) set in the map
* Properties and checks that they are updated properly
*/
@Test
public void testUpdatePropsInJobDataMap() {
JobDataMap oldJobDataMap = new JobDataMap();
Properties originalProperties = new Properties();
originalProperties.setProperty(ConfigurationKeys.JOB_SCHEDULE_KEY, "0 0 0 ? * * 2050");
originalProperties.setProperty(ConfigurationKeys.SCHEDULER_EVENT_TO_REVISIT_TIMESTAMP_MILLIS_KEY, "0");
originalProperties.setProperty(ConfigurationKeys.SCHEDULER_EVENT_TO_TRIGGER_TIMESTAMP_MILLIS_KEY, "1");
oldJobDataMap.put(GobblinServiceJobScheduler.PROPERTIES_KEY, originalProperties);
JobDataMap newJobDataMap = FlowTriggerHandler.updatePropsInJobDataMap(oldJobDataMap, newCronExpression,
newEventToRevisit, newEventToTrigger);
Properties newProperties = (Properties) newJobDataMap.get(GobblinServiceJobScheduler.PROPERTIES_KEY);
Assert.assertEquals(newCronExpression, newProperties.getProperty(ConfigurationKeys.JOB_SCHEDULE_KEY));
Assert.assertEquals(String.valueOf(newEventToRevisit),
newProperties.getProperty(ConfigurationKeys.SCHEDULER_EVENT_TO_REVISIT_TIMESTAMP_MILLIS_KEY));
Assert.assertEquals(String.valueOf(newEventToTrigger),
newProperties.getProperty(ConfigurationKeys.SCHEDULER_EVENT_TO_TRIGGER_TIMESTAMP_MILLIS_KEY));
}
/**
* Provides input with an empty Properties object and checks that the three values in question are set.
*/
@Test
public void testSetPropsInJobDataMap() {
JobDataMap oldJobDataMap = new JobDataMap();
Properties originalProperties = new Properties();
oldJobDataMap.put(GobblinServiceJobScheduler.PROPERTIES_KEY, originalProperties);
JobDataMap newJobDataMap = FlowTriggerHandler.updatePropsInJobDataMap(oldJobDataMap, newCronExpression,
newEventToRevisit, newEventToTrigger);
Properties newProperties = (Properties) newJobDataMap.get(GobblinServiceJobScheduler.PROPERTIES_KEY);
Assert.assertEquals(newCronExpression, newProperties.getProperty(ConfigurationKeys.JOB_SCHEDULE_KEY));
Assert.assertEquals(String.valueOf(newEventToRevisit),
newProperties.getProperty(ConfigurationKeys.SCHEDULER_EVENT_TO_REVISIT_TIMESTAMP_MILLIS_KEY));
Assert.assertEquals(String.valueOf(newEventToTrigger),
newProperties.getProperty(ConfigurationKeys.SCHEDULER_EVENT_TO_TRIGGER_TIMESTAMP_MILLIS_KEY));
}
}
| 1,216 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/hadoop | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/hadoop/fs/FileSystemTestUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
public class FileSystemTestUtils {
public static void addFileSystemForTest(URI uri, Configuration conf, FileSystem fs) throws IOException {
FileSystem.addFileSystemForTesting(uri, conf, fs);
}
}
| 1,217 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/TestDataPublisher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test;
import java.io.IOException;
import java.util.Collection;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.publisher.DataPublisher;
import org.apache.gobblin.publisher.SingleTaskDataPublisher;
/**
* An implementation of {@link DataPublisher} for integration test.
*
* <p>
* This is a dummy implementation that exists purely to make
* integration test work.
* </p>
*/
public class TestDataPublisher extends SingleTaskDataPublisher {
public TestDataPublisher(State state) {
super(state);
}
@Override
public void initialize() throws IOException {
// Do nothing
}
@Override
public void close() throws IOException {
// Do nothing
}
@Override
public void publishData(Collection<? extends WorkUnitState> tasks) throws IOException {
// Do nothing
}
@Override
public void publishMetadata(Collection<? extends WorkUnitState> tasks) throws IOException {
// Do nothing
}
@Override
public void publishData(WorkUnitState state) throws IOException {
// Do nothing
}
@Override
public void publishMetadata(WorkUnitState state) throws IOException {
// Do nothing
}
}
| 1,218 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/TestConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test;
import java.lang.reflect.Type;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.reflect.TypeToken;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.SingleRecordIterable;
import org.apache.gobblin.converter.ToAvroConverterBase;
/**
* An extension to {@link ToAvroConverterBase} for integration test.
*
* @author Yinan Li
*/
public class TestConverter extends ToAvroConverterBase<String, String> {
private static final Gson GSON = new Gson();
// Expect the input JSON string to be key-value pairs
private static final Type FIELD_ENTRY_TYPE = new TypeToken<Map<String, Object>>() {}.getType();
@Override
public Schema convertSchema(String schema, WorkUnitState workUnit) {
return new Schema.Parser().parse(schema);
}
@Override
public Iterable<GenericRecord> convertRecord(Schema schema, String inputRecord, WorkUnitState workUnit) {
JsonElement element = GSON.fromJson(inputRecord, JsonElement.class);
Map<String, Object> fields = GSON.fromJson(element, FIELD_ENTRY_TYPE);
GenericRecord record = new GenericData.Record(schema);
for (Map.Entry<String, Object> entry : fields.entrySet()) {
record.put(entry.getKey(), entry.getValue());
}
return new SingleRecordIterable<GenericRecord>(record);
}
}
| 1,219 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/TestExtractor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test;
import java.io.IOException;
import java.net.URI;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.mapred.FsInput;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.Extractor;
/**
* An implementation of {@link Extractor} for integration test.
*
* @author Yinan Li
*/
public class TestExtractor implements Extractor<String, String> {
private static final Logger LOG = LoggerFactory.getLogger(TestExtractor.class);
// Test Avro schema
private static final String AVRO_SCHEMA = "{\"namespace\": \"example.avro\",\n" +
" \"type\": \"record\",\n" +
" \"name\": \"User\",\n" +
" \"fields\": [\n" +
" {\"name\": \"name\", \"type\": \"string\"},\n" +
" {\"name\": \"favorite_number\", \"type\": \"int\"},\n" +
" {\"name\": \"favorite_color\", \"type\": \"string\"}\n" +
" ]\n" +
"}";
public static final int TOTAL_RECORDS = 1000;
private DataFileReader<GenericRecord> dataFileReader;
public TestExtractor(WorkUnitState workUnitState) {
//super(workUnitState);
Schema schema = new Schema.Parser().parse(AVRO_SCHEMA);
Path sourceFile = new Path(workUnitState.getWorkunit().getProp(TestSource.SOURCE_FILE_KEY));
LOG.info("Reading from source file " + sourceFile);
DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
try {
FileSystem fs = FileSystem
.get(URI.create(workUnitState.getProp(ConfigurationKeys.FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI)),
new Configuration());
sourceFile = new Path(fs.makeQualified(sourceFile).toUri().getRawPath());
this.dataFileReader =
new DataFileReader<GenericRecord>(new FsInput(sourceFile, new Configuration()), datumReader);
} catch (IOException ioe) {
LOG.error("Failed to read the source file " + sourceFile, ioe);
}
}
@Override
public String getSchema() {
return AVRO_SCHEMA;
}
@Override
public String readRecord(@Deprecated String reuse) throws IOException {
if (this.dataFileReader == null) {
return null;
}
if (this.dataFileReader.hasNext()) {
return this.dataFileReader.next().toString();
}
return null;
}
@Override
public void close() {
try {
this.dataFileReader.close();
} catch (IOException ioe) {
// ignored
}
}
@Override
public long getExpectedRecordCount() {
return TOTAL_RECORDS;
}
@Override
public long getHighWatermark() {
// TODO Auto-generated method stub
return 0;
}
}
| 1,220 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/TestConverter2.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test;
import java.lang.reflect.Type;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.reflect.TypeToken;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.Converter;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SingleRecordIterable;
import org.apache.gobblin.fork.CopyNotSupportedException;
import org.apache.gobblin.fork.CopyableGenericRecord;
import org.apache.gobblin.fork.CopyableSchema;
/**
* An implementation of {@link Converter} for tests related to the
* {@link org.apache.gobblin.fork.ForkOperator}.
*
* @author Yinan Li
*/
@SuppressWarnings("unused")
public class TestConverter2 extends Converter<String, CopyableSchema, String, CopyableGenericRecord> {
private static final Gson GSON = new Gson();
// Expect the input JSON string to be key-value pairs
private static final Type FIELD_ENTRY_TYPE = new TypeToken<Map<String, Object>>() {
}.getType();
@Override
public CopyableSchema convertSchema(String schema, WorkUnitState workUnit) {
return new CopyableSchema(new Schema.Parser().parse(schema));
}
@Override
public Iterable<CopyableGenericRecord> convertRecord(CopyableSchema schema, String inputRecord,
WorkUnitState workUnit)
throws DataConversionException {
JsonElement element = GSON.fromJson(inputRecord, JsonElement.class);
Map<String, Object> fields = GSON.fromJson(element, FIELD_ENTRY_TYPE);
try {
Schema avroSchema = schema.copy();
GenericRecord record = new GenericData.Record(avroSchema);
for (Map.Entry<String, Object> entry : fields.entrySet()) {
if (entry.getValue() instanceof Double) {
// Gson reads the integers in the input Json documents as doubles, so we have
// to convert doubles to integers here as the Avro schema specifies integers.
record.put(entry.getKey(), ((Double) entry.getValue()).intValue());
} else {
record.put(entry.getKey(), entry.getValue());
}
}
return new SingleRecordIterable<CopyableGenericRecord>(new CopyableGenericRecord(record));
} catch (CopyNotSupportedException cnse) {
throw new DataConversionException(cnse);
}
}
}
| 1,221 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/TestSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test;
import java.util.List;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.Source;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.extractor.extract.AbstractSource;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.Extract.TableType;
import org.apache.gobblin.source.workunit.MultiWorkUnit;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* An implementation of {@link Source} for integration test.
*
* @author Yinan Li
*/
public class TestSource extends AbstractSource<String, String> {
static final String SOURCE_FILE_LIST_KEY = "source.files";
static final String SOURCE_FILE_KEY = "source.file";
private static final Splitter SPLITTER = Splitter.on(",").omitEmptyStrings().trimResults();
@Override
public List<WorkUnit> getWorkunits(SourceState state) {
String nameSpace = state.getProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY);
Extract extract1 = createExtract(TableType.SNAPSHOT_ONLY, nameSpace, "TestTable1");
Extract extract2 = createExtract(TableType.SNAPSHOT_ONLY, nameSpace, "TestTable2");
String sourceFileList = state.getProp(SOURCE_FILE_LIST_KEY);
List<String> list = SPLITTER.splitToList(sourceFileList);
List<WorkUnit> workUnits = Lists.newArrayList();
for (int i = 0; i < list.size(); i++) {
WorkUnit workUnit = WorkUnit.create(i % 2 == 0 ? extract1 : extract2);
workUnit.setProp(SOURCE_FILE_KEY, list.get(i));
workUnits.add(workUnit);
}
if (state.getPropAsBoolean(ConfigurationKeys.WORK_UNIT_SKIP_KEY, false)) {
for (int i = 0; i < list.size(); i++) {
if (i % 2 == 0) {
workUnits.get(i).setProp(ConfigurationKeys.WORK_UNIT_SKIP_KEY, true);
}
}
}
if (state.getPropAsBoolean("use.multiworkunit", false)) {
MultiWorkUnit multiWorkUnit = MultiWorkUnit.createEmpty();
multiWorkUnit.addWorkUnits(workUnits);
workUnits.clear();
workUnits.add(multiWorkUnit);
}
return workUnits;
}
@Override
public Extractor<String, String> getExtractor(WorkUnitState state) {
return new TestExtractor(state);
}
@Override
public void shutdown(SourceState state) {
// Do nothing
}
}
| 1,222 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/matchers/service | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/matchers/service/monitoring/JobStatusMatch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.matchers.service.monitoring;
import lombok.AllArgsConstructor;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
import org.apache.gobblin.service.monitoring.JobStatus;
import org.apache.gobblin.service.monitoring.JobStatusRetriever;
/** {@link org.hamcrest.Matcher} for {@link org.apache.gobblin.service.monitoring.JobStatus} */
@AllArgsConstructor(staticName = "ofTagged")
@RequiredArgsConstructor(staticName = "of")
@ToString
public class JobStatusMatch extends TypeSafeMatcher<JobStatus> {
private final String flowGroup;
private final String flowName;
private final long flowExecutionId;
private final String jobGroup;
private final String jobName;
private final long jobExecutionId;
private final String eventName;
private String jobTag;
/** relative identification: acquire/share the `flowGroup`, `flowName`, and `flowExecutionId` of whichever dependent {@link #upon} */
@AllArgsConstructor(staticName = "ofTagged")
@RequiredArgsConstructor(staticName = "of")
@ToString
public static class Dependent {
private final String jobGroup;
private final String jobName;
private final long jobExecutionId;
private final String eventName;
private String jobTag;
public JobStatusMatch upon(FlowStatusMatch fsm) {
return JobStatusMatch.ofTagged(fsm.getFlowGroup(), fsm.getFlowName(), fsm.getFlowExecutionId(), jobGroup, jobName, jobExecutionId, eventName, jobTag);
}
}
/** supplements {@link #of} and {@link #ofTagged} factories, to simplify matching of "flow-level" `JobStatus` */
public static JobStatusMatch ofFlowLevelStatus(String flowGroup, String flowName, long flowExecutionId, String eventName) {
return of(flowGroup, flowName, flowExecutionId, JobStatusRetriever.NA_KEY, JobStatusRetriever.NA_KEY, 0L, eventName);
}
@Override
public void describeTo(final Description description) {
description.appendText("matches JobStatus of `" + toString() + "`");
}
@Override
public boolean matchesSafely(JobStatus jobStatus) {
return jobStatus.getFlowGroup().equals(flowGroup)
&& jobStatus.getFlowName().equals(flowName)
&& jobStatus.getFlowExecutionId() == flowExecutionId
&& jobStatus.getJobGroup().equals(jobGroup)
&& jobStatus.getJobName().equals(jobName)
&& jobStatus.getJobExecutionId() == jobExecutionId
&& jobStatus.getEventName().equals(eventName)
&& (jobStatus.getJobTag() == null ? jobTag == null : jobStatus.getJobTag().equals(jobTag));
}
}
| 1,223 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/matchers/service | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/test/matchers/service/monitoring/FlowStatusMatch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.matchers.service.monitoring;
import java.util.List;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.MatcherAssert;
import org.hamcrest.TypeSafeMatcher;
import org.hamcrest.collection.IsIterableContainingInOrder;
import com.google.api.client.util.Lists;
import com.google.common.collect.Iterables;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
import org.apache.gobblin.service.ExecutionStatus;
import org.apache.gobblin.service.monitoring.FlowStatus;
import org.apache.gobblin.service.monitoring.JobStatus;
/** {@link org.hamcrest.Matcher} for {@link org.apache.gobblin.service.monitoring.FlowStatus} */
@AllArgsConstructor(staticName = "withDependentJobStatuses")
@RequiredArgsConstructor(staticName = "of")
@ToString
public class FlowStatusMatch extends TypeSafeMatcher<FlowStatus> {
@Getter
private final String flowGroup;
@Getter
private final String flowName;
@Getter
private final long flowExecutionId;
private final ExecutionStatus execStatus;
private List<JobStatusMatch.Dependent> jsmDependents;
@Override
public void describeTo(final Description description) {
description.appendText("matches FlowStatus of `" + toString() + "`");
}
@Override
public boolean matchesSafely(FlowStatus flowStatus) {
JobStatusMatch flowJobStatusMatch = JobStatusMatch.ofFlowLevelStatus(flowGroup, flowName, flowExecutionId, execStatus.name());
List<Matcher<? super JobStatus>> matchers = new java.util.ArrayList<>();
matchers.add(flowJobStatusMatch);
if (jsmDependents != null) {
jsmDependents.stream().map(dependent -> dependent.upon(this)).forEach(matchers::add);
}
return flowStatus.getFlowGroup().equals(flowGroup)
&& flowStatus.getFlowName().equals(flowName)
&& flowStatus.getFlowExecutionId() == flowExecutionId
&& assertOrderedJobStatuses(flowStatus, Iterables.toArray(matchers, Matcher.class));
}
@SafeVarargs
private static boolean assertOrderedJobStatuses(FlowStatus flowStatus, Matcher<? super JobStatus>... matchers) {
MatcherAssert.assertThat(Lists.newArrayList(flowStatus.getJobStatusIterator()),
IsIterableContainingInOrder.contains(matchers));
return true; // NOTE: exception thrown in case of error
}
}
| 1,224 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/util/TemplateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.util.Collection;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.job_catalog.PackagedTemplatesJobCatalogDecorator;
import org.apache.gobblin.runtime.template.ResourceBasedJobTemplate;
/**
* Testing the functions for reading template merging template with user-specified attributes.
* 1. Reading the template configuration, testing size or something
* 2. Testing the required attributes result.
*/
@Test(groups = {"gobblin.runtime"})
public class TemplateTest {
// For template inside resource folder.
private File jobConfigDir;
private Properties userProp ;
@BeforeClass
public void setUp()
throws IOException, URISyntaxException {
// Creating userCustomized stuff
this.jobConfigDir =
Files.createTempDirectory(String.format("gobblin-test_%s_job-conf", this.getClass().getSimpleName())).toFile();
FileUtils.forceDeleteOnExit(this.jobConfigDir);
//User specified file content:
this.userProp = new Properties();
userProp.setProperty("a", "1");
userProp.setProperty("templated0", "2");
userProp.setProperty("required0", "r0");
userProp.setProperty("required1", "r1");
userProp.setProperty("required2", "r2");
userProp.setProperty("job.template", "templates/test.template");
// User specified file's name : /[jobConfigDirName]/user.attr
userProp.store(new FileWriter(new File(this.jobConfigDir, "user.attr")), "");
}
@Test
public void testRequiredAttrList() throws Exception {
Properties jobProps = this.userProp;
Collection<String> requiredConfigList = ResourceBasedJobTemplate.forURI(new URI(
jobProps.getProperty(ConfigurationKeys.JOB_TEMPLATE_PATH)), new PackagedTemplatesJobCatalogDecorator())
.getRequiredConfigList();
Assert.assertEquals(requiredConfigList.size(), 3);
Assert.assertTrue( requiredConfigList.contains("required0"));
Assert.assertTrue( requiredConfigList.contains("required1"));
Assert.assertTrue( requiredConfigList.contains("required2"));
}
// Testing the resolving of userCustomized attributes and template is correct.
@Test
public void testResolvingConfig()
throws Exception {
Config jobProps = ConfigFactory.parseProperties(this.userProp);
Assert.assertEquals(ConfigUtils.configToProperties(jobProps).size(), 6);
jobProps = ResourceBasedJobTemplate.forResourcePath(jobProps.getString(ConfigurationKeys.JOB_TEMPLATE_PATH),
new PackagedTemplatesJobCatalogDecorator())
.getResolvedConfig(jobProps);
// Remove job.template in userSpecified file and gobblin.template.required_attributes in template
Assert.assertEquals(ConfigUtils.configToProperties(jobProps).size(), 8);
Properties targetResolvedJobProps = new Properties() ;
targetResolvedJobProps.setProperty("a", "1");
targetResolvedJobProps.setProperty("templated0", "2");
targetResolvedJobProps.setProperty("templated1", "y");
targetResolvedJobProps.setProperty("required0","r0");
targetResolvedJobProps.setProperty("required1","r1");
targetResolvedJobProps.setProperty("required2","r2");
targetResolvedJobProps.setProperty(ConfigurationKeys.JOB_TEMPLATE_PATH, "templates/test.template");
targetResolvedJobProps.setProperty(ConfigurationKeys.REQUIRED_ATRRIBUTES_LIST, "required0,required1,required2");
Assert.assertEquals(targetResolvedJobProps, ConfigUtils.configToProperties(jobProps));
}
@AfterClass
public void tearDown()
throws IOException {
if (this.jobConfigDir != null) {
FileUtils.forceDelete(this.jobConfigDir);
}
}
}
| 1,225 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/util/ReflectivePredicateEvaluatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.junit.Assert;
import org.testng.annotations.Test;
import lombok.Data;
public class ReflectivePredicateEvaluatorTest {
@Test
public void simpleTest() throws Exception {
ReflectivePredicateEvaluator evaluator = new ReflectivePredicateEvaluator(
"SELECT anInt = 1 FROM myInterface", MyInterface.class);
Assert.assertTrue(evaluator.evaluate(new MyImplementation(1, "foo")));
Assert.assertFalse(evaluator.evaluate(new MyImplementation(2, "foo")));
Assert.assertTrue(evaluator.evaluate("SELECT anInt = 1 OR aString = 'foo' FROM myInterface",
new MyImplementation(1, "bar")));
Assert.assertTrue(evaluator.evaluate("SELECT anInt = 1 OR aString = 'foo' FROM myInterface",
new MyImplementation(2, "foo")));
Assert.assertFalse(evaluator.evaluate("SELECT anInt = 1 OR aString = 'foo' FROM myInterface",
new MyImplementation(2, "bar")));
}
@Test
public void testWithAggregations() throws Exception {
ReflectivePredicateEvaluator evaluator = new ReflectivePredicateEvaluator(
"SELECT sum(anInt) = 5 FROM myInterface", MyInterface.class);
Assert.assertFalse(evaluator.evaluate(new MyImplementation(1, "foo")));
Assert.assertTrue(evaluator.evaluate(new MyImplementation(1, "foo"), new MyImplementation(4, "foo")));
Assert.assertFalse(evaluator.evaluate(new MyImplementation(2, "foo"), new MyImplementation(4, "foo")));
}
@Test
public void testWithAggregationsAndFilter() throws Exception {
ReflectivePredicateEvaluator evaluator = new ReflectivePredicateEvaluator(
"SELECT sum(anInt) = 5 FROM myInterface WHERE aString = 'foo'", MyInterface.class);
Assert.assertFalse(evaluator.evaluate(new MyImplementation(1, "foo")));
Assert.assertTrue(evaluator.evaluate(new MyImplementation(1, "foo"), new MyImplementation(4, "foo"), new MyImplementation(4, "bar")));
Assert.assertFalse(evaluator.evaluate(new MyImplementation(1, "foo"), new MyImplementation(4, "foo"), new MyImplementation(4, "foo")));
}
@Test
public void testMultipleInterfaces() throws Exception {
ReflectivePredicateEvaluator evaluator = new ReflectivePredicateEvaluator(
"SELECT true = ALL (SELECT sum(anInt) = 2 AS satisfied FROM myInterface UNION SELECT sum(anInt) = 3 AS satisfied FROM myInterface2)",
MyInterface.class, MyInterface2.class);
Assert.assertFalse(evaluator.evaluate(new MyImplementation(2, "foo")));
Assert.assertTrue(evaluator.evaluate(new MyImplementation(2, "foo"), new MyImplementation2(3)));
Assert.assertTrue(evaluator.evaluate(new MyImplementation(1, "foo"), new MyImplementation2(3), new MyImplementation(1, "foo")));
}
@Test
public void testMultipleOutputs() throws Exception {
ReflectivePredicateEvaluator evaluator =
new ReflectivePredicateEvaluator("SELECT anInt = 1 FROM myInterface", MyInterface.class);
Assert.assertTrue(evaluator.evaluate(new MyImplementation(1, "bar"), new MyImplementation(1, "foo")));
Assert.assertFalse(evaluator.evaluate(new MyImplementation(1, "bar"), new MyImplementation(2, "foo")));
}
@Test
public void testInvalidSQL() throws Exception {
try {
ReflectivePredicateEvaluator evaluator =
new ReflectivePredicateEvaluator("SELECT anInt FROM myInterface", MyInterface.class);
Assert.fail();
} catch (IllegalArgumentException exc) {
// Expected
}
}
@Test
public void testNoOutputs() throws Exception {
try {
ReflectivePredicateEvaluator evaluator =
new ReflectivePredicateEvaluator("SELECT anInt = 1 FROM myInterface WHERE aString = 'foo'",
MyInterface.class);
evaluator.evaluate(new MyImplementation(1, "bar"));
Assert.fail();
} catch (IllegalArgumentException exc) {
// Expected
}
}
private interface MyInterface {
int getAnInt();
String getAString();
}
@Data
private static class MyImplementation implements MyInterface {
private final int anInt;
private final String aString;
}
private interface MyInterface2 {
int getAnInt();
}
@Data
private static class MyImplementation2 implements MyInterface2 {
private final int anInt;
}
}
| 1,226 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/util/SchedulerUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.runtime.job_spec.JobSpecResolver;
import org.apache.gobblin.util.filesystem.PathAlterationListener;
import org.apache.gobblin.util.filesystem.PathAlterationListenerAdaptor;
import org.apache.gobblin.util.filesystem.PathAlterationObserverScheduler;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.Semaphore;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
/**
* Unit tests for {@link SchedulerUtils}.
* Note that the entities involved in unit tests
* are only from local file system (Compatible with java.io.File Class)
*/
@Test(groups = {"gobblin.util"})
public class SchedulerUtilsTest {
// For general type of File system
private File jobConfigDir;
private File subDir1;
private File subDir11;
private File subDir2;
@BeforeClass
public void setUp()
throws IOException {
this.jobConfigDir = java.nio.file.Files.createTempDirectory(
String.format("gobblin-test_%s_job-conf", this.getClass().getSimpleName())).toFile();
FileUtils.forceDeleteOnExit(this.jobConfigDir);
this.subDir1 = new File(this.jobConfigDir, "test1");
this.subDir11 = new File(this.subDir1, "test11");
this.subDir2 = new File(this.jobConfigDir, "test2");
this.subDir1.mkdirs();
this.subDir11.mkdirs();
this.subDir2.mkdirs();
Properties rootProps = new Properties();
rootProps.setProperty("k1", "a1");
rootProps.setProperty("k2", "a2");
// test-job-conf-dir/root.properties
rootProps.store(new FileWriter(new File(this.jobConfigDir, "root.properties")), "");
Properties props1 = new Properties();
props1.setProperty("k1", "b1");
props1.setProperty("k3", "a3");
// test-job-conf-dir/test1/test.properties
props1.store(new FileWriter(new File(this.subDir1, "test.properties")), "");
Properties jobProps1 = new Properties();
jobProps1.setProperty("k1", "c1");
jobProps1.setProperty("k3", "b3");
jobProps1.setProperty("k6", "a6");
// test-job-conf-dir/test1/test11.pull
jobProps1.store(new FileWriter(new File(this.subDir1, "test11.pull")), "");
Properties jobProps2 = new Properties();
jobProps2.setProperty("k7", "a7");
// test-job-conf-dir/test1/test12.PULL
jobProps2.store(new FileWriter(new File(this.subDir1, "test12.PULL")), "");
Properties jobProps3 = new Properties();
jobProps3.setProperty("k1", "d1");
jobProps3.setProperty("k8", "a8");
jobProps3.setProperty("k9", "${k8}");
// test-job-conf-dir/test1/test11/test111.pull
jobProps3.store(new FileWriter(new File(this.subDir11, "test111.pull")), "");
Properties props2 = new Properties();
props2.setProperty("k2", "b2");
props2.setProperty("k5", "a5");
// test-job-conf-dir/test2/test.properties
props2.store(new FileWriter(new File(this.subDir2, "test.PROPERTIES")), "");
Properties jobProps4 = new Properties();
jobProps4.setProperty("k5", "b5");
// test-job-conf-dir/test2/test21.PULL
jobProps4.store(new FileWriter(new File(this.subDir2, "test21.PULL")), "");
}
@Test
public void testloadGenericJobConfigs()
throws ConfigurationException, IOException {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getAbsolutePath());
List<Properties> jobConfigs = SchedulerUtils.loadGenericJobConfigs(properties, JobSpecResolver.mock());
Assert.assertEquals(jobConfigs.size(), 4);
// test-job-conf-dir/test1/test11/test111.pull
Properties jobProps1 = getJobConfigForFile(jobConfigs, "test111.pull");
Assert.assertEquals(jobProps1.stringPropertyNames().size(), 7);
Assert.assertTrue(jobProps1.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY) || jobProps1.containsKey(
ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY));
Assert.assertTrue(jobProps1.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps1.getProperty("k1"), "d1");
Assert.assertEquals(jobProps1.getProperty("k2"), "a2");
Assert.assertEquals(jobProps1.getProperty("k3"), "a3");
Assert.assertEquals(jobProps1.getProperty("k8"), "a8");
Assert.assertEquals(jobProps1.getProperty("k9"), "a8");
// test-job-conf-dir/test1/test11.pull
Properties jobProps2 = getJobConfigForFile(jobConfigs, "test11.pull");
Assert.assertEquals(jobProps2.stringPropertyNames().size(), 6);
Assert.assertTrue(jobProps2.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY) || jobProps1.containsKey(
ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY));
Assert.assertTrue(jobProps2.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps2.getProperty("k1"), "c1");
Assert.assertEquals(jobProps2.getProperty("k2"), "a2");
Assert.assertEquals(jobProps2.getProperty("k3"), "b3");
Assert.assertEquals(jobProps2.getProperty("k6"), "a6");
// test-job-conf-dir/test1/test12.PULL
Properties jobProps3 = getJobConfigForFile(jobConfigs, "test12.PULL");
Assert.assertEquals(jobProps3.stringPropertyNames().size(), 6);
Assert.assertTrue(jobProps3.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY) || jobProps1.containsKey(
ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY));
Assert.assertTrue(jobProps3.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps3.getProperty("k1"), "b1");
Assert.assertEquals(jobProps3.getProperty("k2"), "a2");
Assert.assertEquals(jobProps3.getProperty("k3"), "a3");
Assert.assertEquals(jobProps3.getProperty("k7"), "a7");
// test-job-conf-dir/test2/test21.PULL
Properties jobProps4 = getJobConfigForFile(jobConfigs, "test21.PULL");
Assert.assertEquals(jobProps4.stringPropertyNames().size(), 5);
Assert.assertTrue(jobProps4.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY) || jobProps1.containsKey(
ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY));
Assert.assertTrue(jobProps4.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps4.getProperty("k1"), "a1");
Assert.assertEquals(jobProps4.getProperty("k2"), "b2");
Assert.assertEquals(jobProps4.getProperty("k5"), "b5");
}
@Test
public void testLoadJobConfigsForCommonPropsFile()
throws ConfigurationException, IOException {
Path commonPropsPath = new Path(this.subDir1.getAbsolutePath() + "/test.properties");
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getAbsolutePath());
List<Properties> jobConfigs = SchedulerUtils.loadGenericJobConfigs(properties, commonPropsPath,
new Path(this.jobConfigDir.getAbsolutePath()), JobSpecResolver.mock());
Assert.assertEquals(jobConfigs.size(), 3);
// test-job-conf-dir/test1/test11/test111.pull
Properties jobProps1 = getJobConfigForFile(jobConfigs, "test111.pull");
Assert.assertEquals(jobProps1.stringPropertyNames().size(), 7);
Assert.assertEquals(jobProps1.getProperty("k1"), "d1");
Assert.assertEquals(jobProps1.getProperty("k2"), "a2");
Assert.assertEquals(jobProps1.getProperty("k3"), "a3");
Assert.assertEquals(jobProps1.getProperty("k8"), "a8");
Assert.assertEquals(jobProps1.getProperty("k9"), "a8");
// test-job-conf-dir/test1/test11.pull
Properties jobProps2 = getJobConfigForFile(jobConfigs, "test11.pull");
Assert.assertEquals(jobProps2.stringPropertyNames().size(), 6);
Assert.assertEquals(jobProps2.getProperty("k1"), "c1");
Assert.assertEquals(jobProps2.getProperty("k2"), "a2");
Assert.assertEquals(jobProps2.getProperty("k3"), "b3");
Assert.assertEquals(jobProps2.getProperty("k6"), "a6");
// test-job-conf-dir/test1/test12.PULL
Properties jobProps3 = getJobConfigForFile(jobConfigs, "test12.PULL");
Assert.assertEquals(jobProps3.stringPropertyNames().size(), 6);
Assert.assertEquals(jobProps3.getProperty("k1"), "b1");
Assert.assertEquals(jobProps3.getProperty("k2"), "a2");
Assert.assertEquals(jobProps3.getProperty("k3"), "a3");
Assert.assertEquals(jobProps3.getProperty("k7"), "a7");
}
@Test
public void testloadGenericJobConfig()
throws ConfigurationException, IOException {
Path jobConfigPath = new Path(this.subDir11.getAbsolutePath(), "test111.pull");
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getAbsolutePath());
Properties jobProps =
SchedulerUtils.loadGenericJobConfig(properties, jobConfigPath, new Path(this.jobConfigDir.getAbsolutePath()),
JobSpecResolver.builder(ConfigFactory.empty()).build());
Assert.assertEquals(jobProps.stringPropertyNames().size(), 7);
Assert.assertTrue(jobProps.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY) || jobProps.containsKey(
ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY));
Assert.assertTrue(jobProps.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps.getProperty("k1"), "d1");
Assert.assertEquals(jobProps.getProperty("k2"), "a2");
Assert.assertEquals(jobProps.getProperty("k3"), "a3");
Assert.assertEquals(jobProps.getProperty("k8"), "a8");
Assert.assertEquals(jobProps.getProperty("k9"), "a8");
}
@Test(dependsOnMethods = {"testLoadJobConfigsForCommonPropsFile", "testloadGenericJobConfig"})
public void testPathAlterationObserver()
throws Exception {
PathAlterationObserverScheduler monitor = new PathAlterationObserverScheduler(1000);
final Set<Path> fileAltered = Sets.newHashSet();
final Semaphore semaphore = new Semaphore(0);
PathAlterationListener listener = new PathAlterationListenerAdaptor() {
@Override
public void onFileCreate(Path path) {
fileAltered.add(path);
semaphore.release();
}
@Override
public void onFileChange(Path path) {
fileAltered.add(path);
semaphore.release();
}
};
SchedulerUtils.addPathAlterationObserver(monitor, listener, new Path(this.jobConfigDir.getPath()));
try {
monitor.start();
// Give the monitor some time to start
Thread.sleep(1000);
File jobConfigFile = new File(this.subDir11, "test111.pull");
Files.touch(jobConfigFile);
File commonPropsFile = new File(this.subDir1, "test.properties");
Files.touch(commonPropsFile);
File newJobConfigFile = new File(this.subDir11, "test112.pull");
Files.append("k1=v1", newJobConfigFile, ConfigurationKeys.DEFAULT_CHARSET_ENCODING);
semaphore.acquire(3);
Assert.assertEquals(fileAltered.size(), 3);
Assert.assertTrue(fileAltered.contains(new Path("file:" + jobConfigFile)));
Assert.assertTrue(fileAltered.contains(new Path("file:" + commonPropsFile)));
Assert.assertTrue(fileAltered.contains(new Path("file:" + newJobConfigFile)));
} finally {
monitor.stop();
}
}
@Test
public void testTemplateLoad() throws Exception {
Path path = new Path(getClass().getClassLoader().getResource("schedulerUtilsTest").getFile());
Properties pullFile =
SchedulerUtils.loadGenericJobConfig(new Properties(), new Path(path, "templated.pull"), path,
JobSpecResolver.builder(ConfigFactory.empty()).build());
Assert.assertEquals(pullFile.getProperty("gobblin.dataset.pattern"), "pattern");
Assert.assertEquals(pullFile.getProperty("job.name"), "GobblinDatabaseCopyTest");
List<Properties> jobConfigs = SchedulerUtils.loadGenericJobConfigs(new Properties(), new Path(path, "templated.pull"),
path, JobSpecResolver.mock());
Properties pullFile2 = getJobConfigForFile(jobConfigs, "templated.pull");
Assert.assertEquals(pullFile2.getProperty("gobblin.dataset.pattern"), "pattern");
Assert.assertEquals(pullFile2.getProperty("job.name"), "GobblinDatabaseCopyTest");
Properties props = new Properties();
props.put(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, path.toString());
List<Properties> jobConfigs3 = SchedulerUtils.loadGenericJobConfigs(props, JobSpecResolver.mock());
Properties pullFile3 = getJobConfigForFile(jobConfigs3, "templated.pull");
Assert.assertEquals(pullFile3.getProperty("gobblin.dataset.pattern"), "pattern");
Assert.assertEquals(pullFile3.getProperty("job.name"), "GobblinDatabaseCopyTest");
}
@AfterClass
public void tearDown()
throws IOException {
if (this.jobConfigDir != null) {
FileUtils.forceDelete(this.jobConfigDir);
}
}
private Properties getJobConfigForFile(List<Properties> jobConfigs, String fileName) {
for (Properties jobConfig : jobConfigs) {
if (jobConfig.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY).endsWith(fileName)) {
return jobConfig;
}
}
return null;
}
}
| 1,227 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/source/extractor/filebased/TextFileBasedSourceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.filebased;
import java.io.File;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Charsets;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.embedded.EmbeddedGobblin;
import org.apache.gobblin.writer.test.GobblinTestEventBusWriter;
import org.apache.gobblin.writer.test.TestingEventBusAsserter;
import org.apache.gobblin.writer.test.TestingEventBuses;
public class TextFileBasedSourceTest {
@Test(enabled=false, groups = { "disabledOnCI" })
public void test() throws Exception {
File stateStoreDir = Files.createTempDir();
stateStoreDir.deleteOnExit();
File dataDir = Files.createTempDir();
dataDir.deleteOnExit();
String eventBusId = UUID.randomUUID().toString();
TestingEventBusAsserter asserter = new TestingEventBusAsserter(eventBusId);
EmbeddedGobblin gobblin = new EmbeddedGobblin().setTemplate("resource:///templates/textFileBasedSourceTest.template")
.setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, dataDir.getAbsolutePath())
.setConfiguration(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, stateStoreDir.getAbsolutePath())
.setConfiguration(GobblinTestEventBusWriter.FULL_EVENTBUSID_KEY, eventBusId)
.setConfiguration(ConfigurationKeys.STATE_STORE_ENABLED, "true");
Files.write("record1\nrecord2\nrecord3", new File(dataDir, "file1"), Charsets.UTF_8);
Files.write("record4\nrecord5", new File(dataDir, "file2"), Charsets.UTF_8);
gobblin.run();
Set<Object> events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet());
Assert.assertEquals(events, Sets.newHashSet("record1", "record2", "record3", "record4", "record5"));
asserter.clear();
// should only pull new files
Files.write("record6\nrecord7", new File(dataDir, "file3"), Charsets.UTF_8);
gobblin.run();
events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet());
Assert.assertEquals(events, Sets.newHashSet("record6", "record7"));
asserter.clear();
// if we replace old file, it should repull that file
Assert.assertTrue(new File(dataDir, "file2").delete());
// Some systems don't provide modtime so gobblin can't keep of changed files.
// run gobblin once with file2 deleted to update the snapshot
gobblin.run();
events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet());
Assert.assertTrue(events.isEmpty());
asserter.clear();
Files.write("record8\nrecord9", new File(dataDir, "file2"), Charsets.UTF_8);
gobblin.run();
events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet());
Assert.assertEquals(events, Sets.newHashSet("record8", "record9"));
asserter.clear();
}
@Test (enabled=false)
public void testFileLimit() throws Exception {
File stateStoreDir = Files.createTempDir();
stateStoreDir.deleteOnExit();
File dataDir = Files.createTempDir();
dataDir.deleteOnExit();
String eventBusId = UUID.randomUUID().toString();
TestingEventBusAsserter asserter = new TestingEventBusAsserter(eventBusId);
EmbeddedGobblin gobblin = new EmbeddedGobblin().setTemplate("resource:///templates/textFileBasedSourceTest.template")
.setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, dataDir.getAbsolutePath())
.setConfiguration(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, stateStoreDir.getAbsolutePath())
.setConfiguration(GobblinTestEventBusWriter.FULL_EVENTBUSID_KEY, eventBusId)
.setConfiguration(ConfigurationKeys.STATE_STORE_ENABLED, "true")
.setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_MAX_FILES_PER_RUN, "2");
Files.write("record1\nrecord2\nrecord3", new File(dataDir, "file1"), Charsets.UTF_8);
Files.write("record4\nrecord5", new File(dataDir, "file2"), Charsets.UTF_8);
Files.write("record6\nrecord7", new File(dataDir, "file3"), Charsets.UTF_8);
gobblin.run();
// should only pull first 2 files
Set<Object> events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet());
Assert.assertEquals(events, Sets.newHashSet("record1", "record2", "record3", "record4", "record5"));
asserter.clear();
gobblin.run();
events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet());
Assert.assertEquals(events, Sets.newHashSet("record6", "record7"));
asserter.clear();
}
}
| 1,228 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/JobContextTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import java.util.Queue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.gobblin.util.JobLauncherUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.slf4j.Logger;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Queues;
import com.google.common.io.Files;
import org.apache.gobblin.commit.DeliverySemantics;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.util.Either;
import org.apache.gobblin.util.Id;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class JobContextTest {
@Test
public void testNonParallelCommit()
throws Exception {
Properties jobProps = new Properties();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, "test");
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, "job_id_12345");
jobProps.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "false");
Map<String, JobState.DatasetState> datasetStateMap = Maps.newHashMap();
for (int i = 0; i < 2; i++) {
datasetStateMap.put(Integer.toString(i), new JobState.DatasetState());
}
final BlockingQueue<ControllableCallable<Void>> callables = Queues.newLinkedBlockingQueue();
final JobContext jobContext =
new ControllableCommitJobContext(jobProps, log, datasetStateMap, new Predicate<String>() {
@Override
public boolean apply(@Nullable String input) {
return true;
}
}, callables);
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future future = executorService.submit(new Runnable() {
@Override
public void run() {
try {
jobContext.commit();
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
});
// Not parallelized, should only one commit running
ControllableCallable<Void> callable = callables.poll(1, TimeUnit.SECONDS);
Assert.assertNotNull(callable);
Assert.assertNull(callables.poll(200, TimeUnit.MILLISECONDS));
// unblock first commit, should see a second commit
callable.unblock();
callable = callables.poll(1, TimeUnit.SECONDS);
Assert.assertNotNull(callable);
Assert.assertNull(callables.poll(200, TimeUnit.MILLISECONDS));
Assert.assertFalse(future.isDone());
// unblock second commit, commit should complete
callable.unblock();
future.get(1, TimeUnit.SECONDS);
Assert.assertEquals(jobContext.getJobState().getState(), JobState.RunningState.COMMITTED);
}
@Test
public void testParallelCommit()
throws Exception {
Properties jobProps = new Properties();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, "test");
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, "job_id_12345");
jobProps.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "false");
jobProps.setProperty(ConfigurationKeys.PARALLELIZE_DATASET_COMMIT, "true");
Map<String, JobState.DatasetState> datasetStateMap = Maps.newHashMap();
for (int i = 0; i < 5; i++) {
datasetStateMap.put(Integer.toString(i), new JobState.DatasetState());
}
final BlockingQueue<ControllableCallable<Void>> callables = Queues.newLinkedBlockingQueue();
final JobContext jobContext =
new ControllableCommitJobContext(jobProps, log, datasetStateMap, new Predicate<String>() {
@Override
public boolean apply(@Nullable String input) {
return true;
}
}, callables);
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future future = executorService.submit(new Runnable() {
@Override
public void run() {
try {
jobContext.commit();
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
});
// Parallelized, should be able to get all 5 commits running
Queue<ControllableCallable<Void>> drainedCallables = Lists.newLinkedList();
Assert.assertEquals(Queues.drain(callables, drainedCallables, 5, 1, TimeUnit.SECONDS), 5);
Assert.assertFalse(future.isDone());
// unblock all commits
for (ControllableCallable<Void> callable : drainedCallables) {
callable.unblock();
}
// check that future is done
future.get(1, TimeUnit.SECONDS);
// check that no more commits were added
Assert.assertTrue(callables.isEmpty());
Assert.assertEquals(jobContext.getJobState().getState(), JobState.RunningState.COMMITTED);
}
@Test
public void testSingleExceptionSemantics()
throws Exception {
Properties jobProps = new Properties();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, "test");
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, "job_id_12345");
jobProps.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "false");
Map<String, JobState.DatasetState> datasetStateMap = Maps.newHashMap();
for (int i = 0; i < 3; i++) {
datasetStateMap.put(Integer.toString(i), new JobState.DatasetState());
}
final BlockingQueue<ControllableCallable<Void>> callables = Queues.newLinkedBlockingQueue();
// There are three datasets, "0", "1", and "2", middle one will fail
final JobContext jobContext =
new ControllableCommitJobContext(jobProps, log, datasetStateMap, new Predicate<String>() {
@Override
public boolean apply(@Nullable String input) {
return !input.equals("1");
}
}, callables);
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future future = executorService.submit(new Runnable() {
@Override
public void run() {
try {
jobContext.commit();
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
});
// All three commits should be run (even though second one fails)
callables.poll(1, TimeUnit.SECONDS).unblock();
callables.poll(1, TimeUnit.SECONDS).unblock();
callables.poll(1, TimeUnit.SECONDS).unblock();
try {
// check future is done
future.get(1, TimeUnit.SECONDS);
Assert.fail();
} catch (ExecutionException ee) {
// future should fail
}
// job failed
Assert.assertEquals(jobContext.getJobState().getState(), JobState.RunningState.FAILED);
}
@Test
public void testCleanUpOldJobData() throws Exception {
String rootPath = Files.createTempDir().getAbsolutePath();
final String JOB_PREFIX = Id.Job.PREFIX;
final String JOB_NAME1 = "GobblinKafka";
final String JOB_NAME2 = "GobblinBrooklin";
final long timestamp1 = 1505774129247L;
final long timestamp2 = 1505774129248L;
final Joiner JOINER = Joiner.on(Id.SEPARATOR).skipNulls();
Object[] oldJob1 = new Object[]{JOB_PREFIX, JOB_NAME1, timestamp1};
Object[] oldJob2 = new Object[]{JOB_PREFIX, JOB_NAME2, timestamp1};
Object[] currentJob = new Object[]{JOB_PREFIX, JOB_NAME1, timestamp2};
Path currentJobPath = new Path(JobContext.getJobDir(rootPath, JOB_NAME1), JOINER.join(currentJob));
Path oldJobPath1 = new Path(JobContext.getJobDir(rootPath, JOB_NAME1), JOINER.join(oldJob1));
Path oldJobPath2 = new Path(JobContext.getJobDir(rootPath, JOB_NAME2), JOINER.join(oldJob2));
Path stagingPath = new Path(currentJobPath, "task-staging");
Path outputPath = new Path(currentJobPath, "task-output");
FileSystem fs = FileSystem.getLocal(new Configuration());
fs.mkdirs(currentJobPath);
fs.mkdirs(oldJobPath1);
fs.mkdirs(oldJobPath2);
log.info("Created : {} {} {}", currentJobPath, oldJobPath1, oldJobPath2);
gobblin.runtime.JobState jobState = new gobblin.runtime.JobState();
jobState.setProp(ConfigurationKeys.WRITER_STAGING_DIR, stagingPath.toString());
jobState.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, outputPath.toString());
JobContext jobContext = mock(JobContext.class);
when(jobContext.getStagingDirProvided()).thenReturn(false);
when(jobContext.getOutputDirProvided()).thenReturn(false);
when(jobContext.getJobId()).thenReturn(currentJobPath.getName().toString());
JobLauncherUtils.cleanUpOldJobData(jobState, log, jobContext.getStagingDirProvided(), jobContext.getOutputDirProvided());
Assert.assertFalse(fs.exists(oldJobPath1));
Assert.assertTrue(fs.exists(oldJobPath2));
Assert.assertFalse(fs.exists(currentJobPath));
}
@Test
public void testDatasetStateFailure() throws Exception {
Properties jobProps = new Properties();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, "test");
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, "job_id_12345");
jobProps.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "false");
Map<String, JobState.DatasetState> datasetStateMap = Maps.newHashMap();
JobState.DatasetState failingDatasetState = new JobState.DatasetState("DatasetState", "DatasetState-1");
// mark dataset state as a failing job
failingDatasetState.incrementJobFailures();
JobState.DatasetState failingDatasetState2 = new JobState.DatasetState("DatasetState2", "DatasetState-2");
failingDatasetState2.incrementJobFailures();
failingDatasetState2.incrementJobFailures();
datasetStateMap.put("0", failingDatasetState);
datasetStateMap.put("1", failingDatasetState2);
final BlockingQueue<ControllableCallable<Void>> callables = Queues.newLinkedBlockingQueue();
JobContext jobContext = new ControllableCommitJobContext(jobProps, log, datasetStateMap, new Predicate<String>() {
@Override
public boolean apply(@Nullable String input) {
return !input.equals("1");
}
}, callables);
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future future = executorService.submit(new Runnable() {
@Override
public void run() {
try {
jobContext.commit();
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
});
callables.poll(1, TimeUnit.SECONDS).unblock();
callables.poll(1, TimeUnit.SECONDS).unblock();
// when checking the number of failures, this should detect the failing dataset state
Assert.assertEquals(jobContext.getDatasetStateFailures(), 3);
}
@Test
public void testNoDatasetStates() throws Exception {
Properties jobProps = new Properties();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, "test");
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, "job_id_12345");
jobProps.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "false");
Map<String, JobState.DatasetState> datasetStateMap = Maps.newHashMap();
final BlockingQueue<ControllableCallable<Void>> callables = Queues.newLinkedBlockingQueue();
JobContext jobContext = new ControllableCommitJobContext(jobProps, log, datasetStateMap, new Predicate<String>() {
@Override
public boolean apply(@Nullable String input) {
return !input.equals("1");
}
}, callables);
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future future = executorService.submit(new Runnable() {
@Override
public void run() {
try {
jobContext.commit();
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
});
// when checking the number of failures, this should detect the failing dataset state
Assert.assertEquals(jobContext.getDatasetStateFailures(), 0);
}
/**
* A {@link Callable} that blocks until a different thread calls {@link #unblock()}.
*/
private class ControllableCallable<T> implements Callable<T> {
private final BlockingQueue<Boolean> queue;
private final Either<T, Exception> toReturn;
private final String name;
public ControllableCallable(Either<T, Exception> toReturn, String name) {
this.queue = Queues.newArrayBlockingQueue(1);
this.queue.add(true);
this.toReturn = toReturn;
this.name = name;
}
public void unblock() {
if (!this.queue.isEmpty()) {
this.queue.poll();
}
}
@Override
public T call()
throws Exception {
this.queue.put(false);
if (this.toReturn instanceof Either.Left) {
return ((Either.Left<T, Exception>) this.toReturn).getLeft();
} else {
throw ((Either.Right<T, Exception>) this.toReturn).getRight();
}
}
}
private class ControllableCommitJobContext extends DummyJobContext {
private final Predicate<String> successPredicate;
private final Queue<ControllableCallable<Void>> callablesQueue;
public ControllableCommitJobContext(Properties jobProps, Logger logger,
Map<String, JobState.DatasetState> datasetStateMap, Predicate<String> successPredicate,
Queue<ControllableCallable<Void>> callablesQueue)
throws Exception {
super(jobProps, logger, datasetStateMap);
this.successPredicate = successPredicate;
this.callablesQueue = callablesQueue;
}
@Override
protected Callable<Void> createSafeDatasetCommit(boolean shouldCommitDataInJob, boolean isJobCancelled,
DeliverySemantics deliverySemantics, String datasetUrn, JobState.DatasetState datasetState,
boolean isMultithreaded, JobContext jobContext) {
ControllableCallable<Void> callable;
if (this.successPredicate.apply(datasetUrn)) {
callable = new ControllableCallable<>(Either.<Void, Exception>left(null), datasetUrn);
} else {
callable = new ControllableCallable<>(Either.<Void, Exception>right(new RuntimeException("Fail!")), datasetUrn);
}
this.callablesQueue.add(callable);
return callable;
}
}
}
| 1,229 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/TaskContinuousTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.fork.IdentityForkOperator;
import org.apache.gobblin.publisher.TaskPublisher;
import org.apache.gobblin.qualitychecker.row.RowLevelPolicyChecker;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyChecker;
import org.apache.gobblin.runtime.util.TaskMetrics;
import org.apache.gobblin.source.extractor.CheckpointableWatermark;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.DefaultCheckpointableWatermark;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.extractor.StreamingExtractor;
import org.apache.gobblin.source.extractor.extract.LongWatermark;
import org.apache.gobblin.stream.RecordEnvelope;
import org.apache.gobblin.util.ExecutorsUtils;
import org.apache.gobblin.util.TestUtils;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.WatermarkAwareWriter;
import org.apache.gobblin.writer.WatermarkStorage;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@Slf4j
@Test
public class TaskContinuousTest {
private static class OneRecordExtractor implements StreamingExtractor<Object, String> {
private long index = 0L;
private final String record;
private volatile boolean closed = false;
public OneRecordExtractor(String record) {
this.record = record;
}
@Override
public Object getSchema()
throws IOException {
return null;
}
@Override
public RecordEnvelope<String> readRecordEnvelope()
throws DataRecordException, IOException {
if (!this.closed) {
if (index == 0) {
RecordEnvelope<String> recordEnvelope =
new RecordEnvelope<>(record, new DefaultCheckpointableWatermark("default", new LongWatermark(index)));
log.debug("Returning record with index {}", index);
index++;
return recordEnvelope;
}
else {
return null;
}
} else {
log.info("Extractor has been closed, returning null");
return null;
}
}
@Override
public long getExpectedRecordCount() {
return this.index;
}
@Override
public long getHighWatermark() {
return -1;
}
@Override
public void close()
throws IOException {
this.closed = true;
}
public boolean validateWatermarks(boolean exact, Map<String, CheckpointableWatermark> watermarkMap) {
if (!watermarkMap.isEmpty()) {
// watermark must be <= the index
LongWatermark longWatermark = (LongWatermark) watermarkMap.values().iterator().next().getWatermark();
if (exact) {
System.out.println(index-1 + ":" + longWatermark.getValue());
return ((index-1) == longWatermark.getValue());
} else {
return (index > longWatermark.getValue());
}
}
return true;
}
@Override
public void start(WatermarkStorage watermarkStorage) {
}
}
private static class ContinuousExtractor implements StreamingExtractor<Object, String> {
private long index = 0L;
private final long sleepTimeInMillis;
private volatile boolean closed = false;
public ContinuousExtractor(long sleepTimeInMillis) {
this.sleepTimeInMillis = sleepTimeInMillis;
}
@Override
public Object getSchema()
throws IOException {
return null;
}
@Override
public RecordEnvelope<String> readRecordEnvelope()
throws DataRecordException, IOException {
if (!this.closed) {
try {
Thread.sleep(this.sleepTimeInMillis);
} catch (InterruptedException e) {
Throwables.propagate(e);
}
String record = index + "";
RecordEnvelope<String> recordEnvelope =
new RecordEnvelope<>(record, new DefaultCheckpointableWatermark("default", new LongWatermark(index)));
index++;
return recordEnvelope;
} else {
log.info("Extractor has been closed, returning null");
return null;
}
}
@Override
public long getExpectedRecordCount() {
return this.index;
}
@Override
public long getHighWatermark() {
return -1;
}
@Override
public void close()
throws IOException {
this.closed = true;
}
public boolean validateWatermarks(boolean exact, Map<String, CheckpointableWatermark> watermarkMap) {
if (!watermarkMap.isEmpty()) {
// watermark must be <= the index
LongWatermark longWatermark = (LongWatermark) watermarkMap.values().iterator().next().getWatermark();
if (exact) {
System.out.println(index-1 + ":" + longWatermark.getValue());
return ((index-1) == longWatermark.getValue());
} else {
return (index > longWatermark.getValue());
}
}
return true;
}
@Override
public void start(WatermarkStorage watermarkStorage) {
}
@Override
public void shutdown() throws JobShutdownException {
try {
this.close();
} catch (Exception e) {
throw new JobShutdownException("Failed to close extractor during shutdown");
}
}
}
class MockWatermarkStorage implements WatermarkStorage {
private ConcurrentHashMap<String, CheckpointableWatermark> watermarks = new ConcurrentHashMap<>();
@Override
public void commitWatermarks(Iterable<CheckpointableWatermark> watermarks)
throws IOException {
for (CheckpointableWatermark watermark: watermarks) {
this.watermarks.put(watermark.getSource(), watermark);
}
}
@Override
public Map<String, CheckpointableWatermark> getCommittedWatermarks(
Class<? extends CheckpointableWatermark> watermarkClass, Iterable<String> sourcePartitions)
throws IOException {
// make a copy
return new HashMap<>(this.watermarks);
}
}
/**
* Test that a streaming task will work correctly when the extractor only produces one record
* @throws Exception
*/
@Test
public void testContinuousTaskOneRecord()
throws Exception {
for (Boolean taskExecutionSync: new Boolean[]{true, false}) {
ArrayList<Object> recordCollector = new ArrayList<>(100);
String testRecord = "hello";
OneRecordExtractor oneRecordExtractor = new OneRecordExtractor(testRecord);
TaskContext mockTaskContext = getMockTaskContext(recordCollector, oneRecordExtractor, taskExecutionSync,
Integer.MAX_VALUE);
// Create a mock TaskPublisher
TaskPublisher mockTaskPublisher = mock(TaskPublisher.class);
when(mockTaskPublisher.canPublish()).thenReturn(TaskPublisher.PublisherState.SUCCESS);
when(mockTaskContext.getTaskPublisher(any(TaskState.class), any())).thenReturn(mockTaskPublisher);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task task = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch>absent());
ScheduledExecutorService taskRunner = new ScheduledThreadPoolExecutor(1, ExecutorsUtils.newThreadFactory(Optional.of(log)));
taskRunner.execute(task);
// Let the task run for 2 seconds
int sleepIterations = 2;
int currentIteration = 0;
WatermarkStorage mockWatermarkStorage = mockTaskContext.getWatermarkStorage();
Map<String, CheckpointableWatermark> externalWatermarkStorage;
while (currentIteration < sleepIterations) {
Thread.sleep(1000);
currentIteration++;
externalWatermarkStorage =
mockWatermarkStorage.getCommittedWatermarks(CheckpointableWatermark.class, ImmutableList.of("default"));
if (!externalWatermarkStorage.isEmpty()) {
for (CheckpointableWatermark watermark : externalWatermarkStorage.values()) {
log.info("Observed committed watermark: {}", watermark);
}
log.info("Task progress: {}", task.getProgress());
// Ensure that watermarks seem reasonable at each step
Assert.assertTrue(oneRecordExtractor.validateWatermarks(false, externalWatermarkStorage));
}
}
// Let's try to shutdown the task
task.shutdown();
log.info("Shutting down task now");
boolean success = task.awaitShutdown(3000);
Assert.assertTrue(success, "Task should shutdown in 3 seconds");
log.info("Task done waiting to shutdown {}", success);
externalWatermarkStorage =
mockWatermarkStorage.getCommittedWatermarks(CheckpointableWatermark.class, ImmutableList.of("0"));
// Ensure that committed watermarks match exactly the input rows because we shutdown in an orderly manner.
Assert.assertTrue(oneRecordExtractor.validateWatermarks(true, externalWatermarkStorage));
// Ensure that the record made it to the writer correctly
Assert.assertEquals(recordCollector.size(), 1);
Assert.assertEquals(recordCollector.get(0), testRecord);
task.commit();
Assert.assertTrue(mockTaskContext.getTaskState().getWorkingState() == WorkUnitState.WorkingState.SUCCESSFUL);
// Shutdown the executor
taskRunner.shutdown();
taskRunner.awaitTermination(100, TimeUnit.MILLISECONDS);
}
}
private TaskContext getMockTaskContext(ArrayList<Object> recordCollector,
Extractor mockExtractor, Boolean taskExecutionSync, int errorAtCount)
throws Exception {
TaskState taskState = getStreamingTaskState(taskExecutionSync);
// Create a mock RowLevelPolicyChecker
RowLevelPolicyChecker mockRowLevelPolicyChecker =
new RowLevelPolicyChecker(Lists.newArrayList(), "stateId", FileSystem.getLocal(new Configuration()));
WatermarkStorage mockWatermarkStorage = new MockWatermarkStorage();
// Create a mock TaskPublisher
TaskPublisher mockTaskPublisher = mock(TaskPublisher.class);
when(mockTaskPublisher.canPublish()).thenReturn(TaskPublisher.PublisherState.SUCCESS);
// Create a mock TaskContext
TaskContext mockTaskContext = mock(TaskContext.class);
when(mockTaskContext.getTaskMetrics()).thenReturn(TaskMetrics.get(taskState));
when(mockTaskContext.getExtractor()).thenReturn(mockExtractor);
when(mockTaskContext.getRawSourceExtractor()).thenReturn(mockExtractor);
when(mockTaskContext.getWatermarkStorage()).thenReturn(mockWatermarkStorage);
when(mockTaskContext.getForkOperator()).thenReturn(new IdentityForkOperator());
when(mockTaskContext.getTaskState()).thenReturn(taskState);
when(mockTaskContext.getTaskPublisher(any(TaskState.class), any()))
.thenReturn(mockTaskPublisher);
when(mockTaskContext.getRowLevelPolicyChecker()).thenReturn(mockRowLevelPolicyChecker);
when(mockTaskContext.getRowLevelPolicyChecker(anyInt())).thenReturn(mockRowLevelPolicyChecker);
when(mockTaskContext.getTaskLevelPolicyChecker(any(TaskState.class), anyInt())).thenReturn(mock(TaskLevelPolicyChecker.class));
when(mockTaskContext.getDataWriterBuilder(anyInt(), anyInt())).thenReturn(new TestStreamingDataWriterBuilder(recordCollector,
errorAtCount));
return mockTaskContext;
}
private TaskState getStreamingTaskState(Boolean taskExecutionSync) {
WorkUnitState workUnitState = TestUtils.createTestWorkUnitState();
workUnitState.setProp(ConfigurationKeys.TASK_KEY_KEY, "1234");
TaskState taskState = new TaskState(workUnitState);
taskState.setProp(ConfigurationKeys.METRICS_ENABLED_KEY, Boolean.toString(false));
taskState.setProp(TaskConfigurationKeys.TASK_EXECUTION_MODE, ExecutionModel.STREAMING.name());
taskState.setProp(ConfigurationKeys.TASK_SYNCHRONOUS_EXECUTION_MODEL_KEY, Boolean.toString(taskExecutionSync));
taskState.setJobId("1234");
taskState.setTaskId("testContinuousTaskId");
return taskState;
}
/**
* Test that a streaming task will work correctly when an extractor is continuously producing records
* No converters
* Identity fork
* One writer
* @throws Exception
*/
@Test
public void testContinuousTask()
throws Exception {
for (Boolean taskExecutionSync: new Boolean[]{true, false}) {
ArrayList<Object> recordCollector = new ArrayList<>(100);
long perRecordExtractLatencyMillis = 1000; // 1 second per record
ContinuousExtractor continuousExtractor = new ContinuousExtractor(perRecordExtractLatencyMillis);
TaskContext mockTaskContext = getMockTaskContext(recordCollector, continuousExtractor, taskExecutionSync,
Integer.MAX_VALUE);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task task = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch>absent());
ScheduledExecutorService taskRunner = new ScheduledThreadPoolExecutor(1, ExecutorsUtils.newThreadFactory(Optional.of(log)));
taskRunner.execute(task);
// Let the task run for 10 seconds
int sleepIterations = 10;
int currentIteration = 0;
while (currentIteration < sleepIterations) {
Thread.sleep(1000);
currentIteration++;
Map<String, CheckpointableWatermark> externalWatermarkStorage = mockTaskContext.getWatermarkStorage()
.getCommittedWatermarks(CheckpointableWatermark.class, ImmutableList.of("default"));
if (!externalWatermarkStorage.isEmpty()) {
for (CheckpointableWatermark watermark : externalWatermarkStorage.values()) {
log.info("Observed committed watermark: {}", watermark);
}
log.info("Task progress: {}", task.getProgress());
// Ensure that watermarks seem reasonable at each step
Assert.assertTrue(continuousExtractor.validateWatermarks(false, externalWatermarkStorage));
}
}
// Let's try to shutdown the task
task.shutdown();
log.info("Shutting down task now");
boolean success = task.awaitShutdown(30000);
Assert.assertTrue(success, "Task should shutdown in 3 seconds");
log.info("Task done waiting to shutdown {}", success);
// Ensure that committed watermarks match exactly the input rows because we shutdown in an orderly manner.
Assert.assertTrue(continuousExtractor.validateWatermarks(true, mockTaskContext.getWatermarkStorage()
.getCommittedWatermarks(CheckpointableWatermark.class, ImmutableList.of("default"))));
task.commit();
Assert.assertTrue(mockTaskContext.getTaskState().getWorkingState() == WorkUnitState.WorkingState.SUCCESSFUL);
// Shutdown the executor
taskRunner.shutdown();
taskRunner.awaitTermination(100, TimeUnit.MILLISECONDS);
}
}
/**
* Test that a streaming task will work correctly when an extractor is continuously producing records and encounters
* an error in the writer.
*
* The task should exit in a failed state.
*
* No converters
* Identity fork
* One writer
* @throws Exception
*/
@Test
public void testContinuousTaskError()
throws Exception {
for (Boolean taskExecutionSync: new Boolean[]{true, false}) {
ArrayList<Object> recordCollector = new ArrayList<>(100);
long perRecordExtractLatencyMillis = 1000; // 1 second per record
ContinuousExtractor continuousExtractor = new ContinuousExtractor(perRecordExtractLatencyMillis);
TaskContext mockTaskContext = getMockTaskContext(recordCollector, continuousExtractor, taskExecutionSync, 5);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task task = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch>absent());
ScheduledExecutorService taskRunner = new ScheduledThreadPoolExecutor(1, ExecutorsUtils.newThreadFactory(Optional.of(log)));
taskRunner.execute(task);
// Let the task run for 10 seconds
int sleepIterations = 10;
int currentIteration = 0;
while (currentIteration < sleepIterations) {
Thread.sleep(1000);
currentIteration++;
Map<String, CheckpointableWatermark> externalWatermarkStorage = mockTaskContext.getWatermarkStorage()
.getCommittedWatermarks(CheckpointableWatermark.class, ImmutableList.of("default"));
if (!externalWatermarkStorage.isEmpty()) {
for (CheckpointableWatermark watermark : externalWatermarkStorage.values()) {
log.info("Observed committed watermark: {}", watermark);
}
log.info("Task progress: {}", task.getProgress());
// Ensure that watermarks seem reasonable at each step
Assert.assertTrue(continuousExtractor.validateWatermarks(false, externalWatermarkStorage));
}
}
boolean success = task.awaitShutdown(30000);
Assert.assertTrue(success, "Task should shutdown in 30 seconds");
log.info("Task done waiting to shutdown {}", success);
// Shutdown on error, so don't check for the exact watermark
Assert.assertTrue(continuousExtractor.validateWatermarks(false, mockTaskContext.getWatermarkStorage()
.getCommittedWatermarks(CheckpointableWatermark.class, ImmutableList.of("default"))));
task.commit();
// Task should be in failed state from extractor error
Assert.assertEquals(mockTaskContext.getTaskState().getWorkingState(), WorkUnitState.WorkingState.FAILED);
// Shutdown the executor
taskRunner.shutdown();
taskRunner.awaitTermination(100, TimeUnit.MILLISECONDS);
}
}
private class TestStreamingDataWriterBuilder extends org.apache.gobblin.writer.DataWriterBuilder {
private final List<Object> _recordCollector;
private final int _errorAtCount;
private int _recordCount = 0;
TestStreamingDataWriterBuilder(List<Object> recordCollector, int errorAtCount) {
_recordCollector = recordCollector;
_errorAtCount = errorAtCount;
}
@Override
public DataWriter build()
throws IOException {
return new WatermarkAwareWriter<Object>() {
private AtomicReference<CheckpointableWatermark> lastWatermark = new AtomicReference<>(null);
private AtomicReference<String> source = new AtomicReference<>(null);
@Override
public boolean isWatermarkCapable() {
return true;
}
@Override
public void writeEnvelope(RecordEnvelope<Object> recordEnvelope)
throws IOException {
_recordCount++;
if (_recordCount >= _errorAtCount) {
throw new IOException("Errored after record count " + _errorAtCount);
}
_recordCollector.add(recordEnvelope.getRecord());
String source = recordEnvelope.getWatermark().getSource();
if (this.source.get() != null) {
if (!source.equals(this.source.get())) {
throw new RuntimeException("This writer only supports a single source");
}
}
this.lastWatermark.set(recordEnvelope.getWatermark());
recordEnvelope.ack();
this.source.set(source);
}
@Override
public void commit()
throws IOException {
}
@Override
public void cleanup()
throws IOException {
}
@Override
public long recordsWritten() {
return 0;
}
@Override
public long bytesWritten()
throws IOException {
return 0;
}
@Override
public void close()
throws IOException {
}
};
}
}
}
| 1,230 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/MultiEventMetadataGeneratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.util.Map;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.gobblin.metrics.event.EventName;
import org.apache.gobblin.runtime.api.EventMetadataGenerator;
import org.apache.gobblin.runtime.api.MultiEventMetadataGenerator;
public class MultiEventMetadataGeneratorTest {
@Test
public void testInstantiate() {
JobContext jobContext = Mockito.mock(JobContext.class);
MultiEventMetadataGenerator multiEventMetadataGenerator = new MultiEventMetadataGenerator(ImmutableList.of(
"org.apache.gobblin.runtime.MultiEventMetadataGeneratorTest$DummyEventMetadataGenerator",
"org.apache.gobblin.runtime.MultiEventMetadataGeneratorTest$DummyEventMetadataGenerator2"));
Map<String, String> metadata = multiEventMetadataGenerator.getMetadata(jobContext, EventName.getEnumFromEventId("JobCompleteTimer"));
Assert.assertEquals(metadata.size(), 3);
Assert.assertEquals(metadata.get("dummyKey11"), "dummyValue11");
Assert.assertEquals(metadata.get("dummyKey12"), "dummyValue22");
Assert.assertEquals(metadata.get("dummyKey21"), "dummyValue21");
}
public static class DummyEventMetadataGenerator implements EventMetadataGenerator {
@Override
public Map<String, String> getMetadata(JobContext jobContext, EventName eventName) {
return ImmutableMap.of("dummyKey11", "dummyValue11", "dummyKey12", "dummyValue12");
}
}
public static class DummyEventMetadataGenerator2 implements EventMetadataGenerator {
@Override
public Map<String, String> getMetadata(JobContext jobContext, EventName eventName) {
return ImmutableMap.of("dummyKey21", "dummyValue21", "dummyKey12", "dummyValue22");
}
}
} | 1,231 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/LocalJobLauncherTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import com.codahale.metrics.Metric;
import java.io.FileReader;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.FsStateStore;
import org.apache.gobblin.metastore.StateStore;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.runtime.local.LocalJobLauncher;
import org.apache.gobblin.runtime.metrics.ServiceGobblinJobMetricReporter;
import org.apache.gobblin.util.JobLauncherUtils;
import org.apache.gobblin.util.limiter.BaseLimiterType;
import org.apache.gobblin.util.limiter.DefaultLimiterFactory;
import org.apache.gobblin.writer.Destination;
import org.apache.gobblin.writer.WriterOutputFormat;
import org.junit.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
import static org.apache.gobblin.runtime.AbstractJobLauncher.GOBBLIN_JOB_MULTI_TEMPLATE_KEY;
import static org.apache.gobblin.runtime.AbstractJobLauncher.GOBBLIN_JOB_TEMPLATE_KEY;
/**
* Unit test for {@link LocalJobLauncher}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.runtime.local" })
public class LocalJobLauncherTest {
private Properties launcherProps;
private JobLauncherTestHelper jobLauncherTestHelper;
private ITestMetastoreDatabase testMetastoreDatabase;
@BeforeClass
public void startUp() throws Exception {
testMetastoreDatabase = TestMetastoreDatabaseFactory.get();
this.launcherProps = new Properties();
this.launcherProps.load(new FileReader("gobblin-test/resource/gobblin.test.properties"));
this.launcherProps.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_ENABLED_KEY, "true");
this.launcherProps.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "true");
this.launcherProps.setProperty(ConfigurationKeys.METRICS_REPORTING_FILE_ENABLED_KEY, "false");
this.launcherProps.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_URL_KEY, testMetastoreDatabase.getJdbcUrl());
StateStore<JobState.DatasetState> datasetStateStore =
new FsStateStore<>(this.launcherProps.getProperty(ConfigurationKeys.STATE_STORE_FS_URI_KEY),
this.launcherProps.getProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY), JobState.DatasetState.class);
this.jobLauncherTestHelper = new JobLauncherTestHelper(this.launcherProps, datasetStateStore);
}
@Test
public void testLaunchJob() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJob");
try {
this.jobLauncherTestHelper.runTest(jobProps);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testJobTemplateResolutionInAbstractLauncher() throws Exception {
Properties jobProps = loadJobProps();
String jobId = JobLauncherUtils.newJobId("beforeResolution");
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
jobProps.setProperty("job.name", "beforeResolution");
jobProps.setProperty(GOBBLIN_JOB_TEMPLATE_KEY, "resource:///templates/distcp-ng.template");
JobContext jobContext = dummyJobContextInitHelper(jobProps);
// Indicating resolution succeeded.
// 1) User config not being overloaded by template
// 2) Conf that not appearing in the user-config being populated by template
System.out.println(jobContext.getJobState());
Assert.assertEquals(jobContext.getJobState().getProp("job.name"), "beforeResolution");
Assert.assertEquals(jobContext.getJobState().getProp("distcp.persist.dir"), "/tmp/distcp-persist-dir");
}
@Test
public void testMultipleJobTemplatesResoluion() throws Exception {
Properties jobProps = loadJobProps();
// Job Name shouldn't be overwritten by any templates and the precedence of template is lower than job configuration.
String jobId = JobLauncherUtils.newJobId("beforeResolution");
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
jobProps.setProperty("job.name", "beforeResolution");
jobProps.setProperty(GOBBLIN_JOB_MULTI_TEMPLATE_KEY,
"resource:///templates/test.template,resource:///templates/test-overwrite.template");
JobContext jobContext = dummyJobContextInitHelper(jobProps);
// Verifying multi-resolution happens and it respect the precedency.
Assert.assertEquals(jobContext.getJobState().getProp("job.name"), "beforeResolution");
Assert.assertEquals(jobContext.getJobState().getProp("templated0"), "x_x");
Assert.assertEquals(jobContext.getJobState().getProp("templated1"), "y_y");
// Verifying the order of template list matters.
jobProps.setProperty(GOBBLIN_JOB_MULTI_TEMPLATE_KEY,
"resource:///templates/test-overwrite.template,resource:///templates/test.template");
jobContext = dummyJobContextInitHelper(jobProps);
Assert.assertEquals(jobContext.getJobState().getProp("job.name"), "beforeResolution");
Assert.assertEquals(jobContext.getJobState().getProp("templated0"), "x");
Assert.assertEquals(jobContext.getJobState().getProp("templated1"), "y");
// Verify multi-resolution with inheritance.
jobProps.setProperty(GOBBLIN_JOB_MULTI_TEMPLATE_KEY,
"resource:///templates/test-multitemplate-with-inheritance.template,resource:///templates/test.template");
jobContext = dummyJobContextInitHelper(jobProps);
Assert.assertEquals(jobContext.getJobState().getProp("templated0"), "x");
Assert.assertEquals(jobContext.getJobState().getProp("templated1"), "y");
Assert.assertEquals(jobContext.getJobState().getProp("job.name"), "beforeResolution");
// Picked an distcp-specific configuration that there's no default value being set in jobConf.
Assert.assertEquals(jobContext.getJobState().getProp("distcp.persist.dir"), "/tmp/distcp-persist-dir");
}
/**
* Initialize a jobContext by initializing jobLauncher. This code is mostly used for
* testing job templates resolution.
*/
private JobContext dummyJobContextInitHelper(Properties jobProps) throws Exception {
JobContext jobContext = null;
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
return ((AbstractJobLauncher) jobLauncher).getJobContext();
} finally {
closer.close();
}
}
@Test
public void testLaunchJobWithPullLimit() throws Exception {
int limit = 10;
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJobWithPullLimit");
jobProps.setProperty(ConfigurationKeys.EXTRACT_LIMIT_ENABLED_KEY, Boolean.TRUE.toString());
jobProps.setProperty(DefaultLimiterFactory.EXTRACT_LIMIT_TYPE_KEY, BaseLimiterType.COUNT_BASED.toString());
jobProps.setProperty(DefaultLimiterFactory.EXTRACT_LIMIT_COUNT_LIMIT_KEY, Integer.toString(limit));
try {
this.jobLauncherTestHelper.runTestWithPullLimit(jobProps, limit);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithMultiWorkUnit() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJobWithMultiWorkUnit");
jobProps.setProperty("use.multiworkunit", Boolean.toString(true));
try {
this.jobLauncherTestHelper.runTest(jobProps);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithTaskTimestamps() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJob");
jobProps.setProperty(ConfigurationKeys.WRITER_ADD_TASK_TIMESTAMP, "true");
try {
this.jobLauncherTestHelper.runTest(jobProps);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test(groups = {"ignore"})
public void testCancelJob() throws Exception {
this.jobLauncherTestHelper.runTestWithCancellation(loadJobProps());
}
@Test
public void testLaunchJobWithFork() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJobWithFork");
jobProps.setProperty(ConfigurationKeys.CONVERTER_CLASSES_KEY, "org.apache.gobblin.test.TestConverter2");
jobProps.setProperty(ConfigurationKeys.FORK_BRANCHES_KEY, "2");
jobProps.setProperty(ConfigurationKeys.ROW_LEVEL_POLICY_LIST + ".0",
"org.apache.gobblin.policies.schema.SchemaRowCheckPolicy");
jobProps.setProperty(ConfigurationKeys.ROW_LEVEL_POLICY_LIST + ".1",
"org.apache.gobblin.policies.schema.SchemaRowCheckPolicy");
jobProps.setProperty(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE + ".0", "OPTIONAL");
jobProps.setProperty(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE + ".1", "OPTIONAL");
jobProps.setProperty(ConfigurationKeys.TASK_LEVEL_POLICY_LIST + ".0",
"org.apache.gobblin.policies.count.RowCountPolicy,org.apache.gobblin.policies.schema.SchemaCompatibilityPolicy");
jobProps.setProperty(ConfigurationKeys.TASK_LEVEL_POLICY_LIST + ".1",
"org.apache.gobblin.policies.count.RowCountPolicy,org.apache.gobblin.policies.schema.SchemaCompatibilityPolicy");
jobProps.setProperty(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE + ".0", "OPTIONAL,OPTIONAL");
jobProps.setProperty(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE + ".1", "OPTIONAL,OPTIONAL");
jobProps.setProperty(ConfigurationKeys.WRITER_OUTPUT_FORMAT_KEY + ".0", WriterOutputFormat.AVRO.name());
jobProps.setProperty(ConfigurationKeys.WRITER_OUTPUT_FORMAT_KEY + ".1", WriterOutputFormat.AVRO.name());
jobProps.setProperty(ConfigurationKeys.WRITER_DESTINATION_TYPE_KEY + ".0", Destination.DestinationType.HDFS.name());
jobProps.setProperty(ConfigurationKeys.WRITER_DESTINATION_TYPE_KEY + ".1", Destination.DestinationType.HDFS.name());
jobProps.setProperty(ConfigurationKeys.WRITER_STAGING_DIR + ".0",
jobProps.getProperty(ConfigurationKeys.WRITER_STAGING_DIR));
jobProps.setProperty(ConfigurationKeys.WRITER_STAGING_DIR + ".1",
jobProps.getProperty(ConfigurationKeys.WRITER_STAGING_DIR));
jobProps.setProperty(ConfigurationKeys.WRITER_OUTPUT_DIR + ".0",
jobProps.getProperty(ConfigurationKeys.WRITER_OUTPUT_DIR));
jobProps.setProperty(ConfigurationKeys.WRITER_OUTPUT_DIR + ".1",
jobProps.getProperty(ConfigurationKeys.WRITER_OUTPUT_DIR));
jobProps.setProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + ".0",
jobProps.getProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR));
jobProps.setProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + ".1",
jobProps.getProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR));
try {
this.jobLauncherTestHelper.runTestWithFork(jobProps);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithMultipleDatasets() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJobWithMultipleDatasets");
try {
this.jobLauncherTestHelper.runTestWithMultipleDatasets(jobProps);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithCommitSuccessfulTasksPolicy() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJobWithCommitSuccessfulTasksPolicy");
try {
this.jobLauncherTestHelper.runTestWithCommitSuccessfulTasksPolicy(jobProps);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithMultipleDatasetsAndFaultyExtractor() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testLaunchJobWithMultipleDatasetsAndFaultyExtractor");
try {
this.jobLauncherTestHelper.runTestWithMultipleDatasetsAndFaultyExtractor(jobProps, false);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithMultipleDatasetsAndFaultyExtractorAndPartialCommitPolicy() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY)
+ "-testLaunchJobWithMultipleDatasetsAndFaultyExtractorAndPartialCommitPolicy");
try {
this.jobLauncherTestHelper.runTestWithMultipleDatasetsAndFaultyExtractor(jobProps, true);
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithDefaultMetricsReporter() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testDefaultMetricsReporter");
try {
JobContext jobContext = this.jobLauncherTestHelper.runTest(jobProps);
Map<String, Metric> metrics = jobContext.getJobMetricsOptional().get().getMetricContext().getMetrics();
Assert.assertTrue(metrics.containsKey("JobMetrics.WorkUnitsCreationTimer.GobblinTest1-testDefaultMetricsReporter"));
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithServiceMetricsReporter() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.GOBBLIN_OUTPUT_JOB_LEVEL_METRICS, "true");
jobProps.setProperty(ConfigurationKeys.JOB_METRICS_REPORTER_CLASS_KEY, ServiceGobblinJobMetricReporter.class.getName());
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, "FlowName_FlowGroup_JobName_EdgeId_Hash");
jobProps.setProperty(ConfigurationKeys.FLOW_GROUP_KEY, "FlowGroup");
jobProps.setProperty(ConfigurationKeys.FLOW_NAME_KEY, "FlowName");
jobProps.setProperty("flow.edge.id", "EdgeId");
try {
JobContext jobContext = this.jobLauncherTestHelper.runTest(jobProps);
Map<String, Metric> metrics = jobContext.getJobMetricsOptional().get().getMetricContext().getMetrics();
Assert.assertTrue(metrics.containsKey("GobblinService.FlowGroup.FlowName.EdgeId.WorkUnitsCreated"));
Assert.assertTrue(metrics.containsKey("GobblinService.FlowGroup.FlowName.EdgeId.WorkUnitsCreationTimer"));
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@AfterClass(alwaysRun = true)
public void tearDown() throws IOException {
if (testMetastoreDatabase != null) {
testMetastoreDatabase.close();
}
}
private Properties loadJobProps() throws IOException {
Properties jobProps = new Properties();
jobProps.load(new FileReader("gobblin-test/resource/job-conf/GobblinTest1.pull"));
jobProps.putAll(this.launcherProps);
jobProps.setProperty(JobLauncherTestHelper.SOURCE_FILE_LIST_KEY,
"gobblin-test/resource/source/test.avro.0," + "gobblin-test/resource/source/test.avro.1,"
+ "gobblin-test/resource/source/test.avro.2," + "gobblin-test/resource/source/test.avro.3");
return jobProps;
}
} | 1,232 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/GobblinMultiTaskAttemptTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.util.List;
import java.util.Properties;
import org.junit.Assert;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerImpl;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.source.workunit.WorkUnit;
import static org.apache.gobblin.util.retry.RetryerFactory.RETRY_TIME_OUT_MS;
@Slf4j
public class GobblinMultiTaskAttemptTest {
private GobblinMultiTaskAttempt taskAttempt;
private TaskExecutor taskExecutorMock;
private SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker;
@BeforeClass
public void setup() {
// Initializing jobBroker
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory
.createDefaultTopLevelBroker(config, GobblinScopeTypes.GLOBAL.defaultScopeInstance());
this.jobBroker = topBroker.newSubscopedBuilder(new JobScopeInstance("testJob", "job123")).build();
// Mocking task executor
this.taskExecutorMock = Mockito.mock(TaskExecutor.class);
}
@Test
public void testRunWithTaskCreationFailure()
throws Exception {
// Preparing Instance of TaskAttempt with designed failure on task creation
WorkUnit tmpWU = WorkUnit.createEmpty();
// Put necessary attributes in workunit
tmpWU.setProp(ConfigurationKeys.TASK_ID_KEY, "task_test");
List<WorkUnit> workUnit = ImmutableList.of(tmpWU);
JobState jobState = new JobState();
// Limit the number of times of retry in task-creation.
jobState.setProp(RETRY_TIME_OUT_MS, 1000);
TaskStateTracker stateTrackerMock = Mockito.mock(TaskStateTracker.class);
taskAttempt =
new GobblinMultiTaskAttempt(workUnit.iterator(), "testJob", jobState, stateTrackerMock, taskExecutorMock,
Optional.absent(), Optional.absent(), jobBroker);
try {
// This attempt will automatically fail due to missing required config in
// org.apache.gobblin.runtime.TaskContext.getSource
taskAttempt.run();
} catch (Exception e) {
Assert.assertTrue(e instanceof TaskCreationException);
return;
}
// Should never reach here.
Assert.fail();
}
@Test
public void testRunWithTaskStatsTrackerNotScheduledFailure()
throws Exception {
TaskStateTracker stateTracker = new DummyTestStateTracker(new Properties(), log);
// Preparing Instance of TaskAttempt with designed failure on task creation
WorkUnit tmpWU = WorkUnit.createEmpty();
// Put necessary attributes in workunit
tmpWU.setProp(ConfigurationKeys.TASK_ID_KEY, "task_test");
List<WorkUnit> workUnit = ImmutableList.of(tmpWU);
JobState jobState = new JobState();
// Limit the number of times of retry in task-creation.
jobState.setProp(RETRY_TIME_OUT_MS, 1000);
jobState.setProp(ConfigurationKeys.SOURCE_CLASS_KEY, DatasetStateStoreTest.DummySource.class.getName());
taskAttempt = new GobblinMultiTaskAttempt(workUnit.iterator(), "testJob", jobState, stateTracker, taskExecutorMock,
Optional.absent(), Optional.absent(), jobBroker);
try {
// This attempt will automatically fail since the registerNewTask call will directly throw RuntimeException
// as a way to simulate the case when scheduling reporter is rejected.
taskAttempt.run();
} catch (Exception e) {
Assert.assertTrue(e instanceof TaskCreationException);
return;
}
// Should never reach here.
Assert.fail();
}
@Test
public void testRunAfterCancellation() throws Exception {
WorkUnit tmpWU = WorkUnit.createEmpty();
// Put necessary attributes in workunit
tmpWU.setProp(ConfigurationKeys.TASK_ID_KEY, "task_test");
List<WorkUnit> workUnit = ImmutableList.of(tmpWU);
JobState jobState = new JobState();
// Limit the number of times of retry in task-creation.
jobState.setProp(RETRY_TIME_OUT_MS, 1000);
jobState.setProp(ConfigurationKeys.SOURCE_CLASS_KEY, DatasetStateStoreTest.DummySource.class.getName());
TaskStateTracker stateTrackerMock = Mockito.mock(TaskStateTracker.class);
taskAttempt =
new GobblinMultiTaskAttempt(workUnit.iterator(), "testJob1", jobState, stateTrackerMock, taskExecutorMock,
Optional.absent(), Optional.absent(), jobBroker);
//Call shutdown() before creation of underlying Gobblin tasks.
taskAttempt.shutdownTasks();
taskAttempt.runAndOptionallyCommitTaskAttempt(GobblinMultiTaskAttempt.CommitPolicy.IMMEDIATE);
Assert.assertEquals(taskAttempt.getNumTasksCreated(), 0);
Assert.assertTrue(taskAttempt.getStopped().get());
}
public static class DummyTestStateTracker extends AbstractTaskStateTracker {
public DummyTestStateTracker(Properties properties, Logger logger) {
super(properties, logger);
}
@Override
public void registerNewTask(Task task) {
throw new RuntimeException("Failing registering new task on purpose");
}
@Override
public void onTaskRunCompletion(Task task) {
task.markTaskCompletion();
}
@Override
public void onTaskCommitCompletion(Task task) {
}
}
} | 1,233 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/JobBrokerInjectionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Sets;
import com.typesafe.config.ConfigFactory;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.apache.gobblin.broker.ResourceInstance;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.iface.ConfigView;
import org.apache.gobblin.broker.iface.NoSuchScopeException;
import org.apache.gobblin.broker.iface.NotConfiguredException;
import org.apache.gobblin.broker.iface.ScopedConfigView;
import org.apache.gobblin.broker.iface.SharedResourceFactory;
import org.apache.gobblin.broker.iface.SharedResourceKey;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.Converter;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.converter.SingleRecordIterable;
import org.apache.gobblin.runtime.api.JobExecutionDriver;
import org.apache.gobblin.runtime.api.JobExecutionResult;
import org.apache.gobblin.runtime.api.JobLifecycleListener;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.runtime.instance.StandardGobblinInstanceDriver;
import org.apache.gobblin.runtime.instance.StandardGobblinInstanceLauncher;
import org.apache.gobblin.runtime.std.DefaultJobLifecycleListenerImpl;
import org.apache.gobblin.runtime.std.FilteredJobLifecycleListener;
import org.apache.gobblin.runtime.std.JobSpecFilter;
import org.apache.gobblin.writer.test.GobblinTestEventBusWriter;
import org.apache.gobblin.writer.test.TestingEventBusAsserter;
import org.apache.gobblin.writer.test.TestingEventBuses;
public class JobBrokerInjectionTest {
@Test
public void testBrokerIsAcquiredAndShared() throws Exception {
StandardGobblinInstanceLauncher.Builder instanceLauncherBuilder =
StandardGobblinInstanceLauncher.builder()
.withInstanceName("testSubmitToJobCatalog");
instanceLauncherBuilder.driver();
StandardGobblinInstanceLauncher instanceLauncher =
instanceLauncherBuilder.build();
instanceLauncher.startAsync();
instanceLauncher.awaitRunning(5, TimeUnit.SECONDS);
JobSpec js1 = JobSpec.builder()
.withConfig(ConfigFactory.parseResources("brokerTest/SimpleHelloWorldJob.jobconf"))
.build();
final String eventBusId = js1.getConfig().resolve().getString(GobblinTestEventBusWriter.FULL_EVENTBUSID_KEY);
TestingEventBusAsserter asserter = new TestingEventBusAsserter(eventBusId);
final StandardGobblinInstanceDriver instance =
(StandardGobblinInstanceDriver)instanceLauncher.getDriver();
final ArrayBlockingQueue<JobExecutionDriver> jobDrivers = new ArrayBlockingQueue<>(1);
JobLifecycleListener js1Listener = new FilteredJobLifecycleListener(
JobSpecFilter.eqJobSpecURI(js1.getUri()),
new DefaultJobLifecycleListenerImpl(instance.getLog()) {
@Override public void onJobLaunch(JobExecutionDriver jobDriver) {
super.onJobLaunch(jobDriver);
try {
jobDrivers.offer(jobDriver, 5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
instance.getLog().error("Offer interrupted.");
}
}
});
instance.registerWeakJobLifecycleListener(js1Listener);
instance.getMutableJobCatalog().put(js1);
JobExecutionDriver jobDriver = jobDrivers.poll(10, TimeUnit.SECONDS);
Assert.assertNotNull(jobDriver);
JobExecutionResult jobResult = jobDriver.get(100000, TimeUnit.SECONDS);
Assert.assertTrue(jobResult.isSuccessful());
Queue<TestingEventBuses.Event> events = asserter.getEvents();
Set<Long> seenInstanceObjectIds = Sets.newHashSet();
Set<Long> seenJobObjectIds = Sets.newHashSet();
Set<Long> seenTaskObjectIds = Sets.newHashSet();
for (TestingEventBuses.Event event : events) {
MyRecord record = (MyRecord) event.getValue();
seenInstanceObjectIds.add(record.getInstanceSharedObjectId());
seenJobObjectIds.add(record.getJobSharedObjectId());
seenTaskObjectIds.add(record.getTaskSharedObjectId());
}
// Should see same instance and job id (only 1 id in the set), but 5 different task ids for each task
Assert.assertEquals(seenInstanceObjectIds.size(), 1);
Assert.assertEquals(seenJobObjectIds.size(), 1);
Assert.assertEquals(seenTaskObjectIds.size(), 5);
asserter.clear();
instance.getMutableJobCatalog().remove(js1.getUri());
instance.getMutableJobCatalog().put(js1);
jobDriver = jobDrivers.poll(10, TimeUnit.SECONDS);
Assert.assertNotNull(jobDriver);
jobResult = jobDriver.get(10, TimeUnit.SECONDS);
Assert.assertTrue(jobResult.isSuccessful());
events = asserter.getEvents();
for (TestingEventBuses.Event event : events) {
MyRecord record = (MyRecord) event.getValue();
seenInstanceObjectIds.add(record.getInstanceSharedObjectId());
seenJobObjectIds.add(record.getJobSharedObjectId());
seenTaskObjectIds.add(record.getTaskSharedObjectId());
}
// A different job should produce a new shared object id
Assert.assertEquals(seenInstanceObjectIds.size(), 1);
Assert.assertEquals(seenJobObjectIds.size(), 2);
Assert.assertEquals(seenTaskObjectIds.size(), 10);
}
public static class JobBrokerConverter extends Converter<String, String, String, MyRecord> {
private MySharedObject instanceSharedObject;
private MySharedObject jobSharedObject;
private MySharedObject taskSharedObject;
@Override
public String convertSchema(String inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
try {
try {
this.instanceSharedObject = workUnit.getTaskBroker()
.getSharedResourceAtScope(new MySharedObjectFactory(), new MySharedKey(), GobblinScopeTypes.INSTANCE);
this.jobSharedObject = workUnit.getTaskBroker()
.getSharedResourceAtScope(new MySharedObjectFactory(), new MySharedKey(), GobblinScopeTypes.JOB);
this.taskSharedObject = workUnit.getTaskBroker()
.getSharedResourceAtScope(new MySharedObjectFactory(), new MySharedKey(), GobblinScopeTypes.TASK);
} catch (NoSuchScopeException nsse) {
throw new RuntimeException(nsse);
}
return inputSchema;
} catch (NotConfiguredException nce) {
throw new RuntimeException(nce);
}
}
@Override
public Iterable<MyRecord> convertRecord(String outputSchema, String inputRecord, WorkUnitState workUnit)
throws DataConversionException {
return new SingleRecordIterable<>(new MyRecord(this.taskSharedObject.id, this.jobSharedObject.id, this.instanceSharedObject.id));
}
}
@Data
public static class MyRecord {
private final long taskSharedObjectId;
private final long jobSharedObjectId;
private final long instanceSharedObjectId;
}
public static class MySharedObject {
private final long id = new Random().nextLong();
}
public static class MySharedObjectFactory implements SharedResourceFactory<MySharedObject, MySharedKey, GobblinScopeTypes> {
@Override
public String getName() {
return MySharedObjectFactory.class.getSimpleName();
}
@Override
public ResourceInstance<MySharedObject> createResource(SharedResourcesBroker<GobblinScopeTypes> broker,
ScopedConfigView<GobblinScopeTypes, MySharedKey> config) {
return new ResourceInstance<>(new MySharedObject());
}
@Override
public GobblinScopeTypes getAutoScope(SharedResourcesBroker<GobblinScopeTypes> broker, ConfigView<GobblinScopeTypes, MySharedKey> config) {
return broker.selfScope().getType();
}
}
@EqualsAndHashCode
public static class MySharedKey implements SharedResourceKey {
@Override
public String toConfigurationKey() {
return "key";
}
}
}
| 1,234 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/JobExecutionEventSubmitterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
import java.util.Map;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import org.mockito.Mockito;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metrics.event.EventSubmitter;
import org.apache.gobblin.rest.LauncherTypeEnum;
@Test(groups = {"gobblin.runtime"})
public class JobExecutionEventSubmitterTest {
private EventSubmitter mockEventSubmitter;
private JobExecutionEventSubmitter jobExecutionEventSubmitter;
@BeforeClass
public void setUp() {
this.mockEventSubmitter = mock(EventSubmitter.class);
this.jobExecutionEventSubmitter = new JobExecutionEventSubmitter(this.mockEventSubmitter);
}
@Test
public void testSubmitJobExecutionEvents() {
JobState mockJobState = mock(JobState.class, Mockito.RETURNS_SMART_NULLS);
when(mockJobState.getState()).thenReturn(JobState.RunningState.SUCCESSFUL);
when(mockJobState.getLauncherType()).thenReturn(LauncherTypeEnum.$UNKNOWN);
when(mockJobState.getTrackingURL()).thenReturn(Optional.<String> absent());
TaskState mockTaskState1 = createMockTaskState();
TaskState mockTaskState2 = createMockTaskState();
when(mockJobState.getTaskStates()).thenReturn(Lists.newArrayList(mockTaskState1, mockTaskState2));
this.jobExecutionEventSubmitter.submitJobExecutionEvents(mockJobState);
verify(this.mockEventSubmitter, atLeastOnce()).submit(any(String.class), any(Map.class));
}
private TaskState createMockTaskState() {
TaskState taskState = mock(TaskState.class, Mockito.RETURNS_SMART_NULLS);
when(taskState.getWorkingState()).thenReturn(WorkUnitState.WorkingState.SUCCESSFUL);
when(taskState.getTaskFailureException()).thenReturn(Optional.<String> absent());
return taskState;
}
}
| 1,235 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/JobStateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Closer;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.TaskExecutionInfo;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit test for {@link JobState}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.runtime"})
public class JobStateTest {
private JobState jobState;
private long startTime;
@BeforeClass
public void setUp() {
this.jobState = new JobState("TestJob", "TestJob-1");
}
@Test
public void testSetAndGet() {
this.jobState.setId(this.jobState.getJobId());
this.startTime = System.currentTimeMillis();
this.jobState.setStartTime(this.startTime);
this.jobState.setEndTime(this.startTime + 1000);
this.jobState.setDuration(1000);
this.jobState.setState(JobState.RunningState.COMMITTED);
this.jobState.setTaskCount(3);
this.jobState.setProp("foo", "bar");
for (int i = 0; i < 3; i++) {
WorkUnit workUnit = WorkUnit.createEmpty();
Properties initialProps = new Properties();
initialProps.setProperty("common1", "1");
initialProps.setProperty("common2", "2");
initialProps.setProperty("spec1", String.valueOf(i));
initialProps.setProperty("spec2", "spec" + i);
workUnit.setProps(new Properties(), initialProps);
WorkUnitState workUnitState = new WorkUnitState(workUnit);
workUnitState.setProp(ConfigurationKeys.JOB_ID_KEY, "TestJob-1");
workUnitState.setProp(ConfigurationKeys.TASK_ID_KEY, "TestTask-" + i);
workUnitState.setProp(ConfigurationKeys.DATASET_URN_KEY, "TestDataset" + i);
TaskState taskState = new TaskState(workUnitState);
taskState.setTaskId("TestTask-" + i);
taskState.setId(taskState.getTaskId());
taskState.setStartTime(this.startTime);
taskState.setEndTime(this.startTime + 1000);
taskState.setTaskDuration(1000);
taskState.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
taskState.setProp("foo", "bar");
this.jobState.addTaskState(taskState);
}
doAsserts(this.jobState, true, true);
}
@Test(dependsOnMethods = {"testSetAndGet"})
public void testSerDe()
throws IOException {
Closer closer = Closer.create();
try {
ByteArrayOutputStream baos = closer.register(new ByteArrayOutputStream());
DataOutputStream dos = closer.register(new DataOutputStream(baos));
this.jobState.write(dos);
ByteArrayInputStream bais = closer.register((new ByteArrayInputStream(baos.toByteArray())));
DataInputStream dis = closer.register((new DataInputStream(bais)));
JobState newJobState = new JobState();
newJobState.readFields(dis);
doAsserts(newJobState, true, false);
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
private void doAsserts(JobState jobState, boolean considerTaskStates, boolean initial) {
Assert.assertEquals(jobState.getJobName(), "TestJob");
Assert.assertEquals(jobState.getJobId(), "TestJob-1");
Assert.assertEquals(jobState.getId(), "TestJob-1");
Assert.assertEquals(jobState.getStartTime(), this.startTime);
Assert.assertEquals(jobState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(jobState.getDuration(), 1000);
Assert.assertEquals(jobState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(jobState.getTaskCount(), 3);
Assert.assertEquals(jobState.getProp("foo"), "bar");
if (!considerTaskStates) {
return;
}
List<String> taskStateIds = Lists.newArrayList();
for (int i = 0; i < jobState.getCompletedTasks(); i++) {
TaskState taskState = jobState.getTaskStates().get(i);
Assert.assertEquals(taskState.getJobId(), "TestJob-1");
Assert.assertEquals(taskState.getStartTime(), this.startTime);
Assert.assertEquals(taskState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(taskState.getTaskDuration(), 1000);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertTrue(taskState.getProp(ConfigurationKeys.DATASET_URN_KEY).startsWith("TestDataset"));
Assert.assertEquals(taskState.getProp("foo"), "bar");
if (initial) {
Assert.assertEquals(taskState.getWorkunit().getCommonProperties().size(), 0);
Assert.assertEquals(taskState.getWorkunit().getSpecProperties().size(), 4);
Assert.assertEquals(taskState.getProp("common1"), "1");
Assert.assertEquals(taskState.getProp("common2"), "2");
Assert.assertEquals(taskState.getProp("spec1"), String.valueOf(i));
Assert.assertEquals(taskState.getProp("spec2"), "spec" + i);
} else {
Assert.assertEquals(taskState.getWorkunit().getCommonProperties().size(), 2);
Assert.assertEquals(taskState.getWorkunit().getCommonProperties().getProperty("common1"), "1");
Assert.assertEquals(taskState.getWorkunit().getCommonProperties().getProperty("common2"), "2");
Assert.assertEquals(taskState.getWorkunit().getSpecProperties().size(), 2);
Assert.assertEquals(taskState.getWorkunit().getSpecProperties().getProperty("spec1"), String.valueOf(i));
Assert.assertEquals(taskState.getWorkunit().getSpecProperties().getProperty("spec2"), "spec" + i);
Assert.assertEquals(taskState.getProp("common1"), "1");
Assert.assertEquals(taskState.getProp("common2"), "2");
Assert.assertEquals(taskState.getProp("spec1"), String.valueOf(i));
Assert.assertEquals(taskState.getProp("spec2"), "spec" + i);
}
taskStateIds.add(taskState.getTaskId());
}
Collections.sort(taskStateIds);
Assert.assertEquals(taskStateIds, Lists.newArrayList("TestTask-0", "TestTask-1", "TestTask-2"));
Set<String> sortedDatasetUrns = Sets.newTreeSet(jobState.createDatasetStatesByUrns().keySet());
Assert.assertEquals(sortedDatasetUrns.size(), jobState.getCompletedTasks());
Map<String, JobState.DatasetState> datasetStatesByUrns = jobState.createDatasetStatesByUrns();
int index = 0;
for (String dataSetUrn : sortedDatasetUrns) {
Assert.assertEquals(dataSetUrn, "TestDataset" + index);
List<TaskState> taskStates = datasetStatesByUrns.get(dataSetUrn).getTaskStates();
Assert.assertEquals(taskStates.size(), 1);
Assert.assertEquals(taskStates.get(0).getTaskId(), "TestTask-" + index);
index++;
}
}
@Test(dependsOnMethods = {"testSetAndGet"})
public void testToJobExecutionInfo() {
JobExecutionInfo jobExecutionInfo = this.jobState.toJobExecutionInfo();
Assert.assertEquals(jobExecutionInfo.getJobName(), "TestJob");
Assert.assertEquals(jobExecutionInfo.getJobId(), "TestJob-1");
Assert.assertEquals(jobExecutionInfo.getStartTime().longValue(), this.startTime);
Assert.assertEquals(jobExecutionInfo.getEndTime().longValue(), this.startTime + 1000);
Assert.assertEquals(jobExecutionInfo.getDuration().longValue(), 1000L);
Assert.assertEquals(jobExecutionInfo.getState().name(), JobState.RunningState.COMMITTED.name());
Assert.assertEquals(jobExecutionInfo.getLaunchedTasks().intValue(), 3);
Assert.assertEquals(jobExecutionInfo.getCompletedTasks().intValue(), 3);
Assert.assertEquals(jobExecutionInfo.getJobProperties().get("foo"), "bar");
List<String> taskStateIds = Lists.newArrayList();
for (TaskExecutionInfo taskExecutionInfo : jobExecutionInfo.getTaskExecutions()) {
Assert.assertEquals(taskExecutionInfo.getJobId(), "TestJob-1");
Assert.assertEquals(taskExecutionInfo.getStartTime().longValue(), this.startTime);
Assert.assertEquals(taskExecutionInfo.getEndTime().longValue(), this.startTime + 1000);
Assert.assertEquals(taskExecutionInfo.getDuration().longValue(), 1000);
Assert.assertEquals(taskExecutionInfo.getState().name(), WorkUnitState.WorkingState.COMMITTED.name());
Assert.assertEquals(taskExecutionInfo.getTaskProperties().get("foo"), "bar");
taskStateIds.add(taskExecutionInfo.getTaskId());
}
Collections.sort(taskStateIds);
Assert.assertEquals(taskStateIds, Lists.newArrayList("TestTask-0", "TestTask-1", "TestTask-2"));
}
}
| 1,236 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/LimiterStopEventTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.source.extractor.limiter.LimiterConfigurationKeys;
import org.apache.gobblin.util.limiter.Limiter;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.WorkUnit;
import junit.framework.Assert;
import org.mockito.Mockito;
import org.testng.annotations.Test;
import java.lang.reflect.Method;
import java.util.Properties;
import static org.mockito.Mockito.mock;
@Test(groups = {"gobblin.runtime"})
public class LimiterStopEventTest {
@Test
public void testGetLimiterStopMetadataCase0() throws InterruptedException {
Properties properties = new Properties();
String key1 = "topic";
String key2 = "partition.id";
String key3 = "others";
String keyList = Joiner.on(',').join(key1, key2);
properties.setProperty(LimiterConfigurationKeys.LIMITER_REPORT_KEY_LIST, keyList);
properties.setProperty(key1, "1111");
properties.setProperty(key2, "1111");
Extractor extractor = mock (Extractor.class);
Limiter limiter = mock (Limiter.class);
TaskState taskState = mock (TaskState.class);
WorkUnit workUnit = mock (WorkUnit.class);
Mockito.when(taskState.getWorkunit()).thenReturn(workUnit);
Mockito.when(taskState.getJobId()).thenReturn("123");
Mockito.when(taskState.getTaskAttemptId()).thenReturn(Optional.of("555"));
Mockito.when(taskState.getTaskId()).thenReturn("888");
Mockito.when(limiter.acquirePermits(1)).thenReturn(null);
Mockito.when (taskState.getProp(ConfigurationKeys.DATASET_URN_KEY, ConfigurationKeys.DEFAULT_DATASET_URN)).thenReturn("file://xyz");
Mockito.when(workUnit.getProperties()).thenReturn(properties);
LimitingExtractorDecorator<String, String> decorator = new LimitingExtractorDecorator<>(extractor, limiter, taskState);
try {
Method method = LimitingExtractorDecorator.class.getDeclaredMethod("getLimiterStopMetadata");
method.setAccessible(true);
ImmutableMap<String, String> metaData = (ImmutableMap<String, String>)method.invoke(decorator);
Assert.assertEquals(metaData.containsKey(key1), true);
Assert.assertEquals(metaData.containsKey(key2), true);
Assert.assertEquals(metaData.containsKey(key3), false);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testGetLimiterStopMetadataCase1() throws InterruptedException {
Properties properties = new Properties();
String key1 = "topic";
String key2 = "partition.id";
String keyList = Joiner.on(',').join(key1, key2);
String subKey1 = key2 + ".0";
String subKey2 = key2 + ".1";
String subKey3 = key2 + ".2";
String subKey4 = key2 + ".3";
String subKey5 = "partition";
properties.setProperty(LimiterConfigurationKeys.LIMITER_REPORT_KEY_LIST, keyList);
properties.setProperty(subKey1, "1111");
properties.setProperty(subKey2, "1111");
properties.setProperty(subKey3, "1111");
properties.setProperty(subKey4, "1111");
Extractor extractor = mock (Extractor.class);
Limiter limiter = mock (Limiter.class);
TaskState taskState = mock (TaskState.class);
WorkUnit workUnit = mock (WorkUnit.class);
Mockito.when(taskState.getWorkunit()).thenReturn(workUnit);
Mockito.when(taskState.getJobId()).thenReturn("123");
Mockito.when(taskState.getTaskAttemptId()).thenReturn(Optional.of("555"));
Mockito.when(taskState.getTaskId()).thenReturn("888");
Mockito.when(limiter.acquirePermits(1)).thenReturn(null);
Mockito.when (taskState.getProp(ConfigurationKeys.DATASET_URN_KEY, ConfigurationKeys.DEFAULT_DATASET_URN)).thenReturn("file://xyz");
Mockito.when(workUnit.getProperties()).thenReturn(properties);
LimitingExtractorDecorator<String, String> decorator = new LimitingExtractorDecorator<>(extractor, limiter, taskState);
try {
Method method = LimitingExtractorDecorator.class.getDeclaredMethod("getLimiterStopMetadata");
method.setAccessible(true);
ImmutableMap<String, String> metaData = (ImmutableMap<String, String>)method.invoke(decorator);
Assert.assertEquals(metaData.containsKey(subKey1), true);
Assert.assertEquals(metaData.containsKey(subKey2), true);
Assert.assertEquals(metaData.containsKey(subKey3), true);
Assert.assertEquals(metaData.containsKey(subKey4), true);
Assert.assertEquals(metaData.containsKey(subKey5), false);
} catch (Exception e) {
Assert.fail();
}
}
@Test
public void testGetLimiterStopMetadataCase3() throws InterruptedException {
Properties properties = new Properties();
String key1 = "topic";
String key2 = "partition.id";
String keyList = Joiner.on(',').join(key1, key2);
String subKey1 = key2 + "....";
String subKey2 = key2 + "##fjpaierbng;";
String subKey3 = key2 + "x[n sdf";
String subKey4 = key2 + "";
properties.setProperty(LimiterConfigurationKeys.LIMITER_REPORT_KEY_LIST, keyList);
properties.setProperty(subKey1, "1111");
properties.setProperty(subKey2, "1111");
properties.setProperty(subKey3, "1111");
properties.setProperty(subKey4, "1111");
properties.setProperty(key1, "1111");
properties.setProperty(key2, "1111");
Extractor extractor = mock (Extractor.class);
Limiter limiter = mock (Limiter.class);
TaskState taskState = mock (TaskState.class);
WorkUnit workUnit = mock (WorkUnit.class);
Mockito.when(taskState.getWorkunit()).thenReturn(workUnit);
Mockito.when(taskState.getJobId()).thenReturn("123");
Mockito.when(taskState.getTaskAttemptId()).thenReturn(Optional.of("555"));
Mockito.when(taskState.getTaskId()).thenReturn("888");
Mockito.when(limiter.acquirePermits(1)).thenReturn(null);
Mockito.when (taskState.getProp(ConfigurationKeys.DATASET_URN_KEY, ConfigurationKeys.DEFAULT_DATASET_URN)).thenReturn("file://xyz");
Mockito.when(workUnit.getProperties()).thenReturn(properties);
LimitingExtractorDecorator<String, String> decorator = new LimitingExtractorDecorator<>(extractor, limiter, taskState);
try {
Method method = LimitingExtractorDecorator.class.getDeclaredMethod("getLimiterStopMetadata");
method.setAccessible(true);
ImmutableMap<String, String> metaData = (ImmutableMap<String, String>)method.invoke(decorator);
Assert.assertEquals(metaData.containsKey(key1), true);
Assert.assertEquals(metaData.containsKey(key2), true);
Assert.assertEquals(metaData.containsKey(subKey1), true);
Assert.assertEquals(metaData.containsKey(subKey2), true);
Assert.assertEquals(metaData.containsKey(subKey3), true);
Assert.assertEquals(metaData.containsKey(subKey4), true);
} catch (Exception e) {
Assert.fail();
}
}
}
| 1,237 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/TestWorkUnitStreamSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.util.Iterator;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.runtime.api.JobExecutionDriver;
import org.apache.gobblin.runtime.api.JobExecutionResult;
import org.apache.gobblin.runtime.embedded.EmbeddedGobblin;
import org.apache.gobblin.source.WorkUnitStreamSource;
import org.apache.gobblin.source.workunit.BasicWorkUnitStream;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.source.workunit.WorkUnitStream;
import org.apache.gobblin.task.EventBusPublishingTaskFactory;
import org.apache.gobblin.writer.test.TestingEventBuses;
@Slf4j
public class TestWorkUnitStreamSource {
/**
* This test uses a slow source to verify that we can stream work units through local job launcher, with available units
* being processes eagerly even if not all work units are available.
*/
@Test
public void test() throws Exception {
String eventBusId = UUID.randomUUID().toString();
MyListener listener = new MyListener();
EventBus eventBus = TestingEventBuses.getEventBus(eventBusId);
eventBus.register(listener);
EmbeddedGobblin embeddedGobblin = new EmbeddedGobblin("testStreamedSource")
.setConfiguration(EventBusPublishingTaskFactory.EVENTBUS_ID_KEY, eventBusId)
.setConfiguration(ConfigurationKeys.SOURCE_CLASS_KEY, MySource.class.getName())
.setConfiguration(EventBusPublishingTaskFactory.Source.NUM_TASKS_KEY, "5");
JobExecutionDriver driver = embeddedGobblin.runAsync();
if (!listener.iteratorReady.tryAcquire(2, TimeUnit.SECONDS)) {
throw new RuntimeException("Failed to get start signal.");
}
Assert.assertFalse(listener.tasksRun.tryAcquire(50, TimeUnit.MILLISECONDS));
eventBus.post(new MySource.NextWorkUnit());
Assert.assertTrue(listener.tasksRun.tryAcquire(500, TimeUnit.MILLISECONDS));
Assert.assertFalse(listener.tasksRun.tryAcquire(50, TimeUnit.MILLISECONDS));
eventBus.post(new MySource.NextWorkUnit());
Assert.assertTrue(listener.tasksRun.tryAcquire(500, TimeUnit.MILLISECONDS));
Assert.assertFalse(listener.tasksRun.tryAcquire(50, TimeUnit.MILLISECONDS));
eventBus.post(new MySource.NextWorkUnit());
eventBus.post(new MySource.NextWorkUnit());
eventBus.post(new MySource.NextWorkUnit());
JobExecutionResult result = driver.get(5, TimeUnit.SECONDS);
Assert.assertTrue(result.isSuccessful());
SetMultimap<String, Integer> eventsSeen = listener.getEventsSeenMap();
Set<Integer> expected = Sets.newHashSet(0, 1, 2, 3, 4);
Assert.assertEquals(eventsSeen.get(EventBusPublishingTaskFactory.RUN_EVENT), expected);
Assert.assertEquals(eventsSeen.get(EventBusPublishingTaskFactory.COMMIT_EVENT), expected);
Assert.assertEquals(eventsSeen.get(EventBusPublishingTaskFactory.PUBLISH_EVENT), expected);
}
public static class MyListener extends EventBusPublishingTaskFactory.EventListener {
private Semaphore iteratorReady = new Semaphore(0);
private Semaphore tasksRun = new Semaphore(0);
@Subscribe
public void iteratorReadyProcess(MySource.IteratorReady event) {
this.iteratorReady.release();
}
@Subscribe
public void taskRun(EventBusPublishingTaskFactory.Event event) {
if (event.getType().equals(EventBusPublishingTaskFactory.RUN_EVENT)) {
this.tasksRun.release();
}
}
}
public static class MySource extends EventBusPublishingTaskFactory.Source implements WorkUnitStreamSource<String, String> {
@Override
public WorkUnitStream getWorkunitStream(SourceState state) {
int numTasks = state.getPropAsInt(NUM_TASKS_KEY);
String eventBusId = state.getProp(EventBusPublishingTaskFactory.EVENTBUS_ID_KEY);
EventBus eventBus = TestingEventBuses.getEventBus(eventBusId);
return new BasicWorkUnitStream.Builder(new WorkUnitIterator(eventBus, eventBusId, numTasks)).build();
}
private class WorkUnitIterator implements Iterator<WorkUnit> {
private final Semaphore semaphore = new Semaphore(0);
private final EventBus eventBus;
private final String eventBusId;
private final int maxWus;
private int currentWus;
private boolean promisedNext = false;
public WorkUnitIterator(EventBus eventBus, String eventBusId, int maxWus) {
this.eventBus = eventBus;
this.eventBusId = eventBusId;
this.maxWus = maxWus;
this.currentWus = 0;
this.eventBus.register(this);
this.eventBus.post(new IteratorReady());
}
@Override
public boolean hasNext() {
if (this.promisedNext) {
return true;
}
if (this.currentWus >= this.maxWus) {
return false;
}
try {
if (!this.semaphore.tryAcquire(5, TimeUnit.SECONDS)) {
log.error("Failed to receive signal to emit next work unit.", new RuntimeException());
return false;
}
this.promisedNext = true;
return true;
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
@Override
public WorkUnit next() {
if (!hasNext()) {
throw new IllegalStateException();
}
this.currentWus++;
this.promisedNext = false;
return MySource.this.createWorkUnit(this.currentWus - 1, this.eventBusId);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Subscribe
public void subscribe(NextWorkUnit nwu) {
this.semaphore.release();
}
}
public static class NextWorkUnit {}
public static class IteratorReady {}
}
}
| 1,238 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/MysqlDatasetStateStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.base.Predicates;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metastore.DatasetStateStore;
import org.apache.gobblin.metastore.MysqlStateStore;
import org.apache.gobblin.metastore.StateStore;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.util.ClassAliasResolver;
/**
* Unit tests for {@link MysqlDatasetStateStore}.
**/
@Test(groups = { "gobblin.runtime" })
public class MysqlDatasetStateStoreTest {
private static final String TEST_STATE_STORE = "TestStateStore";
private static final String TEST_JOB_NAME = "TestJob";
private static final String TEST_JOB_NAME_LOWER = "testjob";
private static final String TEST_JOB_NAME2 = "TestJob2";
private static final String TEST_JOB_ID = "TestJob1";
private static final String TEST_TASK_ID_PREFIX = "TestTask-";
private static final String TEST_DATASET_URN = "TestDataset";
private static final String TEST_DATASET_URN_LOWER = "testdataset";
private static final String TEST_DATASET_URN2 = "TestDataset2";
private StateStore<JobState> dbJobStateStore;
private DatasetStateStore<JobState.DatasetState> dbDatasetStateStore;
private long startTime = System.currentTimeMillis();
private ITestMetastoreDatabase testMetastoreDatabase;
private static final String TEST_USER = "testUser";
private static final String TEST_PASSWORD = "testPassword";
@BeforeClass
public void setUp() throws Exception {
testMetastoreDatabase = TestMetastoreDatabaseFactory.get();
String jdbcUrl = testMetastoreDatabase.getJdbcUrl();
ConfigBuilder configBuilder = ConfigBuilder.create();
HikariDataSource dataSource = new HikariDataSource();
dataSource.setDriverClassName(ConfigurationKeys.DEFAULT_STATE_STORE_DB_JDBC_DRIVER);
dataSource.setAutoCommit(false);
dataSource.setJdbcUrl(jdbcUrl);
dataSource.setUsername(TEST_USER);
dataSource.setPassword(TEST_PASSWORD);
dbJobStateStore = new MysqlStateStore<>(dataSource, TEST_STATE_STORE, false, JobState.class);
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_URL_KEY, jdbcUrl);
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_USER_KEY, TEST_USER);
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, TEST_PASSWORD);
ClassAliasResolver<DatasetStateStore.Factory> resolver =
new ClassAliasResolver<>(DatasetStateStore.Factory.class);
DatasetStateStore.Factory stateStoreFactory =
resolver.resolveClass("mysql").newInstance();
dbDatasetStateStore = stateStoreFactory.createStateStore(configBuilder.build());
// clear data that may have been left behind by a prior test run
dbJobStateStore.delete(TEST_JOB_NAME);
dbDatasetStateStore.delete(TEST_JOB_NAME);
dbJobStateStore.delete(TEST_JOB_NAME2);
dbDatasetStateStore.delete(TEST_JOB_NAME2);
}
@Test
public void testPersistJobState() throws IOException {
JobState jobState = new JobState(TEST_JOB_NAME, TEST_JOB_ID);
jobState.setId(TEST_JOB_ID);
jobState.setProp("foo", "bar");
jobState.setState(JobState.RunningState.COMMITTED);
jobState.setStartTime(this.startTime);
jobState.setEndTime(this.startTime + 1000);
jobState.setDuration(1000);
for (int i = 0; i < 3; i++) {
TaskState taskState = new TaskState();
taskState.setJobId(TEST_JOB_ID);
taskState.setTaskId(TEST_TASK_ID_PREFIX + i);
taskState.setId(TEST_TASK_ID_PREFIX + i);
taskState.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
jobState.addTaskState(taskState);
}
dbJobStateStore.put(TEST_JOB_NAME,
MysqlDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + MysqlDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
jobState);
// for testing a colliding lowercase job name
jobState.setJobName(TEST_JOB_NAME_LOWER);
jobState.setProp("lower", "case");
dbJobStateStore.put(TEST_JOB_NAME_LOWER,
MysqlDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + MysqlDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
jobState);
// second job name for testing getting store names in a later test case
jobState.setJobName(TEST_JOB_NAME2);
dbJobStateStore.put(TEST_JOB_NAME2,
MysqlDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + MysqlDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
jobState);
}
@Test(dependsOnMethods = "testPersistJobState")
public void testGetJobState() throws IOException {
JobState jobState = dbJobStateStore.get(TEST_JOB_NAME,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
Assert.assertEquals(jobState.getId(), TEST_JOB_ID);
Assert.assertEquals(jobState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(jobState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(jobState.getProp("foo"), "bar");
Assert.assertNotEquals(jobState.getProp("lower"), "case");
Assert.assertEquals(jobState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(jobState.getStartTime(), this.startTime);
Assert.assertEquals(jobState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(jobState.getDuration(), 1000);
Assert.assertEquals(jobState.getCompletedTasks(), 3);
for (int i = 0; i < jobState.getCompletedTasks(); i++) {
TaskState taskState = jobState.getTaskStates().get(i);
Assert.assertEquals(taskState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(taskState.getTaskId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
}
jobState = dbJobStateStore.get(TEST_JOB_NAME_LOWER,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
Assert.assertEquals(jobState.getId(), TEST_JOB_ID);
Assert.assertEquals(jobState.getJobName(), TEST_JOB_NAME_LOWER);
Assert.assertEquals(jobState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(jobState.getProp("foo"), "bar");
Assert.assertEquals(jobState.getProp("lower"), "case");
}
@Test(dependsOnMethods = "testGetJobState")
public void testPersistDatasetState() throws IOException {
JobState.DatasetState datasetState = new JobState.DatasetState(TEST_JOB_NAME, TEST_JOB_ID);
datasetState.setDatasetUrn(TEST_DATASET_URN);
datasetState.setState(JobState.RunningState.COMMITTED);
datasetState.setId(TEST_DATASET_URN);
datasetState.setStartTime(this.startTime);
datasetState.setEndTime(this.startTime + 1000);
datasetState.setDuration(1000);
for (int i = 0; i < 3; i++) {
TaskState taskState = new TaskState();
taskState.setJobId(TEST_JOB_ID);
taskState.setTaskId(TEST_TASK_ID_PREFIX + i);
taskState.setId(TEST_TASK_ID_PREFIX + i);
taskState.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
datasetState.addTaskState(taskState);
}
dbDatasetStateStore.persistDatasetState(TEST_DATASET_URN, datasetState);
// persist a second dataset state to test that retrieval of multiple dataset states works
datasetState.setDatasetUrn(TEST_DATASET_URN2);
datasetState.setId(TEST_DATASET_URN2);
datasetState.setDuration(2000);
dbDatasetStateStore.persistDatasetState(TEST_DATASET_URN2, datasetState);
// persist a colliding lowercase dataset state to test that retrieval is case sensitive
datasetState.setDatasetUrn(TEST_DATASET_URN_LOWER);
datasetState.setId(TEST_DATASET_URN_LOWER);
datasetState.setDuration(3000);
dbDatasetStateStore.persistDatasetState(TEST_DATASET_URN_LOWER, datasetState);
// second job name for testing getting store names in a later test case
datasetState.setJobName(TEST_JOB_NAME2);
dbDatasetStateStore.persistDatasetState(TEST_DATASET_URN2, datasetState);
}
@Test(dependsOnMethods = "testPersistDatasetState")
public void testGetDatasetState() throws IOException {
JobState.DatasetState datasetState =
dbDatasetStateStore.getLatestDatasetState(TEST_JOB_NAME, TEST_DATASET_URN);
Assert.assertEquals(datasetState.getId(), TEST_DATASET_URN);
Assert.assertEquals(datasetState.getDatasetUrn(), TEST_DATASET_URN);
Assert.assertEquals(datasetState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(datasetState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getStartTime(), this.startTime);
Assert.assertEquals(datasetState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(datasetState.getDuration(), 1000);
Assert.assertEquals(datasetState.getCompletedTasks(), 3);
for (int i = 0; i < datasetState.getCompletedTasks(); i++) {
TaskState taskState = datasetState.getTaskStates().get(i);
Assert.assertEquals(taskState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(taskState.getTaskId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
}
}
@Test(dependsOnMethods = { "testGetDatasetState" })
public void testGetStoreNames() throws IOException {
List<String> storeNames = this.dbJobStateStore.getStoreNames(Predicates.alwaysTrue());
Collections.sort(storeNames);
Assert.assertTrue(storeNames.size() == 3);
Assert.assertEquals(storeNames.get(0), TEST_JOB_NAME);
Assert.assertEquals(storeNames.get(1), TEST_JOB_NAME2);
Assert.assertEquals(storeNames.get(2), TEST_JOB_NAME_LOWER);
storeNames = this.dbDatasetStateStore.getStoreNames(Predicates.alwaysTrue());
Collections.sort(storeNames);
Assert.assertTrue(storeNames.size() == 2);
Assert.assertEquals(storeNames.get(0), TEST_JOB_NAME);
Assert.assertEquals(storeNames.get(1), TEST_JOB_NAME2);
}
@Test(dependsOnMethods = "testGetStoreNames")
public void testGetPreviousDatasetStatesByUrns() throws IOException {
Map<String, JobState.DatasetState> datasetStatesByUrns =
dbDatasetStateStore.getLatestDatasetStatesByUrns(TEST_JOB_NAME);
Assert.assertEquals(datasetStatesByUrns.size(), 3);
JobState.DatasetState datasetState = datasetStatesByUrns.get(TEST_DATASET_URN);
Assert.assertEquals(datasetState.getId(), TEST_DATASET_URN);
Assert.assertEquals(datasetState.getDatasetUrn(), TEST_DATASET_URN);
Assert.assertEquals(datasetState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(datasetState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getStartTime(), this.startTime);
Assert.assertEquals(datasetState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(datasetState.getDuration(), 1000);
datasetState = datasetStatesByUrns.get(TEST_DATASET_URN2);
Assert.assertEquals(datasetState.getId(), TEST_DATASET_URN2);
Assert.assertEquals(datasetState.getDatasetUrn(), TEST_DATASET_URN2);
Assert.assertEquals(datasetState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(datasetState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getStartTime(), this.startTime);
Assert.assertEquals(datasetState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(datasetState.getDuration(), 2000);
datasetState = datasetStatesByUrns.get(TEST_DATASET_URN_LOWER);
Assert.assertEquals(datasetState.getId(), TEST_DATASET_URN_LOWER);
Assert.assertEquals(datasetState.getDatasetUrn(), TEST_DATASET_URN_LOWER);
Assert.assertEquals(datasetState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(datasetState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getStartTime(), this.startTime);
Assert.assertEquals(datasetState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(datasetState.getDuration(), 3000);
}
@Test(dependsOnMethods = "testGetPreviousDatasetStatesByUrns")
public void testDeleteJobState() throws IOException {
JobState jobState = dbJobStateStore.get(TEST_JOB_NAME,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
Assert.assertNotNull(jobState);
Assert.assertEquals(jobState.getJobId(), TEST_JOB_ID);
dbJobStateStore.delete(TEST_JOB_NAME);
jobState = dbJobStateStore.get(TEST_JOB_NAME,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
Assert.assertNull(jobState);
}
@Test(dependsOnMethods = "testGetPreviousDatasetStatesByUrns")
public void testDeleteDatasetJobState() throws IOException {
JobState.DatasetState datasetState = dbDatasetStateStore.get(TEST_JOB_NAME,
TEST_DATASET_URN + "-" + dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX +
dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX, TEST_DATASET_URN);
Assert.assertNotNull(datasetState);
Assert.assertEquals(datasetState.getJobId(), TEST_JOB_ID);
dbDatasetStateStore.delete(TEST_JOB_NAME);
datasetState = dbDatasetStateStore.get(TEST_JOB_NAME,
TEST_DATASET_URN + "-" + dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX +
dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX, TEST_DATASET_URN);
Assert.assertNull(datasetState);
}
@AfterClass
public void tearDown() throws IOException {
dbJobStateStore.delete(TEST_JOB_NAME);
dbDatasetStateStore.delete(TEST_JOB_NAME);
}
}
| 1,239 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/TaskContextTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.StringReader;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.test.TestSource;
import org.apache.gobblin.writer.Destination;
import org.apache.gobblin.writer.WriterOutputFormat;
import org.apache.gobblin.util.JobLauncherUtils;
/**
* Unit tests for {@link TaskContext}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.runtime"})
public class TaskContextTest {
private static final String TEST_JOB_CONFIG = "job.name=GobblinTest1\n" +
"job.group=Test\n" +
"job.description=Test Gobblin job 1\n" +
"job.schedule=0 0/1 * * * ?\n" +
"source.class=org.apache.gobblin.test.TestSource\n" +
"workunit.namespace=test\n" +
"workunit.table=test\n" +
"writer.destination.type=HDFS\n" +
"writer.output.format=AVRO\n" +
"writer.fs.uri=file://localhost/\n" +
"writer.staging.dir=test/staging\n" +
"writer.output.dir=test/output\n" +
"writer.file.name=test.avro";
private TaskContext taskContext;
@BeforeClass
public void setUp()
throws Exception {
WorkUnit workUnit = WorkUnit.createEmpty();
Properties properties = new Properties();
properties.load(new StringReader(TEST_JOB_CONFIG));
workUnit.addAll(properties);
workUnit.setProp(ConfigurationKeys.JOB_ID_KEY, JobLauncherUtils.newJobId("GobblinTest1"));
workUnit.setProp(ConfigurationKeys.TASK_ID_KEY,
JobLauncherUtils.newTaskId(workUnit.getProp(ConfigurationKeys.JOB_ID_KEY), 0));
this.taskContext = new TaskContext(new WorkUnitState(workUnit));
}
@Test
public void testOtherMethods() {
Assert.assertTrue(this.taskContext.getSource() instanceof TestSource);
Assert.assertEquals(this.taskContext.getStatusReportingInterval(),
ConfigurationKeys.DEFAULT_TASK_STATUS_REPORT_INTERVAL_IN_MS);
Assert.assertEquals(this.taskContext.getDestinationType(1, 0), Destination.DestinationType.HDFS);
Assert.assertEquals(this.taskContext.getWriterOutputFormat(1, 0), WriterOutputFormat.AVRO);
Assert.assertTrue(this.taskContext.getConverters().isEmpty());
}
}
| 1,240 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/FsDatasetStateStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.gobblin.metastore.DatasetStateStore;
import org.apache.gobblin.metastore.metadata.DatasetStateStoreEntryManager;
import org.apache.gobblin.metastore.predicates.DatasetPredicate;
import org.apache.gobblin.metastore.predicates.StateStorePredicate;
import org.apache.gobblin.metastore.predicates.StoreNamePredicate;
import org.apache.gobblin.runtime.metastore.filesystem.FsDatasetStateStoreEntryManager;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metastore.FsStateStore;
import org.apache.gobblin.metastore.StateStore;
import com.google.common.io.Files;
/**
* Unit tests for {@link FsDatasetStateStore}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.runtime" })
public class FsDatasetStateStoreTest {
private static final String TEST_JOB_NAME = "TestJob";
private static final String TEST_JOB_ID = "TestJob1";
private static final String TEST_TASK_ID_PREFIX = "TestTask-";
private static final String TEST_DATASET_URN = "TestDataset";
private StateStore<JobState> fsJobStateStore;
private FsDatasetStateStore fsDatasetStateStore;
private long startTime = System.currentTimeMillis();
@BeforeClass
public void setUp() throws IOException {
this.fsJobStateStore = new FsStateStore<>(ConfigurationKeys.LOCAL_FS_URI,
FsDatasetStateStoreTest.class.getSimpleName(), JobState.class);
this.fsDatasetStateStore =
new FsDatasetStateStore(ConfigurationKeys.LOCAL_FS_URI, FsDatasetStateStoreTest.class.getSimpleName());
// clear data that may have been left behind by a prior test run
this.fsDatasetStateStore.delete(TEST_JOB_NAME);
}
@Test
public void testPersistJobState() throws IOException {
JobState jobState = new JobState(TEST_JOB_NAME, TEST_JOB_ID);
jobState.setId(TEST_JOB_ID);
jobState.setProp("foo", "bar");
jobState.setState(JobState.RunningState.COMMITTED);
jobState.setStartTime(this.startTime);
jobState.setEndTime(this.startTime + 1000);
jobState.setDuration(1000);
for (int i = 0; i < 3; i++) {
TaskState taskState = new TaskState();
taskState.setJobId(TEST_JOB_ID);
taskState.setTaskId(TEST_TASK_ID_PREFIX + i);
taskState.setId(TEST_TASK_ID_PREFIX + i);
taskState.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
jobState.addTaskState(taskState);
}
this.fsJobStateStore.put(TEST_JOB_NAME,
FsDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + FsDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
jobState);
}
@Test(dependsOnMethods = "testPersistJobState")
public void testGetJobState() throws IOException {
JobState jobState = this.fsDatasetStateStore.get(TEST_JOB_NAME,
FsDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + FsDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
Assert.assertEquals(jobState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(jobState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(jobState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(jobState.getStartTime(), this.startTime);
Assert.assertEquals(jobState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(jobState.getDuration(), 1000);
Assert.assertEquals(jobState.getCompletedTasks(), 3);
for (int i = 0; i < jobState.getCompletedTasks(); i++) {
TaskState taskState = jobState.getTaskStates().get(i);
Assert.assertEquals(taskState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(taskState.getTaskId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
}
}
@Test(dependsOnMethods = "testGetJobState")
public void testPersistDatasetState() throws IOException {
JobState.DatasetState datasetState = new JobState.DatasetState(TEST_JOB_NAME, TEST_JOB_ID);
datasetState.setDatasetUrn(TEST_DATASET_URN);
datasetState.setState(JobState.RunningState.COMMITTED);
datasetState.setId(TEST_DATASET_URN);
datasetState.setStartTime(this.startTime);
datasetState.setEndTime(this.startTime + 1000);
datasetState.setDuration(1000);
for (int i = 0; i < 3; i++) {
TaskState taskState = new TaskState();
taskState.setJobId(TEST_JOB_ID);
taskState.setTaskId(TEST_TASK_ID_PREFIX + i);
taskState.setId(TEST_TASK_ID_PREFIX + i);
taskState.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
datasetState.addTaskState(taskState);
}
this.fsDatasetStateStore.persistDatasetState(TEST_DATASET_URN, datasetState);
}
@Test(dependsOnMethods = "testPersistDatasetState")
public void testGetDatasetState() throws IOException {
JobState.DatasetState datasetState =
this.fsDatasetStateStore.getLatestDatasetState(TEST_JOB_NAME, TEST_DATASET_URN);
Assert.assertEquals(datasetState.getDatasetUrn(), TEST_DATASET_URN);
Assert.assertEquals(datasetState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(datasetState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getStartTime(), this.startTime);
Assert.assertEquals(datasetState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(datasetState.getDuration(), 1000);
Assert.assertEquals(datasetState.getCompletedTasks(), 3);
for (int i = 0; i < datasetState.getCompletedTasks(); i++) {
TaskState taskState = datasetState.getTaskStates().get(i);
Assert.assertEquals(taskState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(taskState.getTaskId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getId(), TEST_TASK_ID_PREFIX + i);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
}
}
@Test(dependsOnMethods = "testGetDatasetState")
public void testGetPreviousDatasetStatesByUrns() throws IOException {
Map<String, JobState.DatasetState> datasetStatesByUrns =
this.fsDatasetStateStore.getLatestDatasetStatesByUrns(TEST_JOB_NAME);
Assert.assertEquals(datasetStatesByUrns.size(), 1);
JobState.DatasetState datasetState = datasetStatesByUrns.get(TEST_DATASET_URN);
Assert.assertEquals(datasetState.getDatasetUrn(), TEST_DATASET_URN);
Assert.assertEquals(datasetState.getJobName(), TEST_JOB_NAME);
Assert.assertEquals(datasetState.getJobId(), TEST_JOB_ID);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getStartTime(), this.startTime);
Assert.assertEquals(datasetState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(datasetState.getDuration(), 1000);
}
/**
* Loading previous statestore without apache package name.
*
* Specifically the example used here is the state store generated from previous gobblin-kafka version without
* changing the package name into apache-intialized.
*
* Should pass the test even the class name doesn't match given the change in
* @throws IOException
*/
@Test
public void testGetPreviousDatasetStatesByUrnsNoApache() throws IOException{
String JOB_NAME_FOR_INCOMPATIBLE_STATE_STORE = "test_failing_job";
FsDatasetStateStore _fsDatasetStateStore =
new FsDatasetStateStore(ConfigurationKeys.LOCAL_FS_URI,
"gobblin-runtime/src/test/resources/datasetState");
try {
Map<String, JobState.DatasetState> datasetStatesByUrns =
_fsDatasetStateStore.getLatestDatasetStatesByUrns(JOB_NAME_FOR_INCOMPATIBLE_STATE_STORE);
} catch (RuntimeException re){
Assert.fail("Loading of state store should not fail.");
}
}
@Test
public void testGetMetadataForTables() throws Exception {
File tmpDir = Files.createTempDir();
tmpDir.deleteOnExit();
FsDatasetStateStore store = new FsDatasetStateStore(FileSystem.getLocal(new Configuration()), tmpDir.getAbsolutePath());
JobState.DatasetState dataset2State = new JobState.DatasetState("job1", "job1_id2");
dataset2State.setDatasetUrn("dataset2");
dataset2State.setId("dataset2");
TaskState taskState = new TaskState();
taskState.setJobId("job1_id2");
taskState.setTaskId("task123");
taskState.setProp("key", "value");
dataset2State.addTaskState(taskState);
store.persistDatasetState("dataset1", new JobState.DatasetState("job1", "job1_id1"));
store.persistDatasetState("dataset1", new JobState.DatasetState("job1", "job1_id2"));
store.persistDatasetState("dataset2", dataset2State);
store.persistDatasetState("dataset1", new JobState.DatasetState("job2", "job2_id1"));
store.persistDatasetState("", new JobState.DatasetState("job3", "job3_id1"));
List<FsDatasetStateStoreEntryManager> metadataList = store.getMetadataForTables(new StateStorePredicate(x -> true));
// 5 explicitly stored states, plus 4 current links, one per job-dataset
Assert.assertEquals(metadataList.size(), 9);
metadataList = store.getMetadataForTables(new StoreNamePredicate("job1", x-> true));
// 3 explicitly stored states, plus 2 current links, one per dataset
Assert.assertEquals(metadataList.size(), 5);
metadataList = store.getMetadataForTables(new DatasetPredicate("job1", "dataset1", x -> true));
Assert.assertEquals(metadataList.size(), 3);
metadataList = store.getMetadataForTables(new DatasetPredicate("job1", "dataset2", meta ->
((DatasetStateStoreEntryManager) meta).getStateId().equals(DatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX)
));
Assert.assertEquals(metadataList.size(), 1);
DatasetStateStoreEntryManager metadata = metadataList.get(0);
Assert.assertEquals(metadata.getStoreName(), "job1");
Assert.assertEquals(metadata.getSanitizedDatasetUrn(), "dataset2");
Assert.assertEquals(metadata.getStateId(), DatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX);
Assert.assertEquals(metadata.getDatasetStateStore(), store);
JobState.DatasetState readState = (JobState.DatasetState) metadata.readState();
TaskState readTaskState = readState.getTaskStates().get(0);
Assert.assertEquals(readTaskState.getProp("key"), "value");
metadata.delete();
// verify it got deleted
metadataList = store.getMetadataForTables(new DatasetPredicate("job1", "dataset2", meta ->
((DatasetStateStoreEntryManager) meta).getStateId().equals(DatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX)
));
Assert.assertTrue(metadataList.isEmpty());
}
@AfterClass
public void tearDown() throws IOException {
FileSystem fs = FileSystem.getLocal(new Configuration(false));
Path rootDir = new Path(FsDatasetStateStoreTest.class.getSimpleName());
if (fs.exists(rootDir)) {
fs.delete(rootDir, true);
}
}
}
| 1,241 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/TaskStateCollectorServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import org.apache.gobblin.metrics.event.EventSubmitter;
import org.apache.gobblin.service.ServiceConfigKeys;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Maps;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.FsStateStore;
import org.apache.gobblin.runtime.troubleshooter.InMemoryIssueRepository;
import org.apache.gobblin.util.JobLauncherUtils;
import static org.mockito.Mockito.*;
/**
* Unit tests for {@link TaskStateCollectorService}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.runtime" })
public class TaskStateCollectorServiceTest {
private static final String JOB_NAME = "TestJob";
private static final String JOB_ID = JobLauncherUtils.newJobId(JOB_NAME);
private static final String TASK_ID_0 = JobLauncherUtils.newTaskId(JOB_ID, 0);
private static final String TASK_ID_1 = JobLauncherUtils.newTaskId(JOB_ID, 1);
private static final String WORK_UNIT_SIZE = "20";
private final Path outputTaskStateDir = new Path(TaskStateCollectorServiceTest.class.getSimpleName());
private FileSystem localFs;
private FsStateStore<TaskState> taskStateStore;
private TaskStateCollectorService taskStateCollectorService;
private final JobState jobState = new JobState();
private final EventBus eventBus = new EventBus();
private EventSubmitter mockEventSubmitter;
private final Map<String, TaskState> taskStateMap = Maps.newHashMap();
@BeforeClass
public void setUp() throws Exception {
this.localFs = FileSystem.getLocal(new Configuration());
this.localFs.mkdirs(this.outputTaskStateDir);
this.taskStateStore = new FsStateStore<>(this.localFs, this.outputTaskStateDir.toUri().getPath(), TaskState.class);
this.mockEventSubmitter = mock(EventSubmitter.class);
this.taskStateCollectorService = new TaskStateCollectorService(new Properties(), this.jobState, this.eventBus,
this.mockEventSubmitter,this.taskStateStore, new Path(this.outputTaskStateDir, JOB_ID),
new InMemoryIssueRepository());
this.eventBus.register(this);
}
@Test
public void testPutIntoTaskStateStore() throws IOException {
TaskState taskState1 = new TaskState();
taskState1.setJobId(JOB_ID);
taskState1.setTaskId(TASK_ID_0);
taskState1.setProp(ServiceConfigKeys.WORK_UNIT_SIZE, WORK_UNIT_SIZE);
this.taskStateStore.put(JOB_ID, TASK_ID_0 + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX, taskState1);
TaskState taskState2 = new TaskState();
taskState2.setJobId(JOB_ID);
taskState2.setTaskId(TASK_ID_1);
taskState2.setProp(ServiceConfigKeys.WORK_UNIT_SIZE, WORK_UNIT_SIZE);
this.taskStateStore.put(JOB_ID, TASK_ID_1 + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX, taskState2);
}
@Test(dependsOnMethods = "testPutIntoTaskStateStore")
public void testCollectOutputTaskStates() throws Exception {
this.taskStateCollectorService.runOneIteration();
Assert.assertEquals(this.jobState.getTaskStates().size(), 2);
Assert.assertEquals(this.taskStateMap.size(), 2);
Assert.assertEquals(this.taskStateMap.get(TASK_ID_0).getJobId(), JOB_ID);
Assert.assertEquals(this.taskStateMap.get(TASK_ID_0).getTaskId(), TASK_ID_0);
Assert.assertEquals(this.taskStateMap.get(TASK_ID_1).getJobId(), JOB_ID);
Assert.assertEquals(this.taskStateMap.get(TASK_ID_1).getTaskId(), TASK_ID_1);
}
@Test
public void testHandlerResolution() throws Exception{
Properties props = new Properties();
props.setProperty(ConfigurationKeys.TASK_STATE_COLLECTOR_HANDLER_CLASS, "hivereg");
TaskStateCollectorService taskStateCollectorServiceHive = new TaskStateCollectorService(props, this.jobState, this.eventBus,
this.mockEventSubmitter, this.taskStateStore, new Path(this.outputTaskStateDir, JOB_ID + "_prime"),
new InMemoryIssueRepository());
Assert.assertEquals(taskStateCollectorServiceHive.getOptionalTaskCollectorHandler().get().getClass().getName(),
"org.apache.gobblin.runtime.HiveRegTaskStateCollectorServiceHandlerImpl");
taskStateCollectorServiceHive.shutDown();
return;
}
@AfterClass
public void tearDown() throws IOException {
if (this.localFs.exists(this.outputTaskStateDir)) {
this.localFs.delete(this.outputTaskStateDir, true);
}
}
@Subscribe
@Test(enabled = false)
public void handleNewOutputTaskStateEvent(NewTaskCompletionEvent newOutputTaskStateEvent) {
for (TaskState taskState : newOutputTaskStateEvent.getTaskStates()) {
this.taskStateMap.put(taskState.getTaskId(), taskState);
}
}
}
| 1,242 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/TaskStateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.rest.TaskExecutionInfo;
import org.apache.gobblin.runtime.troubleshooter.Issue;
import org.apache.gobblin.runtime.troubleshooter.IssueSeverity;
/**
* Unit tests for {@link TaskState}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.runtime"})
public class TaskStateTest {
private TaskState taskState;
private long startTime;
@BeforeClass
public void setUp() {
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.JOB_ID_KEY, "Job-1");
workUnitState.setProp(ConfigurationKeys.TASK_ID_KEY, "Task-1");
this.taskState = new TaskState(workUnitState);
}
@Test
public void testSetAndGet() {
this.taskState.setId("Task-1");
this.taskState.setHighWaterMark(2000);
this.startTime = System.currentTimeMillis();
this.taskState.setStartTime(this.startTime);
this.taskState.setEndTime(this.startTime + 1000);
this.taskState.setTaskDuration(1000);
this.taskState.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
this.taskState.setProp("foo", "bar");
Assert.assertEquals(this.taskState.getJobId(), "Job-1");
Assert.assertEquals(this.taskState.getTaskId(), "Task-1");
Assert.assertEquals(this.taskState.getId(), "Task-1");
Assert.assertEquals(this.taskState.getHighWaterMark(), 2000);
Assert.assertEquals(this.taskState.getStartTime(), this.startTime);
Assert.assertEquals(this.taskState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(this.taskState.getTaskDuration(), 1000);
Assert.assertEquals(this.taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(this.taskState.getProp("foo"), "bar");
}
@Test(dependsOnMethods = {"testSetAndGet"})
public void testSerDe()
throws IOException {
Closer closer = Closer.create();
try {
ByteArrayOutputStream baos = closer.register(new ByteArrayOutputStream());
DataOutputStream dos = closer.register(new DataOutputStream(baos));
this.taskState.write(dos);
ByteArrayInputStream bais = closer.register((new ByteArrayInputStream(baos.toByteArray())));
DataInputStream dis = closer.register((new DataInputStream(bais)));
TaskState newTaskState = new TaskState();
newTaskState.readFields(dis);
Assert.assertEquals(newTaskState.getJobId(), "Job-1");
Assert.assertEquals(newTaskState.getTaskId(), "Task-1");
Assert.assertEquals(this.taskState.getHighWaterMark(), 2000);
Assert.assertEquals(newTaskState.getStartTime(), this.startTime);
Assert.assertEquals(newTaskState.getEndTime(), this.startTime + 1000);
Assert.assertEquals(newTaskState.getTaskDuration(), 1000);
Assert.assertEquals(newTaskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(newTaskState.getProp("foo"), "bar");
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
@Test(dependsOnMethods = {"testSetAndGet"})
public void testToTaskExecutionInfo() {
TaskExecutionInfo taskExecutionInfo = this.taskState.toTaskExecutionInfo();
Assert.assertEquals(taskExecutionInfo.getJobId(), "Job-1");
Assert.assertEquals(taskExecutionInfo.getTaskId(), "Task-1");
Assert.assertEquals(taskExecutionInfo.getHighWatermark().longValue(), 2000L);
Assert.assertEquals(taskExecutionInfo.getStartTime().longValue(), this.startTime);
Assert.assertEquals(taskExecutionInfo.getEndTime().longValue(), this.startTime + 1000);
Assert.assertEquals(taskExecutionInfo.getDuration().longValue(), 1000L);
Assert.assertEquals(taskExecutionInfo.getState().name(), WorkUnitState.WorkingState.COMMITTED.name());
Assert.assertEquals(taskExecutionInfo.getTaskProperties().get("foo"), "bar");
}
@Test
public void testIssueSerialization() {
TaskState state = new TaskState(new WorkUnitState());
ArrayList<Issue> issues = new ArrayList<>();
issues.add(Issue.builder().summary("test issue 1").code("test").build());
HashMap<String, String> testProperties = new HashMap<String, String>() {{
put("testKey", "test value %'\"");
}};
issues.add(
Issue.builder().summary("test issue 2").code("test2").time(ZonedDateTime.now()).severity(IssueSeverity.ERROR)
.properties(testProperties).build());
state.setTaskIssues(issues);
List<Issue> deserializedIssues = state.getTaskIssues();
Assert.assertEquals(deserializedIssues, issues);
Assert.assertNotSame(deserializedIssues, issues);
}
}
| 1,243 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/DummyJobContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.slf4j.Logger;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.commit.CommitSequenceStore;
import org.apache.gobblin.commit.DeliverySemantics;
import org.apache.gobblin.metastore.JobHistoryStore;
import org.apache.gobblin.runtime.troubleshooter.InMemoryIssueRepository;
import org.apache.gobblin.source.Source;
public class DummyJobContext extends JobContext {
private final Map<String, JobState.DatasetState> datasetStateMap;
public DummyJobContext(Properties jobProps, Logger logger, Map<String, JobState.DatasetState> datasetStateMap)
throws Exception {
super(jobProps, logger, SharedResourcesBrokerFactory
.createDefaultTopLevelBroker(ConfigFactory.empty(), GobblinScopeTypes.GLOBAL.defaultScopeInstance()),
new InMemoryIssueRepository());
this.datasetStateMap = datasetStateMap;
}
@Override
protected FsDatasetStateStore createStateStore(Config config)
throws IOException {
return new NoopDatasetStateStore(FileSystem.getLocal(new Configuration()), "");
}
@Override
protected Optional<JobHistoryStore> createJobHistoryStore(Properties jobProps) {
return Optional.absent();
}
@Override
protected Optional<CommitSequenceStore> createCommitSequenceStore()
throws IOException {
return Optional.absent();
}
@Override
protected Source<?, ?> createSource(Properties jobProps)
throws ClassNotFoundException, InstantiationException, IllegalAccessException {
return null;
}
@Override
protected void setTaskStagingAndOutputDirs() {
// nothing
}
@Override
protected Callable<Void> createSafeDatasetCommit(boolean shouldCommitDataInJob, boolean isJobCancelled,
DeliverySemantics deliverySemantics, String datasetUrn, JobState.DatasetState datasetState,
boolean isMultithreaded, JobContext jobContext) {
return new Callable<Void>() {
@Override
public Void call()
throws Exception {
return null;
}
};
}
@Override
protected Map<String, JobState.DatasetState> computeDatasetStatesByUrns() {
return this.datasetStateMap;
}
}
| 1,244 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/BoundedBlockingRecordQueueTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.Lists;
/**
* Unit tests for {@link BoundedBlockingRecordQueue}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.runtime" })
public class BoundedBlockingRecordQueueTest {
private static final String METRIC_NAME_PREFIX = "test";
private BoundedBlockingRecordQueue<Integer> boundedBlockingRecordQueue;
@BeforeClass
public void setUp() {
this.boundedBlockingRecordQueue = BoundedBlockingRecordQueue.<Integer> newBuilder().hasCapacity(2).useTimeout(1000)
.useTimeoutTimeUnit(TimeUnit.MILLISECONDS).collectStats().build();
}
@Test
public void testPutAndGet() throws InterruptedException {
final List<Integer> produced = Lists.newArrayList();
Thread producer = new Thread(new Runnable() {
@Override
public void run() {
for (int i = 0; i < 6; i++) {
try {
BoundedBlockingRecordQueueTest.this.boundedBlockingRecordQueue.put(i);
produced.add(i);
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
}
});
final List<Integer> consumed = Lists.newArrayList();
Thread consumer = new Thread(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; i < 6; i++) {
consumed.add(BoundedBlockingRecordQueueTest.this.boundedBlockingRecordQueue.get());
}
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
});
producer.start();
consumer.start();
producer.join();
consumer.join();
Assert.assertEquals(produced, consumed);
Assert.assertNull(this.boundedBlockingRecordQueue.get());
}
@Test(dependsOnMethods = "testPutAndGet")
public void testQueueStats() throws InterruptedException {
BoundedBlockingRecordQueue<Integer>.QueueStats stats = this.boundedBlockingRecordQueue.stats().get();
Assert.assertEquals(stats.queueSize(), 0);
Assert.assertEquals(stats.fillRatio(), 0d);
Assert.assertEquals(stats.getAttemptCount(), 7);
Assert.assertEquals(stats.putAttemptCount(), 6);
this.boundedBlockingRecordQueue.put(0);
this.boundedBlockingRecordQueue.put(1);
Assert.assertEquals(stats.queueSize(), 2);
Assert.assertEquals(stats.fillRatio(), 1d);
Assert.assertEquals(stats.getAttemptCount(), 7);
Assert.assertEquals(stats.putAttemptCount(), 8);
}
@Test(dependsOnMethods = "testQueueStats")
public void testRegisterAll() {
MetricRegistry metricRegistry = new MetricRegistry();
this.boundedBlockingRecordQueue.stats().get().registerAll(metricRegistry, METRIC_NAME_PREFIX);
@SuppressWarnings("rawtypes")
Map<String, Gauge> gauges = metricRegistry.getGauges();
Assert.assertEquals(gauges.size(), 2);
Assert.assertEquals(gauges
.get(MetricRegistry.name(METRIC_NAME_PREFIX, BoundedBlockingRecordQueue.QueueStats.QUEUE_SIZE)).getValue(), 2);
Assert.assertEquals(gauges
.get(MetricRegistry.name(METRIC_NAME_PREFIX, BoundedBlockingRecordQueue.QueueStats.FILL_RATIO)).getValue(), 1d);
Assert.assertEquals(metricRegistry.getMeters().size(), 2);
Assert.assertEquals(metricRegistry
.meter(MetricRegistry.name(METRIC_NAME_PREFIX, BoundedBlockingRecordQueue.QueueStats.GET_ATTEMPT_RATE))
.getCount(), 7);
Assert.assertEquals(metricRegistry
.meter(MetricRegistry.name(METRIC_NAME_PREFIX, BoundedBlockingRecordQueue.QueueStats.PUT_ATTEMPT_RATE))
.getCount(), 8);
}
@AfterClass
public void tearDown() throws InterruptedException {
this.boundedBlockingRecordQueue.clear();
Assert.assertNull(this.boundedBlockingRecordQueue.get());
}
}
| 1,245 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/JobListenersTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import com.google.common.collect.Lists;
import org.testng.annotations.Test;
import org.apache.gobblin.runtime.listeners.CloseableJobListener;
import org.apache.gobblin.runtime.listeners.JobListener;
import org.apache.gobblin.runtime.listeners.JobListeners;
@Test(groups = {"gobblin.runtime"})
public class JobListenersTest {
@Test
public void testParallelJobListener()
throws Exception {
JobContext jobContext = mock(JobContext.class);
JobListener mockJobListener1 = mock(JobListener.class);
JobListener mockJobListener2 = mock(JobListener.class);
CloseableJobListener closeableJobListener =
JobListeners.parallelJobListener(Lists.newArrayList(mockJobListener1, mockJobListener2));
closeableJobListener.onJobCompletion(jobContext);
closeableJobListener.onJobCancellation(jobContext);
closeableJobListener.close();
verify(mockJobListener1, times(1)).onJobCompletion(jobContext);
verify(mockJobListener1, times(1)).onJobCancellation(jobContext);
verify(mockJobListener2, times(1)).onJobCompletion(jobContext);
verify(mockJobListener2, times(1)).onJobCancellation(jobContext);
}
}
| 1,246 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/TestRecordStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.Flushable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import io.reactivex.Flowable;
import lombok.AllArgsConstructor;
import org.apache.gobblin.ack.Ackable;
import org.apache.gobblin.ack.BasicAckableForTesting;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.Converter;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.fork.IdentityForkOperator;
import org.apache.gobblin.metadata.GlobalMetadata;
import org.apache.gobblin.publisher.TaskPublisher;
import org.apache.gobblin.qualitychecker.row.RowLevelPolicyChecker;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyChecker;
import org.apache.gobblin.records.ControlMessageHandler;
import org.apache.gobblin.records.FlushControlMessageHandler;
import org.apache.gobblin.records.RecordStreamProcessor;
import org.apache.gobblin.records.RecordStreamWithMetadata;
import org.apache.gobblin.runtime.util.TaskMetrics;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.stream.ControlMessage;
import org.apache.gobblin.stream.ControlMessageInjector;
import org.apache.gobblin.stream.FlushControlMessage;
import org.apache.gobblin.stream.MetadataUpdateControlMessage;
import org.apache.gobblin.stream.RecordEnvelope;
import org.apache.gobblin.stream.StreamEntity;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.DataWriterBuilder;
import static org.mockito.Mockito.*;
/**
* Tests for streaming model of Gobblin.
*/
public class TestRecordStream {
@Test
public void testControlMessages() throws Exception {
MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"),
new BasicTestControlMessage("1"), new RecordEnvelope<>("b"), new BasicTestControlMessage("2")});
MyConverter converter = new MyConverter();
MyDataWriter writer = new MyDataWriter();
Task task = setupTask(extractor, writer, converter);
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
Assert.assertEquals(converter.records, Lists.newArrayList("a", "b"));
Assert.assertEquals(converter.messages, Lists.newArrayList(new BasicTestControlMessage("1"), new BasicTestControlMessage("2")));
Assert.assertEquals(writer.records, Lists.newArrayList("a", "b"));
Assert.assertEquals(writer.messages, Lists.newArrayList(new BasicTestControlMessage("1"), new BasicTestControlMessage("2")));
}
@Test
public void testFlushControlMessages() throws Exception {
MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"),
FlushControlMessage.builder().flushReason("flush1").build(), new RecordEnvelope<>("b"),
FlushControlMessage.builder().flushReason("flush2").build()});
MyConverter converter = new MyConverter();
MyFlushDataWriter writer = new MyFlushDataWriter();
Task task = setupTask(extractor, writer, converter);
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
Assert.assertEquals(converter.records, Lists.newArrayList("a", "b"));
Assert.assertEquals(converter.messages, Lists.newArrayList(
FlushControlMessage.builder().flushReason("flush1").build(),
FlushControlMessage.builder().flushReason("flush2").build()));
Assert.assertEquals(writer.records, Lists.newArrayList("a", "b"));
Assert.assertEquals(writer.flush_messages, Lists.newArrayList("flush called", "flush called"));
}
@Test
public void testFlushFailure() throws Exception {
FlushAckable flushAckable1 = new FlushAckable();
FlushAckable flushAckable2 = new FlushAckable();
MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"),
FlushControlMessage.builder().flushReason("flush1").build().addCallBack(flushAckable1), new RecordEnvelope<>("b"),
FlushControlMessage.builder().flushReason("flushFail1").build().addCallBack(flushAckable2)});
MyConverter converter = new MyConverter();
MyFlushDataWriter writer = new MyFlushDataWriter();
Task task = setupTask(extractor, writer, converter);
task.run();
// first flush should succeed, but second one should fail
Throwable error = flushAckable1.waitForAck();
Assert.assertNull(error);
error = flushAckable2.waitForAck();
Assert.assertNotNull(error);
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.FAILED);
Assert.assertEquals(converter.records, Lists.newArrayList("a", "b"));
Assert.assertEquals(converter.messages, Lists.newArrayList(
FlushControlMessage.builder().flushReason("flush1").build(),
FlushControlMessage.builder().flushReason("flushFail1").build()));
Assert.assertEquals(writer.records, Lists.newArrayList("a", "b"));
Assert.assertEquals(writer.flush_messages, Lists.newArrayList("flush called"));
}
/**
* Test of metadata update control messages that signal the converters to change schemas
* @throws Exception
*/
@Test
public void testMetadataUpdateControlMessages() throws Exception {
MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()), new RecordEnvelope<>("b"),
new MetadataUpdateControlMessage(GlobalMetadata.<String>builder().schema("Schema2").build())});
SchemaAppendConverter converter = new SchemaAppendConverter();
MyDataWriter writer = new MyDataWriter();
Task task = setupTask(extractor, writer, converter);
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
Assert.assertEquals(converter.records, Lists.newArrayList("a:schema", "b:Schema1"));
Assert.assertEquals(converter.messages,
Lists.newArrayList(new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build())));
Assert.assertEquals(writer.records, Lists.newArrayList("a:schema", "b:Schema1"));
Assert.assertEquals(writer.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>(
GlobalMetadata.<String>builder().schema("Schema1").build()),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build())));
}
/**
* Test with the converter configured in the list of {@link RecordStreamProcessor}s.
* @throws Exception
*/
@Test
public void testMetadataUpdateWithStreamProcessors() throws Exception {
MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("a"),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()), new RecordEnvelope<>("b"),
new MetadataUpdateControlMessage(GlobalMetadata.<String>builder().schema("Schema2").build())});
SchemaAppendConverter converter = new SchemaAppendConverter();
MyDataWriter writer = new MyDataWriter();
Task task = setupTask(extractor, writer, Collections.EMPTY_LIST, Lists.newArrayList(converter));
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
Assert.assertEquals(converter.records, Lists.newArrayList("a:schema", "b:Schema1"));
Assert.assertEquals(converter.messages,
Lists.newArrayList(new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema1").build()),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build())));
Assert.assertEquals(writer.records, Lists.newArrayList("a:schema", "b:Schema1"));
Assert.assertEquals(writer.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>(
GlobalMetadata.<String>builder().schema("Schema1").build()),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("Schema2").build())));
}
/**
* Test the injection of {@link ControlMessage}s
* @throws Exception
*/
@Test
public void testInjectedControlMessages() throws Exception {
MyExtractor extractor = new MyExtractor(new StreamEntity[]{new RecordEnvelope<>("schema:a"),
new RecordEnvelope<>("schema:b"), new RecordEnvelope<>("schema1:c"), new RecordEnvelope<>("schema2:d")});
SchemaChangeDetectionInjector injector = new SchemaChangeDetectionInjector();
SchemaAppendConverter converter = new SchemaAppendConverter();
MyDataWriter writer = new MyDataWriterWithSchemaCheck();
Task task = setupTask(extractor, writer, Collections.EMPTY_LIST,
Lists.newArrayList(injector, converter));
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
Assert.assertEquals(converter.records, Lists.newArrayList("a:schema", "b:schema", "c:schema1", "d:schema2"));
Assert.assertEquals(converter.messages,
Lists.newArrayList(new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("schema1").build()),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("schema2").build())));
Assert.assertEquals(writer.records, Lists.newArrayList("a:schema", "b:schema", "c:schema1", "d:schema2"));
Assert.assertEquals(writer.messages, Lists.newArrayList(new MetadataUpdateControlMessage<>(
GlobalMetadata.<String>builder().schema("schema1").build()),
new MetadataUpdateControlMessage<>(GlobalMetadata.<String>builder().schema("schema2").build())));
}
@Test
public void testAcks() throws Exception {
StreamEntity[] entities = new StreamEntity[]{new RecordEnvelope<>("a"),
new BasicTestControlMessage("1"), new RecordEnvelope<>("b"), new BasicTestControlMessage("2")};
BasicAckableForTesting ackable = new BasicAckableForTesting();
for (int i = 0; i < entities.length; i++) {
entities[i].addCallBack(ackable);
}
MyExtractor extractor = new MyExtractor(entities);
MyConverter converter = new MyConverter();
MyDataWriter writer = new MyDataWriter();
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testRetryTaskId");
taskState.setProp(ConfigurationKeys.TASK_SYNCHRONOUS_EXECUTION_MODEL_KEY, false);
// Create a mock TaskContext
TaskContext mockTaskContext = mock(TaskContext.class);
when(mockTaskContext.getExtractor()).thenReturn(extractor);
when(mockTaskContext.getForkOperator()).thenReturn(new IdentityForkOperator());
when(mockTaskContext.getTaskState()).thenReturn(taskState);
when(mockTaskContext.getConverters()).thenReturn(Lists.newArrayList(converter));
when(mockTaskContext.getTaskLevelPolicyChecker(any(TaskState.class), anyInt()))
.thenReturn(mock(TaskLevelPolicyChecker.class));
when(mockTaskContext.getRowLevelPolicyChecker()).
thenReturn(new RowLevelPolicyChecker(Lists.newArrayList(), "ss", FileSystem.getLocal(new Configuration())));
when(mockTaskContext.getRowLevelPolicyChecker(anyInt())).
thenReturn(new RowLevelPolicyChecker(Lists.newArrayList(), "ss", FileSystem.getLocal(new Configuration())));
when(mockTaskContext.getDataWriterBuilder(anyInt(), anyInt())).thenReturn(writer);
// Create a mock TaskPublisher
TaskPublisher mockTaskPublisher = mock(TaskPublisher.class);
when(mockTaskPublisher.canPublish()).thenReturn(TaskPublisher.PublisherState.SUCCESS);
when(mockTaskContext.getTaskPublisher(any(TaskState.class), any()))
.thenReturn(mockTaskPublisher);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task realTask = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch> absent());
Task task = spy(realTask);
doNothing().when(task).submitTaskCommittedEvent();
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
Assert.assertEquals(ackable.acked, 4);
}
TaskState getEmptyTestTaskState(String taskId) {
// Create a TaskState
WorkUnit workUnit = WorkUnit.create(
new Extract(Extract.TableType.SNAPSHOT_ONLY, this.getClass().getName(), this.getClass().getSimpleName()));
workUnit.setProp(ConfigurationKeys.TASK_KEY_KEY, "taskKey");
TaskState taskState = new TaskState(new WorkUnitState(workUnit));
taskState.setProp(ConfigurationKeys.METRICS_ENABLED_KEY, Boolean.toString(false));
taskState.setTaskId(taskId);
taskState.setJobId("1234");
return taskState;
}
private Task setupTask(Extractor extractor, DataWriterBuilder writer, Converter converter) throws Exception {
return setupTask(extractor, writer, Lists.newArrayList(converter), Collections.EMPTY_LIST);
}
private Task setupTask(Extractor extractor, DataWriterBuilder writer, List<Converter<?,?,?,?>> converters,
List<RecordStreamProcessor<?,?,?,?>> recordStreamProcessors) throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testRetryTaskId");
taskState.setProp(ConfigurationKeys.TASK_SYNCHRONOUS_EXECUTION_MODEL_KEY, false);
// Create a mock TaskContext
TaskContext mockTaskContext = mock(TaskContext.class);
when(mockTaskContext.getExtractor()).thenReturn(extractor);
when(mockTaskContext.getForkOperator()).thenReturn(new IdentityForkOperator());
when(mockTaskContext.getTaskState()).thenReturn(taskState);
when(mockTaskContext.getConverters()).thenReturn(converters);
when(mockTaskContext.getRecordStreamProcessors()).thenReturn(recordStreamProcessors);
when(mockTaskContext.getTaskLevelPolicyChecker(any(TaskState.class), anyInt()))
.thenReturn(mock(TaskLevelPolicyChecker.class));
when(mockTaskContext.getRowLevelPolicyChecker()).
thenReturn(new RowLevelPolicyChecker(Lists.newArrayList(), "ss", FileSystem.getLocal(new Configuration())));
when(mockTaskContext.getRowLevelPolicyChecker(anyInt())).
thenReturn(new RowLevelPolicyChecker(Lists.newArrayList(), "ss", FileSystem.getLocal(new Configuration())));
when(mockTaskContext.getDataWriterBuilder(anyInt(), anyInt())).thenReturn(writer);
when(mockTaskContext.getTaskMetrics()).thenReturn(TaskMetrics.get(taskState));
// Create a mock TaskPublisher
TaskPublisher mockTaskPublisher = mock(TaskPublisher.class);
when(mockTaskPublisher.canPublish()).thenReturn(TaskPublisher.PublisherState.SUCCESS);
when(mockTaskContext.getTaskPublisher(any(TaskState.class), any()))
.thenReturn(mockTaskPublisher);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task realTask = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch> absent());
Task task = spy(realTask);
doNothing().when(task).submitTaskCommittedEvent();
return task;
}
@AllArgsConstructor
static class MyExtractor implements Extractor<String, String> {
private final StreamEntity<String>[] stream;
@Override
public String getSchema() throws IOException {
return "schema";
}
@Override
public long getExpectedRecordCount() {
return 0;
}
@Override
public long getHighWatermark() {
return 0;
}
@Override
public void close() throws IOException {
}
@Override
public RecordStreamWithMetadata<String, String> recordStream(AtomicBoolean shutdownRequest) throws IOException {
return new RecordStreamWithMetadata<>(Flowable.fromArray(this.stream),
GlobalMetadata.<String>builder().schema("schema").build());
}
}
static class MyDataWriter extends DataWriterBuilder<String, String> implements DataWriter<String> {
protected List<String> records = new ArrayList<>();
protected List<ControlMessage<String>> messages = new ArrayList<>();
protected String writerSchema;
@Override
public void write(String record) throws IOException {
this.records.add(record);
}
@Override
public ControlMessageHandler getMessageHandler() {
return messages::add;
}
@Override
public void commit() throws IOException {}
@Override
public void cleanup() throws IOException {}
@Override
public long recordsWritten() {
return 0;
}
@Override
public long bytesWritten() throws IOException {
return 0;
}
@Override
public DataWriter<String> build() throws IOException {
this.writerSchema = this.schema;
return this;
}
@Override
public void close() throws IOException {}
}
static class MyConverter extends Converter<String, String, String, String> {
private List<String> records = new ArrayList<>();
private List<ControlMessage<String>> messages = new ArrayList<>();
@Override
public String convertSchema(String inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return "schema";
}
@Override
public Iterable<String> convertRecord(String outputSchema, String inputRecord, WorkUnitState workUnit)
throws DataConversionException {
records.add(inputRecord);
return Lists.newArrayList(inputRecord);
}
@Override
public ControlMessageHandler getMessageHandler() {
return messages::add;
}
}
/**
* Converter that appends the output schema string to the record string
*/
static class SchemaAppendConverter extends Converter<String, String, String, String> {
private List<String> records = new ArrayList<>();
private List<ControlMessage<String>> messages = new ArrayList<>();
@Override
public String convertSchema(String inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<String> convertRecord(String outputSchema, String inputRecord, WorkUnitState workUnit)
throws DataConversionException {
String inputWithoutSchema = inputRecord.substring(inputRecord.indexOf(":") + 1);
String outputRecord = inputWithoutSchema + ":" + outputSchema;
records.add(outputRecord);
return Lists.newArrayList(outputRecord);
}
@Override
public ControlMessageHandler getMessageHandler() {
return messages::add;
}
}
/**
* Input to this {@link RecordStreamProcessor} is a string of the form "schema:value".
* It will inject a {@link MetadataUpdateControlMessage} when a schema change is detected.
*/
static class SchemaChangeDetectionInjector extends ControlMessageInjector<String, String> {
private List<String> records = new ArrayList<>();
private List<ControlMessage<String>> messages = new ArrayList<>();
private GlobalMetadata<String> globalMetadata;
public Iterable<String> convertRecord(String outputSchema, String inputRecord, WorkUnitState workUnitState)
throws DataConversionException {
String outputRecord = inputRecord.split(":")[1];
records.add(outputRecord);
return Lists.newArrayList(outputRecord);
}
@Override
protected void setInputGlobalMetadata(GlobalMetadata<String> inputGlobalMetadata, WorkUnitState workUnitState) {
this.globalMetadata = inputGlobalMetadata;
}
@Override
public Iterable<ControlMessage<String>> injectControlMessagesBefore(RecordEnvelope<String> inputRecordEnvelope,
WorkUnitState workUnitState) {
String recordSchema = inputRecordEnvelope.getRecord().split(":")[0];
if (!recordSchema.equals(this.globalMetadata.getSchema())) {
return Lists.newArrayList(new MetadataUpdateControlMessage<>(
GlobalMetadata.<String>builder().schema(recordSchema).build()));
}
return null;
}
@Override
public Iterable<ControlMessage<String>> injectControlMessagesAfter(RecordEnvelope<String> inputRecordEnvelope,
WorkUnitState workUnitState) {
return null;
}
@Override
public ControlMessageHandler getMessageHandler() {
return messages::add;
}
}
static class MyFlushControlMessageHandler extends FlushControlMessageHandler {
public MyFlushControlMessageHandler(Flushable flushable) {
super(flushable);
}
@Override
public void handleMessage(ControlMessage message) {
if (message instanceof FlushControlMessage) {
if (((FlushControlMessage) message).getFlushReason().contains("Fail")) {
throw new RuntimeException("Flush failed: " + ((FlushControlMessage) message).getFlushReason());
}
try {
flushable.flush();
} catch (IOException e) {
throw new RuntimeException("Could not flush when handling FlushControlMessage", e);
}
}
}
}
/**
* {@link Ackable} for waiting for the flush control message to be processed
*/
private static class FlushAckable implements Ackable {
private Throwable error;
private final CountDownLatch processed;
public FlushAckable() {
this.processed = new CountDownLatch(1);
}
@Override
public void ack() {
this.processed.countDown();
}
@Override
public void nack(Throwable error) {
this.error = error;
this.processed.countDown();
}
/**
* Wait for ack
* @return any error encountered
*/
public Throwable waitForAck() {
try {
this.processed.await();
return this.error;
} catch (InterruptedException e) {
throw new RuntimeException("interrupted while waiting for ack");
}
}
}
static class MyFlushDataWriter extends MyDataWriter {
private List<String> flush_messages = new ArrayList<>();
@Override
public ControlMessageHandler getMessageHandler() {
return new MyFlushControlMessageHandler(this);
}
@Override
public void flush() throws IOException {
flush_messages.add("flush called");
}
}
static class MyDataWriterWithSchemaCheck extends MyDataWriter {
@Override
public void write(String record) throws IOException {
super.write(record);
Assert.assertEquals(this.writerSchema, record.split(":")[1]);
}
}
}
| 1,247 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/TaskTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.fork.ForkOperator;
import org.apache.gobblin.fork.IdentityForkOperator;
import org.apache.gobblin.publisher.TaskPublisher;
import org.apache.gobblin.qualitychecker.row.RowLevelPolicyCheckResults;
import org.apache.gobblin.qualitychecker.row.RowLevelPolicyChecker;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyChecker;
import org.apache.gobblin.runtime.util.TaskMetrics;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.testing.AssertWithBackoff;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.DataWriterBuilder;
import static org.mockito.Mockito.*;
/**
* Integration tests for {@link Task}.
*/
@Test
@Slf4j
public class TaskTest {
TaskState getEmptyTestTaskState(String taskId) {
// Create a TaskState
WorkUnit workUnit = WorkUnit.create(
new Extract(Extract.TableType.SNAPSHOT_ONLY, this.getClass().getName(), this.getClass().getSimpleName()));
workUnit.setProp(ConfigurationKeys.TASK_KEY_KEY, "taskKey");
TaskState taskState = new TaskState(new WorkUnitState(workUnit));
taskState.setProp(ConfigurationKeys.METRICS_ENABLED_KEY, Boolean.toString(false));
taskState.setTaskId(taskId);
taskState.setJobId("1234");
return taskState;
}
@DataProvider(name = "stateOverrides")
public Object[][] createTestsForDifferentExecutionModes() {
State synchronousStateOverrides = new State();
synchronousStateOverrides.setProp(ConfigurationKeys.TASK_SYNCHRONOUS_EXECUTION_MODEL_KEY, true);
State streamStateOverrides = new State();
streamStateOverrides.setProp(ConfigurationKeys.TASK_SYNCHRONOUS_EXECUTION_MODEL_KEY, false);
return new Object[][] {
{ synchronousStateOverrides },
{ streamStateOverrides }
};
}
/**
* Check if a {@link WorkUnitState.WorkingState} of a {@link Task} is set properly after a {@link Task} fails once,
* but then is successful the next time.
*/
@Test(dataProvider = "stateOverrides")
public void testRetryTask(State overrides) throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testRetryTaskId");
taskState.addAll(overrides);
// Create a mock TaskContext
TaskContext mockTaskContext = mock(TaskContext.class);
when(mockTaskContext.getTaskMetrics()).thenReturn(TaskMetrics.get(taskState));
when(mockTaskContext.getExtractor()).thenReturn(new FailOnceExtractor());
when(mockTaskContext.getForkOperator()).thenReturn(new IdentityForkOperator());
when(mockTaskContext.getTaskState()).thenReturn(taskState);
when(mockTaskContext.getTaskLevelPolicyChecker(any(TaskState.class), anyInt()))
.thenReturn(mock(TaskLevelPolicyChecker.class));
when(mockTaskContext.getRowLevelPolicyChecker()).
thenReturn(new RowLevelPolicyChecker(Lists.newArrayList(), "ss", FileSystem.getLocal(new Configuration())));
when(mockTaskContext.getRowLevelPolicyChecker(anyInt())).
thenReturn(new RowLevelPolicyChecker(Lists.newArrayList(), "ss", FileSystem.getLocal(new Configuration())));
// Create a mock TaskPublisher
TaskPublisher mockTaskPublisher = mock(TaskPublisher.class);
when(mockTaskPublisher.canPublish()).thenReturn(TaskPublisher.PublisherState.SUCCESS);
when(mockTaskContext.getTaskPublisher(any(TaskState.class), any()))
.thenReturn(mockTaskPublisher);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task realTask = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch> absent());
Task task = spy(realTask);
doNothing().when(task).submitTaskCommittedEvent();
// The first run of the Task should fail
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.FAILED);
// The second run of the Task should succeed
task.run();
task.commit();
Assert.assertEquals(task.getTaskState().getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
}
private TaskContext getMockTaskContext(TaskState taskState, Extractor mockExtractor,
ArrayList<ArrayList<Object>> writerCollectors, ForkOperator mockForkOperator)
throws Exception {
int numForks = writerCollectors.size();
// Create a mock RowLevelPolicyChecker
RowLevelPolicyChecker mockRowLevelPolicyChecker =
spy(new RowLevelPolicyChecker(Lists.newArrayList(), "ss", FileSystem.getLocal(new Configuration())));
when(mockRowLevelPolicyChecker.executePolicies(any(Object.class), any(RowLevelPolicyCheckResults.class)))
.thenReturn(true);
when(mockRowLevelPolicyChecker.getFinalState()).thenReturn(new State());
// Create a mock TaskPublisher
TaskPublisher mockTaskPublisher = mock(TaskPublisher.class);
when(mockTaskPublisher.canPublish()).thenReturn(TaskPublisher.PublisherState.SUCCESS);
// Create a mock TaskContext
TaskContext mockTaskContext = mock(TaskContext.class);
when(mockTaskContext.getExtractor()).thenReturn(mockExtractor);
when(mockTaskContext.getRawSourceExtractor()).thenReturn(mockExtractor);
when(mockTaskContext.getForkOperator()).thenReturn(mockForkOperator);
when(mockTaskContext.getTaskState()).thenReturn(taskState);
when(mockTaskContext.getTaskPublisher(any(TaskState.class), any()))
.thenReturn(mockTaskPublisher);
when(mockTaskContext.getRowLevelPolicyChecker()).thenReturn(mockRowLevelPolicyChecker);
when(mockTaskContext.getRowLevelPolicyChecker(anyInt())).thenReturn(mockRowLevelPolicyChecker);
when(mockTaskContext.getTaskLevelPolicyChecker(any(TaskState.class), anyInt())).thenReturn(mock(TaskLevelPolicyChecker.class));
for (int i =0; i < numForks; ++i) {
when(mockTaskContext.getDataWriterBuilder(numForks, i)).thenReturn(new RecordCollectingWriterBuilder(writerCollectors.get(i)));
}
return mockTaskContext;
}
/**
* Test that forks work correctly when the operator picks one outgoing fork
*/
@Test(dataProvider = "stateOverrides")
public void testForkCorrectnessRoundRobin(State overrides)
throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testForkTaskId");
taskState.addAll(overrides);
int numRecords = 9;
int numForks = 3;
ForkOperator mockForkOperator = new RoundRobinForkOperator(numForks);
// The following code depends on exact multiples
Assert.assertTrue(numRecords % numForks == 0);
ArrayList<ArrayList<Object>> recordCollectors = runTaskAndGetResults(taskState, numRecords, numForks, mockForkOperator);
// Check that we got the right records in the collectors
int recordsPerFork = numRecords/numForks;
for (int forkNumber=0; forkNumber < numForks; ++ forkNumber) {
ArrayList<Object> forkRecords = recordCollectors.get(forkNumber);
Assert.assertEquals(forkRecords.size(), recordsPerFork);
for (int j=0; j < recordsPerFork; ++j) {
Object forkRecord = forkRecords.get(j);
Assert.assertEquals((String) forkRecord, "" + (j * recordsPerFork + forkNumber));
}
}
}
/**
* Test that forks work correctly when the operator picks all outgoing forks
*/
@Test(dataProvider = "stateOverrides")
public void testForkCorrectnessIdentity(State overrides)
throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testForkTaskId");
taskState.addAll(overrides);
int numRecords = 100;
int numForks = 5;
// Identity Fork Operator looks for number of forks in work unit state.
taskState.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, "" + numForks);
ForkOperator mockForkOperator = new IdentityForkOperator();
ArrayList<ArrayList<Object>> recordCollectors = runTaskAndGetResults(taskState, numRecords, numForks, mockForkOperator);
// Check that we got the right records in the collectors
int recordsPerFork = numRecords;
for (int forkNumber=0; forkNumber < numForks; ++ forkNumber) {
ArrayList<Object> forkRecords = recordCollectors.get(forkNumber);
Assert.assertEquals(forkRecords.size(), recordsPerFork);
for (int j=0; j < recordsPerFork; ++j) {
Object forkRecord = forkRecords.get(j);
Assert.assertEquals((String) forkRecord, "" + j);
}
}
}
/**
* Test that forks work correctly when the operator picks a subset of outgoing forks
*/
@Test(dataProvider = "stateOverrides")
public void testForkCorrectnessSubset(State overrides)
throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testForkTaskId");
taskState.addAll(overrides);
int numRecords = 20;
int numForks = 5;
int subset = 2;
ForkOperator mockForkOperator = new SubsetForkOperator(numForks, subset);
ArrayList<ArrayList<Object>> recordCollectors = runTaskAndGetResults(taskState, numRecords, numForks, mockForkOperator);
log.info("Records collected: {}", recordCollectors);
// Check that we got the right records in the collectors
int totalRecordsExpected = numRecords * subset;
int totalRecordsFound = 0;
HashMap<String, ArrayList<Integer>> recordsMap = new HashMap<>();
for (int forkNumber=0; forkNumber < numForks; ++ forkNumber) {
ArrayList<Object> forkRecords = recordCollectors.get(forkNumber);
for (Object forkRecord: forkRecords) {
String recordAsString = (String) forkRecord;
totalRecordsFound++;
if (recordsMap.containsKey(recordAsString)) {
recordsMap.get(recordAsString).add(forkNumber);
} else {
ArrayList<Integer> forksFound = new ArrayList<>();
forksFound.add(forkNumber);
recordsMap.put(recordAsString, forksFound);
}
}
}
Assert.assertEquals(totalRecordsFound, totalRecordsExpected, "Total records");
for (Map.Entry<String, ArrayList<Integer>> recordForks: recordsMap.entrySet()) {
Assert.assertEquals(recordForks.getValue().size(), subset);
}
}
private ArrayList<ArrayList<Object>> runTaskAndGetResults(TaskState taskState, int numRecords, int numForks,
ForkOperator mockForkOperator)
throws Exception {
ArrayList<ArrayList<Object>> recordCollectors = new ArrayList<>(numForks);
for (int i=0; i < numForks; ++i) {
recordCollectors.add(new ArrayList<Object>());
}
TaskContext mockTaskContext = getMockTaskContext(taskState,
new StringExtractor(numRecords), recordCollectors, mockForkOperator);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task task = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch>absent());
// Run and commit
task.run();
task.commit();
return recordCollectors;
}
/**
* Test the addition of a task timestamp to the file name
*/
@Test
public void testTimestampInFilename()
throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testTimestampInFilename");
taskState.setProp(ConfigurationKeys.TASK_START_TIME_MILLIS_KEY, "12345");
taskState.setProp(ConfigurationKeys.WRITER_ADD_TASK_TIMESTAMP, "true");
int numRecords = 1;
int numForks = 1;
ForkOperator mockForkOperator = new RoundRobinForkOperator(numForks);
ArrayList<ArrayList<Object>> recordCollectors = new ArrayList<>(numForks);
for (int i=0; i < numForks; ++i) {
recordCollectors.add(new ArrayList<>());
}
TaskContext mockTaskContext = getMockTaskContext(taskState,
new StringExtractor(numRecords), recordCollectors, mockForkOperator);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task task = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch>absent());
// Run and commit
task.run();
task.commit();
DataWriterBuilder writerBuilder = mockTaskContext.getDataWriterBuilder(numForks, 0);
// writer id should have the expected name with the timestamp
Assert.assertEquals(writerBuilder.getWriterId(), "testTimestampInFilename_12345_0");
}
/**
* Test the addition of a task timestamp to the file name fails if the task start time is not present
*/
@Test(expectedExceptions = {ExecutionException.class, NullPointerException.class})
public void testTimestampInFilenameError()
throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testTimestampInFilenameError");
taskState.setProp(ConfigurationKeys.WRITER_ADD_TASK_TIMESTAMP, "true");
int numRecords = 1;
int numForks = 1;
ForkOperator mockForkOperator = new RoundRobinForkOperator(numForks);
ArrayList<ArrayList<Object>> recordCollectors = new ArrayList<>(numForks);
for (int i=0; i < numForks; ++i) {
recordCollectors.add(new ArrayList<>());
}
TaskContext mockTaskContext = getMockTaskContext(taskState,
new StringExtractor(numRecords), recordCollectors, mockForkOperator);
// Create a mock TaskStateTracker
TaskStateTracker mockTaskStateTracker = mock(TaskStateTracker.class);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
// Create the Task
Task task = new Task(mockTaskContext, mockTaskStateTracker, taskExecutor, Optional.<CountDownLatch>absent());
// Run and commit
task.run();
task.commit();
}
/**
* A test that calls {@link Task#cancel()} while {@link Task#run()} is executing. Ensures that the countdown latch
* is decremented and TaskState is set to FAILED.
* @throws Exception
*/
@Test
public void testTaskCancelBeforeCompletion()
throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testCancelBeforeCompletion");
int numRecords = -1;
int numForks = 1;
ForkOperator mockForkOperator = new RoundRobinForkOperator(numForks);
ArrayList<ArrayList<Object>> recordCollectors = new ArrayList<>(numForks);
for (int i=0; i < numForks; ++i) {
recordCollectors.add(new ArrayList<>());
}
TaskContext mockTaskContext = getMockTaskContext(taskState,
new StringExtractor(numRecords), recordCollectors, mockForkOperator);
// Create a dummy TaskStateTracker
TaskStateTracker dummyTaskStateTracker = new GobblinMultiTaskAttemptTest.DummyTestStateTracker(new Properties(), log);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
CountUpAndDownLatch countDownLatch = new CountUpAndDownLatch(0);
// Create the Task
Task task = new DelayedFailureTask(mockTaskContext, dummyTaskStateTracker, taskExecutor, Optional.of(countDownLatch));
//Increment the countDownLatch to signal a new task creation.
countDownLatch.countUp();
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future taskFuture = executorService.submit(new Thread(() -> task.run()));
task.setTaskFuture(taskFuture);
//Wait for task to enter RUNNING state
AssertWithBackoff.create().maxSleepMs(10).timeoutMs(1000).backoffFactor(1)
.assertTrue(input -> task.getWorkingState() == WorkUnitState.WorkingState.RUNNING,
"Waiting for task to enter RUNNING state");
Assert.assertEquals(countDownLatch.getCount(), 1);
task.shutdown();
//Ensure task is still RUNNING, since shutdown() is a NO-OP and the extractor should continue.
Assert.assertEquals(countDownLatch.getCount(), 1);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.RUNNING);
//Call task cancel
task.cancel();
//Ensure task is still RUNNING immediately after cancel() due to the delay introduced in task failure handling.
Assert.assertEquals(countDownLatch.getCount(), 1);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.RUNNING);
//Ensure countDownLatch is eventually counted down to 0
AssertWithBackoff.create().maxSleepMs(100).timeoutMs(5000).backoffFactor(1)
.assertTrue(input -> countDownLatch.getCount() == 0, "Waiting for the task to complete.");
//Ensure the TaskState is set to FAILED
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.FAILED);
}
/**
* A test that calls {@link Task#cancel()} after {@link Task#run()} is completed. In this case the cancel() method should
* be a NO-OP and should leave the task state unchanged.
* @throws Exception
*/
@Test
public void testTaskCancelAfterCompletion()
throws Exception {
// Create a TaskState
TaskState taskState = getEmptyTestTaskState("testCancelAfterCompletion");
int numRecords = -1;
int numForks = 1;
ForkOperator mockForkOperator = new RoundRobinForkOperator(numForks);
ArrayList<ArrayList<Object>> recordCollectors = new ArrayList<>(numForks);
for (int i=0; i < numForks; ++i) {
recordCollectors.add(new ArrayList<>());
}
TaskContext mockTaskContext = getMockTaskContext(taskState,
new StringExtractor(numRecords, false), recordCollectors, mockForkOperator);
// Create a dummy TaskStateTracker
TaskStateTracker dummyTaskStateTracker = new GobblinMultiTaskAttemptTest.DummyTestStateTracker(new Properties(), log);
// Create a TaskExecutor - a real TaskExecutor must be created so a Fork is run in a separate thread
TaskExecutor taskExecutor = new TaskExecutor(new Properties());
CountUpAndDownLatch countDownLatch = new CountUpAndDownLatch(0);
// Create the Task
Task task = new Task(mockTaskContext, dummyTaskStateTracker, taskExecutor, Optional.of(countDownLatch));
//Increment the countDownLatch to signal a new task creation.
countDownLatch.countUp();
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future taskFuture = executorService.submit(new Thread(() -> task.run()));
task.setTaskFuture(taskFuture);
//Wait for task to enter RUNNING state
AssertWithBackoff.create().maxSleepMs(10).timeoutMs(1000).backoffFactor(1)
.assertTrue(input -> task.getWorkingState() == WorkUnitState.WorkingState.RUNNING,
"Waiting for task to enter RUNNING state");
Assert.assertEquals(countDownLatch.getCount(), 1);
task.shutdown();
//Ensure countDownLatch is counted down to 0 i.e. task is done.
AssertWithBackoff.create().maxSleepMs(100).timeoutMs(5000).backoffFactor(1)
.assertTrue(input -> countDownLatch.getCount() == 0, "Waiting for the task to complete.");
//Ensure the TaskState is RUNNING
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.RUNNING);
//Call task cancel
task.cancel();
//Ensure the TaskState is unchanged on cancel()
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.RUNNING);
//Ensure task state is successful on commit()
task.commit();
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.SUCCESSFUL);
}
/**
* An implementation of {@link Extractor} that throws an {@link IOException} during the invocation of
* {@link #readRecord(Object)}.
*/
private static class FailOnceExtractor implements Extractor<Object, Object> {
private final AtomicBoolean HAS_FAILED = new AtomicBoolean();
@Override
public Object getSchema() {
return null;
}
@Override
public Object readRecord(@Deprecated Object reuse) throws IOException {
if (!HAS_FAILED.get()) {
HAS_FAILED.set(true);
throw new IOException("Injected failure");
}
return null;
}
@Override
public long getExpectedRecordCount() {
return -1;
}
@Override
public long getHighWatermark() {
return -1;
}
@Override
public void close() {
// Do nothing
}
}
private static class StringExtractor implements Extractor<Object, String> {
//Num records to extract. If set to -1, it is treated as an unbounded extractor.
private final int _numRecords;
private int _currentRecord;
private boolean _shouldIgnoreShutdown = true;
private AtomicBoolean _shutdownRequested = new AtomicBoolean(false);
public StringExtractor(int numRecords) {
this(numRecords, true);
}
public StringExtractor(int numRecords, boolean shouldIgnoreShutdown) {
_numRecords = numRecords;
_currentRecord = -1;
_shouldIgnoreShutdown = shouldIgnoreShutdown;
}
@Override
public Object getSchema() {
return "";
}
@Override
public String readRecord(@Deprecated String reuse) {
if (!_shutdownRequested.get() && (_numRecords == -1 || _currentRecord < _numRecords-1)) {
_currentRecord++;
return "" + _currentRecord;
} else {
return null;
}
}
@Override
public long getExpectedRecordCount() {
return _numRecords;
}
@Override
public long getHighWatermark() {
return -1;
}
@Override
public void close() {
}
@Override
public void shutdown() {
if (!this._shouldIgnoreShutdown) {
this._shutdownRequested.set(true);
}
}
}
private static class RoundRobinForkOperator implements ForkOperator<Object, Object> {
private final int _numForks;
private final Boolean[] _forkedSchemas;
private final Boolean[] _forkedRecords;
private int _lastForkTaken;
public RoundRobinForkOperator(int numForks) {
_numForks = numForks;
_forkedSchemas = new Boolean[_numForks];
_forkedRecords = new Boolean[_numForks];
_lastForkTaken = _numForks-1;
for (int i=0; i < _numForks; ++i) {
_forkedSchemas[i] = Boolean.TRUE;
_forkedRecords[i] = Boolean.FALSE;
}
}
@Override
public void init(WorkUnitState workUnitState) {
}
@Override
public int getBranches(WorkUnitState workUnitState) {
return _numForks;
}
@Override
public List<Boolean> forkSchema(WorkUnitState workUnitState, Object input) {
return Arrays.asList(_forkedSchemas);
}
@Override
public List<Boolean> forkDataRecord(WorkUnitState workUnitState, Object input) {
_forkedRecords[_lastForkTaken] = Boolean.FALSE;
_lastForkTaken = (_lastForkTaken+1)%_numForks;
_forkedRecords[_lastForkTaken] = Boolean.TRUE;
return Arrays.asList(_forkedRecords);
}
@Override
public void close() {
}
}
private static class SubsetForkOperator implements ForkOperator<Object, Object> {
private final int _numForks;
private final int _subsetSize;
private final Boolean[] _forkedSchemas;
private final Boolean[] _forkedRecords;
private final Random _random;
public SubsetForkOperator(int numForks, int subsetSize) {
Preconditions.checkArgument(subsetSize >=0 && subsetSize <= numForks,
"Subset size should be in range [0, numForks]");
_numForks = numForks;
_subsetSize = subsetSize;
_forkedSchemas = new Boolean[_numForks];
_forkedRecords = new Boolean[_numForks];
_random = new Random();
for (int i=0; i < _numForks; ++i) {
_forkedSchemas[i] = Boolean.TRUE;
_forkedRecords[i] = Boolean.FALSE;
}
}
@Override
public void init(WorkUnitState workUnitState) {
}
@Override
public int getBranches(WorkUnitState workUnitState) {
return _numForks;
}
@Override
public List<Boolean> forkSchema(WorkUnitState workUnitState, Object input) {
return Arrays.asList(_forkedSchemas);
}
@Override
public List<Boolean> forkDataRecord(WorkUnitState workUnitState, Object input) {
for (int i=0; i < _numForks; ++i) {
_forkedRecords[i] = Boolean.FALSE;
}
// Really lazy way of getting a random subset, not intended for production use
int chosenRecords = 0;
while (chosenRecords != _subsetSize) {
int index = _random.nextInt(_numForks);
if (!_forkedRecords[index]) {
_forkedRecords[index] = Boolean.TRUE;
chosenRecords++;
}
}
return Arrays.asList(_forkedRecords);
}
@Override
public void close() {
}
}
private class RecordCollectingWriterBuilder extends DataWriterBuilder {
private final ArrayList<Object> _recordSink;
public RecordCollectingWriterBuilder(ArrayList<Object> objects) {
super();
_recordSink = objects;
}
@Override
public DataWriter build() {
return new DataWriter() {
@Override
public void write(Object record) {
_recordSink.add(record);
}
@Override
public void commit() {
}
@Override
public void cleanup() {
}
@Override
public long recordsWritten() {
return _recordSink.size();
}
@Override
public long bytesWritten() {
return -1;
}
@Override
public void close() {
}
};
}
}
/**
* An extension of {@link Task} that introduces a fixed delay on encountering an exception.
*/
private static class DelayedFailureTask extends Task {
public DelayedFailureTask(TaskContext context, TaskStateTracker taskStateTracker, TaskExecutor taskExecutor,
Optional<CountDownLatch> countDownLatch) {
super(context, taskStateTracker, taskExecutor, countDownLatch);
}
@Override
protected void failTask(Throwable t) {
try {
Thread.sleep(1000);
super.failTask(t);
} catch (InterruptedException e) {
log.error("Encountered exception: {}", e);
}
}
}
}
| 1,248 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/DatasetStateStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.FileReader;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metastore.FsStateStore;
import org.apache.gobblin.metastore.StateStore;
import org.apache.gobblin.runtime.local.LocalJobLauncher;
import org.apache.gobblin.source.Source;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.extractor.extract.AbstractSource;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.DataWriterBuilder;
/**
* Unit tests around the state store.
*
* <p>
* This test uses the {@link LocalJobLauncher} to launch and run a dummy job and checks the
* state store between runs of the dummy job to make sure important things like watermarks
* are carried over properly between runs.
* </p>
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.runtime" })
public class DatasetStateStoreTest {
private static final String JOB_NAME = DatasetStateStoreTest.class.getSimpleName();
private static final String NAMESPACE = "TestNamespace";
private static final String TABLE = "TestTable";
private static final String FOO = "foo";
private static final String BAR = "bar";
private static final String WORK_UNIT_INDEX_KEY = "work.unit.index";
private static final String LAST_READ_RECORD_KEY = "last.read.record";
private StateStore<JobState.DatasetState> datasetStateStore;
private Properties jobConfig = new Properties();
@BeforeClass
public void setUp() throws Exception {
Properties properties = new Properties();
try (FileReader fr = new FileReader("gobblin-test/resource/gobblin.test.properties")) {
properties.load(fr);
}
this.datasetStateStore = new FsStateStore<>(
properties.getProperty(ConfigurationKeys.STATE_STORE_FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI),
properties.getProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY), JobState.DatasetState.class);
// clear data that might be there from a prior run
this.datasetStateStore.delete(JOB_NAME);
this.jobConfig.putAll(properties);
this.jobConfig.setProperty(ConfigurationKeys.JOB_NAME_KEY, JOB_NAME);
this.jobConfig.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, DummySource.class.getName());
this.jobConfig.setProperty(ConfigurationKeys.WRITER_BUILDER_CLASS, DummyDataWriterBuilder.class.getName());
}
@Test
public void testLaunchFirstJob() throws Exception {
try (JobLauncher launcher = new LocalJobLauncher(this.jobConfig)) {
launcher.launchJob(null);
}
verifyJobState(1);
}
@Test(dependsOnMethods = "testLaunchFirstJob")
public void testLaunchSecondJob() throws Exception {
try (JobLauncher launcher = new LocalJobLauncher(this.jobConfig)) {
launcher.launchJob(null);
}
verifyJobState(2);
}
@Test(dependsOnMethods = "testLaunchSecondJob")
public void testLaunchThirdJob() throws Exception {
try (JobLauncher launcher = new LocalJobLauncher(this.jobConfig)) {
launcher.launchJob(null);
}
verifyJobState(3);
}
@AfterClass
public void tearDown() throws IOException {
this.datasetStateStore.delete(JOB_NAME);
}
private void verifyJobState(int run) throws IOException {
List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(JOB_NAME, "current.jst");
Assert.assertEquals(datasetStateList.size(), 1);
JobState jobState = datasetStateList.get(0);
Assert.assertEquals(jobState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(jobState.getTaskStates().size(), DummySource.NUM_WORK_UNITS);
for (TaskState taskState : jobState.getTaskStates()) {
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(taskState.getProp(FOO), BAR);
// Check if the low watermark is properly kept track of
int expectedLowWatermark =
(run - 1) * DummySource.NUM_WORK_UNITS * DummySource.NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR
+ taskState.getPropAsInt(WORK_UNIT_INDEX_KEY) * DummySource.NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR + 1;
Assert.assertEquals(taskState.getPropAsInt(ConfigurationKeys.WORK_UNIT_LOW_WATER_MARK_KEY), expectedLowWatermark);
// Check if the high watermark is properly kept track of
int expectedHighWatermark = expectedLowWatermark + DummySource.NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR - 1;
Assert.assertEquals(taskState.getPropAsInt(ConfigurationKeys.WORK_UNIT_HIGH_WATER_MARK_KEY),
expectedHighWatermark);
Assert.assertEquals(taskState.getPropAsInt(LAST_READ_RECORD_KEY), expectedHighWatermark);
}
}
/**
* A dummy implementation of {@link Source}.
*/
public static class DummySource extends AbstractSource<String, Integer> {
private static final int NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR = 1000;
private static final int NUM_WORK_UNITS = 5;
@Override
public List<WorkUnit> getWorkunits(SourceState sourceState) {
sourceState.setProp(FOO, BAR);
if (Iterables.isEmpty(sourceState.getPreviousWorkUnitStates())) {
return initializeWorkUnits();
}
List<WorkUnit> workUnits = Lists.newArrayList();
for (WorkUnitState workUnitState : sourceState.getPreviousWorkUnitStates()) {
WorkUnit workUnit = WorkUnit.create(createExtract(Extract.TableType.SNAPSHOT_ONLY, NAMESPACE, TABLE));
workUnit.setLowWaterMark(workUnitState.getPropAsInt(ConfigurationKeys.WORK_UNIT_LOW_WATER_MARK_KEY)
+ NUM_WORK_UNITS * NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR);
workUnit.setHighWaterMark(workUnitState.getPropAsInt(ConfigurationKeys.WORK_UNIT_HIGH_WATER_MARK_KEY)
+ NUM_WORK_UNITS * NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR);
workUnit.setProp(WORK_UNIT_INDEX_KEY, workUnitState.getPropAsInt(WORK_UNIT_INDEX_KEY));
workUnits.add(workUnit);
}
return workUnits;
}
@Override
public Extractor<String, Integer> getExtractor(WorkUnitState state) throws IOException {
return new DummyExtractor(state);
}
@Override
public void shutdown(SourceState state) {
// Nothing to do
}
private List<WorkUnit> initializeWorkUnits() {
List<WorkUnit> workUnits = Lists.newArrayList();
for (int i = 0; i < NUM_WORK_UNITS; i++) {
WorkUnit workUnit = WorkUnit.create(createExtract(Extract.TableType.SNAPSHOT_ONLY, NAMESPACE, TABLE));
workUnit.setLowWaterMark(i * NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR + 1);
workUnit.setHighWaterMark((i + 1) * NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR);
workUnit.setProp(WORK_UNIT_INDEX_KEY, i);
workUnits.add(workUnit);
}
return workUnits;
}
}
/**
* A dummy implementation of {@link Extractor}.
*/
private static class DummyExtractor implements Extractor<String, Integer> {
private final WorkUnitState workUnitState;
private int current;
DummyExtractor(WorkUnitState workUnitState) {
this.workUnitState = workUnitState;
workUnitState.setProp(FOO, BAR);
this.current = Integer.parseInt(this.workUnitState.getProp(ConfigurationKeys.WORK_UNIT_LOW_WATER_MARK_KEY));
}
@Override
public String getSchema() {
return "";
}
@Override
public Integer readRecord(Integer reuse) throws DataRecordException, IOException {
if (this.current > this.workUnitState.getPropAsInt(ConfigurationKeys.WORK_UNIT_HIGH_WATER_MARK_KEY)) {
return null;
}
this.workUnitState.setProp(LAST_READ_RECORD_KEY, this.current);
return this.current++;
}
@Override
public long getExpectedRecordCount() {
return DummySource.NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR;
}
@Override
public long getHighWatermark() {
return this.workUnitState.getHighWaterMark();
}
@Override
public void close() throws IOException {
// Nothing to do
}
}
/**
* A dummy implementation of {@link DataWriterBuilder} to work with {@link DummySource}.
*/
public static class DummyDataWriterBuilder extends DataWriterBuilder<String, Integer> {
@Override
public DataWriter<Integer> build() throws IOException {
return new DummyDataWriter();
}
}
/**
* A dummy implementation of {@link DataWriter} to work with {@link DummySource}.
*/
private static class DummyDataWriter implements DataWriter<Integer> {
@Override
public void write(Integer record) throws IOException {
// Nothing to do
}
@Override
public void commit() throws IOException {
// Nothing to do
}
@Override
public void cleanup() throws IOException {
// Nothing to do
}
@Override
public long recordsWritten() {
return DummySource.NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR;
}
@Override
public long bytesWritten() throws IOException {
return DummySource.NUM_RECORDS_TO_EXTRACT_PER_EXTRACTOR * 4;
}
@Override
public void close() throws IOException {
// Nothing to do
}
}
}
| 1,249 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/JobLauncherTestHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import com.google.common.io.Closer;
import com.google.gson.reflect.TypeToken;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metastore.StateStore;
import org.apache.gobblin.metrics.GobblinTrackingEvent;
import org.apache.gobblin.metrics.test.MetricsAssert;
import org.apache.gobblin.runtime.JobState.DatasetState;
import org.apache.gobblin.runtime.util.GsonUtils;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.test.TestExtractor;
import org.apache.gobblin.test.TestSource;
import org.apache.gobblin.util.ClusterNameTags;
import org.apache.gobblin.util.JobLauncherUtils;
/**
* Base class for {@link JobLauncher} unit tests.
*
* @author Yinan Li
*/
public class JobLauncherTestHelper {
public static final String SOURCE_FILE_LIST_KEY = "source.files";
public static final String DYNAMIC_KEY1 = "DynamicKey1";
public static final String DYNAMIC_VALUE1 = "DynamicValue1";
private final StateStore<JobState.DatasetState> datasetStateStore;
private final Properties launcherProps;
public JobLauncherTestHelper(Properties launcherProps, StateStore<JobState.DatasetState> datasetStateStore) {
this.launcherProps = launcherProps;
this.datasetStateStore = datasetStateStore;
}
/**
* Runs a job with the given job properties.
* The job will go through the planning, writing, and commit stage of the Gobblin Job
* @param jobProps
* @return
* @throws Exception
*/
public JobContext runTest(Properties jobProps) throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName);
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
JobContext jobContext = null;
MetricsAssert metricsAssert = null;
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
jobContext = ((AbstractJobLauncher) jobLauncher).getJobContext();
metricsAssert = new MetricsAssert(jobContext.getJobMetricsOptional().get().getMetricContext());
jobLauncher.launchJob(null);
} finally {
closer.close();
}
List<GobblinTrackingEvent> events = metricsAssert.getEvents();
Assert.assertTrue(events.stream().anyMatch(e -> e.getName().equals("JobStartTimer")));
Assert.assertTrue(jobContext.getJobMetricsOptional().isPresent());
String jobMetricContextTags = jobContext.getJobMetricsOptional().get().getMetricContext().getTags().toString();
Assert.assertTrue(jobMetricContextTags.contains(ClusterNameTags.CLUSTER_IDENTIFIER_TAG_NAME),
ClusterNameTags.CLUSTER_IDENTIFIER_TAG_NAME + " tag missing in job metric context tags.");
Assert.assertTrue(events.stream().anyMatch(e -> e.getName().equals("JobCommitTimer")));
List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(jobName, sanitizeJobNameForDatasetStore(jobId) + ".jst");
DatasetState datasetState = datasetStateList.get(0);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getJobFailures(), 0);
int skippedWorkunits = 0;
int totalRecords = 0;
for (TaskState taskState : datasetState.getTaskStates()) {
if (Boolean.parseBoolean(jobProps.getProperty(ConfigurationKeys.WORK_UNIT_SKIP_KEY, Boolean.FALSE.toString()))
&& taskState.getWorkingState() == WorkUnitState.WorkingState.SKIPPED) {
skippedWorkunits++;
continue;
}
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN),
TestExtractor.TOTAL_RECORDS);
totalRecords += TestExtractor.TOTAL_RECORDS;
// if the addition of the task timestamp is configured then validate that the file name has the expected format
if (Boolean.valueOf(taskState.getProp(ConfigurationKeys.WRITER_ADD_TASK_TIMESTAMP, "false"))) {
String pattern = ".*part.task_.*_(\\d+)_\\d+_(\\d+)_\\d+.avro";
String value = taskState.getProp(ConfigurationKeys.WRITER_FINAL_OUTPUT_FILE_PATHS);
Pattern r = Pattern.compile(pattern);
Matcher m = r.matcher(taskState.getProp(ConfigurationKeys.WRITER_FINAL_OUTPUT_FILE_PATHS));
long timeBuffer = 5 * 60 * 1000;
if (!m.matches()) {
Assert.fail("no matches for " + value);
}
Long currentTime = System.currentTimeMillis();
Assert.assertTrue(Long.valueOf(m.group(1)) > currentTime - timeBuffer);
Assert.assertTrue(Long.valueOf(m.group(1)) < currentTime);
// the task time should be after the job time
Assert.assertTrue(Long.valueOf(m.group(1)) < Long.valueOf(m.group(2)));
Assert.assertTrue(Long.valueOf(m.group(2)) > currentTime - timeBuffer);
Assert.assertTrue(Long.valueOf(m.group(2)) < currentTime);
}
}
Optional<GobblinTrackingEvent>
summaryEvent = events.stream().filter(e -> e.getName().equals("JobSummaryTimer")).findFirst();
Assert.assertNotNull(summaryEvent);
Assert.assertTrue(summaryEvent.get().getMetadata().containsKey("datasetTaskSummaries"));
Type datasetTaskSummaryType = new TypeToken<ArrayList<DatasetTaskSummary>>(){}.getType();
List<DatasetTaskSummary> datasetTaskSummaries =
GsonUtils.GSON_WITH_DATE_HANDLING.fromJson(summaryEvent.get().getMetadata().get("datasetTaskSummaries"), datasetTaskSummaryType);
Assert.assertEquals(datasetTaskSummaries.size(), 1);
Assert.assertEquals(datasetTaskSummaries.get(0).getRecordsWritten(), totalRecords);
if (Boolean.parseBoolean(jobProps.getProperty(ConfigurationKeys.WORK_UNIT_SKIP_KEY,
Boolean.FALSE.toString()))) {
Assert.assertEquals(skippedWorkunits, 2);
Assert.assertEquals(datasetState.getCompletedTasks(), 2);
} else {
Assert.assertEquals(skippedWorkunits, 0);
Assert.assertEquals(datasetState.getCompletedTasks(), 4);
}
return jobContext;
}
public void runTestWithPullLimit(Properties jobProps, long limit) throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName).toString();
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
jobLauncher.launchJob(null);
} finally {
closer.close();
}
List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(jobName, sanitizeJobNameForDatasetStore(jobId) + ".jst");
DatasetState datasetState = datasetStateList.get(0);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getCompletedTasks(), 4);
Assert.assertEquals(datasetState.getJobFailures(), 0);
for (TaskState taskState : datasetState.getTaskStates()) {
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.EXTRACTOR_ROWS_EXTRACTED), limit);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_ROWS_WRITTEN), limit);
}
}
public void runTestWithCancellation(final Properties jobProps) throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName).toString();
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
Closer closer = Closer.create();
try {
final JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
final AtomicBoolean isCancelled = new AtomicBoolean(false);
// This thread will cancel the job after some time
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(500);
jobLauncher.cancelJob(null);
isCancelled.set(true);
} catch (Exception je) {
// Ignored
}
}
});
thread.start();
jobLauncher.launchJob(null);
Assert.assertTrue(isCancelled.get());
} finally {
closer.close();
}
List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(jobName, sanitizeJobNameForDatasetStore(jobId) + ".jst");
Assert.assertTrue(datasetStateList.isEmpty());
}
public void runTestWithFork(Properties jobProps) throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName).toString();
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
try (JobLauncher jobLauncher = JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps)) {
jobLauncher.launchJob(null);
}
List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(jobName, sanitizeJobNameForDatasetStore(jobId) + ".jst");
DatasetState datasetState = datasetStateList.get(0);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getCompletedTasks(), 4);
Assert.assertEquals(datasetState.getJobFailures(), 0);
FileSystem lfs = FileSystem.getLocal(new Configuration());
for (TaskState taskState : datasetState.getTaskStates()) {
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Path path = new Path(this.launcherProps.getProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR),
new Path(taskState.getExtract().getOutputFilePath(), "fork_0"));
Assert.assertTrue(lfs.exists(path));
Assert.assertEquals(lfs.listStatus(path).length, 2);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN + ".0"),
TestExtractor.TOTAL_RECORDS);
path = new Path(this.launcherProps.getProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR),
new Path(taskState.getExtract().getOutputFilePath(), "fork_1"));
Assert.assertTrue(lfs.exists(path));
Assert.assertEquals(lfs.listStatus(path).length, 2);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN + ".1"),
TestExtractor.TOTAL_RECORDS);
}
}
public void runTestWithMultipleDatasets(Properties jobProps) throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName).toString();
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
jobProps.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, MultiDatasetTestSource.class.getName());
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
jobLauncher.launchJob(null);
} finally {
closer.close();
}
for (int i = 0; i < 4; i++) {
List<JobState.DatasetState> datasetStateList =
this.datasetStateStore.getAll(jobName, "Dataset" + i + "-current.jst");
DatasetState datasetState = datasetStateList.get(0);
Assert.assertEquals(datasetState.getDatasetUrn(), "Dataset" + i);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getCompletedTasks(), 1);
Assert.assertEquals(datasetState.getJobFailures(), 0);
for (TaskState taskState : datasetState.getTaskStates()) {
Assert.assertEquals(taskState.getProp(ConfigurationKeys.DATASET_URN_KEY), "Dataset" + i);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN),
TestExtractor.TOTAL_RECORDS);
}
}
}
/**
* Test when a test with the matching suffix is skipped.
* @param jobProps job properties
* @param skippedTaskSuffix the suffix for the task that is skipped
*/
public void runTestWithSkippedTask(Properties jobProps, String skippedTaskSuffix) throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName).toString();
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
jobProps.setProperty(ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL, Boolean.FALSE.toString());
jobProps.setProperty(ConfigurationKeys.JOB_COMMIT_POLICY_KEY, "successful");
jobProps.setProperty(ConfigurationKeys.MAX_TASK_RETRIES_KEY, "0");
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
jobLauncher.launchJob(null);
} finally {
closer.close();
}
List<JobState.DatasetState> datasetStateList =
this.datasetStateStore.getAll(jobName, sanitizeJobNameForDatasetStore(jobId) + ".jst");
JobState jobState = datasetStateList.get(0);
Assert.assertEquals(jobState.getState(), JobState.RunningState.COMMITTED);
// one task is skipped out of 4
Assert.assertEquals(jobState.getCompletedTasks(), 3);
for (TaskState taskState : jobState.getTaskStates()) {
if (taskState.getTaskId().endsWith(skippedTaskSuffix)) {
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.PENDING);
} else {
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN),
TestExtractor.TOTAL_RECORDS);
}
}
}
public void runTestWithCommitSuccessfulTasksPolicy(Properties jobProps) throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName).toString();
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
jobProps.setProperty(ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL, Boolean.FALSE.toString());
jobProps.setProperty(ConfigurationKeys.JOB_COMMIT_POLICY_KEY, "successful");
jobProps.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, TestSourceWithFaultyExtractor.class.getName());
jobProps.setProperty(ConfigurationKeys.MAX_TASK_RETRIES_KEY, "0");
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
jobLauncher.launchJob(null);
} finally {
closer.close();
}
List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(jobName, sanitizeJobNameForDatasetStore(jobId) + ".jst");
JobState jobState = datasetStateList.get(0);
Assert.assertEquals(jobState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(jobState.getCompletedTasks(), 4);
for (TaskState taskState : jobState.getTaskStates()) {
if (taskState.getTaskId().endsWith("0")) {
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.FAILED);
} else {
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN),
TestExtractor.TOTAL_RECORDS);
}
}
}
public void runTestWithMultipleDatasetsAndFaultyExtractor(Properties jobProps, boolean usePartialCommitPolicy)
throws Exception {
String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY);
String jobId = JobLauncherUtils.newJobId(jobName).toString();
jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId);
jobProps.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, MultiDatasetTestSourceWithFaultyExtractor.class.getName());
jobProps.setProperty(ConfigurationKeys.MAX_TASK_RETRIES_KEY, "0");
if (usePartialCommitPolicy) {
jobProps.setProperty(ConfigurationKeys.JOB_COMMIT_POLICY_KEY, "partial");
}
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps));
jobLauncher.launchJob(null);
} catch (JobException je) {
// JobException is expected
} finally {
closer.close();
}
if (usePartialCommitPolicy) {
List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(jobName, "Dataset0-current.jst");
JobState.DatasetState datasetState = datasetStateList.get(0);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getTaskCount(), 1);
TaskState taskState = datasetState.getTaskStates().get(0);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.FAILED);
} else {
// Task 0 should have failed
Assert.assertTrue(this.datasetStateStore.getAll(jobName, "Dataset0-current.jst").isEmpty());
}
for (int i = 1; i < 4; i++) {
List<JobState.DatasetState> datasetStateList =
this.datasetStateStore.getAll(jobName, "Dataset" + i + "-current.jst");
JobState.DatasetState datasetState = datasetStateList.get(0);
Assert.assertEquals(datasetState.getDatasetUrn(), "Dataset" + i);
Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED);
Assert.assertEquals(datasetState.getCompletedTasks(), 1);
for (TaskState taskState : datasetState.getTaskStates()) {
Assert.assertEquals(taskState.getProp(ConfigurationKeys.DATASET_URN_KEY), "Dataset" + i);
Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED);
}
}
}
public void deleteStateStore(String storeName) throws IOException {
this.datasetStateStore.delete(storeName);
}
public static class MultiDatasetTestSource extends TestSource {
@Override
public List<WorkUnit> getWorkunits(SourceState state) {
List<WorkUnit> workUnits = super.getWorkunits(state);
for (int i = 0; i < workUnits.size(); i++) {
workUnits.get(i).setProp(ConfigurationKeys.DATASET_URN_KEY, "Dataset" + i);
}
return workUnits;
}
}
public static class MultiDatasetTestSourceWithFaultyExtractor extends MultiDatasetTestSource {
@Override
public Extractor<String, String> getExtractor(WorkUnitState workUnitState) {
Extractor<String, String> extractor = super.getExtractor(workUnitState);
if (workUnitState.getProp(ConfigurationKeys.DATASET_URN_KEY).endsWith("0")) {
return new FaultyExtractor(workUnitState);
}
return extractor;
}
}
public static class FaultyExtractor extends TestExtractor {
public FaultyExtractor(WorkUnitState workUnitState) {
super(workUnitState);
}
@Override
public String readRecord(@Deprecated String reuse) throws IOException {
throw new IOException("Injected failure");
}
}
public static class TestSourceWithFaultyExtractor extends TestSource {
@Override
public Extractor<String, String> getExtractor(WorkUnitState workUnitState) {
Extractor<String, String> extractor = super.getExtractor(workUnitState);
if (((TaskState) workUnitState).getTaskId().endsWith("0")) {
return new FaultyExtractor(workUnitState);
}
return extractor;
}
}
private String sanitizeJobNameForDatasetStore(String jobId) {
return jobId.replaceAll("[-/]", "_");
}
}
| 1,250 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/CountUpAndDownLatchTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.Test;
public class CountUpAndDownLatchTest {
@Test
public void test() throws Exception {
ExecutorService executorService = null;
try {
executorService = Executors.newFixedThreadPool(1);
CountUpAndDownLatch countUpAndDownLatch = new CountUpAndDownLatch(1);
Assert.assertFalse(countUpAndDownLatch.await(50, TimeUnit.MILLISECONDS));
countUpAndDownLatch.countUp();
Assert.assertFalse(countUpAndDownLatch.await(50, TimeUnit.MILLISECONDS));
countUpAndDownLatch.countDown();
Assert.assertFalse(countUpAndDownLatch.await(50, TimeUnit.MILLISECONDS));
countUpAndDownLatch.countDown();
Assert.assertTrue(countUpAndDownLatch.await(1, TimeUnit.SECONDS));
// count-up again
countUpAndDownLatch.countUp();
// verify we will wait even though the latch was at 0 before
Assert.assertFalse(countUpAndDownLatch.await(50, TimeUnit.MILLISECONDS));
countUpAndDownLatch.countDown();
Assert.assertTrue(countUpAndDownLatch.await(1, TimeUnit.SECONDS));
} finally {
if (executorService != null) {
executorService.shutdownNow();
}
}
}
}
| 1,251 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/MultiConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.Converter;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.EmptyIterable;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.converter.SingleRecordIterable;
import org.apache.gobblin.test.TestConverter;
/**
* Unit tests for {@link MultiConverter}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.runtime" })
public class MultiConverterTest {
private static final String TEST_SCHEMA = "{\"namespace\": \"example.avro\",\n" + " \"type\": \"record\",\n"
+ " \"name\": \"User\",\n" + " \"fields\": [\n" + " {\"name\": \"name\", \"type\": \"string\"},\n"
+ " {\"name\": \"favorite_number\", \"type\": \"int\"},\n"
+ " {\"name\": \"favorite_color\", \"type\": \"string\"}\n" + " ]\n" + "}";
private static final String TEST_RECORD = "{\"fields\": {" + "\"name\": \"Alyssa\", " + "\"favorite_number\": 256, "
+ "\"favorite_color\": \"yellow\"" + "}" + "}";
@Test
public void testConversion() throws Exception {
MultiConverter multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new IdentityConverter(),
new TestConverter()));
WorkUnitState workUnitState = new WorkUnitState();
Schema schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Iterable<Object> convertedRecordIterable = multiConverter.convertRecord(schema, TEST_RECORD, workUnitState);
Assert.assertEquals(Iterables.size(convertedRecordIterable), 1);
checkConvertedAvroData(schema, (GenericRecord) convertedRecordIterable.iterator().next());
}
@Test
public void testConversionWithMultiplicity() throws Exception {
MultiConverter multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new MultiIdentityConverter(2),
new MultiIdentityConverter(2), new TestConverter()));
WorkUnitState workUnitState = new WorkUnitState();
Schema schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Iterable<Object> convertedRecordIterable = multiConverter.convertRecord(schema, TEST_RECORD, workUnitState);
Assert.assertEquals(Iterables.size(convertedRecordIterable), 4);
for (Object record : convertedRecordIterable) {
checkConvertedAvroData(schema, (GenericRecord) record);
}
}
/**
* Combines {@link MultiIdentityConverter()} with {@link AlternatingConverter()}
* @throws Exception
*/
@Test
public void testConversionWithMultiplicityAndAlternating() throws Exception {
MultiConverter multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new MultiIdentityConverter(6),
new AlternatingConverter(4), new TestConverter()));
WorkUnitState workUnitState = new WorkUnitState();
Schema schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Iterable<Object> convertedRecordIterable = multiConverter.convertRecord(schema, TEST_RECORD, workUnitState);
Assert.assertEquals(Iterables.size(convertedRecordIterable), 10);
for (Object record : convertedRecordIterable) {
checkConvertedAvroData(schema, (GenericRecord) record);
}
multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new MultiIdentityConverter(6),
new AlternatingConverter(4), new MultiIdentityConverter(4), new TestConverter()));
workUnitState = new WorkUnitState();
schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
convertedRecordIterable = multiConverter.convertRecord(schema, TEST_RECORD, workUnitState);
Assert.assertEquals(Iterables.size(convertedRecordIterable), 40);
for (Object record : convertedRecordIterable) {
checkConvertedAvroData(schema, (GenericRecord) record);
}
}
/**
* Combines {@link MultiIdentityConverter()} with {@link OneOrEmptyConverter()}
* @throws Exception
*/
@Test
public void testConversionWithMultiplicityAndOneOrEmpty() throws Exception {
MultiConverter multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new MultiIdentityConverter(20),
new OneOrEmptyConverter(1), new TestConverter()));
WorkUnitState workUnitState = new WorkUnitState();
Schema schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Iterable<Object> convertedRecordIterable = multiConverter.convertRecord(schema, TEST_RECORD, workUnitState);
Assert.assertEquals(Iterables.size(convertedRecordIterable), 20);
for (Object record : convertedRecordIterable) {
checkConvertedAvroData(schema, (GenericRecord) record);
}
multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new MultiIdentityConverter(20),
new OneOrEmptyConverter(10), new TestConverter()));
workUnitState = new WorkUnitState();
schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
convertedRecordIterable = multiConverter.convertRecord(schema, TEST_RECORD, workUnitState);
Assert.assertEquals(Iterables.size(convertedRecordIterable), 2);
for (Object record : convertedRecordIterable) {
checkConvertedAvroData(schema, (GenericRecord) record);
}
multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new MultiIdentityConverter(20),
new OneOrEmptyConverter(10), new MultiIdentityConverter(10), new TestConverter()));
workUnitState = new WorkUnitState();
schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
convertedRecordIterable = multiConverter.convertRecord(schema, TEST_RECORD, workUnitState);
Assert.assertEquals(Iterables.size(convertedRecordIterable), 20);
for (Object record : convertedRecordIterable) {
checkConvertedAvroData(schema, (GenericRecord) record);
}
}
@Test
public void testConversionWithEmptyConverter() throws Exception {
WorkUnitState workUnitState = new WorkUnitState();
MultiConverter multiConverter =
new MultiConverter(Lists.newArrayList(new EmptyConverter(), new SchemaSimplificationConverter(),
new TestConverter()));
Schema schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Assert.assertFalse(multiConverter.convertRecord(schema, TEST_RECORD, workUnitState).iterator().hasNext());
multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new EmptyConverter(),
new TestConverter()));
schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Assert.assertFalse(multiConverter.convertRecord(schema, TEST_RECORD, workUnitState).iterator().hasNext());
multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new TestConverter(),
new EmptyConverter()));
schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Assert.assertFalse(multiConverter.convertRecord(schema, TEST_RECORD, workUnitState).iterator().hasNext());
multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new MultiIdentityConverter(5),
new TestConverter(), new EmptyConverter()));
schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Assert.assertFalse(multiConverter.convertRecord(schema, TEST_RECORD, workUnitState).iterator().hasNext());
multiConverter =
new MultiConverter(Lists.newArrayList(new SchemaSimplificationConverter(), new EmptyConverter(),
new MultiIdentityConverter(5), new TestConverter()));
schema = (Schema) multiConverter.convertSchema(TEST_SCHEMA, workUnitState);
Assert.assertFalse(multiConverter.convertRecord(schema, TEST_RECORD, workUnitState).iterator().hasNext());
}
@Test
public void testConversionWithoutConverters() throws Exception {
MultiConverter multiConverter =
new MultiConverter(
Lists.<Converter<? extends Object, ? extends Object, ? extends Object, ? extends Object>> newArrayList());
WorkUnitState workUnitState = new WorkUnitState();
Assert.assertEquals(TEST_SCHEMA, multiConverter.convertSchema(TEST_SCHEMA, workUnitState));
Assert.assertEquals(TEST_RECORD, multiConverter.convertRecord(TEST_SCHEMA, TEST_RECORD, workUnitState).iterator()
.next());
}
private void checkConvertedAvroData(Schema schema, GenericRecord record) {
Assert.assertEquals(schema.getNamespace(), "example.avro");
Assert.assertEquals(schema.getType(), Schema.Type.RECORD);
Assert.assertEquals(schema.getName(), "User");
Assert.assertEquals(schema.getFields().size(), 3);
Schema.Field nameField = schema.getField("name");
Assert.assertEquals(nameField.name(), "name");
Assert.assertEquals(nameField.schema().getType(), Schema.Type.STRING);
Schema.Field favNumberField = schema.getField("favorite_number");
Assert.assertEquals(favNumberField.name(), "favorite_number");
Assert.assertEquals(favNumberField.schema().getType(), Schema.Type.INT);
Schema.Field favColorField = schema.getField("favorite_color");
Assert.assertEquals(favColorField.name(), "favorite_color");
Assert.assertEquals(favColorField.schema().getType(), Schema.Type.STRING);
Assert.assertEquals(record.get("name"), "Alyssa");
Assert.assertEquals(record.get("favorite_number"), 256d);
Assert.assertEquals(record.get("favorite_color"), "yellow");
}
/**
* A {@link Converter} that simplifies the input data records.
*/
private static class SchemaSimplificationConverter extends Converter<String, String, String, String> {
private static final Gson GSON = new Gson();
@Override
public String convertSchema(String inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<String> convertRecord(String outputSchema, String inputRecord, WorkUnitState workUnit)
throws DataConversionException {
JsonElement element = GSON.fromJson(inputRecord, JsonObject.class).get("fields");
return new SingleRecordIterable<String>(element.getAsJsonObject().toString());
}
}
/**
* A {@link Converter} that returns the input schema and data records as they are.
*/
private static class IdentityConverter extends Converter<Object, Object, Object, Object> {
@Override
public Object convertSchema(Object inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<Object> convertRecord(Object outputSchema, Object inputRecord, WorkUnitState workUnit)
throws DataConversionException {
return new SingleRecordIterable<Object>(inputRecord);
}
}
/**
* A {@link Converter} that returns the input schema and data records as they are but with a given multiplicity.
*/
private static class MultiIdentityConverter extends Converter<Object, Object, Object, Object> {
private final int multiplicity;
public MultiIdentityConverter(int multiplicity) {
this.multiplicity = multiplicity;
}
@Override
public Object convertSchema(Object inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<Object> convertRecord(Object outputSchema, Object inputRecord, WorkUnitState workUnit)
throws DataConversionException {
List<Object> records = Lists.newArrayList();
for (int i = 0; i < this.multiplicity; i++) {
records.add(inputRecord);
}
return records;
}
}
/**
* A {@link Converter} that returns no converted data record.
*/
private static class EmptyConverter extends Converter<Object, Object, Object, Object> {
@Override
public Object convertSchema(Object inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<Object> convertRecord(Object outputSchema, Object inputRecord, WorkUnitState workUnit)
throws DataConversionException {
return new EmptyIterable<Object>();
}
}
/**
* A {@link Converter} which will alternate between returning a {@link EmptyIterable()},
* a {@link SingleRecordIterable()}, or a {@link List()}. The number of records {@link List()} is controlled by the
* multiplicity config in the constructor, similar to {@link MultiIdentityConverter()}.
*/
private static class AlternatingConverter extends Converter<Object, Object, Object, Object> {
private int executionCount = 0;
private final int multiplicity;
public AlternatingConverter(int multiplicity) {
this.multiplicity = multiplicity;
}
@Override
public Object convertSchema(Object inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<Object> convertRecord(Object outputSchema, Object inputRecord, WorkUnitState workUnit)
throws DataConversionException {
this.executionCount++;
if (this.executionCount > 3) {
this.executionCount = 1;
}
if (this.executionCount == 1) {
return new EmptyIterable<Object>();
} else if (this.executionCount == 2) {
return new SingleRecordIterable<Object>(inputRecord);
} else if (this.executionCount == 3) {
List<Object> records = Lists.newArrayList();
for (int i = 0; i < this.multiplicity; i++) {
records.add(inputRecord);
}
return records;
} else {
throw new DataConversionException("Execution count must always be 1, 2, or 3");
}
}
}
/**
* A {@link Converter} which will return a {@link SingleRecordIterable()} every "x" number of calls to
* convertRecord. Every other time it will return an {@link EmptyIterable()}
*/
private static class OneOrEmptyConverter extends Converter<Object, Object, Object, Object> {
private int executionCount = 0;
private final int recordNum;
/**
*
* @param recordNum is the frequency at which a {@link SingleRecordIterable()} will be returned. This iterable will
* be a simple wrapped of the input record.
*/
public OneOrEmptyConverter(int recordNum) {
this.recordNum = recordNum;
}
@Override
public Object convertSchema(Object inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<Object> convertRecord(Object outputSchema, Object inputRecord, WorkUnitState workUnit)
throws DataConversionException {
if (this.executionCount % this.recordNum == 0) {
this.executionCount++;
return new SingleRecordIterable<Object>(inputRecord);
} else {
this.executionCount++;
return new EmptyIterable<Object>();
}
}
}
}
| 1,252 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/commit/CommitSequenceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.commit;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.commit.CommitSequence;
import org.apache.gobblin.commit.FsRenameCommitStep;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.runtime.JobState.DatasetState;
/**
* Tests for {@link CommitSequence}.
*
* @author Ziyang Liu
*/
@Test(groups = { "gobblin.runtime.commit" })
public class CommitSequenceTest {
private static final String ROOT_DIR = "commit-sequence-test";
private FileSystem fs;
private CommitSequence sequence;
@BeforeClass
public void setUp() throws IOException {
this.fs = FileSystem.getLocal(new Configuration());
this.fs.delete(new Path(ROOT_DIR), true);
Path storeRootDir = new Path(ROOT_DIR, "store");
Path dir1 = new Path(ROOT_DIR, "dir1");
Path dir2 = new Path(ROOT_DIR, "dir2");
this.fs.mkdirs(dir1);
this.fs.mkdirs(dir2);
Path src1 = new Path(dir1, "file1");
Path src2 = new Path(dir2, "file2");
Path dst1 = new Path(dir2, "file1");
Path dst2 = new Path(dir1, "file2");
this.fs.createNewFile(src1);
this.fs.createNewFile(src2);
DatasetState ds = new DatasetState("job-name", "job-id");
ds.setDatasetUrn("urn");
ds.setNoJobFailure();
State state = new State();
state.setProp(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, storeRootDir.toString());
this.sequence = new CommitSequence.Builder().withJobName("testjob").withDatasetUrn("testurn")
.beginStep(FsRenameCommitStep.Builder.class).from(src1).to(dst1).withProps(state).endStep()
.beginStep(FsRenameCommitStep.Builder.class).from(src2).to(dst2).withProps(state).endStep()
.beginStep(DatasetStateCommitStep.Builder.class).withDatasetUrn("urn").withDatasetState(ds).withProps(state)
.endStep().build();
}
@AfterClass
public void tearDown() throws IOException {
this.fs.delete(new Path(ROOT_DIR), true);
}
@Test
public void testExecute() throws IOException {
this.sequence.execute();
Assert.assertTrue(this.fs.exists(new Path(ROOT_DIR, "dir1/file2")));
Assert.assertTrue(this.fs.exists(new Path(ROOT_DIR, "dir2/file1")));
Assert.assertTrue(this.fs.exists(new Path(ROOT_DIR, "store/job-name/urn-job_id.jst")));
Assert.assertTrue(this.fs.exists(new Path(ROOT_DIR, "store/job-name/urn-current.jst")));
}
}
| 1,253 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/commit/FsCommitSequenceStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.commit;
import java.io.IOException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.gson.Gson;
import org.apache.gobblin.commit.CommitSequence;
import org.apache.gobblin.commit.CommitStep;
import org.apache.gobblin.commit.FsRenameCommitStep;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.runtime.JobState.DatasetState;
import org.apache.gobblin.util.io.GsonInterfaceAdapter;
/**
* Tests for {@link FsCommitSequenceStore}.
*
* @author Ziyang Liu
*/
public class FsCommitSequenceStoreTest {
private static final Gson GSON = GsonInterfaceAdapter.getGson(CommitStep.class);
private final String jobName = "test-job";
private final String datasetUrn = StringUtils.EMPTY;
private FsCommitSequenceStore store;
private CommitSequence sequence;
@BeforeClass
public void setUp() throws IOException {
FileSystem fs = FileSystem.getLocal(new Configuration());
this.store = new FsCommitSequenceStore(fs, new Path("commit-sequence-store-test"));
State props = new State();
props.setId("propsId");
props.setProp("prop1", "valueOfProp1");
props.setProp("prop2", "valueOfProp2");
DatasetState datasetState = new DatasetState();
datasetState.setDatasetUrn(this.datasetUrn);
datasetState.incrementJobFailures();
this.sequence = new CommitSequence.Builder().withJobName("testjob").withDatasetUrn("testurn")
.beginStep(FsRenameCommitStep.Builder.class).from(new Path("/ab/cd")).to(new Path("/ef/gh")).withProps(props)
.endStep().beginStep(DatasetStateCommitStep.Builder.class).withDatasetUrn(this.datasetUrn)
.withDatasetState(datasetState).withProps(props).endStep().build();
}
@AfterClass
public void tearDown() throws IOException {
this.store.delete(this.jobName);
}
@Test
public void testPut() throws IOException {
tearDown();
this.store.put(this.jobName, this.datasetUrn, this.sequence);
Assert.assertTrue(this.store.exists(this.jobName, this.datasetUrn));
try {
this.store.put(this.jobName, this.datasetUrn, this.sequence);
Assert.fail();
} catch (IOException e) {
// Expected to catch IOException
}
}
@Test(dependsOnMethods = { "testPut" })
public void testGet() throws IOException {
Optional<CommitSequence> sequence2 = this.store.get(this.jobName, this.datasetUrn);
Assert.assertTrue(sequence2.isPresent());
Assert.assertEquals(GSON.toJsonTree(sequence2.get()), GSON.toJsonTree(this.sequence));
}
}
| 1,254 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/metrics/GobblinJobMetricReporterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.metrics;
import com.codahale.metrics.Metric;
import java.io.FileReader;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.FsStateStore;
import org.apache.gobblin.metastore.StateStore;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.runtime.JobContext;
import org.apache.gobblin.runtime.JobLauncherTestHelper;
import org.apache.gobblin.runtime.JobState;
import org.junit.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Unit test for {@link GobblinJobMetricReporter and its implementation classes}.
*/
@Test(groups = { "gobblin.runtime.local" })
public class GobblinJobMetricReporterTest {
private Properties launcherProps;
private JobLauncherTestHelper jobLauncherTestHelper;
private ITestMetastoreDatabase testMetastoreDatabase;
@BeforeClass
public void startUp() throws Exception {
testMetastoreDatabase = TestMetastoreDatabaseFactory.get();
this.launcherProps = new Properties();
this.launcherProps.load(new FileReader("gobblin-test/resource/gobblin.test.properties"));
this.launcherProps.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_ENABLED_KEY, "true");
this.launcherProps.setProperty(ConfigurationKeys.METRICS_ENABLED_KEY, "true");
this.launcherProps.setProperty(ConfigurationKeys.METRICS_REPORTING_FILE_ENABLED_KEY, "false");
this.launcherProps.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_URL_KEY, testMetastoreDatabase.getJdbcUrl());
StateStore<JobState.DatasetState> datasetStateStore =
new FsStateStore<>(this.launcherProps.getProperty(ConfigurationKeys.STATE_STORE_FS_URI_KEY),
this.launcherProps.getProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY), JobState.DatasetState.class);
this.jobLauncherTestHelper = new JobLauncherTestHelper(this.launcherProps, datasetStateStore);
}
@Test
public void testLaunchJobWithDefaultMetricsReporter() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY,
jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY) + "-testDefaultMetricsReporter");
try {
JobContext jobContext = this.jobLauncherTestHelper.runTest(jobProps);
Map<String, Metric> metrics = jobContext.getJobMetricsOptional().get().getMetricContext().getMetrics();
Assert.assertTrue(metrics.containsKey("JobMetrics.WorkUnitsCreationTimer.GobblinTest1-testDefaultMetricsReporter"));
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@Test
public void testLaunchJobWithServiceMetricsReporter() throws Exception {
Properties jobProps = loadJobProps();
jobProps.setProperty(ConfigurationKeys.GOBBLIN_OUTPUT_JOB_LEVEL_METRICS, "true");
jobProps.setProperty(ConfigurationKeys.JOB_METRICS_REPORTER_CLASS_KEY, ServiceGobblinJobMetricReporter.class.getName());
jobProps.setProperty(ConfigurationKeys.JOB_NAME_KEY, "FlowName_FlowGroup_JobName_EdgeId_Hash");
jobProps.setProperty(ConfigurationKeys.FLOW_GROUP_KEY, "FlowGroup");
jobProps.setProperty(ConfigurationKeys.FLOW_NAME_KEY, "FlowName");
jobProps.setProperty("flow.edge.id", "EdgeId");
try {
JobContext jobContext = this.jobLauncherTestHelper.runTest(jobProps);
Map<String, Metric> metrics = jobContext.getJobMetricsOptional().get().getMetricContext().getMetrics();
Assert.assertTrue(metrics.containsKey("GobblinService.FlowGroup.FlowName.EdgeId.WorkUnitsCreated"));
Assert.assertTrue(metrics.containsKey("GobblinService.FlowGroup.FlowName.EdgeId.WorkUnitsCreationTimer"));
} finally {
this.jobLauncherTestHelper.deleteStateStore(jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY));
}
}
@AfterClass(alwaysRun = true)
public void tearDown() throws IOException {
if (testMetastoreDatabase != null) {
testMetastoreDatabase.close();
}
}
private Properties loadJobProps() throws IOException {
Properties jobProps = new Properties();
jobProps.load(new FileReader("gobblin-test/resource/job-conf/GobblinTest1.pull"));
jobProps.putAll(this.launcherProps);
jobProps.setProperty(JobLauncherTestHelper.SOURCE_FILE_LIST_KEY,
"gobblin-test/resource/source/test.avro.0," + "gobblin-test/resource/source/test.avro.1,"
+ "gobblin-test/resource/source/test.avro.2," + "gobblin-test/resource/source/test.avro.3");
return jobProps;
}
} | 1,255 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/locks/FileBasedJobLockFactoryManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.locks;
import java.io.File;
import org.apache.hadoop.fs.LocalFileSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.runtime.api.JobSpec;
/**
* Unit tests for {@link FileBasedJobLockFactoryManager}
*/
public class FileBasedJobLockFactoryManagerTest {
@Test
public void testGetFactoryConfig() {
FileBasedJobLockFactoryManager mgr = new FileBasedJobLockFactoryManager();
Config sysConfig1 = ConfigFactory.empty();
Assert.assertTrue(mgr.getFactoryConfig(sysConfig1).isEmpty());
Config sysConfig2 = sysConfig1.withValue("some.prop", ConfigValueFactory.fromAnyRef("test"));
Assert.assertTrue(mgr.getFactoryConfig(sysConfig2).isEmpty());
Config sysConfig3 =
sysConfig2.withValue(FileBasedJobLockFactoryManager.CONFIG_PREFIX + "." + FileBasedJobLockFactory.LOCK_DIR_CONFIG,
ConfigValueFactory.fromAnyRef("/tmp"));
Config factoryCfg3 = mgr.getFactoryConfig(sysConfig3);
Assert.assertEquals(factoryCfg3.getString(FileBasedJobLockFactory.LOCK_DIR_CONFIG), "/tmp");
}
@Test
public void testGetJobLockFactory() throws Exception {
final Logger log = LoggerFactory.getLogger("FileBasedJobLockFactoryManagerTest.testGetJobLockFactory");
FileBasedJobLockFactoryManager mgr = new FileBasedJobLockFactoryManager();
// Create an instance with default configs
Config sysConfig1 = ConfigFactory.empty();
FileBasedJobLockFactory factory1 = mgr.getJobLockFactory(sysConfig1, Optional.of(log));
Assert.assertTrue(factory1.getLockFileDir().toString().startsWith(FileBasedJobLockFactory.DEFAULT_LOCK_DIR_PREFIX));
Assert.assertTrue(factory1.getFs() instanceof LocalFileSystem);
Assert.assertTrue(factory1.getFs().exists(factory1.getLockFileDir()));
JobSpec js1 = JobSpec.builder("gobblin-test:job1").build();
FileBasedJobLock lock11 = factory1.getJobLock(js1);
Assert.assertTrue(lock11.getLockFile().getName().startsWith(FileBasedJobLockFactory.getJobName(js1)));
Assert.assertTrue(lock11.tryLock());
lock11.unlock();
// Lock dir should be deleted after close()
factory1.close();
Assert.assertFalse(factory1.getFs().exists(factory1.getLockFileDir()));
// Create an instance with pre-existing lock dir
File lockDir = Files.createTempDir();
Assert.assertTrue(lockDir.exists());
Config sysConfig2 = ConfigFactory.empty()
.withValue(FileBasedJobLockFactoryManager.CONFIG_PREFIX + "." + FileBasedJobLockFactory.LOCK_DIR_CONFIG,
ConfigValueFactory.fromAnyRef(lockDir.getAbsolutePath()));
FileBasedJobLockFactory factory2 = mgr.getJobLockFactory(sysConfig2, Optional.of(log));
Assert.assertEquals(factory2.getLockFileDir().toString(), lockDir.getAbsolutePath());
Assert.assertTrue(factory2.getFs() instanceof LocalFileSystem);
Assert.assertTrue(factory2.getFs().exists(factory2.getLockFileDir()));
// Lock dir should not be removed on close
factory2.close();
Assert.assertTrue(factory2.getFs().exists(factory2.getLockFileDir()));
}
}
| 1,256 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/locks/JobLockTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.locks;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import com.google.common.io.Closer;
import org.apache.log4j.Level;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
/**
* Base for all {@link JobLock} unit tests.
*
* @author Joel Baranick
*/
@Test(groups = {"gobblin.runtime"})
public abstract class JobLockTest {
@AfterClass
public void tearDown() throws IOException {
ZookeeperBasedJobLock.shutdownCuratorFramework();
}
protected abstract JobLock getJobLock() throws JobLockException, IOException;
public void testLocalJobLock() throws Exception {
Closer closer = Closer.create();
try {
// Set to true or false to enable debug logging in the threads
final AtomicBoolean debugEnabled = new AtomicBoolean(true);
final JobLock lock = closer.register(getJobLock());
final CountDownLatch numTestsToPass = new CountDownLatch(2);
final Lock stepsLock = new ReentrantLock();
final AtomicBoolean thread1Locked = new AtomicBoolean(false);
final AtomicBoolean thread2Locked = new AtomicBoolean(false);
final Condition thread1Done = stepsLock.newCondition();
final Condition thread2Done = stepsLock.newCondition();
Thread thread1 = new Thread(new Runnable() {
@Override
public void run() {
final Logger log = LoggerFactory.getLogger("testLocalJobLock.thread1");
if (debugEnabled.get()) {
org.apache.log4j.Logger.getLogger(log.getName()).setLevel(Level.DEBUG);
}
try {
stepsLock.lock();
try {
log.debug("Acquire the lock");
Assert.assertTrue(lock.tryLock());
thread1Locked.set(true);
log.debug("Notify thread2 to check the lock");
thread1Done.signal();
log.debug("Wait for thread2 to check the lock");
thread2Done.await();
log.debug("Release the file lock");
lock.unlock();
thread1Locked.set(false);
log.debug("Notify and wait for thread2 to acquired the lock");
thread1Done.signal();
while (!thread2Locked.get()) thread2Done.await();
Assert.assertFalse(lock.tryLock());
log.debug("Notify thread2 that we are done with the check");
thread1Done.signal();
} finally {
stepsLock.unlock();
}
numTestsToPass.countDown();
} catch (Exception e) {
log.error("error: " + e, e);
}
}
}, "testLocalJobLock.thread1");
thread1.setDaemon(true);
thread1.start();
Thread thread2 = new Thread(new Runnable() {
@Override
public void run() {
final Logger log = LoggerFactory.getLogger("testLocalJobLock.thread2");
if (debugEnabled.get()) {
org.apache.log4j.Logger.getLogger(log.getName()).setLevel(Level.DEBUG);
}
try {
stepsLock.lock();
try {
log.debug("Wait for thread1 to acquire the lock and verify we can't acquire it.");
while (!thread1Locked.get()) thread1Done.await();
Assert.assertFalse(lock.tryLock());
log.debug("Notify thread1 that we are done with the check.");
thread2Done.signal();
log.debug("Wait for thread1 to release the lock and try to acquire it.");
while (thread1Locked.get()) thread1Done.await();
Assert.assertTrue(lock.tryLock());
thread2Locked.set(true);
thread2Done.signal();
log.debug("Wait for thread1 to check the lock");
thread1Done.await();
//clean up the file lock
lock.unlock();
} finally {
stepsLock.unlock();
}
lock.unlock();
numTestsToPass.countDown();
} catch (Exception e) {
log.error("error: " + e, e);
}
}
}, "testLocalJobLock.thread2");
thread2.setDaemon(true);
thread2.start();
//Wait for some time for the threads to die.
Assert.assertTrue(numTestsToPass.await(30, TimeUnit.SECONDS));
} finally {
closer.close();
}
}
}
| 1,257 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/locks/ZookeeperBasedJobLockTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.locks;
import java.io.IOException;
import java.util.Properties;
import org.apache.curator.test.TestingServer;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
/**
* Unit test for {@link ZookeeperBasedJobLock}.
*
* @author Joel Baranick
*/
@Test(groups = {"gobblin.runtime"})
public class ZookeeperBasedJobLockTest extends JobLockTest {
private TestingServer testingServer;
@BeforeClass
public void setUp() throws Exception {
testingServer = new TestingServer(-1);
}
@Override
protected JobLock getJobLock() throws JobLockException, IOException {
Properties properties = new Properties();
properties.setProperty(ZookeeperBasedJobLock.CONNECTION_STRING, testingServer.getConnectString());
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "ZookeeperBasedJobLockTest-" + System.currentTimeMillis());
properties.setProperty(ZookeeperBasedJobLock.MAX_RETRY_COUNT, "1");
properties.setProperty(ZookeeperBasedJobLock.LOCKS_ACQUIRE_TIMEOUT_MILLISECONDS, "1000");
properties.setProperty(ZookeeperBasedJobLock.RETRY_BACKOFF_SECONDS, "1");
properties.setProperty(ZookeeperBasedJobLock.SESSION_TIMEOUT_SECONDS, "180");
properties.setProperty(ZookeeperBasedJobLock.CONNECTION_TIMEOUT_SECONDS, "30");
ZookeeperBasedJobLock lock = new ZookeeperBasedJobLock(properties);
lock.setEventListener(new JobLockEventListener());
return lock;
}
@Override
@AfterClass
public void tearDown() throws IOException {
if (testingServer != null) {
testingServer.close();
}
}
}
| 1,258 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/locks/FileBasedJobLockTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.locks;
import java.io.IOException;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.BasicConfigurator;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
/**
* Unit test for {@link FileBasedJobLock}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.runtime"})
public class FileBasedJobLockTest extends JobLockTest {
private FileSystem fs;
private Path path;
@BeforeClass
public void setUp() throws IOException {
BasicConfigurator.configure();
this.fs = FileSystem.getLocal(new Configuration());
this.path = new Path("MRJobLockTest");
if (!this.fs.exists(this.path)) {
this.fs.mkdirs(this.path);
}
}
@Override
protected JobLock getJobLock() throws JobLockException {
Properties properties = new Properties();
properties.setProperty(FileBasedJobLock.JOB_LOCK_DIR, this.path.getName());
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "FileBasedJobLockTest-" + System.currentTimeMillis());
return new FileBasedJobLock(properties);
}
@Override
@AfterClass
public void tearDown() throws IOException {
if (this.fs.exists(this.path)) {
this.fs.delete(this.path, true);
}
}
}
| 1,259 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/locks/LegacyJobLockFactoryManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.locks;
import java.io.IOException;
import java.util.Properties;
import org.apache.curator.test.TestingServer;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
import org.apache.gobblin.configuration.ConfigurationKeys;
@Test(groups = {"gobblin.runtime"})
public class LegacyJobLockFactoryManagerTest {
@AfterClass
public void tearDown() throws IOException {
ZookeeperBasedJobLock.shutdownCuratorFramework();
}
@Test(expectedExceptions = { NullPointerException.class })
public void testNullProperties_ThrowsException() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
closer.register(LegacyJobLockFactoryManager.getJobLock(null, new JobLockEventListener()));
} finally {
closer.close();
}
}
@Test(expectedExceptions = { NullPointerException.class })
public void testNullListener_ThrowsException() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
closer.register(LegacyJobLockFactoryManager.getJobLock(new Properties(), null));
} finally {
closer.close();
}
}
@Test
public void testMissingJobLockType_ResultsIn_FileBasedJobLock() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.FS_URI_KEY, "file:///");
properties.setProperty(FileBasedJobLock.JOB_LOCK_DIR, "JobLockFactoryTest");
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "JobLockFactoryTest-" + System.currentTimeMillis());
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, FileBasedJobLock.class.getName());
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(FileBasedJobLock.class));
} finally {
closer.close();
}
}
@Test(expectedExceptions = { JobLockException.class })
public void testInvalidJobLockType_ThrowsException() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, "ThisIsATest");
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(FileBasedJobLock.class));
} finally {
closer.close();
}
}
@Test
public void testGetFileBasedJobLock() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.FS_URI_KEY, "file:///");
properties.setProperty(FileBasedJobLock.JOB_LOCK_DIR, "JobLockFactoryTest");
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "JobLockFactoryTest-" + System.currentTimeMillis());
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, FileBasedJobLock.class.getName());
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(FileBasedJobLock.class));
} finally {
closer.close();
}
}
@Test
public void testGetZookeeperBasedJobLock() throws Exception {
Closer closer = Closer.create();
try {
TestingServer testingServer = closer.register(new TestingServer(-1));
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "JobLockFactoryTest-" + System.currentTimeMillis());
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, ZookeeperBasedJobLock.class.getName());
properties.setProperty(ZookeeperBasedJobLock.CONNECTION_STRING, testingServer.getConnectString());
properties.setProperty(ZookeeperBasedJobLock.MAX_RETRY_COUNT, "1");
properties.setProperty(ZookeeperBasedJobLock.LOCKS_ACQUIRE_TIMEOUT_MILLISECONDS, "1000");
properties.setProperty(ZookeeperBasedJobLock.RETRY_BACKOFF_SECONDS, "1");
properties.setProperty(ZookeeperBasedJobLock.SESSION_TIMEOUT_SECONDS, "180");
properties.setProperty(ZookeeperBasedJobLock.CONNECTION_TIMEOUT_SECONDS, "30");
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(ZookeeperBasedJobLock.class));
} finally {
closer.close();
}
}
}
| 1,260 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/fork/ForkTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.fork;
import java.io.IOException;
import lombok.Getter;
import lombok.Setter;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.runtime.ExecutionModel;
import org.apache.gobblin.runtime.TaskContext;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.DataWriterBuilder;
import org.apache.gobblin.writer.RetryWriter;
import org.junit.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class ForkTest {
@DataProvider(name = "closeConfigProvider")
public static Object[][] closeConfigProvider() {
// {close on done, expected count}
return new Object[][]{{"true", 1}, {"false", 0}};
}
@Test(dataProvider = "closeConfigProvider")
public void TestForCloseWriterTrue(String closeConfig, int expectedCloseCount) throws Exception {
WorkUnitState wus = new WorkUnitState();
wus.setProp(ConfigurationKeys.FORK_CLOSE_WRITER_ON_COMPLETION, closeConfig);
wus.setProp(ConfigurationKeys.JOB_ID_KEY, "job2");
wus.setProp(ConfigurationKeys.TASK_ID_KEY, "task1");
wus.setProp(RetryWriter.RETRY_WRITER_ENABLED, "false");
wus.setProp(ConfigurationKeys.WRITER_EAGER_INITIALIZATION_KEY, "true");
wus.setProp(ConfigurationKeys.WRITER_BUILDER_CLASS, DummyDataWriterBuilder.class.getName());
TaskContext taskContext = new TaskContext(wus);
Fork testFork = new TestFork(taskContext, null, 0, 0, ExecutionModel.BATCH);
Assert.assertNotNull(testFork.getWriter());
testFork.run();
Assert.assertEquals(expectedCloseCount, DummyDataWriterBuilder.getWriter().getCloseCount());
}
private static class TestFork extends Fork {
public TestFork(TaskContext taskContext, Object schema, int branches, int index, ExecutionModel executionModel)
throws Exception {
super(taskContext, schema, branches, index, executionModel);
}
@Override
protected void processRecords() throws IOException, DataConversionException {
}
@Override
protected boolean putRecordImpl(Object record) throws InterruptedException {
return true;
}
}
public static class DummyDataWriterBuilder extends DataWriterBuilder<String, Integer> {
private static ThreadLocal<DummyWriter> myThreadLocal = ThreadLocal.withInitial(() -> new DummyWriter());
@Override
public DataWriter<Integer> build() throws IOException {
getWriter().setCloseCount(0);
return getWriter();
}
public static DummyWriter getWriter() {
return myThreadLocal.get();
}
}
private static class DummyWriter implements DataWriter<Integer> {
@Getter
@Setter
private int closeCount = 0;
DummyWriter() {
}
@Override
public void write(Integer record) throws IOException {
}
@Override
public void commit() throws IOException {
}
@Override
public void cleanup() throws IOException {
}
@Override
public long recordsWritten() {
return 0;
}
@Override
public long bytesWritten() throws IOException {
return 0;
}
@Override
public void close() throws IOException {
this.closeCount++;
}
}
}
| 1,261 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_spec/JobSpecResolverTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_spec;
import java.net.URI;
import org.apache.gobblin.runtime.api.JobCatalogWithTemplates;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.runtime.api.JobTemplate;
import org.apache.gobblin.runtime.api.SecureJobTemplate;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
public class JobSpecResolverTest {
@Test
public void test() throws Exception {
Config sysConfig = ConfigFactory.empty();
JobTemplate jobTemplate = Mockito.mock(JobTemplate.class);
Mockito.when(jobTemplate.getResolvedConfig(Mockito.any(Config.class))).thenAnswer(i -> {
Config userConfig = (Config) i.getArguments()[0];
return ConfigFactory.parseMap(ImmutableMap.of("template.value", "foo")).withFallback(userConfig);
});
JobCatalogWithTemplates catalog = Mockito.mock(JobCatalogWithTemplates.class);
Mockito.when(catalog.getTemplate(Mockito.eq(URI.create("my://template")))).thenReturn(jobTemplate);
JobSpecResolver resolver = JobSpecResolver.builder(sysConfig).jobCatalog(catalog).build();
JobSpec jobSpec = JobSpec.builder()
.withConfig(ConfigFactory.parseMap(ImmutableMap.of("key", "value")))
.withTemplate(URI.create("my://template")).build();
ResolvedJobSpec resolvedJobSpec = resolver.resolveJobSpec(jobSpec);
Assert.assertEquals(resolvedJobSpec.getOriginalJobSpec(), jobSpec);
Assert.assertEquals(resolvedJobSpec.getConfig().entrySet().size(), 2);
Assert.assertEquals(resolvedJobSpec.getConfig().getString("key"), "value");
Assert.assertEquals(resolvedJobSpec.getConfig().getString("template.value"), "foo");
}
@Test
public void testWithResolutionAction() throws Exception {
Config sysConfig = ConfigFactory.empty();
SecureJobTemplate insecureTemplate = Mockito.mock(SecureJobTemplate.class);
Mockito.when(insecureTemplate.getResolvedConfig(Mockito.any(Config.class))).thenAnswer(i -> (Config) i.getArguments()[0]);
Mockito.when(insecureTemplate.isSecure()).thenReturn(false);
SecureJobTemplate secureTemplate = Mockito.mock(SecureJobTemplate.class);
Mockito.when(secureTemplate.getResolvedConfig(Mockito.any(Config.class))).thenAnswer(i -> (Config) i.getArguments()[0]);
Mockito.when(secureTemplate.isSecure()).thenReturn(true);
JobCatalogWithTemplates catalog = Mockito.mock(JobCatalogWithTemplates.class);
Mockito.when(catalog.getTemplate(Mockito.eq(URI.create("my://template.insecure")))).thenReturn(insecureTemplate);
Mockito.when(catalog.getTemplate(Mockito.eq(URI.create("my://template.secure")))).thenReturn(secureTemplate);
JobSpecResolver resolver = JobSpecResolver.builder(sysConfig).jobCatalog(catalog)
// This resolution action should block any resolution that does not use a secure template
.jobResolutionAction(new SecureTemplateEnforcer()).build();
JobSpec jobSpec = JobSpec.builder()
.withConfig(ConfigFactory.parseMap(ImmutableMap.of("key", "value")))
.withTemplate(URI.create("my://template.insecure")).build();
Assert.expectThrows(JobTemplate.TemplateException.class, () -> resolver.resolveJobSpec(jobSpec));
JobSpec jobSpec2 = JobSpec.builder()
.withConfig(ConfigFactory.parseMap(ImmutableMap.of("key", "value")))
.withTemplate(URI.create("my://template.secure")).build();
ResolvedJobSpec resolvedJobSpec = resolver.resolveJobSpec(jobSpec2);
Assert.assertEquals(resolvedJobSpec.getOriginalJobSpec(), jobSpec2);
Assert.assertEquals(resolvedJobSpec.getConfig().entrySet().size(), 1);
Assert.assertEquals(resolvedJobSpec.getConfig().getString("key"), "value");
}
}
| 1,262 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/util/MultiWorkUnitUnpackingIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.util;
import com.google.common.base.Function;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import org.apache.gobblin.source.workunit.MultiWorkUnit;
import org.apache.gobblin.source.workunit.WorkUnit;
import java.util.List;
import javax.annotation.Nullable;
import org.testng.Assert;
import org.testng.annotations.Test;
public class MultiWorkUnitUnpackingIteratorTest {
public static final String WORK_UNIT_NAME = "work.unit.name";
@Test
public void test() {
List<WorkUnit> workUnitList = Lists.newArrayList();
workUnitList.add(createWorkUnit("wu1", "wu2"));
workUnitList.add(createWorkUnit("wu3"));
workUnitList.add(createWorkUnit("wu4", "wu5", "wu6"));
List<String> names = Lists.newArrayList(Iterators.transform(new MultiWorkUnitUnpackingIterator(workUnitList.iterator()),
new Function<WorkUnit, String>() {
@Nullable
@Override
public String apply(@Nullable WorkUnit input) {
return input.getProp(WORK_UNIT_NAME);
}
}));
Assert.assertEquals(names.size(), 6);
for (int i = 0; i < 6; i++) {
Assert.assertEquals(names.get(i), "wu" + (i + 1));
}
}
private WorkUnit createWorkUnit(String... names) {
if (names.length == 1) {
WorkUnit workUnit = new WorkUnit();
workUnit.setProp(WORK_UNIT_NAME, names[0]);
return workUnit;
}
MultiWorkUnit mwu = new MultiWorkUnit();
for (String name : names) {
mwu.addWorkUnit(createWorkUnit(name));
}
return mwu;
}
}
| 1,263 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/util/ExceptionCleanupUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.util;
import java.io.IOException;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
@Test(groups = {"gobblin.runtime"})
public class ExceptionCleanupUtilsTest {
@Test
public void canRemoveEmptyWrapper() {
Exception exception = new IOException(new IllegalArgumentException("root cause"));
Throwable rootCause = ExceptionCleanupUtils.removeEmptyWrappers(exception);
assertEquals(rootCause.getClass(), IllegalArgumentException.class);
}
@Test
public void canRemoveMultipleEmptyWrappers() {
Exception exception = new IOException(new IOException(new IllegalArgumentException("root cause")));
Throwable unwrapped = ExceptionCleanupUtils.removeEmptyWrappers(exception);
assertEquals(unwrapped.getClass(), IllegalArgumentException.class);
}
@Test
public void willNotRemoveExceptionWithMessage() {
Exception exception = new IOException("test message", new IllegalArgumentException("root cause"));
Throwable unwrapped = ExceptionCleanupUtils.removeEmptyWrappers(exception);
assertEquals(unwrapped.getClass(), IOException.class);
}
} | 1,264 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/util/JobStateToJsonConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.util;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.stream.JsonReader;
import org.apache.gobblin.configuration.ConfigurationKeys;
@Test(groups = { "gobblin.runtime" })
public class JobStateToJsonConverterTest {
private final String PROPERTIES = "properties";
private final String TASK_STATES = "task states";
private final String TEST_JOB = "TestJob";
private final String TEST_STORE = "store/";
// Disable test for now. It will be re-enabled after we have a current.jst with new class name states
@Test
public void testJsonKeepConfig()
throws IOException {
String stateStorePath = getClass().getClassLoader().getResource(TEST_STORE).getPath();
boolean keepConfig = true;
JobStateToJsonConverter converter = new JobStateToJsonConverter(new Properties(), stateStorePath, keepConfig);
StringWriter stringWriter = new StringWriter();
converter.convert(TEST_JOB, stringWriter);
JsonObject json = new JsonParser().parse(new JsonReader(new StringReader(stringWriter.toString()))).getAsJsonObject();
Assert.assertNotNull(json.get(PROPERTIES));
for (JsonElement taskState: json.get(TASK_STATES).getAsJsonArray()) {
Assert.assertNotNull(taskState.getAsJsonObject().get(PROPERTIES));
}
}
@Test
public void testJsonKeepConfigWithoutStoreUrl()
throws IOException {
String stateStorePath = getClass().getClassLoader().getResource(TEST_STORE).getPath();
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, stateStorePath);
boolean keepConfig = true;
JobStateToJsonConverter converter = new JobStateToJsonConverter(properties, null, keepConfig);
StringWriter stringWriter = new StringWriter();
converter.convert(TEST_JOB, stringWriter);
JsonObject json = new JsonParser().parse(new JsonReader(new StringReader(stringWriter.toString()))).getAsJsonObject();
Assert.assertNotNull(json.get(PROPERTIES));
for (JsonElement taskState: json.get(TASK_STATES).getAsJsonArray()) {
Assert.assertNotNull(taskState.getAsJsonObject().get(PROPERTIES));
}
}
}
| 1,265 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/util/TaskMetricsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.util;
import java.util.ArrayList;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.runtime.JobState;
import org.apache.gobblin.runtime.TaskState;
import org.apache.gobblin.source.workunit.WorkUnit;
@Test(groups = {"gobblin.runtime"})
public class TaskMetricsTest {
@Test
public void testTaskGetMetrics() {
String jobId = "job_456";
String taskId = "task_456";
String jobName = "jobName";
JobState jobState = new JobState(jobName, jobId);
JobMetrics jobMetrics = JobMetrics.get(jobState);
State props = new State();
props.setProp(ConfigurationKeys.JOB_ID_KEY, jobId);
props.setProp(ConfigurationKeys.TASK_ID_KEY, taskId);
SourceState sourceState = new SourceState(props, new ArrayList<WorkUnitState>());
WorkUnit workUnit = new WorkUnit(sourceState, null);
WorkUnitState workUnitState = new WorkUnitState(workUnit);
TaskState taskState = new TaskState(workUnitState);
TaskMetrics taskMetrics = new TaskMetrics(taskState);
Assert.assertNotNull(taskMetrics.getMetricContext());
Assert.assertTrue(taskMetrics.getMetricContext().getParent().isPresent());
Assert.assertEquals(taskMetrics.getMetricContext().getParent().get(), jobMetrics.getMetricContext());
}
}
| 1,266 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/util/CalculateTotalWorkUnitSizeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.util;
import java.util.ArrayList;
import java.util.List;
import org.apache.gobblin.runtime.AbstractJobLauncher;
import org.apache.gobblin.service.ServiceConfigKeys;
import org.apache.gobblin.source.workunit.BasicWorkUnitStream;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.source.workunit.WorkUnitStream;
import org.testng.Assert;
import org.testng.annotations.Test;
public class CalculateTotalWorkUnitSizeTest {
@Test
public void testSummingWorkUnitsIncreasingSize() throws Exception {
List<WorkUnit> workunits = new ArrayList();
WorkUnit newWorkUnit;
for (int i=1; i < 11; i++) {
newWorkUnit = new WorkUnit();
newWorkUnit.setProp(ServiceConfigKeys.WORK_UNIT_SIZE, i);
workunits.add(newWorkUnit);
}
WorkUnitStream workUnitStream = new BasicWorkUnitStream.Builder(workunits).build();
long totalBytesToCopy = AbstractJobLauncher.sumWorkUnitsSizes(workUnitStream);
Assert.assertEquals(totalBytesToCopy, 55);
}
@Test
public void testSummingWorkUnitsArithmeticSum() throws Exception {
List<WorkUnit> workunits = new ArrayList();
WorkUnit newWorkUnit;
for (int i=0; i < 10; i++) {
newWorkUnit = new WorkUnit();
newWorkUnit.setProp(ServiceConfigKeys.WORK_UNIT_SIZE, 3+5*i);
workunits.add(newWorkUnit);
}
WorkUnitStream workUnitStream = new BasicWorkUnitStream.Builder(workunits).build();
long totalBytesToCopy = AbstractJobLauncher.sumWorkUnitsSizes(workUnitStream);
Assert.assertEquals(totalBytesToCopy, 255);
}
}
| 1,267 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/util/JobMetricsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.util;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metrics.GobblinMetrics;
import org.apache.gobblin.metrics.GobblinMetricsRegistry;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.Tag;
import org.apache.gobblin.metrics.event.JobEvent;
import org.apache.gobblin.runtime.JobState;
@Test(groups = { "gobblin.runtime" })
public class JobMetricsTest {
@Test
public void testJobMetricsGet() {
String jobName = "testJob";
String jobId = "job_123";
JobState jobState = new JobState(jobName, jobId);
JobMetrics jobMetrics = JobMetrics.get(jobState);
Assert.assertNotNull(jobMetrics.getMetricContext());
List<Tag<?>> tags = jobMetrics.getMetricContext().getTags();
Map<String, ?> tagMap = jobMetrics.getMetricContext().getTagMap();
String contextId = tagMap.get(MetricContext.METRIC_CONTEXT_ID_TAG_NAME).toString();
String contextName = tagMap.get(MetricContext.METRIC_CONTEXT_NAME_TAG_NAME).toString();
Assert.assertEquals(tagMap.size(), 4);
Assert.assertEquals(tagMap.get(JobEvent.METADATA_JOB_ID), jobId);
Assert.assertEquals(tagMap.get(JobEvent.METADATA_JOB_NAME), jobName);
Assert.assertEquals(tagMap.get(MetricContext.METRIC_CONTEXT_ID_TAG_NAME), contextId);
Assert.assertEquals(tagMap.get(MetricContext.METRIC_CONTEXT_NAME_TAG_NAME), contextName);
// should get the original jobMetrics, can check by the id
JobMetrics jobMetrics1 = JobMetrics.get(jobName + "_", jobId);
Assert.assertNotNull(jobMetrics1.getMetricContext());
tagMap = jobMetrics1.getMetricContext().getTagMap();
Assert.assertEquals(tags.size(), 4);
Assert.assertEquals(tagMap.get(MetricContext.METRIC_CONTEXT_ID_TAG_NAME), contextId);
Assert.assertEquals(tagMap.get(MetricContext.METRIC_CONTEXT_NAME_TAG_NAME), contextName);
// remove original jobMetrics, should create a new one
GobblinMetricsRegistry.getInstance().remove(jobMetrics.getId());
JobMetrics jobMetrics2 = JobMetrics.get(jobName + "_", jobId);
Assert.assertNotNull(jobMetrics2.getMetricContext());
tagMap = jobMetrics2.getMetricContext().getTagMap();
Assert.assertEquals(tags.size(), 4);
Assert.assertNotEquals(tagMap.get(MetricContext.METRIC_CONTEXT_ID_TAG_NAME), contextId);
Assert.assertNotEquals(tagMap.get(MetricContext.METRIC_CONTEXT_NAME_TAG_NAME), contextName);
}
@Test
public void testCustomTags() {
Properties testProperties = new Properties();
Tag<String> expectedPropertyTag = new Tag<>("key1", "value1");
GobblinMetrics.addCustomTagToProperties(testProperties, expectedPropertyTag);
State testState = new State(testProperties);
List<Tag<?>> tags = GobblinMetrics.getCustomTagsFromState(testState);
Assert.assertEquals(Iterables.getFirst(tags, null), expectedPropertyTag);
Tag<String> expectedStateTag = new Tag<>("key2", "value2");
GobblinMetrics.addCustomTagToState(testState, expectedStateTag);
tags = GobblinMetrics.getCustomTagsFromState(testState);
Assert.assertTrue(tags.containsAll(ImmutableList.of(expectedPropertyTag, expectedStateTag)));
}
}
| 1,268 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/template/StaticJobTemplateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.template;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.gobblin.runtime.api.JobTemplate;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.api.JobCatalogWithTemplates;
public class StaticJobTemplateTest {
@Test
public void test() throws Exception {
Map<String, String> confMap = Maps.newHashMap();
confMap.put("key1", "value1");
confMap.put(ConfigurationKeys.REQUIRED_ATRRIBUTES_LIST, "required1,required2");
confMap.put(StaticJobTemplate.SUPER_TEMPLATE_KEY, "template2");
JobCatalogWithTemplates catalog = Mockito.mock(JobCatalogWithTemplates.class);
Mockito.when(catalog.getTemplate(new URI("template2"))).thenAnswer(
new InheritingJobTemplateTest.TestTemplateAnswer(
Lists.<URI>newArrayList(), ImmutableMap.of("key2", "value2"),
Lists.<String>newArrayList(), catalog));
StaticJobTemplate template =
new StaticJobTemplate(new URI("template"), "1", "desc", ConfigFactory.parseMap(confMap), catalog);
Assert.assertEquals(template.getSuperTemplates().size(), 1);
Assert.assertEquals(template.getSuperTemplates().iterator().next().getUri(), new URI("template2"));
Collection<String> required = template.getRequiredConfigList();
Assert.assertEquals(required.size(), 2);
Assert.assertTrue(required.contains("required1"));
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = template.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("key2"), "value2");
Config resolved = template.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required1", "r1", "required2", "r2")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
Assert.assertEquals(resolved.getString("required1"), "r1");
Assert.assertEquals(resolved.getString("required2"), "r2");
}
@Test
public void testMultipleTemplates() throws Exception {
Map<String, String> confMap = Maps.newHashMap();
confMap.put("key", "value");
InheritingJobTemplateTest.TestTemplate
template1 = new InheritingJobTemplateTest.TestTemplate(new URI("template1"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key1", "value1"),
ImmutableList.of());
InheritingJobTemplateTest.TestTemplate
template2 = new InheritingJobTemplateTest.TestTemplate(new URI("template2"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key2", "value2"),
ImmutableList.of());
List<JobTemplate> templateList = new ArrayList<>();
templateList.add(template1);
templateList.add(template2);
StaticJobTemplate template =
new StaticJobTemplate(new URI("template"), "1", "desc", ConfigFactory.parseMap(confMap), templateList);
Config resolved = template.getResolvedConfig(ConfigFactory.empty());
Assert.assertEquals(resolved.getString("key"), "value");
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
}
@Test
public void testSecure() throws Exception {
Map<String, Object> confMap = Maps.newHashMap();
confMap.put("nonOverridableKey", "value1");
confMap.put("overridableKey", "value1");
confMap.put(StaticJobTemplate.IS_SECURE_KEY, true);
confMap.put(StaticJobTemplate.SECURE_OVERRIDABLE_PROPERTIES_KEYS, "overridableKey, overridableKey2");
StaticJobTemplate template = new StaticJobTemplate(URI.create("my://template"), "1", "desc", ConfigFactory.parseMap(confMap), (JobCatalogWithTemplates) null);
Config userConfig = ConfigFactory.parseMap(ImmutableMap.of(
"overridableKey", "override",
"overridableKey2", "override2",
"nonOverridableKey", "override",
"somethingElse", "override"));
Config resolved = template.getResolvedConfig(userConfig);
Assert.assertEquals(resolved.entrySet().size(), 5);
Assert.assertEquals(resolved.getString("nonOverridableKey"), "value1");
Assert.assertEquals(resolved.getString("overridableKey"), "override");
Assert.assertEquals(resolved.getString("overridableKey2"), "override2");
Assert.assertFalse(resolved.hasPath("somethingElse"));
}
} | 1,269 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/template/InheritingJobTemplateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.template;
import java.net.URI;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import lombok.AllArgsConstructor;
import org.apache.gobblin.runtime.api.JobCatalogWithTemplates;
import org.apache.gobblin.runtime.api.JobTemplate;
import org.apache.gobblin.runtime.api.SpecNotFoundException;
public class InheritingJobTemplateTest {
@Test
public void testSimpleInheritance() throws Exception {
TestTemplate template1 = new TestTemplate(new URI("template1"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key1", "value1"),
Lists.newArrayList("required"));
TestTemplate template2 = new TestTemplate(new URI("template2"), Lists.<JobTemplate>newArrayList(template1), ImmutableMap.of("key2", "value2"),
Lists.newArrayList("required2"));
Collection<String> required = template2.getRequiredConfigList();
Assert.assertEquals(required.size(), 2);
Assert.assertTrue(required.contains("required"));
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = template2.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("key2"), "value2");
Config resolved = template2.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1", "required2", "r2")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required2"), "r2");
try {
// should throw exception because missing required property
resolved = template2.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1")));
Assert.fail();
} catch (JobTemplate.TemplateException te) {
// expected
}
}
@Test
public void testMultiInheritance() throws Exception {
TestTemplate template1 = new TestTemplate(new URI("template1"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key1", "value1"),
Lists.newArrayList("required"));
TestTemplate template2 = new TestTemplate(new URI("template2"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key2", "value2"),
Lists.newArrayList("required2"));
TestTemplate inheriting = new TestTemplate(new URI("inheriting"), Lists.<JobTemplate>newArrayList(template1, template2), ImmutableMap.<String, String>of(),
Lists.<String>newArrayList());
Collection<String> required = inheriting.getRequiredConfigList();
Assert.assertEquals(required.size(), 2);
Assert.assertTrue(required.contains("required"));
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = inheriting.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("key2"), "value2");
Config resolved = inheriting.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1", "required2", "r2")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required2"), "r2");
}
@Test
public void testLoopInheritance() throws Exception {
JobCatalogWithTemplates catalog = Mockito.mock(JobCatalogWithTemplates.class);
Mockito.when(catalog.getTemplate(new URI("template2"))).thenAnswer(
new TestTemplateAnswer(Lists.newArrayList(new URI("template3")), ImmutableMap.of("key2", "value2"),
Lists.<String>newArrayList(), catalog));
Mockito.when(catalog.getTemplate(new URI("template3"))).thenAnswer(
new TestTemplateAnswer(Lists.newArrayList(new URI("template1")), ImmutableMap.of("key3", "value3"),
Lists.newArrayList("required3"), catalog));
TestTemplate template = new TestTemplate(new URI("template1"), Lists.newArrayList(new URI("template2")),
ImmutableMap.of("key1", "value1"), Lists.newArrayList("required"), catalog);
Collection<String> required = template.getRequiredConfigList();
Assert.assertEquals(required.size(), 2);
Assert.assertTrue(required.contains("required"));
Assert.assertTrue(required.contains("required3"));
Config rawTemplate = template.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("key2"), "value2");
Assert.assertEquals(rawTemplate.getString("key3"), "value3");
Config resolved = template.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required", "r1", "required3", "r3")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("key2"), "value2");
Assert.assertEquals(resolved.getString("key3"), "value3");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required3"), "r3");
}
@Test
public void testSatisfySuperTemplateRequirements() throws Exception {
TestTemplate template1 = new TestTemplate(new URI("template1"), Lists.<JobTemplate>newArrayList(), ImmutableMap.of("key1", "value1"),
Lists.newArrayList("required"));
TestTemplate template2 = new TestTemplate(new URI("template2"), Lists.<JobTemplate>newArrayList(template1), ImmutableMap.of("required", "r1"),
Lists.newArrayList("required2"));
Collection<String> required = template2.getRequiredConfigList();
Assert.assertEquals(required.size(), 1);
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = template2.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("key1"), "value1");
Assert.assertEquals(rawTemplate.getString("required"), "r1");
Config resolved = template2.getResolvedConfig(ConfigFactory.parseMap(ImmutableMap.of("required2", "r2")));
Assert.assertEquals(resolved.getString("key1"), "value1");
Assert.assertEquals(resolved.getString("required"), "r1");
Assert.assertEquals(resolved.getString("required2"), "r2");
}
@AllArgsConstructor
public static class TestTemplateAnswer implements Answer<JobTemplate> {
private final List<URI> superTemplateUris;
private final Map<String,String> rawTemplate;
private final List<String> required;
private final JobCatalogWithTemplates catalog;
@Override
public JobTemplate answer(InvocationOnMock invocation)
throws Throwable {
return new TestTemplate((URI) invocation.getArguments()[0],
this.superTemplateUris, this.rawTemplate, this.required, this.catalog);
}
}
public static class TestTemplate extends InheritingJobTemplate {
private final URI uri;
private final Map<String,String> rawTemplate;
private final List<String> required;
private Collection<String> dependencies;
public TestTemplate(URI uri, List<URI> superTemplateUris, Map<String, String> rawTemplate, List<String> required,
JobCatalogWithTemplates catalog) throws SpecNotFoundException, TemplateException {
super(superTemplateUris, catalog);
this.uri = uri;
this.rawTemplate = rawTemplate;
this.required = required;
}
public TestTemplate(URI uri, List<JobTemplate> superTemplates, Map<String, String> rawTemplate, List<String> required) {
super(superTemplates);
this.uri = uri;
this.rawTemplate = rawTemplate;
this.required = required;
}
@Override
public URI getUri() {
return this.uri;
}
@Override
public String getVersion() {
return "1";
}
@Override
public String getDescription() {
return "description";
}
@Override
protected Config getLocalRawTemplate() {
return ConfigFactory.parseMap(this.rawTemplate);
}
@Override
protected Collection<String> getLocallyRequiredConfigList() {
return this.required;
}
@Override
protected Config getLocallyResolvedConfig(Config userConfig) throws TemplateException {
for (String required : this.required) {
if (!userConfig.hasPath(required)) {
throw new TemplateException("Missing required property " + required);
}
}
return userConfig.withFallback(getLocalRawTemplate());
}
@Override
public Collection<String> getDependencies() {
return this.dependencies;
}
}
} | 1,270 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/template/ResourceBasedJobTemplateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.template;
import java.util.Collection;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
public class ResourceBasedJobTemplateTest {
@Test
public void test() throws Exception {
ResourceBasedJobTemplate template = ResourceBasedJobTemplate.forResourcePath("templates/test.template", null);
Collection<String> required = template.getRequiredConfigList();
Assert.assertEquals(required.size(), 3);
Assert.assertTrue(required.contains("required0"));
Assert.assertTrue(required.contains("required1"));
Assert.assertTrue(required.contains("required2"));
Config rawTemplate = template.getRawTemplateConfig();
Assert.assertEquals(rawTemplate.getString("templated0"), "x");
Assert.assertEquals(rawTemplate.getString("templated1"), "y");
Config resolved = template.getResolvedConfig(
ConfigFactory.parseMap(ImmutableMap.of("required0", "r0", "required1", "r1", "required2", "r2")));
Assert.assertEquals(resolved.getString("templated0"), "x");
Assert.assertEquals(resolved.getString("required0"), "r0");
}
}
| 1,271 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/spec_serde/FlowSpecSerializationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.spec_serde;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.runtime.api.FlowSpec;
public class FlowSpecSerializationTest {
private Gson gson = new GsonBuilder().registerTypeAdapter(new TypeToken<FlowSpec>() {}.getType(), new FlowSpecSerializer())
.registerTypeAdapter(new TypeToken<FlowSpec>() {}.getType(), new FlowSpecDeserializer()).create();
private FlowSpec flowSpec1;
private FlowSpec flowSpec2;
private FlowSpec flowSpec3;
@BeforeClass
public void setUp() throws URISyntaxException {
gson = new GsonBuilder().registerTypeAdapter(new TypeToken<FlowSpec>() {}.getType(), new FlowSpecSerializer())
.registerTypeAdapter(new TypeToken<FlowSpec>() {}.getType(), new FlowSpecDeserializer()).create();
flowSpec1 = FlowSpec.builder("flowspec1").withVersion("version1").withDescription("description1")
.withConfig(ConfigBuilder.create().addPrimitive("key1", "value1").build()).build();
flowSpec2 = FlowSpec.builder("flowspec2").withVersion("version2").withDescription("description2")
.withConfig(ConfigBuilder.create().addPrimitive("key2", "value2").build()).build();
flowSpec3 = FlowSpec.builder("flowspec3").withVersion("version3").withDescription("description3")
.withConfig(ConfigBuilder.create().addPrimitive("key3", "value3").build())
.withTemplates(Arrays.asList(new URI("template1"), new URI("template2")))
.withChildSpecs(Arrays.asList(flowSpec1, flowSpec2)).build();
}
@Test
public void testSerializeWithNoChildren() {
String json = gson.toJson(flowSpec1);
Assert.assertEquals(gson.fromJson(json, FlowSpec.class), flowSpec1);
}
@Test
public void testSerializeWithChildren() {
String json = gson.toJson(flowSpec3);
Assert.assertEquals(gson.fromJson(json, FlowSpec.class), flowSpec3);
}
}
| 1,272 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/scheduler/TestImmediateJobSpecScheduler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.scheduler;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.runtime.api.JobSpecSchedule;
import org.apache.gobblin.runtime.api.JobSpecSchedulerListener;
/**
* Unit tests for {@link ImmediateJobSpecScheduler}
*
*/
public class TestImmediateJobSpecScheduler {
@Test
public void testSchedule() throws Exception {
final Logger log = LoggerFactory.getLogger(getClass().getName() + ".testSimpleFlow");
final Optional<Logger> logOpt = Optional.of(log);
ImmediateJobSpecScheduler scheduler = new ImmediateJobSpecScheduler(logOpt);
JobSpecSchedulerListener listener = mock(JobSpecSchedulerListener.class);
scheduler.registerWeakJobSpecSchedulerListener(listener);
final CountDownLatch expectedCallCount = new CountDownLatch(4);
Runnable r = new Runnable() {
@Override public void run() {
expectedCallCount.countDown();
}
};
JobSpec js1 = JobSpec.builder("test.job1").build();
JobSpec js2 = JobSpec.builder("test.job2").build();
JobSpec js3 = JobSpec.builder("test.job3").build();
JobSpec js1_2 = JobSpec.builder("test.job1").withVersion("2").build();
JobSpecSchedule jss1 = scheduler.scheduleJob(js1, r);
Assert.assertEquals(scheduler.getSchedules().size(), 1);
Assert.assertEquals(jss1.getJobSpec(), js1);
Assert.assertTrue(jss1.getNextRunTimeMillis().isPresent());
Assert.assertTrue(jss1.getNextRunTimeMillis().get().longValue() <= System.currentTimeMillis());;
JobSpecSchedule jss2 = scheduler.scheduleJob(js2, r);
Assert.assertEquals(scheduler.getSchedules().size(), 2);
Assert.assertEquals(jss2.getJobSpec(), js2);
Assert.assertTrue(jss2.getNextRunTimeMillis().isPresent());
Assert.assertTrue(jss2.getNextRunTimeMillis().get().longValue() <= System.currentTimeMillis());;
JobSpecSchedule jss3 = scheduler.scheduleJob(js3, r);
Assert.assertEquals(scheduler.getSchedules().size(), 3);
Assert.assertEquals(jss3.getJobSpec(), js3);
JobSpecSchedule jss1_2 = scheduler.scheduleJob(js1_2, r);
Assert.assertEquals(scheduler.getSchedules().size(), 3);
Assert.assertEquals(jss1_2.getJobSpec(), js1_2);
Assert.assertTrue(expectedCallCount.await(100, TimeUnit.MILLISECONDS));
scheduler.unscheduleJob(js1.getUri());
Assert.assertEquals(scheduler.getSchedules().size(), 2);
scheduler.unscheduleJob(js1.getUri());
Assert.assertEquals(scheduler.getSchedules().size(), 2);
verify(listener).onJobScheduled(Mockito.eq(jss1));
verify(listener).onJobTriggered(Mockito.eq(js1));
verify(listener).onJobScheduled(Mockito.eq(jss2));
verify(listener).onJobTriggered(Mockito.eq(js2));
verify(listener).onJobScheduled(Mockito.eq(jss3));
verify(listener).onJobTriggered(Mockito.eq(js3));
verify(listener).onJobUnscheduled(Mockito.eq(jss1));
verify(listener).onJobScheduled(Mockito.eq(jss1_2));
verify(listener).onJobTriggered(Mockito.eq(js1_2));
verify(listener, Mockito.times(1)).onJobUnscheduled(Mockito.eq(jss1_2));
}
}
| 1,273 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/scheduler/TestQuartzJobSpecScheduler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.scheduler;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.runtime.api.JobSpecSchedule;
import org.apache.gobblin.runtime.scheduler.QuartzJobSpecScheduler.QuartzJobSchedule;
/**
* Unit tests for {@link QuartzJobSpecScheduler}.
*/
public class TestQuartzJobSpecScheduler {
@Test public void testSchedule() throws Exception {
final Logger log = LoggerFactory.getLogger(getClass().getName() + ".testSchedule");
Config quartzCfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put("org.quartz.scheduler.instanceName", "TestQuartzJobSpecScheduler.testSchedule")
.put("org.quartz.threadPool.threadCount", "10")
.put("org.quartz.jobStore.class", "org.quartz.simpl.RAMJobStore")
.build());
QuartzJobSpecScheduler scheduler = new QuartzJobSpecScheduler(log, quartzCfg);
scheduler.startAsync();
scheduler.awaitRunning(10, TimeUnit.SECONDS);
Assert.assertTrue(scheduler._scheduler.getScheduler().isStarted());
final ArrayBlockingQueue<JobSpec> expectedCalls = new ArrayBlockingQueue<>(100);
try {
Config jobCfg1 = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(ConfigurationKeys.JOB_SCHEDULE_KEY, "0/5 * * * * ?")
.build());
Config jobCfg2 = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(ConfigurationKeys.JOB_SCHEDULE_KEY, "3/5 * * * * ?")
.build());
final JobSpec js1 = JobSpec.builder("test.job1").withConfig(jobCfg1).build();
final JobSpec js2 = JobSpec.builder("test.job2").withConfig(jobCfg2).build();
final JobSpec js1_2 = JobSpec.builder("test.job1").withConfig(jobCfg1).withVersion("2").build();
JobSpecSchedule jss1 = scheduler.scheduleJob(js1, new Runnable() {
@Override public void run() {
expectedCalls.offer(js1);
}
});
Assert.assertEquals(scheduler.getSchedules().size(), 1);
Assert.assertEquals(jss1.getJobSpec(), js1);
Assert.assertTrue(jss1 instanceof QuartzJobSchedule);
QuartzJobSchedule qjss1 = (QuartzJobSchedule)jss1;
Assert.assertNotNull(scheduler._scheduler.getScheduler().getJobDetail(qjss1.getQuartzTrigger().getJobKey()));
Assert.assertNotNull(scheduler._scheduler.getScheduler().getTrigger(qjss1.getQuartzTrigger().getKey()));
Assert.assertTrue(qjss1.getQuartzTrigger().mayFireAgain());
// Wait for the next run
JobSpec expJs1 = expectedCalls.poll(6000, TimeUnit.MILLISECONDS);
Assert.assertEquals(expJs1, js1);
// Wait for the next run
expJs1 = expectedCalls.poll(6000, TimeUnit.MILLISECONDS);
Assert.assertEquals(expJs1, js1);
// Schedule another job
JobSpecSchedule jss2 = scheduler.scheduleJob(js2, new Runnable() {
@Override public void run() {
expectedCalls.offer(js2);
}
});
Assert.assertEquals(scheduler.getSchedules().size(), 2);
Assert.assertEquals(jss2.getJobSpec(), js2);
// Wait for the next run -- we should get js2
JobSpec expJs2 = expectedCalls.poll(6000, TimeUnit.MILLISECONDS);
Assert.assertEquals(expJs2, js2);
// Wait for the next run -- we should get js1
expJs1 = expectedCalls.poll(6000, TimeUnit.MILLISECONDS);
Assert.assertEquals(expJs1, js1);
// Wait for the next run -- we should get js2
expJs2 = expectedCalls.poll(6000, TimeUnit.MILLISECONDS);
log.info("Found call: " + expJs2);
Assert.assertEquals(expJs2, js2);
// Update the first job
QuartzJobSchedule qjss1_2 = (QuartzJobSchedule)scheduler.scheduleJob(js1_2, new Runnable() {
@Override public void run() {
expectedCalls.offer(js1_2);
}
});
Assert.assertEquals(scheduler.getSchedules().size(), 2);
// Wait for 5 seconds -- we should see at least 2 runs of js1_2 and js2
Thread.sleep(15000);
int js1_2_cnt = 0;
int js2_cnt = 0;
for (JobSpec nextJs: expectedCalls){
log.info("Found call: " + nextJs);
if (js1_2.equals(nextJs)) {
++js1_2_cnt;
}
else if (js2.equals(nextJs)) {
++js2_cnt;
}
else {
Assert.fail("Unexpected job spec: " + nextJs);
}
}
Assert.assertTrue(js1_2_cnt >= 2, "js1_2_cnt=" + js1_2_cnt);
Assert.assertTrue(js2_cnt >= 2, "js2_cnt=" + js2_cnt);
scheduler.unscheduleJob(js1_2.getUri());
Assert.assertEquals(scheduler.getSchedules().size(), 1);
Assert.assertFalse(scheduler._scheduler.getScheduler().checkExists(qjss1_2.getQuartzTrigger().getJobKey()));
// Flush calls
Thread.sleep(1000);
expectedCalls.clear();
// All subsequent calls should be for js2
for (int i = 0; i < 2; ++i){
JobSpec nextJs = expectedCalls.poll(12000, TimeUnit.MILLISECONDS);
Assert.assertEquals(nextJs, js2);
}
}
finally {
scheduler.stopAsync();
scheduler.awaitTerminated(10, TimeUnit.SECONDS);
}
// make sure there are no more calls
// we may have to drain at most one call due to race conditions
if (null != expectedCalls.poll(2100, TimeUnit.MILLISECONDS)) {
Assert.assertNull(expectedCalls.poll(3000, TimeUnit.MILLISECONDS));
}
}
}
| 1,274 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/std/TestJobSpecFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.std;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.runtime.api.JobSpec;
/**
* Unit tests for {@link JobSpecFilter}
*/
public class TestJobSpecFilter {
@Test public void testUriAndVersion() {
JobSpec js1_1 = JobSpec.builder("gobblin:/test/job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("gobblin:/test/job1").withVersion("2").build();
JobSpec js2_1 = JobSpec.builder("gobblin:/test/job2").withVersion("1").build();
JobSpec js2_2 = JobSpec.builder("gobblin:/test/job2").withVersion("2").build();
JobSpecFilter filter1 = JobSpecFilter.eqJobSpecURI("gobblin:/test/job1");
Assert.assertTrue(filter1.apply(js1_1));
Assert.assertTrue(filter1.apply(js1_2));
Assert.assertFalse(filter1.apply(js2_1));
Assert.assertFalse(filter1.apply(js2_2));
JobSpecFilter filter2 =
JobSpecFilter.builder().eqURI("gobblin:/test/job2").eqVersion("2").build();
Assert.assertFalse(filter2.apply(js1_1));
Assert.assertFalse(filter2.apply(js1_2));
Assert.assertFalse(filter2.apply(js2_1));
Assert.assertTrue(filter2.apply(js2_2));
}
}
| 1,275 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/std/TestFilteredJobLifecycleListener.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.std;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.JobState.RunningState;
import org.apache.gobblin.runtime.api.JobExecutionState;
import org.apache.gobblin.runtime.api.JobExecutionStateListener;
import org.apache.gobblin.runtime.api.JobLifecycleListener;
import org.apache.gobblin.runtime.api.JobSpec;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
/**
* Unit tests for {@link FilteredJobLifecycleListener}
*/
public class TestFilteredJobLifecycleListener {
@Test public void testSimple() {
Config config = ConfigFactory.empty()
.withValue(ConfigurationKeys.JOB_NAME_KEY, ConfigValueFactory.fromAnyRef("myJob"));
JobSpec js1_1 = JobSpec.builder("gobblin:/testSimple/job1").withVersion("1").withConfig(config).build();
JobSpec js1_2 = JobSpec.builder("gobblin:/testSimple/job1").withVersion("2").withConfig(config).build();
JobLifecycleListener mockListener = mock(JobLifecycleListener.class);
FilteredJobLifecycleListener testListener =
new FilteredJobLifecycleListener(JobSpecFilter.builder()
.eqURI("gobblin:/testSimple/job1").eqVersion("2").build(),
mockListener);
JobExecutionState jss1_1 = new JobExecutionState(js1_1,
JobExecutionUpdatable.createFromJobSpec(js1_1),
Optional.<JobExecutionStateListener>absent());
JobExecutionState jss1_2 = new JobExecutionState(js1_2,
JobExecutionUpdatable.createFromJobSpec(js1_2),
Optional.<JobExecutionStateListener>absent());
testListener.onAddJob(js1_1);
testListener.onDeleteJob(js1_1.getUri(), js1_1.getVersion());
testListener.onUpdateJob(js1_1);;
testListener.onStatusChange(jss1_1, RunningState.PENDING, RunningState.RUNNING);
testListener.onStageTransition(jss1_1, "Stage1", "Stage2");
testListener.onMetadataChange(jss1_1, "metaKey", "value1", "value2");
testListener.onAddJob(js1_2);
testListener.onDeleteJob(js1_2.getUri(), js1_2.getVersion());
testListener.onUpdateJob(js1_2);
testListener.onStatusChange(jss1_2, RunningState.RUNNING, RunningState.SUCCESSFUL);
testListener.onStageTransition(jss1_2, "Stage1", "Stage2");
testListener.onMetadataChange(jss1_2, "metaKey", "value1", "value2");
verify(mockListener).onAddJob(eq(js1_2));
verify(mockListener).onDeleteJob(eq(js1_2.getUri()),
eq(js1_2.getVersion()));
verify(mockListener).onUpdateJob(eq(js1_2));
verify(mockListener).onStatusChange(eq(jss1_2), eq(RunningState.RUNNING),
eq(RunningState.SUCCESSFUL));
verify(mockListener).onStageTransition(eq(jss1_2), eq("Stage1"), eq("Stage2"));
verify(mockListener).onMetadataChange(eq(jss1_2), eq("metaKey"), eq("value1"), eq("value2"));
verify(mockListener, never()).onAddJob(eq(js1_1));
verify(mockListener, never()).onDeleteJob(eq(js1_1.getUri()), eq(js1_1.getVersion()));
verify(mockListener, never()).onUpdateJob(eq(js1_1));
verify(mockListener, never()).onStatusChange(eq(jss1_1), eq(RunningState.RUNNING),
eq(RunningState.SUCCESSFUL));
verify(mockListener, never()).onStatusChange(eq(jss1_1), eq(RunningState.PENDING),
eq(RunningState.RUNNING));
verify(mockListener, never()).onStageTransition(eq(jss1_1), eq("Stage1"), eq("Stage2"));
verify(mockListener, never()).onMetadataChange(eq(jss1_1), eq("metaKey"), eq("value1"), eq("value2"));
}
}
| 1,276 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/std/TestJobExecutionStateListeners.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.std;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.JobState.RunningState;
import org.apache.gobblin.runtime.api.JobExecutionState;
import org.apache.gobblin.runtime.api.JobExecutionStateListener;
import org.apache.gobblin.runtime.api.JobSpec;
/**
* Unit tests for {@link JobExecutionStateListeners}.
*/
public class TestJobExecutionStateListeners {
@Test
public void testHappyPath() {
final Logger log = LoggerFactory.getLogger(getClass() + ".testHappyPath");
JobExecutionStateListeners listeners = new JobExecutionStateListeners(log);
JobSpec js1 = JobSpec.builder("gobblin:test/job")
.withConfig(ConfigFactory.empty()
.withValue(ConfigurationKeys.JOB_NAME_KEY, ConfigValueFactory.fromAnyRef("myJob")))
.build();
JobExecutionUpdatable je1 = JobExecutionUpdatable.createFromJobSpec(js1);
JobExecutionStateListener l1 = Mockito.mock(JobExecutionStateListener.class);
JobExecutionStateListener l2 = Mockito.mock(JobExecutionStateListener.class);
JobExecutionStateListener l3 = Mockito.mock(JobExecutionStateListener.class);
listeners.registerStateListener(l1);
JobExecutionState state =
new JobExecutionState(js1, je1, Optional.<JobExecutionStateListener>of(listeners));
state.setRunningState(RunningState.PENDING);
state.setRunningState(RunningState.RUNNING);
listeners.registerStateListener(l2);
listeners.registerStateListener(l3);
state.setStage("Stage1");
listeners.unregisterStateListener(l2);
listeners.onMetadataChange(state, "key", "oldValue", "newValue");
Mockito.verify(l1).onStatusChange(Mockito.eq(state),
Mockito.eq((RunningState)null), Mockito.eq(RunningState.PENDING));
Mockito.verify(l1).onStatusChange(Mockito.eq(state),
Mockito.eq(RunningState.PENDING), Mockito.eq(RunningState.RUNNING));
Mockito.verify(l1).onStageTransition(Mockito.eq(state),
Mockito.eq(JobExecutionState.UKNOWN_STAGE), Mockito.eq("Stage1"));
Mockito.verify(l1).onMetadataChange(Mockito.eq(state),
Mockito.eq("key"), Mockito.eq("oldValue"), Mockito.eq("newValue"));
Mockito.verify(l2).onStageTransition(Mockito.eq(state),
Mockito.eq(JobExecutionState.UKNOWN_STAGE), Mockito.eq("Stage1"));
Mockito.verify(l3).onStageTransition(Mockito.eq(state),
Mockito.eq(JobExecutionState.UKNOWN_STAGE), Mockito.eq("Stage1"));
Mockito.verify(l3).onMetadataChange(Mockito.eq(state),
Mockito.eq("key"), Mockito.eq("oldValue"), Mockito.eq("newValue"));
Mockito.verifyNoMoreInteractions(l1);
Mockito.verifyNoMoreInteractions(l2);
Mockito.verifyNoMoreInteractions(l3);
}
}
| 1,277 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/std/TestJobLifecycleListenersList.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.std;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.Test;
import org.apache.gobblin.runtime.api.JobCatalogListenersContainer;
import org.apache.gobblin.runtime.api.JobExecutionDriver;
import org.apache.gobblin.runtime.api.JobExecutionState;
import org.apache.gobblin.runtime.api.JobLifecycleListener;
import org.apache.gobblin.runtime.api.JobSpecSchedulerListenersContainer;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
/**
* Unit tests for {@link JobLifecycleListenersList}
*/
public class TestJobLifecycleListenersList {
@Test public void testHappyPath() {
Logger log = LoggerFactory.getLogger("testHappyPath");
JobCatalogListenersContainer jobCatalog = mock(JobCatalogListenersContainer.class);
JobSpecSchedulerListenersContainer jobScheduler = mock(JobSpecSchedulerListenersContainer.class);
JobExecutionDriver mockDriver = mock(JobExecutionDriver.class);
JobExecutionState mockState = mock(JobExecutionState.class);
JobLifecycleListener listener1 = mock(JobLifecycleListener.class);
JobLifecycleListener listener2 = mock(JobLifecycleListener.class);
JobLifecycleListenersList disp = new JobLifecycleListenersList(jobCatalog, jobScheduler, log);
disp.registerJobLifecycleListener(listener1);
disp.onJobLaunch(mockDriver);
disp.registerWeakJobLifecycleListener(listener2);
disp.onMetadataChange(mockState, "key", "oldValue", "newValue");
verify(jobCatalog).addListener(eq(listener1));
verify(jobScheduler).registerJobSpecSchedulerListener(eq(listener1));
verify(listener1).onJobLaunch(eq(mockDriver));
verify(listener2, never()).onJobLaunch(eq(mockDriver));
verify(jobCatalog).registerWeakJobCatalogListener(eq(listener2));
verify(jobScheduler).registerWeakJobSpecSchedulerListener(eq(listener2));
verify(listener1).onMetadataChange(eq(mockState), eq("key"), eq("oldValue"), eq("newValue"));
verify(listener2).onMetadataChange(eq(mockState), eq("key"), eq("oldValue"), eq("newValue"));
}
}
| 1,278 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/std/TestDefaultConfigurableImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.std;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
/**
* Unit tests for {@link DefaultConfigurableImpl}
*/
public class TestDefaultConfigurableImpl {
@Test
public void testFromProperties() {
Properties props = new Properties();
props.put("a1", "a_value");
props.put("a2.b", "1");
props.put("a2.c.d", "12.34");
props.put("a2.c.d2", "true");
DefaultConfigurableImpl c = DefaultConfigurableImpl.createFromProperties(props);
Assert.assertEquals(c.getConfig().getString("a1"), "a_value");
Assert.assertEquals(c.getConfig().getLong("a2.b"), 1L);
Assert.assertEquals(c.getConfig().getDouble("a2.c.d"), 12.34);
Assert.assertTrue(c.getConfig().getBoolean("a2.c.d2"));
}
@Test
public void testFromConfig() {
Config cfg =
ConfigFactory.empty()
.withValue("a1", ConfigValueFactory.fromAnyRef("some_string"))
.withValue("a2.b", ConfigValueFactory.fromAnyRef(-1))
.withValue("a2.c.d", ConfigValueFactory.fromAnyRef(1.2))
.withValue("a2.e.f", ConfigValueFactory.fromAnyRef(true));
DefaultConfigurableImpl c = DefaultConfigurableImpl.createFromConfig(cfg);
Assert.assertEquals(c.getConfigAsProperties().getProperty("a1"), "some_string");
Assert.assertEquals(c.getConfigAsProperties().getProperty("a2.b"), "-1");
Assert.assertEquals(c.getConfigAsProperties().getProperty("a2.c.d"), "1.2");
Assert.assertEquals(c.getConfigAsProperties().getProperty("a2.e.f"), "true");
}
}
| 1,279 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/spec_store/MysqlSpecStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.spec_store;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang3.ArrayUtils;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Iterators;
import com.typesafe.config.Config;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.runtime.api.FlowSpec;
import org.apache.gobblin.runtime.api.FlowSpecSearchObject;
import org.apache.gobblin.runtime.api.Spec;
import org.apache.gobblin.runtime.api.SpecSerDe;
import org.apache.gobblin.runtime.api.SpecSerDeException;
import org.apache.gobblin.runtime.spec_serde.GsonFlowSpecSerDe;
import org.apache.gobblin.service.FlowId;
import static org.apache.gobblin.service.ServiceConfigKeys.FLOW_DESTINATION_IDENTIFIER_KEY;
import static org.apache.gobblin.service.ServiceConfigKeys.FLOW_SOURCE_IDENTIFIER_KEY;
public class MysqlSpecStoreTest {
private static final String USER = "testUser";
private static final String PASSWORD = "testPassword";
private static final String TABLE = "spec_store";
private MysqlSpecStore specStore;
private MysqlSpecStore oldSpecStore;
private final URI uri1 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg1").setFlowGroup("fn1"));
private final URI uri2 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg2").setFlowGroup("fn2"));
private final URI uri3 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg3").setFlowGroup("fn3"));
private final URI uri4 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg4").setFlowGroup("fn4"));
private FlowSpec flowSpec1, flowSpec2, flowSpec3, flowSpec4;
public MysqlSpecStoreTest()
throws URISyntaxException { // (based on `uri1` and other initializations just above)
}
@BeforeClass
public void setUp() throws Exception {
ITestMetastoreDatabase testDb = TestMetastoreDatabaseFactory.get();
Config config = ConfigBuilder.create()
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_URL_KEY, testDb.getJdbcUrl())
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_USER_KEY, USER)
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, PASSWORD)
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_TABLE_KEY, TABLE)
.build();
this.specStore = new MysqlSpecStore(config, new TestSpecSerDe());
this.oldSpecStore = new OldSpecStore(config, new TestSpecSerDe());
flowSpec1 = FlowSpec.builder(this.uri1)
.withConfig(ConfigBuilder.create()
.addPrimitive("key", "value")
.addPrimitive("key3", "value3")
.addPrimitive("filter.this.flow", true)
.addPrimitive("config.with.dot", "value4")
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg1")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn1").build())
.withDescription("Test flow spec")
.withVersion("Test version")
.build();
flowSpec2 = FlowSpec.builder(this.uri2)
.withConfig(ConfigBuilder.create().addPrimitive("converter", "value1,value2,value3")
.addPrimitive("key3", "value3")
.addPrimitive("filter.this.flow", true)
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg2")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn2").build())
.withDescription("Test flow spec 2")
.withVersion("Test version 2")
.build();
flowSpec3 = FlowSpec.builder(this.uri3)
.withConfig(ConfigBuilder.create().addPrimitive("key3", "value3")
.addPrimitive("filter.this.flow", true)
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg3")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn3").build())
.withDescription("Test flow spec 3")
.withVersion("Test version 3")
.build();
flowSpec4 = FlowSpec.builder(this.uri4)
.withConfig(ConfigBuilder.create().addPrimitive("key4", "value4")
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg4")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn4")
.addPrimitive(ConfigurationKeys.FLOW_OWNING_GROUP_KEY, "owningGroup4").build())
.withDescription("Test flow spec 4")
.withVersion("Test version 4")
.build();
}
@Test(expectedExceptions = IOException.class)
public void testSpecSearch() throws Exception {
// empty FlowSpecSearchObject should throw an error
FlowSpecSearchObject flowSpecSearchObject = FlowSpecSearchObject.builder().build();
flowSpecSearchObject.augmentBaseGetStatement("SELECT * FROM Dummy WHERE ");
}
@Test
public void testAddSpec() throws Exception {
this.specStore.addSpec(this.flowSpec1);
this.specStore.addSpec(this.flowSpec2);
this.specStore.addSpec(this.flowSpec4);
Assert.assertEquals(this.specStore.getSize(), 3);
Assert.assertTrue(this.specStore.exists(this.uri1));
Assert.assertTrue(this.specStore.exists(this.uri2));
Assert.assertTrue(this.specStore.exists(this.uri4));
Assert.assertFalse(this.specStore.exists(URI.create("dummy")));
}
@Test (dependsOnMethods = "testAddSpec")
public void testGetSpec() throws Exception {
FlowSpec result = (FlowSpec) this.specStore.getSpec(this.uri1);
removeModificationTimestampFromSpecs(result);
Assert.assertEquals(result, this.flowSpec1);
Collection<Spec> specs = this.specStore.getSpecs();
Assert.assertEquals(specs.size(), 3);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Iterator<URI> uris = this.specStore.getSpecURIs();
Assert.assertTrue(Iterators.contains(uris, this.uri1));
Assert.assertTrue(Iterators.contains(uris, this.uri2));
FlowSpecSearchObject flowSpecSearchObject = FlowSpecSearchObject.builder().flowGroup("fg1").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
flowSpecSearchObject = FlowSpecSearchObject.builder().flowName("fn2").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec2));
flowSpecSearchObject = FlowSpecSearchObject.builder().flowName("fg1").flowGroup("fn2").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 0);
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("key=value").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("converter=value2").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec2));
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("key3").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 2);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("config.with.dot=value4").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
flowSpecSearchObject = FlowSpecSearchObject.builder().owningGroup("owningGroup4").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec4));
}
@Test (dependsOnMethods = "testGetSpec")
public void testGetSpecWithTag() throws Exception {
//Creating and inserting flowspecs with tags
URI uri5 = URI.create("flowspec5");
FlowSpec flowSpec5 = FlowSpec.builder(uri5)
.withConfig(ConfigBuilder.create()
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg5")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn5")
.addPrimitive("key5", "value5").build())
.withDescription("Test flow spec 5")
.withVersion("Test version 5")
.build();
URI uri6 = URI.create("flowspec6");
FlowSpec flowSpec6 = FlowSpec.builder(uri6)
.withConfig(ConfigBuilder.create()
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg6")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn6")
.addPrimitive("key6", "value6").build())
.withDescription("Test flow spec 6")
.withVersion("Test version 6")
.build();
this.specStore.addSpec(flowSpec5, "dr");
this.specStore.addSpec(flowSpec6, "dr");
Assert.assertTrue(this.specStore.exists(uri5));
Assert.assertTrue(this.specStore.exists(uri6));
List<URI> result = new ArrayList<>();
this.specStore.getSpecURIsWithTag("dr").forEachRemaining(result::add);
Assert.assertEquals(result.size(), 2);
}
@Test (dependsOnMethods = "testGetSpec")
public void testGetFilterSpecPaginate() throws Exception {
/**
* Sorted order of the specStore configurations is flowSpec1, flowSpec2.
* flowSpec3 is not included as it is a 'corrupted' flowspec
* flowSpec4 is not included as it doesn't have the 'filter.this.flow' property
* Start is the offset of the first configuration to return
* Count is the total number of configurations to return
* PropertyFilter is the property to filter by
*/
// Start of 0 and count of 1 means start from index 0, and return one configuration only
FlowSpecSearchObject flowSpecSearchObject = FlowSpecSearchObject.builder().start(0).count(1).propertyFilter("filter.this.flow").build();
Collection<Spec> specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertFalse(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
// Start of 1 and count of 1 means start from index 1, and return one configuration only
flowSpecSearchObject = FlowSpecSearchObject.builder().start(1).count(1).propertyFilter("filter.this.flow").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertFalse(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
/**
* Start of 0 and count of 5 means start from index 0, and return five configuration only
* Total of 3 flowSpecs in the DB, but flowSpec4 doesn't have 'filter.this.flow' filter so only returns 2 flowSpecs
* flowSpec1 and flowSpec2 match all the criteria
*/
flowSpecSearchObject = FlowSpecSearchObject.builder().start(0).count(5).propertyFilter("filter.this.flow").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 2);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
}
/**
* This method is used for testing to remove the `modification_timestamp` key from the FlowSpec config field for
* only for testing purposes because the equality check between FlowSpec created for testing and retrieved from the
* store will not be equal. In practice, we would never encounter this issue as we only compare specs obtained from
* the store with the key mentioned.
*/
public static void removeModificationTimestampFromSpecs(Spec spec) {
((FlowSpec) spec).getConfigAsProperties().remove(FlowSpec.MODIFICATION_TIME_KEY);
}
@Test (dependsOnMethods = "testGetSpec")
public void testGetAllSpecPaginate() throws Exception {
/**
* Sorted order of the specStore configurations is flowSpec1, flowSpec2, flowSpec4
*/
// Return all flowSpecs from index 0 to 9. Total of 3 flowSpecs only so return all 3 flowSpecs
Collection<Spec> specs = this.specStore.getSpecsPaginated(0,10);
specs.forEach(spec -> removeModificationTimestampFromSpecs(spec));
Assert.assertEquals(specs.size(), 3);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertTrue(specs.contains(this.flowSpec4));
// Return all flowSpecs using the default get all specs function. Testing default functionality of returning everything
specs = this.specStore.getSpecs();
specs.forEach(spec -> removeModificationTimestampFromSpecs(spec));
Assert.assertEquals(specs.size(), 3);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertTrue(specs.contains(this.flowSpec4));
// Return all flowSpecs from index 0 to 2 - 1. Total of 3 flowSpecs, only return first two.
specs = this.specStore.getSpecsPaginated(0,2);
specs.forEach(spec -> removeModificationTimestampFromSpecs(spec));
Assert.assertEquals(specs.size(), 2);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
// Return 0 flowSpecs when batch size is 0.
specs = this.specStore.getSpecsPaginated(2,0);
Assert.assertEquals(specs.size(), 0);
// Return 0 flowSpecs when start offset is past the end
specs = this.specStore.getSpecsPaginated(3,1);
Assert.assertEquals(specs.size(), 0);
// Check that we throw an error for incorrect inputs
Assert.assertThrows(IllegalArgumentException.class, () -> this.specStore.getSpecsPaginated(-1, 2));
Assert.assertThrows(IllegalArgumentException.class, () -> this.specStore.getSpecsPaginated(2, -4));
}
@Test (expectedExceptions = {IOException.class})
public void testGetCorruptedSpec() throws Exception {
this.specStore.addSpec(this.flowSpec3);
}
@Test (dependsOnMethods = "testGetSpecWithTag")
public void testDeleteSpec() throws Exception {
Assert.assertEquals(this.specStore.getSize(), 5);
this.specStore.deleteSpec(this.uri1);
Assert.assertEquals(this.specStore.getSize(), 4);
Assert.assertFalse(this.specStore.exists(this.uri1));
}
@Test (dependsOnMethods = "testDeleteSpec")
public void testReadOldColumn() throws Exception {
this.oldSpecStore.addSpec(this.flowSpec1);
FlowSpec spec = (FlowSpec) this.specStore.getSpec(this.uri1);
removeModificationTimestampFromSpecs(spec);
Assert.assertEquals(spec, this.flowSpec1);
}
/**
* A {@link MysqlSpecStore} which does not write into the new spec_json column
* to simulate behavior of a table with old data.
*/
public static class OldSpecStore extends MysqlSpecStore {
public OldSpecStore(Config config, SpecSerDe specSerDe) throws IOException {
super(config, specSerDe);
}
@Override
public void addSpec(Spec spec, String tagValue) throws IOException {
try (Connection connection = this.dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(this.sqlStatements.insertStatement)) {
this.sqlStatements.completeInsertPreparedStatement(statement, spec, tagValue);
statement.setString(4, null);
statement.executeUpdate();
connection.commit();
} catch (SQLException | SpecSerDeException e) {
throw new IOException(e);
}
}
}
public class TestSpecSerDe extends GsonFlowSpecSerDe {
@Override
public byte[] serialize(Spec spec) throws SpecSerDeException {
byte[] bytes = super.serialize(spec);
// Reverse bytes to simulate corrupted Spec
if (spec.getUri().equals(uri3)) {
ArrayUtils.reverse(bytes);
}
return bytes;
}
}
} | 1,280 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/spec_store/MysqlSpecStoreWithUpdateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.spec_store;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang3.ArrayUtils;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Iterators;
import com.typesafe.config.Config;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.runtime.api.FlowSpec;
import org.apache.gobblin.runtime.api.FlowSpecSearchObject;
import org.apache.gobblin.runtime.api.Spec;
import org.apache.gobblin.runtime.api.SpecSerDe;
import org.apache.gobblin.runtime.api.SpecSerDeException;
import org.apache.gobblin.runtime.spec_serde.GsonFlowSpecSerDe;
import org.apache.gobblin.service.FlowId;
import static org.apache.gobblin.service.ServiceConfigKeys.FLOW_DESTINATION_IDENTIFIER_KEY;
import static org.apache.gobblin.service.ServiceConfigKeys.FLOW_SOURCE_IDENTIFIER_KEY;
public class MysqlSpecStoreWithUpdateTest {
private static final String USER = "testUser";
private static final String PASSWORD = "testPassword";
private static final String TABLE = "spec_store";
private MysqlSpecStoreWithUpdate specStore;
private MysqlSpecStore oldSpecStore;
private final URI uri1 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg1").setFlowGroup("fn1"));
private final URI uri2 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg2").setFlowGroup("fn2"));
private final URI uri3 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg3").setFlowGroup("fn3"));
private final URI uri4 = FlowSpec.Utils.createFlowSpecUri(new FlowId().setFlowName("fg4").setFlowGroup("fn4"));
private FlowSpec flowSpec1, flowSpec2, flowSpec3, flowSpec4, flowSpec4_update;
public MysqlSpecStoreWithUpdateTest()
throws URISyntaxException { // (based on `uri1` and other initializations just above)
}
@BeforeClass
public void setUp() throws Exception {
ITestMetastoreDatabase testDb = TestMetastoreDatabaseFactory.get();
Config config = ConfigBuilder.create()
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_URL_KEY, testDb.getJdbcUrl())
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_USER_KEY, USER)
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, PASSWORD)
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_TABLE_KEY, TABLE)
.build();
this.specStore = new MysqlSpecStoreWithUpdate(config, new TestSpecSerDe());
this.oldSpecStore = new OldSpecStore(config, new TestSpecSerDe());
flowSpec1 = FlowSpec.builder(this.uri1)
.withConfig(ConfigBuilder.create()
.addPrimitive("key", "value")
.addPrimitive("key3", "value3")
.addPrimitive("filter.this.flow", true)
.addPrimitive("config.with.dot", "value4")
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg1")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn1").build())
.withDescription("Test flow spec")
.withVersion("Test version")
.build();
flowSpec2 = FlowSpec.builder(this.uri2)
.withConfig(ConfigBuilder.create().addPrimitive("converter", "value1,value2,value3")
.addPrimitive("key3", "value3")
.addPrimitive("filter.this.flow", true)
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg2")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn2").build())
.withDescription("Test flow spec 2")
.withVersion("Test version 2")
.build();
flowSpec3 = FlowSpec.builder(this.uri3)
.withConfig(ConfigBuilder.create().addPrimitive("key3", "value3")
.addPrimitive("filter.this.flow", true)
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg3")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn3").build())
.withDescription("Test flow spec 3")
.withVersion("Test version 3")
.build();
flowSpec4 = FlowSpec.builder(this.uri4)
.withConfig(ConfigBuilder.create().addPrimitive("key4", "value4")
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg4")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn4")
.addPrimitive(ConfigurationKeys.FLOW_OWNING_GROUP_KEY, "owningGroup4").build())
.withDescription("Test flow spec 4")
.withVersion("Test version 4")
.build();
flowSpec4_update = FlowSpec.builder(this.uri4)
.withConfig(ConfigBuilder.create().addPrimitive("key4", "value4_update")
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg4")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn4")
.addPrimitive(ConfigurationKeys.FLOW_OWNING_GROUP_KEY, "owningGroup4").build())
.withDescription("Test flow spec 4")
.withVersion("Test version 4")
.build();
}
@Test(expectedExceptions = IOException.class)
public void testSpecSearch() throws Exception {
// empty FlowSpecSearchObject should throw an error
FlowSpecSearchObject flowSpecSearchObject = FlowSpecSearchObject.builder().build();
flowSpecSearchObject.augmentBaseGetStatement("SELECT * FROM Dummy WHERE ");
}
@Test
public void testAddSpec() throws Exception {
this.specStore.addSpec(this.flowSpec1);
this.specStore.addSpec(this.flowSpec2);
this.specStore.addSpec(this.flowSpec4);
Assert.assertEquals(this.specStore.getSize(), 3);
Assert.assertTrue(this.specStore.exists(this.uri1));
Assert.assertTrue(this.specStore.exists(this.uri2));
Assert.assertTrue(this.specStore.exists(this.uri4));
Assert.expectThrows(Exception.class, () -> this.specStore.addSpec(this.flowSpec1));
Assert.assertFalse(this.specStore.exists(URI.create("dummy")));
}
@Test (dependsOnMethods = "testAddSpec")
public void testGetSpec() throws Exception {
FlowSpec result = (FlowSpec) this.specStore.getSpec(this.uri1);
MysqlSpecStoreTest.removeModificationTimestampFromSpecs(result);
Assert.assertEquals(result, this.flowSpec1);
Collection<Spec> specs = this.specStore.getSpecs();
Assert.assertEquals(specs.size(), 3);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Iterator<URI> uris = this.specStore.getSpecURIs();
Assert.assertTrue(Iterators.contains(uris, this.uri1));
Assert.assertTrue(Iterators.contains(uris, this.uri2));
FlowSpecSearchObject flowSpecSearchObject = FlowSpecSearchObject.builder().flowGroup("fg1").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
flowSpecSearchObject = FlowSpecSearchObject.builder().flowName("fn2").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec2));
flowSpecSearchObject = FlowSpecSearchObject.builder().flowName("fg1").flowGroup("fn2").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 0);
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("key=value").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("converter=value2").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec2));
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("key3").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 2);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
flowSpecSearchObject = FlowSpecSearchObject.builder().propertyFilter("config.with.dot=value4").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
flowSpecSearchObject = FlowSpecSearchObject.builder().owningGroup("owningGroup4").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec4));
}
@Test (dependsOnMethods = "testGetSpec")
public void testGetSpecWithTag() throws Exception {
//Creating and inserting flowspecs with tags
URI uri5 = URI.create("flowspec5");
FlowSpec flowSpec5 = FlowSpec.builder(uri5)
.withConfig(ConfigBuilder.create()
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg5")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn5")
.addPrimitive("key5", "value5").build())
.withDescription("Test flow spec 5")
.withVersion("Test version 5")
.build();
URI uri6 = URI.create("flowspec6");
FlowSpec flowSpec6 = FlowSpec.builder(uri6)
.withConfig(ConfigBuilder.create()
.addPrimitive(FLOW_SOURCE_IDENTIFIER_KEY, "source")
.addPrimitive(FLOW_DESTINATION_IDENTIFIER_KEY, "destination")
.addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "fg6")
.addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "fn6")
.addPrimitive("key6", "value6").build())
.withDescription("Test flow spec 6")
.withVersion("Test version 6")
.build();
this.specStore.addSpec(flowSpec5, "dr");
this.specStore.addSpec(flowSpec6, "dr");
Assert.assertTrue(this.specStore.exists(uri5));
Assert.assertTrue(this.specStore.exists(uri6));
List<URI> result = new ArrayList<>();
this.specStore.getSpecURIsWithTag("dr").forEachRemaining(result::add);
Assert.assertEquals(result.size(), 2);
}
@Test (dependsOnMethods = "testGetSpec")
public void testGetFilterSpecPaginate() throws Exception {
/**
* Sorted order of the specStore configurations is flowSpec1, flowSpec2.
* flowSpec3 is not included as it is a 'corrupted' flowspec
* flowSpec4 is not included as it doesn't have the 'filter.this.flow' property
* Start is the offset of the first configuration to return
* Count is the total number of configurations to return
* PropertyFilter is the property to filter by
*/
// Start of 0 and count of 1 means start from index 0, and return one configuration only
FlowSpecSearchObject flowSpecSearchObject = FlowSpecSearchObject.builder().start(0).count(1).propertyFilter("filter.this.flow").build();
Collection<Spec> specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertFalse(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
// Start of 1 and count of 1 means start from index 1, and return one configuration only
flowSpecSearchObject = FlowSpecSearchObject.builder().start(1).count(1).propertyFilter("filter.this.flow").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 1);
Assert.assertFalse(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
/**
* Start of 0 and count of 5 means start from index 0, and return five configuration only
* Total of 3 flowSpecs in the DB, but flowSpec4 doesn't have 'filter.this.flow' filter so only returns 2 flowSpecs
* flowSpec1 and flowSpec2 match all the criteria
*/
flowSpecSearchObject = FlowSpecSearchObject.builder().start(0).count(5).propertyFilter("filter.this.flow").build();
specs = this.specStore.getSpecs(flowSpecSearchObject);
Assert.assertEquals(specs.size(), 2);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
}
@Test (dependsOnMethods = "testGetSpec")
public void testUpdate() throws Exception{
long version = System.currentTimeMillis() /1000;
this.specStore.updateSpec(this.flowSpec4_update);
FlowSpec spec = (FlowSpec) this.specStore.getSpec(this.uri4);
MysqlSpecStoreTest.removeModificationTimestampFromSpecs(spec);
Assert.assertEquals(spec, flowSpec4_update);
Assert.expectThrows(IOException.class, () -> this.specStore.updateSpec(flowSpec4, version));
}
@Test (dependsOnMethods = "testGetSpec")
public void testGetAllSpecPaginate() throws Exception {
/**
* Sorted order of the specStore configurations is flowSpec1, flowSpec2, flowSpec4
*/
// Return all flowSpecs from index 0 to 9. Total of 3 flowSpecs only so return all 3 flowSpecs
Collection<Spec> specs = this.specStore.getSpecsPaginated(0,10);
specs.forEach(spec -> MysqlSpecStoreTest.removeModificationTimestampFromSpecs(spec));
for (Spec spec: specs) {
System.out.println("test" + spec.getUri());
}
Assert.assertEquals(specs.size(), 3);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertTrue(specs.contains(this.flowSpec4));
// Return all flowSpecs using the default get all specs function. Testing default functionality of returning everything
specs = this.specStore.getSpecs();
Assert.assertEquals(specs.size(), 3);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertTrue(specs.contains(this.flowSpec4));
// Return all flowSpecs of index [0, 2). Total of 3 flowSpecs, only return first two.
specs = this.specStore.getSpecsPaginated(0,2);
specs.forEach(spec -> MysqlSpecStoreTest.removeModificationTimestampFromSpecs(spec));
Assert.assertEquals(specs.size(), 2);
Assert.assertTrue(specs.contains(this.flowSpec1));
Assert.assertTrue(specs.contains(this.flowSpec2));
Assert.assertFalse(specs.contains(this.flowSpec4));
// Check that we throw an error for incorrect inputs
Assert.assertThrows(IllegalArgumentException.class, () -> this.specStore.getSpecsPaginated(-1, -4));
}
@Test (expectedExceptions = {IOException.class})
public void testGetCorruptedSpec() throws Exception {
this.specStore.addSpec(this.flowSpec3);
}
@Test (dependsOnMethods = "testGetSpecWithTag")
public void testDeleteSpec() throws Exception {
Assert.assertEquals(this.specStore.getSize(), 5);
this.specStore.deleteSpec(this.uri1);
Assert.assertEquals(this.specStore.getSize(), 4);
Assert.assertFalse(this.specStore.exists(this.uri1));
}
@Test (dependsOnMethods = "testDeleteSpec")
public void testReadOldColumn() throws Exception {
this.oldSpecStore.addSpec(this.flowSpec1);
FlowSpec spec = (FlowSpec) this.specStore.getSpec(this.uri1);
MysqlSpecStoreTest.removeModificationTimestampFromSpecs(spec);
Assert.assertEquals(spec, this.flowSpec1);
}
/**
* A {@link MysqlSpecStore} which does not write into the new spec_json column
* to simulate behavior of a table with old data.
*/
public static class OldSpecStore extends MysqlSpecStore {
public OldSpecStore(Config config, SpecSerDe specSerDe) throws IOException {
super(config, specSerDe);
}
@Override
public void addSpec(Spec spec, String tagValue) throws IOException {
try (Connection connection = this.dataSource.getConnection();
PreparedStatement statement = connection.prepareStatement(this.sqlStatements.insertStatement)) {
this.sqlStatements.completeInsertPreparedStatement(statement, spec, tagValue);
statement.setString(4, null);
statement.executeUpdate();
connection.commit();
} catch (SQLException | SpecSerDeException e) {
throw new IOException(e);
}
}
}
public class TestSpecSerDe extends GsonFlowSpecSerDe {
@Override
public byte[] serialize(Spec spec) throws SpecSerDeException {
byte[] bytes = super.serialize(spec);
// Reverse bytes to simulate corrupted Spec
if (spec.getUri().equals(uri3)) {
ArrayUtils.reverse(bytes);
}
return bytes;
}
}
} | 1,281 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/spec_store/MysqlBaseSpecStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.spec_store;
import com.google.common.collect.Iterators;
import com.typesafe.config.Config;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.runtime.api.TopologySpec;
import org.apache.gobblin.runtime.api.FlowSpecSearchObject;
import org.apache.gobblin.runtime.api.Spec;
import org.apache.gobblin.runtime.spec_executorInstance.MockedSpecExecutor;
import org.apache.gobblin.runtime.spec_serde.JavaSpecSerDe;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class MysqlBaseSpecStoreTest {
private static final String USER = "testUser";
private static final String PASSWORD = "testPassword";
private static final String TABLE = "base_spec_store";
private MysqlBaseSpecStore specStore;
private final URI uri1 = new URI(new TopologySpec.Builder().getDefaultTopologyCatalogURI().toString() + "1");
private final URI uri2 = new URI(new TopologySpec.Builder().getDefaultTopologyCatalogURI().toString() + "2");
private TopologySpec topoSpec1, topoSpec2;
public MysqlBaseSpecStoreTest()
throws URISyntaxException { // (based on `uri1` and other initializations just above)
}
@BeforeClass
public void setUp() throws Exception {
ITestMetastoreDatabase testDb = TestMetastoreDatabaseFactory.get();
// prefix keys to demonstrate disambiguation mechanism used to side-step intentially-sabatoged non-prefixed, 'fallback'
Config config = ConfigBuilder.create()
.addPrimitive(ConfigurationKeys.STATE_STORE_DB_URL_KEY, " SABATOGE! !" + testDb.getJdbcUrl())
.addPrimitive(MysqlBaseSpecStore.CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_URL_KEY, testDb.getJdbcUrl())
.addPrimitive(MysqlBaseSpecStore.CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_USER_KEY, USER)
.addPrimitive(MysqlBaseSpecStore.CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, PASSWORD)
.addPrimitive(MysqlBaseSpecStore.CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_TABLE_KEY, TABLE)
.build();
this.specStore = new MysqlBaseSpecStore(config, new JavaSpecSerDe());
topoSpec1 = new TopologySpec.Builder(this.uri1)
.withConfig(ConfigBuilder.create()
.addPrimitive("key", "value")
.addPrimitive("key3", "value3")
.addPrimitive("config.with.dot", "value4").build())
.withDescription("Test1")
.withVersion("Test version")
.withSpecExecutor(MockedSpecExecutor.createDummySpecExecutor(new URI("execA")))
.build();
topoSpec2 = new TopologySpec.Builder(this.uri2)
.withConfig(ConfigBuilder.create().addPrimitive("converter", "value1,value2,value3")
.addPrimitive("key3", "value3").build())
.withDescription("Test2")
.withVersion("Test version 2")
.withSpecExecutor(MockedSpecExecutor.createDummySpecExecutor(new URI("execB")))
.build();
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testSpecSearchUnsupported() throws Exception {
FlowSpecSearchObject flowSpecSearchObject = FlowSpecSearchObject.builder().build();
Collection<Spec> specs = this.specStore.getSpecs(flowSpecSearchObject);
}
@Test
public void testAddSpec() throws Exception {
this.specStore.addSpec(this.topoSpec1);
this.specStore.addSpec(this.topoSpec2);
Assert.assertEquals(this.specStore.getSize(), 2);
Assert.assertTrue(this.specStore.exists(this.uri1));
Assert.assertTrue(this.specStore.exists(this.uri2));
Assert.assertFalse(this.specStore.exists(URI.create("dummy")));
}
@Test (dependsOnMethods = "testAddSpec")
public void testGetSpec() throws Exception {
TopologySpec result = (TopologySpec) this.specStore.getSpec(this.uri1);
Assert.assertEquals(result, this.topoSpec1);
Collection<Spec> specs = this.specStore.getSpecs();
Assert.assertEquals(specs.size(), 2);
Assert.assertTrue(specs.contains(this.topoSpec1));
Assert.assertTrue(specs.contains(this.topoSpec1));
Iterator<URI> uris = this.specStore.getSpecURIs();
Assert.assertTrue(Iterators.contains(uris, this.uri1));
Assert.assertTrue(Iterators.contains(uris, this.uri2));
}
@Test (dependsOnMethods = "testGetSpec")
public void testGetSpecWithTag() throws Exception {
//Creating and inserting specs with tags
URI uri5 = URI.create("topospec5");
TopologySpec topoSpec5 = new TopologySpec.Builder(uri5)
.withConfig(ConfigBuilder.create()
.addPrimitive("key5", "value5").build())
.withDescription("Test5")
.withVersion("Test version 5")
.withSpecExecutor(MockedSpecExecutor.createDummySpecExecutor(new URI("execE")))
.build();
URI uri6 = URI.create("topospec6");
TopologySpec topoSpec6 = new TopologySpec.Builder(uri6)
.withConfig(ConfigBuilder.create()
.addPrimitive("key6", "value6").build())
.withDescription("Test6")
.withVersion("Test version 6")
.withSpecExecutor(MockedSpecExecutor.createDummySpecExecutor(new URI("execF")))
.build();
this.specStore.addSpec(topoSpec5, "dr");
this.specStore.addSpec(topoSpec6, "dr");
Assert.assertTrue(this.specStore.exists(uri5));
Assert.assertTrue(this.specStore.exists(uri6));
List<URI> result = new ArrayList<>();
this.specStore.getSpecURIsWithTag("dr").forEachRemaining(result::add);
Assert.assertEquals(result.size(), 2);
}
@Test (dependsOnMethods = "testGetSpecWithTag")
public void testDeleteSpec() throws Exception {
Assert.assertEquals(this.specStore.getSize(), 4);
this.specStore.deleteSpec(this.uri1);
Assert.assertEquals(this.specStore.getSize(), 3);
Assert.assertFalse(this.specStore.exists(this.uri1));
}
} | 1,282 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/spec_store/FSSpecStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.spec_store;
import com.google.common.io.ByteStreams;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.gobblin.runtime.api.Spec;
import org.apache.gobblin.runtime.api.SpecSerDe;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import static org.apache.gobblin.runtime.spec_catalog.FlowCatalogTest.*;
public class FSSpecStoreTest {
@Test
public void testPathConversion() throws Exception {
Properties properties = new Properties();
File tmpDir = Files.createTempDir();
properties.setProperty(FSSpecStore.SPECSTORE_FS_DIR_KEY, tmpDir.getAbsolutePath());
SpecSerDe specSerDe = Mockito.mock(SpecSerDe.class);
FSSpecStore fsSpecStore = new FSSpecStore(ConfigUtils.propertiesToConfig(properties), specSerDe);
Path rootPath = new Path("/a/b/c");
URI uri = URI.create("ddd");
Assert.assertEquals(fsSpecStore.getURIFromPath(fsSpecStore.getPathForURI(rootPath, uri, ""), rootPath), uri);
}
/**
* Make sure that when there's on spec failed to be deserialized, the rest of spec in specStore can
* still be taken care of.
*/
@Test
public void testGetSpecRobustness() throws Exception {
File specDir = Files.createTempDir();
Properties properties = new Properties();
properties.setProperty(FSSpecStore.SPECSTORE_FS_DIR_KEY, specDir.getAbsolutePath());
SpecSerDe serde = Mockito.mock(SpecSerDe.class);
TestFsSpecStore fsSpecStore = new TestFsSpecStore(ConfigUtils.propertiesToConfig(properties), serde);
// Version is specified as 0,1,2
File specFileFail = new File(specDir, "spec_fail");
Assert.assertTrue(specFileFail.createNewFile());
File specFile1 = new File(specDir, "spec0");
Assert.assertTrue(specFile1.createNewFile());
File specFile2 = new File(specDir, "spec1");
Assert.assertTrue(specFile2.createNewFile());
File specFile3 = new File(specDir, "serDeFail");
Assert.assertTrue(specFile3.createNewFile());
FileSystem fs = FileSystem.getLocal(new Configuration());
Assert.assertEquals(fs.getFileStatus(new Path(specFile3.getAbsolutePath())).getLen(), 0);
Collection<Spec> specList = fsSpecStore.getSpecs();
// The fail and serDe datasets wouldn't survive
Assert.assertEquals(specList.size(), 2);
for (Spec spec: specList) {
Assert.assertFalse(spec.getDescription().contains("spec_fail"));
Assert.assertFalse(spec.getDescription().contains("serDeFail"));
}
}
class TestFsSpecStore extends FSSpecStore {
public TestFsSpecStore(Config sysConfig, SpecSerDe specSerDe) throws IOException {
super(sysConfig, specSerDe);
}
@Override
protected Spec readSpecFromFile(Path path) throws IOException {
if (path.getName().contains("fail")) {
throw new IOException("Mean to fail in the test");
} else if (path.getName().contains("serDeFail")) {
// Simulate the way that a serDe exception
FSDataInputStream fis = fs.open(path);
SerializationUtils.deserialize(ByteStreams.toByteArray(fis));
// This line should never be reached since we generate SerDe Exception on purpose.
Assert.fail();
return null;
}
else return initFlowSpec(Files.createTempDir().getAbsolutePath());
}
}
@Test
public void testGetSpecURI() throws Exception {
File specDir = Files.createTempDir();
Properties properties = new Properties();
properties.setProperty(FSSpecStore.SPECSTORE_FS_DIR_KEY, specDir.getAbsolutePath());
SpecSerDe serde = Mockito.mock(SpecSerDe.class);
FSSpecStore fsSpecStore = new FSSpecStore(ConfigUtils.propertiesToConfig(properties), serde);
URI specURI0 = URI.create("spec0");
URI specURI1 = URI.create("spec1");
URI specURI2 = URI.create("spec2");
File specFile1 = new File(specDir, "spec0");
Assert.assertTrue(specFile1.createNewFile());
File specFile2 = new File(specDir, "spec1");
Assert.assertTrue(specFile2.createNewFile());
File specFile3 = new File(specDir, "spec2");
Assert.assertTrue(specFile3.createNewFile());
fsSpecStore.exists(specURI0);
fsSpecStore.exists(specURI1);
fsSpecStore.exists(specURI2);
Iterator<URI> it = fsSpecStore.getSpecURIs();
int count = 0;
List<URI> result = new ArrayList<>();
while (it.hasNext()) {
count += 1 ;
result.add(it.next());
}
Assert.assertEquals(count, 3);
Assert.assertEquals(fsSpecStore.getSize(), 3);
Assert.assertTrue(result.contains(specURI0));
Assert.assertTrue(result.contains(specURI1));
Assert.assertTrue(result.contains(specURI2));
}
} | 1,283 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/cli/JobStateStoreCliTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.cli;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metastore.DatasetStateStore;
import org.apache.gobblin.metastore.MysqlStateStore;
import org.apache.gobblin.metastore.StateStore;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.runtime.JobState;
import org.apache.gobblin.runtime.MysqlDatasetStateStore;
import org.apache.gobblin.runtime.TaskState;
import org.apache.gobblin.util.ClassAliasResolver;
import org.apache.gobblin.util.ConfigUtils;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test(groups = { "gobblin.runtime" })
public class JobStateStoreCliTest {
private static final String TEST_STATE_STORE = "TestStateStore";
private static final String TEST_JOB_NAME = "TestJob";
private static final String TEST_JOB_NAME2 = "TestJob2";
private static final String TEST_JOB_NAME3 = "TestJob3";
private static final String TEST_JOB_ID = "TestJob1";
private static final String TEST_TASK_ID_PREFIX = "TestTask-";
private StateStore<JobState> dbJobStateStore;
private DatasetStateStore<JobState.DatasetState> dbDatasetStateStore;
private long startTime = System.currentTimeMillis();
private String configPath;
private ITestMetastoreDatabase testMetastoreDatabase;
private static final String TEST_USER = "testUser";
private static final String TEST_PASSWORD = "testPassword";
File deleteFile;
@BeforeClass
public void setUp() throws Exception {
testMetastoreDatabase = TestMetastoreDatabaseFactory.get();
String jdbcUrl = testMetastoreDatabase.getJdbcUrl();
ConfigBuilder configBuilder = ConfigBuilder.create();
HikariDataSource dataSource = new HikariDataSource();
dataSource.setDriverClassName(ConfigurationKeys.DEFAULT_STATE_STORE_DB_JDBC_DRIVER);
dataSource.setAutoCommit(false);
dataSource.setJdbcUrl(jdbcUrl);
dataSource.setUsername(TEST_USER);
dataSource.setPassword(TEST_PASSWORD);
dbJobStateStore = new MysqlStateStore<>(dataSource, TEST_STATE_STORE, false, JobState.class);
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_URL_KEY, jdbcUrl);
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_USER_KEY, TEST_USER);
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, TEST_PASSWORD);
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_TYPE_KEY, "mysql");
configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_TABLE_KEY, TEST_STATE_STORE);
// store the config into a temp file to be read by cli
configPath = File.createTempFile("config.properties", null).getPath();
try (OutputStream output = new FileOutputStream(configPath)) {
ConfigUtils.configToProperties(configBuilder.build()).store(output, "");
}
ClassAliasResolver<DatasetStateStore.Factory> resolver =
new ClassAliasResolver<>(DatasetStateStore.Factory.class);
DatasetStateStore.Factory stateStoreFactory =
resolver.resolveClass("mysql").newInstance();
dbDatasetStateStore = stateStoreFactory.createStateStore(configBuilder.build());
// clear data that may have been left behind by a prior test run
dbJobStateStore.delete(TEST_JOB_NAME);
dbJobStateStore.delete(TEST_JOB_NAME2);
JobState jobState = new JobState(TEST_JOB_NAME, TEST_JOB_ID);
jobState.setId(TEST_JOB_ID);
jobState.setProp("foo", "bar");
jobState.setState(JobState.RunningState.COMMITTED);
jobState.setStartTime(this.startTime);
jobState.setEndTime(this.startTime + 1000);
jobState.setDuration(1000);
for (int i = 0; i < 3; i++) {
TaskState taskState = new TaskState();
taskState.setJobId(TEST_JOB_ID);
taskState.setTaskId(TEST_TASK_ID_PREFIX + i);
taskState.setId(TEST_TASK_ID_PREFIX + i);
taskState.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
jobState.addTaskState(taskState);
}
dbJobStateStore.put(TEST_JOB_NAME,
MysqlDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + MysqlDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
jobState);
jobState.setJobName(TEST_JOB_NAME2);
dbJobStateStore.put(TEST_JOB_NAME2,
MysqlDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + MysqlDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
jobState);
jobState.setJobName(TEST_JOB_NAME3);
dbJobStateStore.put(TEST_JOB_NAME3,
MysqlDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + MysqlDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
jobState);
}
@Test
public void testClBulkDelete() throws Exception {
String deleteFileText = TEST_JOB_NAME +"\n" + TEST_JOB_NAME2;
deleteFile = File.createTempFile("deleteFile.txt", null);
FileOutputStream outputStream = new FileOutputStream(deleteFile.getPath());
byte[] strToBytes = deleteFileText.getBytes();
outputStream.write(strToBytes);
outputStream.close();
JobStateStoreCLI cli = new JobStateStoreCLI();
String[] args = {"job-state-store", "-sc", configPath, "-bd", deleteFile.getPath()};
cli.run(args);
JobState jobState = dbJobStateStore.get(TEST_JOB_NAME,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
JobState jobState2 = dbJobStateStore.get(TEST_JOB_NAME2,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
JobState jobState3 = dbJobStateStore.get(TEST_JOB_NAME3,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
Assert.assertNull(jobState);
Assert.assertNull(jobState2);
Assert.assertNotNull(jobState3);
}
@Test(dependsOnMethods = "testClBulkDelete")
public void testCliDeleteSingle() throws Exception {
JobStateStoreCLI cli = new JobStateStoreCLI();
String[] args = {"job-state-store", "-sc", configPath, "-d", "-n", TEST_JOB_NAME3};
cli.run(args);
JobState jobState = dbJobStateStore.get(TEST_JOB_NAME3,
dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX,
TEST_JOB_ID);
Assert.assertNull(jobState);
}
}
| 1,284 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/PackagedTemplatesJobCatalogDecoratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import java.net.URI;
import java.util.Collection;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import org.apache.gobblin.runtime.api.JobCatalogWithTemplates;
import org.apache.gobblin.runtime.api.JobTemplate;
import org.apache.gobblin.runtime.api.SpecNotFoundException;
import org.apache.gobblin.runtime.template.ResourceBasedJobTemplate;
public class PackagedTemplatesJobCatalogDecoratorTest {
@Test
public void test() throws Exception {
JobCatalogWithTemplates underlying = Mockito.mock(JobCatalogWithTemplates.class);
JobCatalogWithTemplates catalog = new PackagedTemplatesJobCatalogDecorator(underlying);
JobTemplate classTemplate =
catalog.getTemplate(new URI(PackagedTemplatesJobCatalogDecorator.CLASS + "://" + TestTemplate.class.getName()));
Assert.assertEquals(classTemplate.getClass(), TestTemplate.class);
try {
catalog.getTemplate(new URI(PackagedTemplatesJobCatalogDecorator.CLASS + "://" + "non.existing.class"));
Assert.fail();
} catch (SpecNotFoundException exc) {
// expect exception
}
JobTemplate resourceTemplate =
catalog.getTemplate(new URI(PackagedTemplatesJobCatalogDecorator.RESOURCE + ":///templates/test.template"));
Assert.assertEquals(resourceTemplate.getClass(), ResourceBasedJobTemplate.class);
Assert.assertEquals(resourceTemplate.getRequiredConfigList().size(), 3);
URI uri = new URI("scheme:///templates/test.template");
try {
catalog.getTemplate(uri);
Assert.fail();
} catch (SpecNotFoundException exc) {
// expect exception
}
Mockito.verify(underlying).getTemplate(uri);
}
public static class TestTemplate implements JobTemplate {
@Override
public URI getUri() {
return null;
}
@Override
public String getVersion() {
return null;
}
@Override
public String getDescription() {
return null;
}
@Override
public Config getRawTemplateConfig() {
return null;
}
@Override
public Collection<String> getRequiredConfigList() {
return null;
}
@Override
public Config getResolvedConfig(Config userConfig) {
return null;
}
@Override
public Collection<String> getDependencies() { return null; }
}
}
| 1,285 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestInMemoryJobCatalog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.base.Predicates;
import org.apache.gobblin.instrumented.GobblinMetricsKeys;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.test.MetricsAssert;
import org.apache.gobblin.runtime.api.JobCatalog;
import org.apache.gobblin.runtime.api.JobCatalogListener;
import org.apache.gobblin.runtime.api.JobSpec;
/** Unit tests for {@link InMemoryJobCatalog} */
public class TestInMemoryJobCatalog {
@Test
public void testCallbacks()
throws Exception {
InMemoryJobCatalog cat = new InMemoryJobCatalog();
cat.startAsync();
cat.awaitRunning(1, TimeUnit.SECONDS);
JobCatalogListener l = Mockito.mock(JobCatalogListener.class);
JobSpec js1_1 = JobSpec.builder("test:job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build();
JobSpec js1_3 = JobSpec.builder("test:job1").withVersion("3").build();
JobSpec js2 = JobSpec.builder("test:job2").withVersion("1").build();
cat.put(js1_1);
cat.addListener(l);
cat.put(js1_2);
cat.put(js2);
cat.put(js1_3);
cat.remove(js2.getUri());
cat.remove(new URI("test:dummy_job"));
cat.removeListener(l);
cat.remove(js1_3.getUri());
Mockito.verify(l).onAddJob(Mockito.eq(js1_1));
Mockito.verify(l).onUpdateJob(Mockito.eq(js1_2));
Mockito.verify(l).onAddJob(Mockito.eq(js2));
Mockito.verify(l).onUpdateJob(Mockito.eq(js1_3));
Mockito.verify(l).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion()));
Mockito.verifyNoMoreInteractions(l);
cat.stopAsync();
cat.awaitTerminated(1, TimeUnit.SECONDS);
}
@SuppressWarnings("unchecked")
@Test
public void testMetrics() throws Exception {
final Logger log = LoggerFactory.getLogger(getClass().getSimpleName() +".testMetrics");
InMemoryJobCatalog cat = new InMemoryJobCatalog(Optional.of(log),
Optional.<MetricContext>absent(), true);
cat.startAsync();
cat.awaitRunning(1, TimeUnit.SECONDS);
MetricsAssert ma = new MetricsAssert(cat.getMetricContext());
JobSpec js1_1 = JobSpec.builder("test:job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build();
JobSpec js1_3 = JobSpec.builder("test:job1").withVersion("3").build();
JobSpec js2 = JobSpec.builder("test:job2").withVersion("1").build();
cat.put(js1_1);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 0);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_ADDED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_1.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_1.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.put(js1_2);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_UPDATED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_2.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_2.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.put(js2);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_ADDED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js2.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js2.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.put(js1_3);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_UPDATED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_3.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_3.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.remove(js2.getUri());
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 1);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_DELETED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js2.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js2.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.remove(new URI("test:dummy_job"));
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 1);
cat.remove(js1_3.getUri());
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 0);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 2);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_DELETED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_3.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_3.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.stopAsync();
cat.awaitTerminated(1, TimeUnit.SECONDS);
}
}
| 1,286 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/FSJobCatalogHelperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.Semaphore;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.gobblin.util.PullFileLoader;
import org.apache.gobblin.util.filesystem.PathAlterationListener;
import org.apache.gobblin.util.filesystem.PathAlterationListenerAdaptor;
import org.apache.gobblin.util.filesystem.PathAlterationObserver;
import org.apache.gobblin.util.filesystem.PathAlterationObserverScheduler;
/**
* Inherit original Testing for loading configurations, with .properties files.
* The testing folder structure is:
* /root
* - root.properties
* /test1
* - test11.pull
* - test12.pull
* /test11
* - test111.pull
* /test2
* - test.properties
* - test21.pull
* The new testing routine for JobSpec,
* is to create a jobSpec( Simulated as the result of external JobSpecMonitor)
* persist it, reload it from file system, and compare with the original JobSpec.
*
*/
@Test(enabled=false, groups = {"gobblin.runtime"})
public class FSJobCatalogHelperTest {
// For general type of File system
private File jobConfigDir;
private File subDir1;
private File subDir11;
private File subDir2;
private Config sysConfig;
private PullFileLoader loader;
private ImmutableFSJobCatalog.JobSpecConverter converter;
@BeforeClass
public void setUp()
throws IOException {
this.jobConfigDir = java.nio.file.Files.createTempDirectory(
String.format("gobblin-test_%s_job-conf", this.getClass().getSimpleName())).toFile();
FileUtils.forceDeleteOnExit(this.jobConfigDir);
this.subDir1 = new File(this.jobConfigDir, "test1");
this.subDir11 = new File(this.subDir1, "test11");
this.subDir2 = new File(this.jobConfigDir, "test2");
this.subDir1.mkdirs();
this.subDir11.mkdirs();
this.subDir2.mkdirs();
this.sysConfig = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getAbsolutePath())
.build());
ImmutableFSJobCatalog.ConfigAccessor cfgAccess =
new ImmutableFSJobCatalog.ConfigAccessor(this.sysConfig);
this.loader = new PullFileLoader(new Path(jobConfigDir.toURI()), FileSystem.get(new Configuration()),
cfgAccess.getJobConfigurationFileExtensions(),
PullFileLoader.DEFAULT_HOCON_PULL_FILE_EXTENSIONS);
this.converter = new ImmutableFSJobCatalog.JobSpecConverter(new Path(this.jobConfigDir.toURI()), Optional.of(
FSJobCatalog.CONF_EXTENSION));
Properties rootProps = new Properties();
rootProps.setProperty("k1", "a1");
rootProps.setProperty("k2", "a2");
// test-job-conf-dir/root.properties
rootProps.store(new FileWriter(new File(this.jobConfigDir, "root.properties")), "");
Properties jobProps1 = new Properties();
jobProps1.setProperty("k1", "c1");
jobProps1.setProperty("k3", "b3");
jobProps1.setProperty("k6", "a6");
// test-job-conf-dir/test1/test11.pull
jobProps1.store(new FileWriter(new File(this.subDir1, "test11.pull")), "");
Properties jobProps2 = new Properties();
jobProps2.setProperty("k7", "a7");
// test-job-conf-dir/test1/test12.PULL
jobProps2.store(new FileWriter(new File(this.subDir1, "test12.PULL")), "");
Properties jobProps3 = new Properties();
jobProps3.setProperty("k1", "d1");
jobProps3.setProperty("k8", "a8");
jobProps3.setProperty("k9", "${k8}");
// test-job-conf-dir/test1/test11/test111.pull
jobProps3.store(new FileWriter(new File(this.subDir11, "test111.pull")), "");
Properties props2 = new Properties();
props2.setProperty("k2", "b2");
props2.setProperty("k5", "a5");
// test-job-conf-dir/test2/test.properties
props2.store(new FileWriter(new File(this.subDir2, "test.PROPERTIES")), "");
Properties jobProps4 = new Properties();
jobProps4.setProperty("k5", "b5");
// test-job-conf-dir/test2/test21.PULL
jobProps4.store(new FileWriter(new File(this.subDir2, "test21.PULL")), "");
}
// This test doesn't delete framework attributes and
@Test (enabled=false)
public void testloadGenericJobConfigs()
throws ConfigurationException, IOException, URISyntaxException {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getAbsolutePath());
List<JobSpec> jobSpecs = Lists.transform(
Lists.newArrayList(loader.loadPullFilesRecursively(loader.getRootDirectory(), this.sysConfig, false)),
this.converter);
List<Properties> jobConfigs = convertJobSpecList2PropList(jobSpecs);
Assert.assertEquals(jobConfigs.size(), 4);
// test-job-conf-dir/test1/test11/test111.pull
Properties jobProps1 = getJobConfigForFile(jobConfigs, "test111.pull");
//5 is consisting of three attributes, plus ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY
// which is on purpose to keep
// plus ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, which is not necessary to convert into JobSpec
// but keep it here to avoid NullPointer exception and validation purpose for testing.
Assert.assertEquals(jobProps1.stringPropertyNames().size(), 5);
Assert.assertTrue(jobProps1.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps1.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY),
this.jobConfigDir.getAbsolutePath());
Assert.assertEquals(jobProps1.getProperty("k1"), "d1");
Assert.assertEquals(jobProps1.getProperty("k8"), "a8");
Assert.assertEquals(jobProps1.getProperty("k9"), "a8");
// test-job-conf-dir/test1/test11.pull
Properties jobProps2 = getJobConfigForFile(jobConfigs, "test11.pull");
Assert.assertEquals(jobProps2.stringPropertyNames().size(), 5);
Assert.assertTrue(jobProps2.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps2.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY),
this.jobConfigDir.getAbsolutePath());
Assert.assertEquals(jobProps2.getProperty("k1"), "c1");
Assert.assertEquals(jobProps2.getProperty("k3"), "b3");
Assert.assertEquals(jobProps2.getProperty("k6"), "a6");
// test-job-conf-dir/test1/test12.PULL
Properties jobProps3 = getJobConfigForFile(jobConfigs, "test12.PULL");
Assert.assertEquals(jobProps3.stringPropertyNames().size(), 3);
Assert.assertTrue(jobProps3.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps3.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY),
this.jobConfigDir.getAbsolutePath());
Assert.assertEquals(jobProps3.getProperty("k7"), "a7");
// test-job-conf-dir/test2/test21.PULL
Properties jobProps4 = getJobConfigForFile(jobConfigs, "test21.PULL");
Assert.assertEquals(jobProps4.stringPropertyNames().size(), 3);
Assert.assertTrue(jobProps4.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY));
Assert.assertEquals(jobProps4.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY),
this.jobConfigDir.getAbsolutePath());
Assert.assertEquals(jobProps4.getProperty("k5"), "b5");
}
@Test(enabled=false, dependsOnMethods = {"testloadGenericJobConfigs"})
public void testloadGenericJobConfig()
throws ConfigurationException, IOException {
Path jobConfigPath = new Path(this.subDir11.getAbsolutePath(), "test111.pull");
Properties jobProps =
ConfigUtils.configToProperties(loader.loadPullFile(jobConfigPath, this.sysConfig, false));
Assert.assertEquals(jobProps.stringPropertyNames().size(), 5);
Assert.assertEquals(jobProps.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY),
this.jobConfigDir.getAbsolutePath());
Assert.assertEquals(jobProps.getProperty("k1"), "d1");
Assert.assertEquals(jobProps.getProperty("k8"), "a8");
Assert.assertEquals(jobProps.getProperty("k9"), "a8");
}
@Test(enabled=false, dependsOnMethods = {"testloadGenericJobConfig"})
public void testPathAlterationObserver()
throws Exception {
PathAlterationObserverScheduler detector = new PathAlterationObserverScheduler(1000);
final Set<Path> fileAltered = Sets.newHashSet();
final Semaphore semaphore = new Semaphore(0);
PathAlterationListener listener = new PathAlterationListenerAdaptor() {
@Override
public void onFileCreate(Path path) {
fileAltered.add(path);
semaphore.release();
}
@Override
public void onFileChange(Path path) {
fileAltered.add(path);
semaphore.release();
}
};
detector.addPathAlterationObserver(listener, Optional.<PathAlterationObserver>absent(),
new Path(this.jobConfigDir.getPath()));
try {
detector.start();
// Give the monitor some time to start
Thread.sleep(1000);
File jobConfigFile = new File(this.subDir11, "test111.pull");
Files.touch(jobConfigFile);
File newJobConfigFile = new File(this.subDir11, "test112.pull");
Files.append("k1=v1", newJobConfigFile, ConfigurationKeys.DEFAULT_CHARSET_ENCODING);
semaphore.acquire(2);
Assert.assertEquals(fileAltered.size(), 2);
Assert.assertTrue(fileAltered.contains(new Path("file:" + jobConfigFile)));
Assert.assertTrue(fileAltered.contains(new Path("file:" + newJobConfigFile)));
} finally {
detector.stop();
}
}
@AfterClass
public void tearDown()
throws IOException {
if (this.jobConfigDir != null) {
FileUtils.forceDelete(this.jobConfigDir);
}
}
private Properties getJobConfigForFile(List<Properties> jobConfigs, String fileName) {
for (Properties jobConfig : jobConfigs) {
if (jobConfig.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY).endsWith(fileName)) {
return jobConfig;
}
}
return null;
}
/**
* Suppose in the testing routine, each JobSpec will at least have either config or properties.
* @param jobConfigs
* @return
*/
private List<Properties> convertJobSpecList2PropList(List<JobSpec> jobConfigs) {
List<Properties> result = Lists.newArrayList();
for (JobSpec js : jobConfigs) {
Properties propToBeAdded;
if (js.getConfigAsProperties() != null) {
propToBeAdded = js.getConfigAsProperties();
} else {
propToBeAdded = ConfigUtils.configToProperties(js.getConfig());
}
// For the testing purpose, added it back when doing the comparison.
propToBeAdded.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, js.getUri().toString());
result.add(propToBeAdded);
}
return result;
}
}
| 1,287 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestMutableCachingJobCatalog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import org.apache.gobblin.runtime.api.JobCatalogListener;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.runtime.api.JobSpecNotFoundException;
/** Unit tests for {@link CachingJobCatalog} and {@link MutableCachingJobCatalog} */
public class TestMutableCachingJobCatalog {
@Test
public void test() throws Exception {
InMemoryJobCatalog baseCat =
new InMemoryJobCatalog(Optional.<Logger>of(LoggerFactory.getLogger("baseCat")));
baseCat.startAsync();
baseCat.awaitRunning(2, TimeUnit.SECONDS);
MutableCachingJobCatalog cachedCat =
new MutableCachingJobCatalog(baseCat, Optional.<Logger>of(LoggerFactory.getLogger("cachedCat")));
JobCatalogListener l = Mockito.mock(JobCatalogListener.class);
cachedCat.addListener(l);
cachedCat.startAsync();
cachedCat.awaitRunning(10, TimeUnit.SECONDS);
JobSpec js1_1 = JobSpec.builder("test:job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build();
JobSpec js1_3 = JobSpec.builder("test:job1").withVersion("3").build();
URI jsURI = new URI("test:job1");
baseCat.put(js1_1);
JobSpec res = cachedCat.getJobSpec(new URI("test:job1"));
Assert.assertEquals(res, js1_1);
baseCat.put(js1_2);
res = cachedCat.getJobSpec(jsURI);
Assert.assertEquals(res, js1_2);
baseCat.remove(jsURI);
try {
cachedCat.getJobSpec(jsURI);
Assert.fail("Expected JobSpecNotFoundException");
} catch (JobSpecNotFoundException e) {
Assert.assertEquals(e.getMissingJobSpecURI(), jsURI);
}
cachedCat.removeListener(l);
cachedCat.put(js1_3);
res = cachedCat.getJobSpec(jsURI);
Assert.assertEquals(res, js1_3);
res = baseCat.getJobSpec(jsURI);
Assert.assertEquals(res, js1_3);
cachedCat.remove(jsURI);
try {
cachedCat.getJobSpec(jsURI);
Assert.fail("Expected JobSpecNotFoundException");
} catch (JobSpecNotFoundException e) {
Assert.assertEquals(e.getMissingJobSpecURI(), jsURI);
}
try {
baseCat.getJobSpec(jsURI);
Assert.fail("Expected JobSpecNotFoundException");
} catch (JobSpecNotFoundException e) {
Assert.assertEquals(e.getMissingJobSpecURI(), jsURI);
}
Mockito.verify(l).onAddJob(Mockito.eq(js1_1));
Mockito.verify(l).onUpdateJob(Mockito.eq(js1_2));
Mockito.verify(l).onDeleteJob(Mockito.eq(js1_2.getUri()), Mockito.eq(js1_2.getVersion()));
Mockito.verifyNoMoreInteractions(l);
cachedCat.stopAsync();
cachedCat.awaitTerminated(10, TimeUnit.SECONDS);
baseCat.stopAsync();
baseCat.awaitTerminated(2, TimeUnit.SECONDS);
}
}
| 1,288 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestNonObservingFSJobCatalog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import java.io.File;
import java.io.PrintWriter;
import java.net.URI;
import java.util.Hashtable;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.fs.Path;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.api.JobCatalogListener;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.runtime.job_spec.ResolvedJobSpec;
import org.apache.gobblin.util.ConfigUtils;
/**
* Test interaction between (Mutable)NonObservingFsJobCatalog and its listeners.
* Inherit the testing routine for InMemoryJobCatalog.
*/
public class TestNonObservingFSJobCatalog {
private File jobConfigDir;
private Path jobConfigDirPath;
@Test
public void testCallbacks()
throws Exception {
this.jobConfigDir = java.nio.file.Files.createTempDirectory(
String.format("gobblin-test_%s_job-conf", this.getClass().getSimpleName())).toFile();
this.jobConfigDirPath = new Path(this.jobConfigDir.getPath());
try (PrintWriter printWriter = new PrintWriter(new Path(jobConfigDirPath, "job3.template").toString(), "UTF-8")) {
printWriter.println("param1 = value1");
printWriter.println("param2 = value2");
}
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getPath());
NonObservingFSJobCatalog cat = new NonObservingFSJobCatalog(ConfigUtils.propertiesToConfig(properties));
cat.startAsync();
cat.awaitRunning(10, TimeUnit.SECONDS);
final Map<URI, JobSpec> specs = new Hashtable<>();
JobCatalogListener l = Mockito.mock(JobCatalogListener.class);
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation)
throws Throwable {
JobSpec spec = (JobSpec) invocation.getArguments()[0];
specs.put(spec.getUri(), spec);
return null;
}
}).when(l).onAddJob(Mockito.any(JobSpec.class));
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation)
throws Throwable {
JobSpec spec = (JobSpec) invocation.getArguments()[0];
specs.put(spec.getUri(), spec);
return null;
}
}).when(l).onUpdateJob(Mockito.any(JobSpec.class));
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation)
throws Throwable {
URI uri = (URI) invocation.getArguments()[0];
specs.remove(uri);
return null;
}
}).when(l).onDeleteJob(Mockito.any(URI.class), Mockito.anyString());
JobSpec js1_1 = JobSpec.builder("test_job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("test_job1").withVersion("2").build();
JobSpec js2 = JobSpec.builder("test_job2").withVersion("1").build();
JobSpec js3 = JobSpec.builder("test_job3").withVersion("1").withTemplate(new URI("FS:///job3.template"))
.withConfig(ConfigBuilder.create().addPrimitive("job.template", "FS:///job3.template").build()).build();
cat.addListener(l);
cat.put(js1_1);
Assert.assertTrue(specs.containsKey(js1_1.getUri()));
JobSpec js1_1_notified = specs.get(js1_1.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js1_1_notified.getConfig(), js1_1.getConfig()));
Assert.assertEquals(js1_1.getVersion(), js1_1_notified.getVersion());
cat.put(js1_2);
Assert.assertTrue(specs.containsKey(js1_2.getUri()));
JobSpec js1_2_notified = specs.get(js1_2.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js1_2_notified.getConfig(), js1_2.getConfig()));
Assert.assertEquals(js1_2.getVersion(), js1_2_notified.getVersion());
cat.put(js2);
Assert.assertTrue(specs.containsKey(js2.getUri()));
JobSpec js2_notified = specs.get(js2.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js2_notified.getConfig(), js2.getConfig()));
Assert.assertEquals(js2.getVersion(), js2_notified.getVersion());
cat.remove(js2.getUri());
Assert.assertFalse(specs.containsKey(js2.getUri()));
cat.put(js3);
Assert.assertTrue(specs.containsKey(js3.getUri()));
JobSpec js3_notified = specs.get(js3.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js3_notified.getConfig(), js3.getConfig()));
Assert.assertEquals(js3.getVersion(), js3_notified.getVersion());
ResolvedJobSpec js3_resolved = new ResolvedJobSpec(js3_notified, cat);
Assert.assertEquals(js3_resolved.getConfig().getString("param1"), "value1");
Assert.assertEquals(js3_resolved.getConfig().getString("param2"), "value2");
cat.stopAsync();
cat.awaitTerminated(10, TimeUnit.SECONDS);
}
}
| 1,289 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestFSJobCatalog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import java.io.File;
import java.io.PrintWriter;
import java.net.URI;
import java.util.Hashtable;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.fs.Path;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.api.JobCatalogListener;
import org.apache.gobblin.runtime.api.JobSpec;
import org.apache.gobblin.runtime.job_spec.ResolvedJobSpec;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.gobblin.util.filesystem.PathAlterationObserver;
import static org.mockito.Mockito.any;
/**
* Test interaction between (Mutable)FsJobCatalog and its listeners.
* Inherit the testing routine for InMemoryJobCatalog.
*/
public class TestFSJobCatalog {
private File jobConfigDir;
private Path jobConfigDirPath;
@Test
public void testCallbacks()
throws Exception {
this.jobConfigDir = java.nio.file.Files.createTempDirectory(
String.format("gobblin-test_%s_job-conf", this.getClass().getSimpleName())).toFile();
this.jobConfigDirPath = new Path(this.jobConfigDir.getPath());
try (PrintWriter printWriter = new PrintWriter(new Path(jobConfigDirPath, "job3.template").toString(), "UTF-8")) {
printWriter.println("param1 = value1");
printWriter.println("param2 = value2");
}
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, this.jobConfigDir.getPath());
PathAlterationObserver observer = new PathAlterationObserver(this.jobConfigDirPath);
/* Exposed the observer so that checkAndNotify can be manually invoked. */
FSJobCatalog cat = new FSJobCatalog(ConfigUtils.propertiesToConfig(properties), observer);
cat.startAsync();
cat.awaitRunning(10, TimeUnit.SECONDS);
final Map<URI, JobSpec> specs = new Hashtable<>();
JobCatalogListener l = Mockito.mock(JobCatalogListener.class);
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation)
throws Throwable {
JobSpec spec = (JobSpec) invocation.getArguments()[0];
specs.put(spec.getUri(), spec);
return null;
}
}).when(l).onAddJob(any(JobSpec.class));
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation)
throws Throwable {
JobSpec spec = (JobSpec) invocation.getArguments()[0];
specs.put(spec.getUri(), spec);
return null;
}
}).when(l).onUpdateJob(any(JobSpec.class));
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation)
throws Throwable {
URI uri = (URI) invocation.getArguments()[0];
specs.remove(uri);
return null;
}
}).when(l).onDeleteJob(any(URI.class), any());
JobSpec js1_1 = JobSpec.builder("test_job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("test_job1").withVersion("2").build();
JobSpec js2 = JobSpec.builder("test_job2").withVersion("1").build();
JobSpec js3 = JobSpec.builder("test_job3").withVersion("1").withTemplate(new URI("FS:///job3.template"))
.withConfig(ConfigBuilder.create().addPrimitive("job.template", "FS:///job3.template").build()).build();
cat.addListener(l);
observer.initialize();
cat.put(js1_1);
// enough time for file creation.
observer.checkAndNotify();
Assert.assertTrue(specs.containsKey(js1_1.getUri()));
JobSpec js1_1_notified = specs.get(js1_1.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js1_1_notified.getConfig(), js1_1.getConfig()));
Assert.assertEquals(js1_1.getVersion(), js1_1_notified.getVersion());
// Linux system has too large granularity for the modification time.
Thread.sleep(1000);
cat.put(js1_2);
// enough time for file replacement.
observer.checkAndNotify();
Assert.assertTrue(specs.containsKey(js1_2.getUri()));
JobSpec js1_2_notified = specs.get(js1_2.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js1_2_notified.getConfig(), js1_2.getConfig()));
Assert.assertEquals(js1_2.getVersion(), js1_2_notified.getVersion());
Thread.sleep(1000);
cat.put(js2);
observer.checkAndNotify();
Assert.assertTrue(specs.containsKey(js2.getUri()));
JobSpec js2_notified = specs.get(js2.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js2_notified.getConfig(), js2.getConfig()));
Assert.assertEquals(js2.getVersion(), js2_notified.getVersion());
Thread.sleep(1000);
cat.remove(js2.getUri());
// enough time for file deletion.
observer.checkAndNotify();
Assert.assertFalse(specs.containsKey(js2.getUri()));
Thread.sleep(1000);
cat.put(js3);
observer.checkAndNotify();
Assert.assertTrue(specs.containsKey(js3.getUri()));
JobSpec js3_notified = specs.get(js3.getUri());
Assert.assertTrue(ConfigUtils.verifySubset(js3_notified.getConfig(), js3.getConfig()));
Assert.assertEquals(js3.getVersion(), js3_notified.getVersion());
ResolvedJobSpec js3_resolved = new ResolvedJobSpec(js3_notified, cat);
Assert.assertEquals(js3_resolved.getConfig().getString("param1"), "value1");
Assert.assertEquals(js3_resolved.getConfig().getString("param2"), "value2");
cat.stopAsync();
cat.awaitTerminated(10, TimeUnit.SECONDS);
}
}
| 1,290 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestMysqlJobCatalog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import com.google.common.base.Predicates;
import com.typesafe.config.Config;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import org.apache.gobblin.config.ConfigBuilder;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.instrumented.GobblinMetricsKeys;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.metrics.test.MetricsAssert;
import org.apache.gobblin.runtime.api.JobCatalog;
import org.apache.gobblin.runtime.api.JobCatalogListener;
import org.apache.gobblin.runtime.api.JobSpec;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/** Verify {@link MysqlJobCatalog} [modeled on {@link TestInMemoryJobCatalog}] */
public class TestMysqlJobCatalog {
private static final String USER = "testUser";
private static final String PASSWORD = "testPassword";
private static final String TABLE = "job_catalog";
private MysqlJobCatalog cat;
/** create a new DB/`JobCatalog` for each test, so they're completely independent */
@BeforeMethod
public void setUp() throws Exception {
ITestMetastoreDatabase testDb = TestMetastoreDatabaseFactory.get();
Config config = ConfigBuilder.create()
.addPrimitive(ConfigurationKeys.METRICS_ENABLED_KEY, "true")
.addPrimitive(MysqlJobCatalog.DB_CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_URL_KEY, testDb.getJdbcUrl())
.addPrimitive(MysqlJobCatalog.DB_CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_USER_KEY, USER)
.addPrimitive(MysqlJobCatalog.DB_CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, PASSWORD)
.addPrimitive(MysqlJobCatalog.DB_CONFIG_PREFIX + "." + ConfigurationKeys.STATE_STORE_DB_TABLE_KEY, TABLE)
.build();
this.cat = new MysqlJobCatalog(config);
}
@Test
public void testCallbacks() throws Exception {
cat.startAsync();
cat.awaitRunning(1, TimeUnit.SECONDS);
JobCatalogListener l = Mockito.mock(JobCatalogListener.class);
JobSpec js1_1 = JobSpec.builder("test:job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build();
JobSpec js1_3 = JobSpec.builder("test:job1").withVersion("3").build();
JobSpec js2 = JobSpec.builder("test:job2").withVersion("1").build();
cat.put(js1_1);
cat.addListener(l);
cat.put(js1_2);
cat.put(js2);
cat.put(js1_3);
JobSpec js1_latest_version = cat.getJobSpec(js1_1.getUri());
Assert.assertEquals(js1_3, js1_latest_version);
cat.remove(js2.getUri());
cat.remove(new URI("test:dummy_job")); // doesn't exist: won't be found, so expect no callback
cat.removeListener(l);
cat.remove(js1_3.getUri());
Mockito.verify(l).onAddJob(Mockito.eq(js1_1));
Mockito.verify(l).onUpdateJob(Mockito.eq(js1_2));
Mockito.verify(l).onAddJob(Mockito.eq(js2));
Mockito.verify(l).onUpdateJob(Mockito.eq(js1_3));
Mockito.verify(l).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion()));
Mockito.verifyNoMoreInteractions(l);
cat.stopAsync();
cat.awaitTerminated(1, TimeUnit.SECONDS);
}
@Test
public void testMetrics() throws Exception {
cat.startAsync();
cat.awaitRunning(1, TimeUnit.SECONDS);
MetricsAssert ma = new MetricsAssert(cat.getMetricContext());
JobSpec js1_1 = JobSpec.builder("test:job1").withVersion("1").build();
JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build();
JobSpec js1_3 = JobSpec.builder("test:job1").withVersion("3").build();
JobSpec js2 = JobSpec.builder("test:job2").withVersion("1").build();
cat.put(js1_1);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 0);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_ADDED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_1.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_1.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.put(js1_2);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_UPDATED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_2.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_2.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.put(js2);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_ADDED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js2.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js2.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.put(js1_3);
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 0);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_UPDATED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_3.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_3.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.remove(js2.getUri());
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 1);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_DELETED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js2.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js2.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.remove(new URI("test:dummy_job"));
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 1);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 1);
cat.remove(js1_3.getUri());
Assert.assertEquals(cat.getMetrics().getNumActiveJobs().getValue().intValue(), 0);
Assert.assertEquals(cat.getMetrics().getTotalAddCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalUpdateCalls().getValue().longValue(), 2);
Assert.assertEquals(cat.getMetrics().getTotalDeleteCalls().getValue().longValue(), 2);
ma.assertEvent(Predicates.and(
MetricsAssert.eqEventNamespace(JobCatalog.class.getName()),
MetricsAssert.eqEventName(JobCatalog.StandardMetrics.TRACKING_EVENT_NAME),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.OPERATION_TYPE_META,
JobCatalog.StandardMetrics.JOB_DELETED_OPERATION_TYPE),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_URI_META, js1_3.getUri().toString()),
MetricsAssert.eqEventMetdata(GobblinMetricsKeys.JOB_SPEC_VERSION_META, js1_3.getVersion())
),
100, TimeUnit.MILLISECONDS);
cat.stopAsync();
cat.awaitTerminated(1, TimeUnit.SECONDS);
}
}
| 1,291 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestJobCatalogListenersList.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import org.mockito.Mockito;
import org.testng.annotations.Test;
import org.apache.gobblin.runtime.api.JobCatalogListener;
import org.apache.gobblin.runtime.api.JobSpec;
/** Unit tests for {@link JobCatalogListenersList} */
public class TestJobCatalogListenersList {
@Test
public void testCalls() {
JobCatalogListenersList ll = new JobCatalogListenersList();
JobSpec js1_1 = JobSpec.builder("test:job1").build();
JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build();
JobSpec js2 = JobSpec.builder("test:job2").build();
JobCatalogListener l1 = Mockito.mock(JobCatalogListener.class);
Mockito.doThrow(new RuntimeException("injected l1 failure")).when(l1)
.onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion()));
JobCatalogListener l2 = Mockito.mock(JobCatalogListener.class);
Mockito.doThrow(new RuntimeException("injected l2 failure")).when(l2).onUpdateJob(Mockito.eq(js1_2));
JobCatalogListener l3 = Mockito.mock(JobCatalogListener.class);
Mockito.doThrow(new RuntimeException("injected l3 failure")).when(l3).onAddJob(Mockito.eq(js2));
ll.addListener(l1);
ll.addListener(l2);
ll.addListener(l3);
ll.onAddJob(js1_1);
ll.onAddJob(js2);
ll.onUpdateJob(js1_2);
ll.onDeleteJob(js2.getUri(), js2.getVersion());
Mockito.verify(l1).onAddJob(Mockito.eq(js1_1));
Mockito.verify(l1).onAddJob(Mockito.eq(js2));
Mockito.verify(l1).onUpdateJob(Mockito.eq(js1_2));
Mockito.verify(l1).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion()));
Mockito.verify(l2).onAddJob(Mockito.eq(js1_1));
Mockito.verify(l2).onAddJob(Mockito.eq(js2));
Mockito.verify(l2).onUpdateJob(Mockito.eq(js1_2));
Mockito.verify(l2).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion()));
Mockito.verify(l3).onAddJob(Mockito.eq(js1_1));
Mockito.verify(l3).onAddJob(Mockito.eq(js2));
Mockito.verify(l3).onUpdateJob(Mockito.eq(js1_2));
Mockito.verify(l3).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion()));
Mockito.verifyNoMoreInteractions(l1, l2, l3);
}
}
| 1,292 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/job_catalog/TestImmutableFSJobCatalog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.job_catalog;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.Assert.ThrowingRunnable;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
/**
* Unit tests for {@link ImmutableFSJobCatalog}
*/
public class TestImmutableFSJobCatalog {
@Test
public void testConfigAccessor() throws Exception {
Config sysConfig1 = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, "/tmp")
.build());
ImmutableFSJobCatalog.ConfigAccessor cfgAccessor1 =
new ImmutableFSJobCatalog.ConfigAccessor(sysConfig1);
Assert.assertEquals(cfgAccessor1.getJobConfDir(), "/tmp");
Assert.assertEquals(cfgAccessor1.getJobConfDirPath(), new Path("/tmp"));
Assert.assertEquals(cfgAccessor1.getJobConfDirFileSystem().getClass(),
FileSystem.get(new Configuration()).getClass());
Assert.assertEquals(cfgAccessor1.getPollingInterval(),
ConfigurationKeys.DEFAULT_JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL);
Config sysConfig2 = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY, "/tmp2")
.put(ConfigurationKeys.JOB_CONFIG_FILE_MONITOR_POLLING_INTERVAL_KEY, 100)
.build());
ImmutableFSJobCatalog.ConfigAccessor cfgAccessor2 =
new ImmutableFSJobCatalog.ConfigAccessor(sysConfig2);
Assert.assertEquals(cfgAccessor2.getJobConfDir(), "file:///tmp2");
Assert.assertEquals(cfgAccessor2.getJobConfDirPath(), new Path("file:///tmp2"));
Assert.assertTrue(cfgAccessor2.getJobConfDirFileSystem() instanceof LocalFileSystem);
Assert.assertEquals(cfgAccessor2.getPollingInterval(), 100);
Assert.assertThrows(new ThrowingRunnable() {
@Override public void run() throws Throwable {
new ImmutableFSJobCatalog.ConfigAccessor(ConfigFactory.empty());
}
});
}
}
| 1,293 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/troubleshooter/JobIssueEventHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.troubleshooter;
import org.testng.annotations.Test;
import org.apache.gobblin.metrics.event.TimingEvent;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
public class JobIssueEventHandlerTest {
@Test
public void canHandleIssue()
throws Exception {
MultiContextIssueRepository issueRepository = mock(MultiContextIssueRepository.class);
JobIssueEventHandler eventHandler = new JobIssueEventHandler(issueRepository, true);
IssueEventBuilder eventBuilder = new IssueEventBuilder("TestJob");
eventBuilder.setIssue(getTestIssue("test issue", "code1"));
eventBuilder.addMetadata(TimingEvent.FlowEventConstants.FLOW_GROUP_FIELD, "test-group");
eventBuilder.addMetadata(TimingEvent.FlowEventConstants.FLOW_NAME_FIELD, "test-flow");
eventBuilder.addMetadata(TimingEvent.FlowEventConstants.FLOW_EXECUTION_ID_FIELD, "1234");
eventBuilder.addMetadata(TimingEvent.FlowEventConstants.JOB_NAME_FIELD, "test-job");
eventHandler.processEvent(eventBuilder.build());
verify(issueRepository).put(any(), (Issue) any());
}
private Issue getTestIssue(String summary, String code) {
return Issue.builder().summary(summary).code(code).build();
}
}
| 1,294 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/troubleshooter/AutomaticTroubleshooterConfigTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.troubleshooter;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
@Test
public class AutomaticTroubleshooterConfigTest {
public void willHaveDefaultValuesInBuilder() {
AutomaticTroubleshooterConfig config = AutomaticTroubleshooterConfig.builder().disableEventReporting(true).build();
assertFalse(config.isDisabled());
assertTrue(config.isDisableEventReporting());
assertEquals(100, config.getInMemoryRepositoryMaxSize());
}
} | 1,295 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/troubleshooter/IssueEventBuilderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.troubleshooter;
import java.time.ZonedDateTime;
import java.util.HashMap;
import org.testng.annotations.Test;
import org.apache.gobblin.metrics.GobblinTrackingEvent;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
public class IssueEventBuilderTest {
@Test
public void canSerializeIssue() {
IssueEventBuilder eventBuilder = new IssueEventBuilder(IssueEventBuilder.JOB_ISSUE);
HashMap<String, String> testProperties = new HashMap<String, String>() {{
put("testKey", "test value %'\"");
}};
Issue issue =
Issue.builder().summary("test summary").details("test details").time(ZonedDateTime.now()).code("Code1")
.severity(IssueSeverity.ERROR).exceptionClass("com.TestException").properties(testProperties).build();
eventBuilder.setIssue(issue);
GobblinTrackingEvent trackingEvent = eventBuilder.build();
assertTrue(IssueEventBuilder.isIssueEvent(trackingEvent));
IssueEventBuilder deserializedEventBuilder = IssueEventBuilder.fromEvent(trackingEvent);
assertNotNull(deserializedEventBuilder);
assertEquals("Code1", deserializedEventBuilder.getIssue().getCode());
assertEquals("Code1", IssueEventBuilder.getIssueFromEvent(trackingEvent).getCode());
}
} | 1,296 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/troubleshooter/InMemoryIssueRepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.troubleshooter;
import java.util.List;
import org.testng.annotations.Test;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertTrue;
public class InMemoryIssueRepositoryTest {
@Test
public void canPutIssue()
throws Exception {
InMemoryIssueRepository repository = new InMemoryIssueRepository();
Issue testIssue = getTestIssue("first", "code1");
repository.put(testIssue);
List<Issue> issues = repository.getAll();
assertEquals(1, issues.size());
assertEquals(testIssue, issues.get(0));
}
@Test
public void canPutMultipleIssues()
throws Exception {
InMemoryIssueRepository repository = new InMemoryIssueRepository();
repository.put(getTestIssue("first", "code1"));
repository.put(getTestIssue("second", "code2"));
repository.put(getTestIssue("third", "code3"));
List<Issue> issues = repository.getAll();
assertEquals(3, issues.size());
assertTrue(issues.stream().anyMatch(i -> i.getCode().equals("code2")));
}
@Test
public void canRemoveIssue()
throws Exception {
InMemoryIssueRepository repository = new InMemoryIssueRepository();
repository.put(getTestIssue("first", "code1"));
repository.put(getTestIssue("second", "code2"));
repository.put(getTestIssue("third", "code3"));
List<Issue> issues = repository.getAll();
assertEquals(3, issues.size());
repository.remove("code2");
issues = repository.getAll();
assertEquals(2, issues.size());
}
@Test
public void canDeduplicateIssues()
throws Exception {
InMemoryIssueRepository repository = new InMemoryIssueRepository();
repository.put(getTestIssue("first", "code1"));
repository.put(getTestIssue("second", "code2"));
repository.put(getTestIssue("second-2", "code2"));
repository.put(getTestIssue("second-3", "code2"));
List<Issue> issues = repository.getAll();
assertEquals(2, issues.size());
assertTrue(issues.stream().anyMatch(i -> i.getCode().equals("code1")));
assertTrue(issues.stream().anyMatch(i -> i.getCode().equals("code2")));
}
@Test
public void willIgnoreOverflowIssues()
throws Exception {
InMemoryIssueRepository repository = new InMemoryIssueRepository(50);
for (int i = 0; i < 100; i++) {
repository.put(getTestIssue("issue " + i, "code" + i));
}
assertEquals(50, repository.getAll().size());
}
@Test
public void willPreserveIssueInsertionOrder()
throws Exception {
int issueCount = 50;
InMemoryIssueRepository repository = new InMemoryIssueRepository(issueCount * 2);
for (int i = 0; i < issueCount; i++) {
repository.put(getTestIssue("issue " + i, String.valueOf(i)));
}
List<Issue> retrievedIssues = repository.getAll();
for (int i = 0; i < issueCount; i++) {
assertEquals(String.valueOf(i), retrievedIssues.get(i).getCode());
}
}
private Issue getTestIssue(String summary, String code) {
return Issue.builder().summary(summary).code(code).build();
}
} | 1,297 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/troubleshooter/InMemoryMultiContextIssueRepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.troubleshooter;
import java.util.List;
import org.testng.annotations.Test;
import static org.testng.AssertJUnit.assertEquals;
@Test
public class InMemoryMultiContextIssueRepositoryTest extends MultiContextIssueRepositoryTest {
@Test
public void willHaveCapacityLimit()
throws Exception {
int jobCount = 100;
int jobCapacity = 50;
MultiContextIssueRepository repository = new InMemoryMultiContextIssueRepository(
InMemoryMultiContextIssueRepository.Configuration.builder().maxContextCount(50).maxIssuesPerContext(10)
.build());
for (int j = 0; j < jobCount; j++) {
repository.put("job" + j, getTestIssue("issue 1", "code1"));
}
for (int j = 0; j < jobCount; j++) {
List<Issue> retrievedIssues = repository.getAll("job" + j);
if (j < jobCapacity) {
assertEquals(0, retrievedIssues.size()); // expecting empty list for non-existent jobs
} else {
assertEquals(1, retrievedIssues.size());
}
}
}
@Override
protected MultiContextIssueRepository getRepository() {
return new InMemoryMultiContextIssueRepository();
}
}
| 1,298 |
0 | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-runtime/src/test/java/org/apache/gobblin/runtime/troubleshooter/MultiContextIssueRepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.troubleshooter;
import java.util.List;
import org.testng.annotations.Test;
import org.apache.gobblin.service.ServiceConfigKeys;
import static org.testng.Assert.assertEquals;
public abstract class MultiContextIssueRepositoryTest {
@Test
public void canPutIssue()
throws Exception {
MultiContextIssueRepository repository = getRepository();
Issue testIssue = getTestIssue("first", "code1");
repository.put("job1", testIssue);
List<Issue> issues = repository.getAll("job1");
assertEquals(1, issues.size());
assertEquals(testIssue, issues.get(0));
}
@Test
public void canPutMultipleJobIssue()
throws Exception {
MultiContextIssueRepository repository = getRepository();
repository.put("job1", getTestIssue("first", "code1"));
repository.put("job1", getTestIssue("second", "code2"));
List<Issue> issues = repository.getAll("job1");
assertEquals(2, issues.size());
}
@Test
public void canWorkWithMultipleJobs()
throws Exception {
MultiContextIssueRepository repository = getRepository();
Issue job1Issue1 = getTestIssue("first", "code1");
Issue job2Issue1 = getTestIssue("first", "code1");
Issue job2Issue2 = getTestIssue("second", "code2");
repository.put("job1", job1Issue1);
repository.put("job2", job2Issue1);
repository.put("job2", job2Issue2);
assertEquals(1, repository.getAll("job1").size());
assertEquals(2, repository.getAll("job2").size());
assertEquals(2, repository.getAll("job2").size());
assertEquals(job1Issue1, repository.getAll("job1").get(0));
}
@Test
public void canRemoveIssue()
throws Exception {
MultiContextIssueRepository repository = getRepository();
repository.put("job1", getTestIssue("first", "code1"));
repository.put("job1", getTestIssue("second", "code2"));
repository.remove("job1", "code1");
List<Issue> issues = repository.getAll("job1");
assertEquals(1, issues.size());
assertEquals("code2", issues.get(0).getCode());
}
@Test
public void willPreserveIssueInsertionOrder()
throws Exception {
int jobCount = 10;
int issueCount = ServiceConfigKeys.DEFAULT_MEMORY_ISSUE_REPO_MAX_ISSUE_PER_CONTEXT;
MultiContextIssueRepository repository = getRepository();
for (int j = 0; j < jobCount; j++) {
for (int i = 0; i < issueCount; i++) {
repository.put("job" + j, getTestIssue("issue " + i, String.valueOf(i)));
}
}
for (int j = 0; j < jobCount; j++) {
List<Issue> retrievedIssues = repository.getAll("job" + j);
assertEquals(retrievedIssues.size(), issueCount);
for (int i = 0; i < issueCount; i++) {
assertEquals(String.valueOf(i), retrievedIssues.get(i).getCode());
}
}
}
protected abstract MultiContextIssueRepository getRepository();
protected Issue getTestIssue(String summary, String code) {
return Issue.builder().summary(summary).code(code).build();
}
}
| 1,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.