comment stringlengths 1 45k | method_body stringlengths 23 281k | target_code stringlengths 0 5.16k | method_body_after stringlengths 12 281k | context_before stringlengths 8 543k | context_after stringlengths 8 543k |
|---|---|---|---|---|---|
Would be easier to read if you called this "applicationsNotRedeployed". | boolean redeployAllApplications(Duration maxDuration) throws InterruptedException {
Instant end = Instant.now().plus(maxDuration);
Set<ApplicationId> applicationIds = listApplications();
do {
applicationIds = redeployApplications(applicationIds);
} while ( ! applicationIds.isEmpty() && Instant.now().isBefore(end));
if ( ! applicationIds.isEmpty()) {
log.log(LogLevel.ERROR, "Redeploying applications not finished after " + maxDuration +
", exiting, applications that failed redeployment: " + applicationIds);
return false;
}
return true;
} | Set<ApplicationId> applicationIds = listApplications(); | boolean redeployAllApplications(Duration maxDuration) throws InterruptedException {
Instant end = Instant.now().plus(maxDuration);
Set<ApplicationId> applicationIds = listApplications();
do {
applicationIds = redeployApplications(applicationIds);
} while ( ! applicationIds.isEmpty() && Instant.now().isBefore(end));
if ( ! applicationIds.isEmpty()) {
log.log(LogLevel.ERROR, "Redeploying applications not finished after " + maxDuration +
", exiting, applications that failed redeployment: " + applicationIds);
return false;
}
return true;
} | class ApplicationRepository implements com.yahoo.config.provision.Deployer {
private static final Logger log = Logger.getLogger(ApplicationRepository.class.getName());
private final TenantRepository tenantRepository;
private final Optional<Provisioner> hostProvisioner;
private final ConfigConvergenceChecker convergeChecker;
private final HttpProxy httpProxy;
private final Clock clock;
private final DeployLogger logger = new SilentDeployLogger();
private final ConfigserverConfig configserverConfig;
private final Environment environment;
private final FileDistributionStatus fileDistributionStatus;
@Inject
public ApplicationRepository(TenantRepository tenantRepository,
HostProvisionerProvider hostProvisionerProvider,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig) {
this(tenantRepository, hostProvisionerProvider.getHostProvisioner(),
configConvergenceChecker, httpProxy, configserverConfig, Clock.systemUTC(), new FileDistributionStatus());
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock) {
this(tenantRepository, hostProvisioner, clock, new ConfigserverConfig(new ConfigserverConfig.Builder()));
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock,
ConfigserverConfig configserverConfig) {
this(tenantRepository, Optional.of(hostProvisioner), new ConfigConvergenceChecker(), new HttpProxy(new SimpleHttpFetcher()),
configserverConfig, clock, new FileDistributionStatus());
}
private ApplicationRepository(TenantRepository tenantRepository,
Optional<Provisioner> hostProvisioner,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig,
Clock clock,
FileDistributionStatus fileDistributionStatus) {
this.tenantRepository = tenantRepository;
this.hostProvisioner = hostProvisioner;
this.convergeChecker = configConvergenceChecker;
this.httpProxy = httpProxy;
this.clock = clock;
this.configserverConfig = configserverConfig;
this.environment = Environment.from(configserverConfig.environment());
this.fileDistributionStatus = fileDistributionStatus;
}
public PrepareResult prepare(Tenant tenant, long sessionId, PrepareParams prepareParams, Instant now) {
validateThatLocalSessionIsNotActive(tenant, sessionId);
LocalSession session = getLocalSession(tenant, sessionId);
ApplicationId applicationId = prepareParams.getApplicationId();
Optional<ApplicationSet> currentActiveApplicationSet = getCurrentActiveApplicationSet(tenant, applicationId);
Slime deployLog = createDeployLog();
DeployLogger logger = new DeployHandlerLogger(deployLog.get().setArray("log"), prepareParams.isVerbose(), applicationId);
ConfigChangeActions actions = session.prepare(logger, prepareParams, currentActiveApplicationSet, tenant.getPath(), now);
logConfigChangeActions(actions, logger);
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " prepared successfully. ");
return new PrepareResult(sessionId, actions, deployLog);
}
public PrepareResult prepareAndActivate(Tenant tenant, long sessionId, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
PrepareResult result = prepare(tenant, sessionId, prepareParams, now);
activate(tenant, sessionId, prepareParams.getTimeoutBudget(), ignoreLockFailure, ignoreSessionStaleFailure);
return result;
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) {
return deploy(in, prepareParams, false, false, clock.instant());
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
File tempDir = Files.createTempDir();
PrepareResult prepareResult;
try {
prepareResult = deploy(decompressApplication(in, tempDir), prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
} finally {
cleanupTempDirectory(tempDir);
}
return prepareResult;
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams) {
return deploy(applicationPackage, prepareParams, false, false, Instant.now());
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
ApplicationId applicationId = prepareParams.getApplicationId();
long sessionId = createSession(applicationId, prepareParams.getTimeoutBudget(), applicationPackage);
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return prepareAndActivate(tenant, sessionId, prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application) {
return deployFromLocalActive(application, Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()).plus(Duration.ofSeconds(5)));
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param timeout the timeout to use for each individual deployment operation
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application, Duration timeout) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
TimeoutBudget timeoutBudget = new TimeoutBudget(clock, timeout);
LocalSession newSession = tenant.getSessionFactory().createSessionFromExisting(activeSession, logger, true, timeoutBudget);
tenant.getLocalSessionRepo().addSession(newSession);
Version version = decideVersion(application, environment, newSession.getVespaVersion());
return Optional.of(Deployment.unprepared(newSession, this, hostProvisioner, tenant, timeout, clock,
false /* don't validate as this is already deployed */, version));
}
public ApplicationId activate(Tenant tenant,
long sessionId,
TimeoutBudget timeoutBudget,
boolean ignoreLockFailure,
boolean ignoreSessionStaleFailure) {
LocalSession localSession = getLocalSession(tenant, sessionId);
Deployment deployment = deployFromPreparedSession(localSession, tenant, timeoutBudget.timeLeft());
deployment.setIgnoreLockFailure(ignoreLockFailure);
deployment.setIgnoreSessionStaleFailure(ignoreSessionStaleFailure);
deployment.activate();
return localSession.getApplicationId();
}
private Deployment deployFromPreparedSession(LocalSession session, Tenant tenant, Duration timeout) {
return Deployment.prepared(session, this, hostProvisioner, tenant, timeout, clock);
}
/**
* Removes a previously deployed application
*
* @return true if the application was found and removed, false if it was not present
* @throws RuntimeException if the remove transaction fails. This method is exception safe.
*/
public boolean remove(ApplicationId applicationId) {
Optional<Tenant> owner = Optional.ofNullable(tenantRepository.getTenant(applicationId.tenant()));
if ( ! owner.isPresent()) return false;
TenantApplications tenantApplications = owner.get().getApplicationRepo();
if ( ! tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
LocalSessionRepo localSessionRepo = owner.get().getLocalSessionRepo();
LocalSession session = localSessionRepo.getSession(sessionId);
if (session == null) return false;
NestedTransaction transaction = new NestedTransaction();
localSessionRepo.removeSession(session.getSessionId(), transaction);
session.delete(transaction);
transaction.add(new Rotations(owner.get().getCurator(), owner.get().getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
public HttpResponse clusterControllerStatusPage(ApplicationId applicationId, String hostName, String pathSuffix) {
String relativePath = "clustercontroller-status/" + pathSuffix;
return httpProxy.get(getApplication(applicationId), hostName, "container-clustercontroller", relativePath);
}
public Long getApplicationGeneration(ApplicationId applicationId) {
return getApplication(applicationId).getApplicationGeneration();
}
public void restart(ApplicationId applicationId, HostFilter hostFilter) {
hostProvisioner.ifPresent(provisioner -> provisioner.restart(applicationId, hostFilter));
}
public HttpResponse filedistributionStatus(ApplicationId applicationId, Duration timeout) {
return fileDistributionStatus.status(getApplication(applicationId), timeout);
}
public Set<String> deleteUnusedFiledistributionReferences(File fileReferencesPath, boolean deleteFromDisk) {
if (!fileReferencesPath.isDirectory()) throw new RuntimeException(fileReferencesPath + " is not a directory");
Set<String> fileReferencesInUse = new HashSet<>();
Set<ApplicationId> applicationIds = listApplications();
applicationIds.forEach(applicationId -> fileReferencesInUse.addAll(getApplication(applicationId).getModel().fileReferences()
.stream()
.map(FileReference::value)
.collect(Collectors.toSet())));
log.log(LogLevel.DEBUG, "File references in use : " + fileReferencesInUse);
Set<String> fileReferencesOnDisk = new HashSet<>();
File[] filesOnDisk = fileReferencesPath.listFiles();
if (filesOnDisk != null)
fileReferencesOnDisk.addAll(Arrays.stream(filesOnDisk).map(File::getName).collect(Collectors.toSet()));
log.log(LogLevel.DEBUG, "File references on disk (in " + fileReferencesPath + "): " + fileReferencesOnDisk);
Instant instant = Instant.now().minus(Duration.ofDays(14));
Set<String> fileReferencesToDelete = fileReferencesOnDisk
.stream()
.filter(fileReference -> ! fileReferencesInUse.contains(fileReference))
.filter(fileReference -> isFileLastModifiedBefore(new File(fileReferencesPath, fileReference), instant))
.collect(Collectors.toSet());
if (deleteFromDisk && fileReferencesToDelete.size() > 0) {
log.log(LogLevel.INFO, "Will delete file references not in use: " + fileReferencesToDelete);
fileReferencesToDelete.forEach(fileReference -> {
File file = new File(fileReferencesPath, fileReference);
if ( ! IOUtils.recursiveDeleteDir(file))
log.log(LogLevel.WARNING, "Could not delete " + file.getAbsolutePath());
});
}
return fileReferencesToDelete;
}
public ApplicationFile getApplicationFileFromSession(TenantName tenantName, long sessionId, String path, LocalSession.Mode mode) {
Tenant tenant = tenantRepository.getTenant(tenantName);
return getLocalSession(tenant, sessionId).getApplicationFile(Path.fromString(path), mode);
}
private Application getApplication(ApplicationId applicationId) {
try {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
long sessionId = getSessionIdForApplication(tenant, applicationId);
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId, 0);
return session.ensureApplicationLoaded().getForVersionOrLatest(Optional.empty(), clock.instant());
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed getting application for '" + applicationId + "'", e);
throw e;
}
}
private Set<ApplicationId> listApplications() {
return tenantRepository.getAllTenants().stream()
.flatMap(tenant -> tenant.getApplicationRepo().listApplications().stream())
.collect(Collectors.toSet());
}
private boolean isFileLastModifiedBefore(File fileReference, Instant instant) {
BasicFileAttributes fileAttributes;
try {
fileAttributes = readAttributes(fileReference.toPath(), BasicFileAttributes.class);
return fileAttributes.lastModifiedTime().toInstant().isBefore(instant);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public HttpResponse checkServiceForConfigConvergence(ApplicationId applicationId, String hostAndPort, URI uri) {
return convergeChecker.checkService(getApplication(applicationId), hostAndPort, uri);
}
public HttpResponse servicesToCheckForConfigConvergence(ApplicationId applicationId, URI uri) {
return convergeChecker.servicesToCheck(getApplication(applicationId), uri);
}
/**
* Gets the active Session for the given application id.
*
* @return the active session, or null if there is no active session for the given application id.
*/
public LocalSession getActiveSession(ApplicationId applicationId) {
return getActiveSession(tenantRepository.getTenant(applicationId.tenant()), applicationId);
}
public long getSessionIdForApplication(Tenant tenant, ApplicationId applicationId) {
return tenant.getApplicationRepo().getSessionIdForApplication(applicationId);
}
public void validateThatRemoteSessionIsNotActive(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
public void validateThatRemoteSessionIsPrepared(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if ( ! Session.Status.PREPARE.equals(session.getStatus()))
throw new IllegalStateException("Session not prepared: " + sessionId);
}
public long createSessionFromExisting(ApplicationId applicationId,
DeployLogger logger,
boolean internalRedeploy,
TimeoutBudget timeoutBudget) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession fromSession = getExistingSession(tenant, applicationId);
LocalSession session = sessionFactory.createSessionFromExisting(fromSession, logger, internalRedeploy, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, InputStream in, String contentType) {
File tempDir = Files.createTempDir();
long sessionId;
try {
sessionId = createSession(applicationId, timeoutBudget, decompressApplication(in, contentType, tempDir));
} finally {
cleanupTempDirectory(tempDir);
}
return sessionId;
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, File applicationDirectory) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession session = sessionFactory.createSession(applicationDirectory, applicationId, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public Set<TenantName> deleteUnusedTenants(Duration ttlForUnusedTenant, Instant now) {
return tenantRepository.getAllTenantNames().stream()
.filter(tenantName -> activeApplications(tenantName).isEmpty())
.filter(tenantName -> !tenantName.equals(TenantName.defaultName()))
.filter(tenantName -> !tenantName.equals(TenantRepository.HOSTED_VESPA_TENANT))
.filter(tenantName -> tenantRepository.getTenant(tenantName).getCreatedTime().isBefore(now.minus(ttlForUnusedTenant)))
.peek(tenantRepository::deleteTenant)
.collect(Collectors.toSet());
}
public void deleteTenant(TenantName tenantName) {
List<ApplicationId> activeApplications = activeApplications(tenantName);
if (activeApplications.isEmpty())
tenantRepository.deleteTenant(tenantName);
else
throw new IllegalArgumentException("Cannot delete tenant '" + tenantName + "', it has active applications: " + activeApplications);
}
private List<ApplicationId> activeApplications(TenantName tenantName) {
return tenantRepository.getTenant(tenantName).getApplicationRepo().listApplications();
}
public Tenant verifyTenantAndApplication(ApplicationId applicationId) {
TenantName tenantName = applicationId.tenant();
if (!tenantRepository.checkThatTenantExists(tenantName)) {
throw new IllegalArgumentException("Tenant " + tenantName + " was not found.");
}
Tenant tenant = tenantRepository.getTenant(tenantName);
List<ApplicationId> applicationIds = listApplicationIds(tenant);
if (!applicationIds.contains(applicationId)) {
throw new IllegalArgumentException("No such application id: " + applicationId);
}
return tenant;
}
public ApplicationMetaData getMetadataFromSession(Tenant tenant, long sessionId) {
return getLocalSession(tenant, sessionId).getMetaData();
}
ConfigserverConfig configserverConfig() {
return configserverConfig;
}
private void validateThatLocalSessionIsNotActive(Tenant tenant, long sessionId) {
LocalSession session = getLocalSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
private LocalSession getLocalSession(Tenant tenant, long sessionId) {
LocalSession session = tenant.getLocalSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private RemoteSession getRemoteSession(Tenant tenant, long sessionId) {
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private Optional<ApplicationSet> getCurrentActiveApplicationSet(Tenant tenant, ApplicationId appId) {
Optional<ApplicationSet> currentActiveApplicationSet = Optional.empty();
TenantApplications applicationRepo = tenant.getApplicationRepo();
try {
long currentActiveSessionId = applicationRepo.getSessionIdForApplication(appId);
RemoteSession currentActiveSession = getRemoteSession(tenant, currentActiveSessionId);
if (currentActiveSession != null) {
currentActiveApplicationSet = Optional.ofNullable(currentActiveSession.ensureApplicationLoaded());
}
} catch (IllegalArgumentException e) {
}
return currentActiveApplicationSet;
}
private File decompressApplication(InputStream in, String contentType, File tempDir) {
try (CompressedApplicationInputStream application =
CompressedApplicationInputStream.createFromCompressedStream(in, contentType)) {
return decompressApplication(application, tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress data in body", e);
}
}
private File decompressApplication(CompressedApplicationInputStream in, File tempDir) {
try {
return in.decompress(tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress stream", e);
}
}
private List<ApplicationId> listApplicationIds(Tenant tenant) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return applicationRepo.listApplications();
}
private void cleanupTempDirectory(File tempDir) {
logger.log(LogLevel.DEBUG, "Deleting tmp dir '" + tempDir + "'");
if (!IOUtils.recursiveDeleteDir(tempDir)) {
logger.log(LogLevel.WARNING, "Not able to delete tmp dir '" + tempDir + "'");
}
}
private Set<ApplicationId> redeployApplications(Set<ApplicationId> applicationIds) throws InterruptedException {
ExecutorService executor = Executors.newFixedThreadPool(configserverConfig.numParallelTenantLoaders(),
new DaemonThreadFactory("redeploy apps"));
Map<ApplicationId, Future<?>> futures = new HashMap<>();
Set<ApplicationId> failedDeployments = new HashSet<>();
applicationIds.forEach(appId -> deployFromLocalActive(appId).ifPresent(
deployment -> futures.put(appId, executor.submit(deployment::activate))));
for (Map.Entry<ApplicationId, Future<?>> f : futures.entrySet()) {
try {
f.getValue().get();
} catch (ExecutionException e) {
ApplicationId app = f.getKey();
log.log(LogLevel.WARNING, "Redeploying " + app + " failed, will retry");
failedDeployments.add(app);
}
}
executor.shutdown();
executor.awaitTermination(365, TimeUnit.DAYS);
return failedDeployments;
}
private LocalSession getExistingSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return getLocalSession(tenant, applicationRepo.getSessionIdForApplication(applicationId));
}
private LocalSession getActiveSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
if (applicationRepo.listApplications().contains(applicationId)) {
return tenant.getLocalSessionRepo().getSession(applicationRepo.getSessionIdForApplication(applicationId));
}
return null;
}
private static void logConfigChangeActions(ConfigChangeActions actions, DeployLogger logger) {
RestartActions restartActions = actions.getRestartActions();
if ( ! restartActions.isEmpty()) {
logger.log(Level.WARNING, "Change(s) between active and new application that require restart:\n" +
restartActions.format());
}
RefeedActions refeedActions = actions.getRefeedActions();
if ( ! refeedActions.isEmpty()) {
boolean allAllowed = refeedActions.getEntries().stream().allMatch(RefeedActions.Entry::allowed);
logger.log(allAllowed ? Level.INFO : Level.WARNING,
"Change(s) between active and new application that may require re-feed:\n" +
refeedActions.format());
}
}
/** Returns version to use when deploying application in given environment */
static Version decideVersion(ApplicationId application, Environment environment, Version targetVersion) {
if (environment.isManuallyDeployed() &&
!"hosted-vespa".equals(application.tenant().value())) {
return Vtag.currentVersion;
}
return targetVersion;
}
public Slime createDeployLog() {
Slime deployLog = new Slime();
deployLog.setObject();
return deployLog;
}
} | class ApplicationRepository implements com.yahoo.config.provision.Deployer {
private static final Logger log = Logger.getLogger(ApplicationRepository.class.getName());
private final TenantRepository tenantRepository;
private final Optional<Provisioner> hostProvisioner;
private final ConfigConvergenceChecker convergeChecker;
private final HttpProxy httpProxy;
private final Clock clock;
private final DeployLogger logger = new SilentDeployLogger();
private final ConfigserverConfig configserverConfig;
private final Environment environment;
private final FileDistributionStatus fileDistributionStatus;
@Inject
public ApplicationRepository(TenantRepository tenantRepository,
HostProvisionerProvider hostProvisionerProvider,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig) {
this(tenantRepository, hostProvisionerProvider.getHostProvisioner(),
configConvergenceChecker, httpProxy, configserverConfig, Clock.systemUTC(), new FileDistributionStatus());
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock) {
this(tenantRepository, hostProvisioner, clock, new ConfigserverConfig(new ConfigserverConfig.Builder()));
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock,
ConfigserverConfig configserverConfig) {
this(tenantRepository, Optional.of(hostProvisioner), new ConfigConvergenceChecker(), new HttpProxy(new SimpleHttpFetcher()),
configserverConfig, clock, new FileDistributionStatus());
}
private ApplicationRepository(TenantRepository tenantRepository,
Optional<Provisioner> hostProvisioner,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig,
Clock clock,
FileDistributionStatus fileDistributionStatus) {
this.tenantRepository = tenantRepository;
this.hostProvisioner = hostProvisioner;
this.convergeChecker = configConvergenceChecker;
this.httpProxy = httpProxy;
this.clock = clock;
this.configserverConfig = configserverConfig;
this.environment = Environment.from(configserverConfig.environment());
this.fileDistributionStatus = fileDistributionStatus;
}
public PrepareResult prepare(Tenant tenant, long sessionId, PrepareParams prepareParams, Instant now) {
validateThatLocalSessionIsNotActive(tenant, sessionId);
LocalSession session = getLocalSession(tenant, sessionId);
ApplicationId applicationId = prepareParams.getApplicationId();
Optional<ApplicationSet> currentActiveApplicationSet = getCurrentActiveApplicationSet(tenant, applicationId);
Slime deployLog = createDeployLog();
DeployLogger logger = new DeployHandlerLogger(deployLog.get().setArray("log"), prepareParams.isVerbose(), applicationId);
ConfigChangeActions actions = session.prepare(logger, prepareParams, currentActiveApplicationSet, tenant.getPath(), now);
logConfigChangeActions(actions, logger);
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " prepared successfully. ");
return new PrepareResult(sessionId, actions, deployLog);
}
public PrepareResult prepareAndActivate(Tenant tenant, long sessionId, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
PrepareResult result = prepare(tenant, sessionId, prepareParams, now);
activate(tenant, sessionId, prepareParams.getTimeoutBudget(), ignoreLockFailure, ignoreSessionStaleFailure);
return result;
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) {
return deploy(in, prepareParams, false, false, clock.instant());
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
File tempDir = Files.createTempDir();
PrepareResult prepareResult;
try {
prepareResult = deploy(decompressApplication(in, tempDir), prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
} finally {
cleanupTempDirectory(tempDir);
}
return prepareResult;
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams) {
return deploy(applicationPackage, prepareParams, false, false, Instant.now());
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
ApplicationId applicationId = prepareParams.getApplicationId();
long sessionId = createSession(applicationId, prepareParams.getTimeoutBudget(), applicationPackage);
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return prepareAndActivate(tenant, sessionId, prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application) {
return deployFromLocalActive(application, Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()).plus(Duration.ofSeconds(5)));
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param timeout the timeout to use for each individual deployment operation
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application, Duration timeout) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
TimeoutBudget timeoutBudget = new TimeoutBudget(clock, timeout);
LocalSession newSession = tenant.getSessionFactory().createSessionFromExisting(activeSession, logger, true, timeoutBudget);
tenant.getLocalSessionRepo().addSession(newSession);
Version version = decideVersion(application, environment, newSession.getVespaVersion());
return Optional.of(Deployment.unprepared(newSession, this, hostProvisioner, tenant, timeout, clock,
false /* don't validate as this is already deployed */, version));
}
public ApplicationId activate(Tenant tenant,
long sessionId,
TimeoutBudget timeoutBudget,
boolean ignoreLockFailure,
boolean ignoreSessionStaleFailure) {
LocalSession localSession = getLocalSession(tenant, sessionId);
Deployment deployment = deployFromPreparedSession(localSession, tenant, timeoutBudget.timeLeft());
deployment.setIgnoreLockFailure(ignoreLockFailure);
deployment.setIgnoreSessionStaleFailure(ignoreSessionStaleFailure);
deployment.activate();
return localSession.getApplicationId();
}
private Deployment deployFromPreparedSession(LocalSession session, Tenant tenant, Duration timeout) {
return Deployment.prepared(session, this, hostProvisioner, tenant, timeout, clock);
}
/**
* Removes a previously deployed application
*
* @return true if the application was found and removed, false if it was not present
* @throws RuntimeException if the remove transaction fails. This method is exception safe.
*/
public boolean remove(ApplicationId applicationId) {
Optional<Tenant> owner = Optional.ofNullable(tenantRepository.getTenant(applicationId.tenant()));
if ( ! owner.isPresent()) return false;
TenantApplications tenantApplications = owner.get().getApplicationRepo();
if ( ! tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
LocalSessionRepo localSessionRepo = owner.get().getLocalSessionRepo();
LocalSession session = localSessionRepo.getSession(sessionId);
if (session == null) return false;
NestedTransaction transaction = new NestedTransaction();
localSessionRepo.removeSession(session.getSessionId(), transaction);
session.delete(transaction);
transaction.add(new Rotations(owner.get().getCurator(), owner.get().getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
public HttpResponse clusterControllerStatusPage(ApplicationId applicationId, String hostName, String pathSuffix) {
String relativePath = "clustercontroller-status/" + pathSuffix;
return httpProxy.get(getApplication(applicationId), hostName, "container-clustercontroller", relativePath);
}
public Long getApplicationGeneration(ApplicationId applicationId) {
return getApplication(applicationId).getApplicationGeneration();
}
public void restart(ApplicationId applicationId, HostFilter hostFilter) {
hostProvisioner.ifPresent(provisioner -> provisioner.restart(applicationId, hostFilter));
}
public HttpResponse filedistributionStatus(ApplicationId applicationId, Duration timeout) {
return fileDistributionStatus.status(getApplication(applicationId), timeout);
}
public Set<String> deleteUnusedFiledistributionReferences(File fileReferencesPath, boolean deleteFromDisk) {
if (!fileReferencesPath.isDirectory()) throw new RuntimeException(fileReferencesPath + " is not a directory");
Set<String> fileReferencesInUse = new HashSet<>();
Set<ApplicationId> applicationIds = listApplications();
applicationIds.forEach(applicationId -> fileReferencesInUse.addAll(getApplication(applicationId).getModel().fileReferences()
.stream()
.map(FileReference::value)
.collect(Collectors.toSet())));
log.log(LogLevel.DEBUG, "File references in use : " + fileReferencesInUse);
Set<String> fileReferencesOnDisk = new HashSet<>();
File[] filesOnDisk = fileReferencesPath.listFiles();
if (filesOnDisk != null)
fileReferencesOnDisk.addAll(Arrays.stream(filesOnDisk).map(File::getName).collect(Collectors.toSet()));
log.log(LogLevel.DEBUG, "File references on disk (in " + fileReferencesPath + "): " + fileReferencesOnDisk);
Instant instant = Instant.now().minus(Duration.ofDays(14));
Set<String> fileReferencesToDelete = fileReferencesOnDisk
.stream()
.filter(fileReference -> ! fileReferencesInUse.contains(fileReference))
.filter(fileReference -> isFileLastModifiedBefore(new File(fileReferencesPath, fileReference), instant))
.collect(Collectors.toSet());
if (deleteFromDisk && fileReferencesToDelete.size() > 0) {
log.log(LogLevel.INFO, "Will delete file references not in use: " + fileReferencesToDelete);
fileReferencesToDelete.forEach(fileReference -> {
File file = new File(fileReferencesPath, fileReference);
if ( ! IOUtils.recursiveDeleteDir(file))
log.log(LogLevel.WARNING, "Could not delete " + file.getAbsolutePath());
});
}
return fileReferencesToDelete;
}
public ApplicationFile getApplicationFileFromSession(TenantName tenantName, long sessionId, String path, LocalSession.Mode mode) {
Tenant tenant = tenantRepository.getTenant(tenantName);
return getLocalSession(tenant, sessionId).getApplicationFile(Path.fromString(path), mode);
}
private Application getApplication(ApplicationId applicationId) {
try {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
long sessionId = getSessionIdForApplication(tenant, applicationId);
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId, 0);
return session.ensureApplicationLoaded().getForVersionOrLatest(Optional.empty(), clock.instant());
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed getting application for '" + applicationId + "'", e);
throw e;
}
}
private Set<ApplicationId> listApplications() {
return tenantRepository.getAllTenants().stream()
.flatMap(tenant -> tenant.getApplicationRepo().listApplications().stream())
.collect(Collectors.toSet());
}
private boolean isFileLastModifiedBefore(File fileReference, Instant instant) {
BasicFileAttributes fileAttributes;
try {
fileAttributes = readAttributes(fileReference.toPath(), BasicFileAttributes.class);
return fileAttributes.lastModifiedTime().toInstant().isBefore(instant);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public HttpResponse checkServiceForConfigConvergence(ApplicationId applicationId, String hostAndPort, URI uri) {
return convergeChecker.checkService(getApplication(applicationId), hostAndPort, uri);
}
public HttpResponse servicesToCheckForConfigConvergence(ApplicationId applicationId, URI uri) {
return convergeChecker.servicesToCheck(getApplication(applicationId), uri);
}
/**
* Gets the active Session for the given application id.
*
* @return the active session, or null if there is no active session for the given application id.
*/
public LocalSession getActiveSession(ApplicationId applicationId) {
return getActiveSession(tenantRepository.getTenant(applicationId.tenant()), applicationId);
}
public long getSessionIdForApplication(Tenant tenant, ApplicationId applicationId) {
return tenant.getApplicationRepo().getSessionIdForApplication(applicationId);
}
public void validateThatRemoteSessionIsNotActive(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
public void validateThatRemoteSessionIsPrepared(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if ( ! Session.Status.PREPARE.equals(session.getStatus()))
throw new IllegalStateException("Session not prepared: " + sessionId);
}
public long createSessionFromExisting(ApplicationId applicationId,
DeployLogger logger,
boolean internalRedeploy,
TimeoutBudget timeoutBudget) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession fromSession = getExistingSession(tenant, applicationId);
LocalSession session = sessionFactory.createSessionFromExisting(fromSession, logger, internalRedeploy, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, InputStream in, String contentType) {
File tempDir = Files.createTempDir();
long sessionId;
try {
sessionId = createSession(applicationId, timeoutBudget, decompressApplication(in, contentType, tempDir));
} finally {
cleanupTempDirectory(tempDir);
}
return sessionId;
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, File applicationDirectory) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession session = sessionFactory.createSession(applicationDirectory, applicationId, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public Set<TenantName> deleteUnusedTenants(Duration ttlForUnusedTenant, Instant now) {
return tenantRepository.getAllTenantNames().stream()
.filter(tenantName -> activeApplications(tenantName).isEmpty())
.filter(tenantName -> !tenantName.equals(TenantName.defaultName()))
.filter(tenantName -> !tenantName.equals(TenantRepository.HOSTED_VESPA_TENANT))
.filter(tenantName -> tenantRepository.getTenant(tenantName).getCreatedTime().isBefore(now.minus(ttlForUnusedTenant)))
.peek(tenantRepository::deleteTenant)
.collect(Collectors.toSet());
}
public void deleteTenant(TenantName tenantName) {
List<ApplicationId> activeApplications = activeApplications(tenantName);
if (activeApplications.isEmpty())
tenantRepository.deleteTenant(tenantName);
else
throw new IllegalArgumentException("Cannot delete tenant '" + tenantName + "', it has active applications: " + activeApplications);
}
private List<ApplicationId> activeApplications(TenantName tenantName) {
return tenantRepository.getTenant(tenantName).getApplicationRepo().listApplications();
}
public Tenant verifyTenantAndApplication(ApplicationId applicationId) {
TenantName tenantName = applicationId.tenant();
if (!tenantRepository.checkThatTenantExists(tenantName)) {
throw new IllegalArgumentException("Tenant " + tenantName + " was not found.");
}
Tenant tenant = tenantRepository.getTenant(tenantName);
List<ApplicationId> applicationIds = listApplicationIds(tenant);
if (!applicationIds.contains(applicationId)) {
throw new IllegalArgumentException("No such application id: " + applicationId);
}
return tenant;
}
public ApplicationMetaData getMetadataFromSession(Tenant tenant, long sessionId) {
return getLocalSession(tenant, sessionId).getMetaData();
}
ConfigserverConfig configserverConfig() {
return configserverConfig;
}
private void validateThatLocalSessionIsNotActive(Tenant tenant, long sessionId) {
LocalSession session = getLocalSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
private LocalSession getLocalSession(Tenant tenant, long sessionId) {
LocalSession session = tenant.getLocalSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private RemoteSession getRemoteSession(Tenant tenant, long sessionId) {
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private Optional<ApplicationSet> getCurrentActiveApplicationSet(Tenant tenant, ApplicationId appId) {
Optional<ApplicationSet> currentActiveApplicationSet = Optional.empty();
TenantApplications applicationRepo = tenant.getApplicationRepo();
try {
long currentActiveSessionId = applicationRepo.getSessionIdForApplication(appId);
RemoteSession currentActiveSession = getRemoteSession(tenant, currentActiveSessionId);
if (currentActiveSession != null) {
currentActiveApplicationSet = Optional.ofNullable(currentActiveSession.ensureApplicationLoaded());
}
} catch (IllegalArgumentException e) {
}
return currentActiveApplicationSet;
}
private File decompressApplication(InputStream in, String contentType, File tempDir) {
try (CompressedApplicationInputStream application =
CompressedApplicationInputStream.createFromCompressedStream(in, contentType)) {
return decompressApplication(application, tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress data in body", e);
}
}
private File decompressApplication(CompressedApplicationInputStream in, File tempDir) {
try {
return in.decompress(tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress stream", e);
}
}
private List<ApplicationId> listApplicationIds(Tenant tenant) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return applicationRepo.listApplications();
}
private void cleanupTempDirectory(File tempDir) {
logger.log(LogLevel.DEBUG, "Deleting tmp dir '" + tempDir + "'");
if (!IOUtils.recursiveDeleteDir(tempDir)) {
logger.log(LogLevel.WARNING, "Not able to delete tmp dir '" + tempDir + "'");
}
}
private Set<ApplicationId> redeployApplications(Set<ApplicationId> applicationIds) throws InterruptedException {
ExecutorService executor = Executors.newFixedThreadPool(configserverConfig.numParallelTenantLoaders(),
new DaemonThreadFactory("redeploy apps"));
Map<ApplicationId, Future<?>> futures = new HashMap<>();
Set<ApplicationId> failedDeployments = new HashSet<>();
applicationIds.forEach(appId -> deployFromLocalActive(appId).ifPresent(
deployment -> futures.put(appId, executor.submit(deployment::activate))));
for (Map.Entry<ApplicationId, Future<?>> f : futures.entrySet()) {
try {
f.getValue().get();
} catch (ExecutionException e) {
ApplicationId app = f.getKey();
log.log(LogLevel.WARNING, "Redeploying " + app + " failed, will retry");
failedDeployments.add(app);
}
}
executor.shutdown();
executor.awaitTermination(365, TimeUnit.DAYS);
return failedDeployments;
}
private LocalSession getExistingSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return getLocalSession(tenant, applicationRepo.getSessionIdForApplication(applicationId));
}
private LocalSession getActiveSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
if (applicationRepo.listApplications().contains(applicationId)) {
return tenant.getLocalSessionRepo().getSession(applicationRepo.getSessionIdForApplication(applicationId));
}
return null;
}
private static void logConfigChangeActions(ConfigChangeActions actions, DeployLogger logger) {
RestartActions restartActions = actions.getRestartActions();
if ( ! restartActions.isEmpty()) {
logger.log(Level.WARNING, "Change(s) between active and new application that require restart:\n" +
restartActions.format());
}
RefeedActions refeedActions = actions.getRefeedActions();
if ( ! refeedActions.isEmpty()) {
boolean allAllowed = refeedActions.getEntries().stream().allMatch(RefeedActions.Entry::allowed);
logger.log(allAllowed ? Level.INFO : Level.WARNING,
"Change(s) between active and new application that may require re-feed:\n" +
refeedActions.format());
}
}
/** Returns version to use when deploying application in given environment */
static Version decideVersion(ApplicationId application, Environment environment, Version targetVersion) {
if (environment.isManuallyDeployed() &&
!"hosted-vespa".equals(application.tenant().value())) {
return Vtag.currentVersion;
}
return targetVersion;
}
public Slime createDeployLog() {
Slime deployLog = new Slime();
deployLog.setObject();
return deployLog;
}
} |
Agreed, will fix later | boolean redeployAllApplications(Duration maxDuration) throws InterruptedException {
Instant end = Instant.now().plus(maxDuration);
Set<ApplicationId> applicationIds = listApplications();
do {
applicationIds = redeployApplications(applicationIds);
} while ( ! applicationIds.isEmpty() && Instant.now().isBefore(end));
if ( ! applicationIds.isEmpty()) {
log.log(LogLevel.ERROR, "Redeploying applications not finished after " + maxDuration +
", exiting, applications that failed redeployment: " + applicationIds);
return false;
}
return true;
} | Set<ApplicationId> applicationIds = listApplications(); | boolean redeployAllApplications(Duration maxDuration) throws InterruptedException {
Instant end = Instant.now().plus(maxDuration);
Set<ApplicationId> applicationIds = listApplications();
do {
applicationIds = redeployApplications(applicationIds);
} while ( ! applicationIds.isEmpty() && Instant.now().isBefore(end));
if ( ! applicationIds.isEmpty()) {
log.log(LogLevel.ERROR, "Redeploying applications not finished after " + maxDuration +
", exiting, applications that failed redeployment: " + applicationIds);
return false;
}
return true;
} | class ApplicationRepository implements com.yahoo.config.provision.Deployer {
private static final Logger log = Logger.getLogger(ApplicationRepository.class.getName());
private final TenantRepository tenantRepository;
private final Optional<Provisioner> hostProvisioner;
private final ConfigConvergenceChecker convergeChecker;
private final HttpProxy httpProxy;
private final Clock clock;
private final DeployLogger logger = new SilentDeployLogger();
private final ConfigserverConfig configserverConfig;
private final Environment environment;
private final FileDistributionStatus fileDistributionStatus;
@Inject
public ApplicationRepository(TenantRepository tenantRepository,
HostProvisionerProvider hostProvisionerProvider,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig) {
this(tenantRepository, hostProvisionerProvider.getHostProvisioner(),
configConvergenceChecker, httpProxy, configserverConfig, Clock.systemUTC(), new FileDistributionStatus());
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock) {
this(tenantRepository, hostProvisioner, clock, new ConfigserverConfig(new ConfigserverConfig.Builder()));
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock,
ConfigserverConfig configserverConfig) {
this(tenantRepository, Optional.of(hostProvisioner), new ConfigConvergenceChecker(), new HttpProxy(new SimpleHttpFetcher()),
configserverConfig, clock, new FileDistributionStatus());
}
private ApplicationRepository(TenantRepository tenantRepository,
Optional<Provisioner> hostProvisioner,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig,
Clock clock,
FileDistributionStatus fileDistributionStatus) {
this.tenantRepository = tenantRepository;
this.hostProvisioner = hostProvisioner;
this.convergeChecker = configConvergenceChecker;
this.httpProxy = httpProxy;
this.clock = clock;
this.configserverConfig = configserverConfig;
this.environment = Environment.from(configserverConfig.environment());
this.fileDistributionStatus = fileDistributionStatus;
}
public PrepareResult prepare(Tenant tenant, long sessionId, PrepareParams prepareParams, Instant now) {
validateThatLocalSessionIsNotActive(tenant, sessionId);
LocalSession session = getLocalSession(tenant, sessionId);
ApplicationId applicationId = prepareParams.getApplicationId();
Optional<ApplicationSet> currentActiveApplicationSet = getCurrentActiveApplicationSet(tenant, applicationId);
Slime deployLog = createDeployLog();
DeployLogger logger = new DeployHandlerLogger(deployLog.get().setArray("log"), prepareParams.isVerbose(), applicationId);
ConfigChangeActions actions = session.prepare(logger, prepareParams, currentActiveApplicationSet, tenant.getPath(), now);
logConfigChangeActions(actions, logger);
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " prepared successfully. ");
return new PrepareResult(sessionId, actions, deployLog);
}
public PrepareResult prepareAndActivate(Tenant tenant, long sessionId, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
PrepareResult result = prepare(tenant, sessionId, prepareParams, now);
activate(tenant, sessionId, prepareParams.getTimeoutBudget(), ignoreLockFailure, ignoreSessionStaleFailure);
return result;
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) {
return deploy(in, prepareParams, false, false, clock.instant());
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
File tempDir = Files.createTempDir();
PrepareResult prepareResult;
try {
prepareResult = deploy(decompressApplication(in, tempDir), prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
} finally {
cleanupTempDirectory(tempDir);
}
return prepareResult;
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams) {
return deploy(applicationPackage, prepareParams, false, false, Instant.now());
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
ApplicationId applicationId = prepareParams.getApplicationId();
long sessionId = createSession(applicationId, prepareParams.getTimeoutBudget(), applicationPackage);
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return prepareAndActivate(tenant, sessionId, prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application) {
return deployFromLocalActive(application, Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()).plus(Duration.ofSeconds(5)));
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param timeout the timeout to use for each individual deployment operation
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application, Duration timeout) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
TimeoutBudget timeoutBudget = new TimeoutBudget(clock, timeout);
LocalSession newSession = tenant.getSessionFactory().createSessionFromExisting(activeSession, logger, true, timeoutBudget);
tenant.getLocalSessionRepo().addSession(newSession);
Version version = decideVersion(application, environment, newSession.getVespaVersion());
return Optional.of(Deployment.unprepared(newSession, this, hostProvisioner, tenant, timeout, clock,
false /* don't validate as this is already deployed */, version));
}
public ApplicationId activate(Tenant tenant,
long sessionId,
TimeoutBudget timeoutBudget,
boolean ignoreLockFailure,
boolean ignoreSessionStaleFailure) {
LocalSession localSession = getLocalSession(tenant, sessionId);
Deployment deployment = deployFromPreparedSession(localSession, tenant, timeoutBudget.timeLeft());
deployment.setIgnoreLockFailure(ignoreLockFailure);
deployment.setIgnoreSessionStaleFailure(ignoreSessionStaleFailure);
deployment.activate();
return localSession.getApplicationId();
}
private Deployment deployFromPreparedSession(LocalSession session, Tenant tenant, Duration timeout) {
return Deployment.prepared(session, this, hostProvisioner, tenant, timeout, clock);
}
/**
* Removes a previously deployed application
*
* @return true if the application was found and removed, false if it was not present
* @throws RuntimeException if the remove transaction fails. This method is exception safe.
*/
public boolean remove(ApplicationId applicationId) {
Optional<Tenant> owner = Optional.ofNullable(tenantRepository.getTenant(applicationId.tenant()));
if ( ! owner.isPresent()) return false;
TenantApplications tenantApplications = owner.get().getApplicationRepo();
if ( ! tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
LocalSessionRepo localSessionRepo = owner.get().getLocalSessionRepo();
LocalSession session = localSessionRepo.getSession(sessionId);
if (session == null) return false;
NestedTransaction transaction = new NestedTransaction();
localSessionRepo.removeSession(session.getSessionId(), transaction);
session.delete(transaction);
transaction.add(new Rotations(owner.get().getCurator(), owner.get().getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
public HttpResponse clusterControllerStatusPage(ApplicationId applicationId, String hostName, String pathSuffix) {
String relativePath = "clustercontroller-status/" + pathSuffix;
return httpProxy.get(getApplication(applicationId), hostName, "container-clustercontroller", relativePath);
}
public Long getApplicationGeneration(ApplicationId applicationId) {
return getApplication(applicationId).getApplicationGeneration();
}
public void restart(ApplicationId applicationId, HostFilter hostFilter) {
hostProvisioner.ifPresent(provisioner -> provisioner.restart(applicationId, hostFilter));
}
public HttpResponse filedistributionStatus(ApplicationId applicationId, Duration timeout) {
return fileDistributionStatus.status(getApplication(applicationId), timeout);
}
public Set<String> deleteUnusedFiledistributionReferences(File fileReferencesPath, boolean deleteFromDisk) {
if (!fileReferencesPath.isDirectory()) throw new RuntimeException(fileReferencesPath + " is not a directory");
Set<String> fileReferencesInUse = new HashSet<>();
Set<ApplicationId> applicationIds = listApplications();
applicationIds.forEach(applicationId -> fileReferencesInUse.addAll(getApplication(applicationId).getModel().fileReferences()
.stream()
.map(FileReference::value)
.collect(Collectors.toSet())));
log.log(LogLevel.DEBUG, "File references in use : " + fileReferencesInUse);
Set<String> fileReferencesOnDisk = new HashSet<>();
File[] filesOnDisk = fileReferencesPath.listFiles();
if (filesOnDisk != null)
fileReferencesOnDisk.addAll(Arrays.stream(filesOnDisk).map(File::getName).collect(Collectors.toSet()));
log.log(LogLevel.DEBUG, "File references on disk (in " + fileReferencesPath + "): " + fileReferencesOnDisk);
Instant instant = Instant.now().minus(Duration.ofDays(14));
Set<String> fileReferencesToDelete = fileReferencesOnDisk
.stream()
.filter(fileReference -> ! fileReferencesInUse.contains(fileReference))
.filter(fileReference -> isFileLastModifiedBefore(new File(fileReferencesPath, fileReference), instant))
.collect(Collectors.toSet());
if (deleteFromDisk && fileReferencesToDelete.size() > 0) {
log.log(LogLevel.INFO, "Will delete file references not in use: " + fileReferencesToDelete);
fileReferencesToDelete.forEach(fileReference -> {
File file = new File(fileReferencesPath, fileReference);
if ( ! IOUtils.recursiveDeleteDir(file))
log.log(LogLevel.WARNING, "Could not delete " + file.getAbsolutePath());
});
}
return fileReferencesToDelete;
}
public ApplicationFile getApplicationFileFromSession(TenantName tenantName, long sessionId, String path, LocalSession.Mode mode) {
Tenant tenant = tenantRepository.getTenant(tenantName);
return getLocalSession(tenant, sessionId).getApplicationFile(Path.fromString(path), mode);
}
private Application getApplication(ApplicationId applicationId) {
try {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
long sessionId = getSessionIdForApplication(tenant, applicationId);
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId, 0);
return session.ensureApplicationLoaded().getForVersionOrLatest(Optional.empty(), clock.instant());
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed getting application for '" + applicationId + "'", e);
throw e;
}
}
private Set<ApplicationId> listApplications() {
return tenantRepository.getAllTenants().stream()
.flatMap(tenant -> tenant.getApplicationRepo().listApplications().stream())
.collect(Collectors.toSet());
}
private boolean isFileLastModifiedBefore(File fileReference, Instant instant) {
BasicFileAttributes fileAttributes;
try {
fileAttributes = readAttributes(fileReference.toPath(), BasicFileAttributes.class);
return fileAttributes.lastModifiedTime().toInstant().isBefore(instant);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public HttpResponse checkServiceForConfigConvergence(ApplicationId applicationId, String hostAndPort, URI uri) {
return convergeChecker.checkService(getApplication(applicationId), hostAndPort, uri);
}
public HttpResponse servicesToCheckForConfigConvergence(ApplicationId applicationId, URI uri) {
return convergeChecker.servicesToCheck(getApplication(applicationId), uri);
}
/**
* Gets the active Session for the given application id.
*
* @return the active session, or null if there is no active session for the given application id.
*/
public LocalSession getActiveSession(ApplicationId applicationId) {
return getActiveSession(tenantRepository.getTenant(applicationId.tenant()), applicationId);
}
public long getSessionIdForApplication(Tenant tenant, ApplicationId applicationId) {
return tenant.getApplicationRepo().getSessionIdForApplication(applicationId);
}
public void validateThatRemoteSessionIsNotActive(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
public void validateThatRemoteSessionIsPrepared(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if ( ! Session.Status.PREPARE.equals(session.getStatus()))
throw new IllegalStateException("Session not prepared: " + sessionId);
}
public long createSessionFromExisting(ApplicationId applicationId,
DeployLogger logger,
boolean internalRedeploy,
TimeoutBudget timeoutBudget) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession fromSession = getExistingSession(tenant, applicationId);
LocalSession session = sessionFactory.createSessionFromExisting(fromSession, logger, internalRedeploy, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, InputStream in, String contentType) {
File tempDir = Files.createTempDir();
long sessionId;
try {
sessionId = createSession(applicationId, timeoutBudget, decompressApplication(in, contentType, tempDir));
} finally {
cleanupTempDirectory(tempDir);
}
return sessionId;
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, File applicationDirectory) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession session = sessionFactory.createSession(applicationDirectory, applicationId, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public Set<TenantName> deleteUnusedTenants(Duration ttlForUnusedTenant, Instant now) {
return tenantRepository.getAllTenantNames().stream()
.filter(tenantName -> activeApplications(tenantName).isEmpty())
.filter(tenantName -> !tenantName.equals(TenantName.defaultName()))
.filter(tenantName -> !tenantName.equals(TenantRepository.HOSTED_VESPA_TENANT))
.filter(tenantName -> tenantRepository.getTenant(tenantName).getCreatedTime().isBefore(now.minus(ttlForUnusedTenant)))
.peek(tenantRepository::deleteTenant)
.collect(Collectors.toSet());
}
public void deleteTenant(TenantName tenantName) {
List<ApplicationId> activeApplications = activeApplications(tenantName);
if (activeApplications.isEmpty())
tenantRepository.deleteTenant(tenantName);
else
throw new IllegalArgumentException("Cannot delete tenant '" + tenantName + "', it has active applications: " + activeApplications);
}
private List<ApplicationId> activeApplications(TenantName tenantName) {
return tenantRepository.getTenant(tenantName).getApplicationRepo().listApplications();
}
public Tenant verifyTenantAndApplication(ApplicationId applicationId) {
TenantName tenantName = applicationId.tenant();
if (!tenantRepository.checkThatTenantExists(tenantName)) {
throw new IllegalArgumentException("Tenant " + tenantName + " was not found.");
}
Tenant tenant = tenantRepository.getTenant(tenantName);
List<ApplicationId> applicationIds = listApplicationIds(tenant);
if (!applicationIds.contains(applicationId)) {
throw new IllegalArgumentException("No such application id: " + applicationId);
}
return tenant;
}
public ApplicationMetaData getMetadataFromSession(Tenant tenant, long sessionId) {
return getLocalSession(tenant, sessionId).getMetaData();
}
ConfigserverConfig configserverConfig() {
return configserverConfig;
}
private void validateThatLocalSessionIsNotActive(Tenant tenant, long sessionId) {
LocalSession session = getLocalSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
private LocalSession getLocalSession(Tenant tenant, long sessionId) {
LocalSession session = tenant.getLocalSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private RemoteSession getRemoteSession(Tenant tenant, long sessionId) {
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private Optional<ApplicationSet> getCurrentActiveApplicationSet(Tenant tenant, ApplicationId appId) {
Optional<ApplicationSet> currentActiveApplicationSet = Optional.empty();
TenantApplications applicationRepo = tenant.getApplicationRepo();
try {
long currentActiveSessionId = applicationRepo.getSessionIdForApplication(appId);
RemoteSession currentActiveSession = getRemoteSession(tenant, currentActiveSessionId);
if (currentActiveSession != null) {
currentActiveApplicationSet = Optional.ofNullable(currentActiveSession.ensureApplicationLoaded());
}
} catch (IllegalArgumentException e) {
}
return currentActiveApplicationSet;
}
private File decompressApplication(InputStream in, String contentType, File tempDir) {
try (CompressedApplicationInputStream application =
CompressedApplicationInputStream.createFromCompressedStream(in, contentType)) {
return decompressApplication(application, tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress data in body", e);
}
}
private File decompressApplication(CompressedApplicationInputStream in, File tempDir) {
try {
return in.decompress(tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress stream", e);
}
}
private List<ApplicationId> listApplicationIds(Tenant tenant) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return applicationRepo.listApplications();
}
private void cleanupTempDirectory(File tempDir) {
logger.log(LogLevel.DEBUG, "Deleting tmp dir '" + tempDir + "'");
if (!IOUtils.recursiveDeleteDir(tempDir)) {
logger.log(LogLevel.WARNING, "Not able to delete tmp dir '" + tempDir + "'");
}
}
private Set<ApplicationId> redeployApplications(Set<ApplicationId> applicationIds) throws InterruptedException {
ExecutorService executor = Executors.newFixedThreadPool(configserverConfig.numParallelTenantLoaders(),
new DaemonThreadFactory("redeploy apps"));
Map<ApplicationId, Future<?>> futures = new HashMap<>();
Set<ApplicationId> failedDeployments = new HashSet<>();
applicationIds.forEach(appId -> deployFromLocalActive(appId).ifPresent(
deployment -> futures.put(appId, executor.submit(deployment::activate))));
for (Map.Entry<ApplicationId, Future<?>> f : futures.entrySet()) {
try {
f.getValue().get();
} catch (ExecutionException e) {
ApplicationId app = f.getKey();
log.log(LogLevel.WARNING, "Redeploying " + app + " failed, will retry");
failedDeployments.add(app);
}
}
executor.shutdown();
executor.awaitTermination(365, TimeUnit.DAYS);
return failedDeployments;
}
private LocalSession getExistingSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return getLocalSession(tenant, applicationRepo.getSessionIdForApplication(applicationId));
}
private LocalSession getActiveSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
if (applicationRepo.listApplications().contains(applicationId)) {
return tenant.getLocalSessionRepo().getSession(applicationRepo.getSessionIdForApplication(applicationId));
}
return null;
}
private static void logConfigChangeActions(ConfigChangeActions actions, DeployLogger logger) {
RestartActions restartActions = actions.getRestartActions();
if ( ! restartActions.isEmpty()) {
logger.log(Level.WARNING, "Change(s) between active and new application that require restart:\n" +
restartActions.format());
}
RefeedActions refeedActions = actions.getRefeedActions();
if ( ! refeedActions.isEmpty()) {
boolean allAllowed = refeedActions.getEntries().stream().allMatch(RefeedActions.Entry::allowed);
logger.log(allAllowed ? Level.INFO : Level.WARNING,
"Change(s) between active and new application that may require re-feed:\n" +
refeedActions.format());
}
}
/** Returns version to use when deploying application in given environment */
static Version decideVersion(ApplicationId application, Environment environment, Version targetVersion) {
if (environment.isManuallyDeployed() &&
!"hosted-vespa".equals(application.tenant().value())) {
return Vtag.currentVersion;
}
return targetVersion;
}
public Slime createDeployLog() {
Slime deployLog = new Slime();
deployLog.setObject();
return deployLog;
}
} | class ApplicationRepository implements com.yahoo.config.provision.Deployer {
private static final Logger log = Logger.getLogger(ApplicationRepository.class.getName());
private final TenantRepository tenantRepository;
private final Optional<Provisioner> hostProvisioner;
private final ConfigConvergenceChecker convergeChecker;
private final HttpProxy httpProxy;
private final Clock clock;
private final DeployLogger logger = new SilentDeployLogger();
private final ConfigserverConfig configserverConfig;
private final Environment environment;
private final FileDistributionStatus fileDistributionStatus;
@Inject
public ApplicationRepository(TenantRepository tenantRepository,
HostProvisionerProvider hostProvisionerProvider,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig) {
this(tenantRepository, hostProvisionerProvider.getHostProvisioner(),
configConvergenceChecker, httpProxy, configserverConfig, Clock.systemUTC(), new FileDistributionStatus());
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock) {
this(tenantRepository, hostProvisioner, clock, new ConfigserverConfig(new ConfigserverConfig.Builder()));
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock,
ConfigserverConfig configserverConfig) {
this(tenantRepository, Optional.of(hostProvisioner), new ConfigConvergenceChecker(), new HttpProxy(new SimpleHttpFetcher()),
configserverConfig, clock, new FileDistributionStatus());
}
private ApplicationRepository(TenantRepository tenantRepository,
Optional<Provisioner> hostProvisioner,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig,
Clock clock,
FileDistributionStatus fileDistributionStatus) {
this.tenantRepository = tenantRepository;
this.hostProvisioner = hostProvisioner;
this.convergeChecker = configConvergenceChecker;
this.httpProxy = httpProxy;
this.clock = clock;
this.configserverConfig = configserverConfig;
this.environment = Environment.from(configserverConfig.environment());
this.fileDistributionStatus = fileDistributionStatus;
}
public PrepareResult prepare(Tenant tenant, long sessionId, PrepareParams prepareParams, Instant now) {
validateThatLocalSessionIsNotActive(tenant, sessionId);
LocalSession session = getLocalSession(tenant, sessionId);
ApplicationId applicationId = prepareParams.getApplicationId();
Optional<ApplicationSet> currentActiveApplicationSet = getCurrentActiveApplicationSet(tenant, applicationId);
Slime deployLog = createDeployLog();
DeployLogger logger = new DeployHandlerLogger(deployLog.get().setArray("log"), prepareParams.isVerbose(), applicationId);
ConfigChangeActions actions = session.prepare(logger, prepareParams, currentActiveApplicationSet, tenant.getPath(), now);
logConfigChangeActions(actions, logger);
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " prepared successfully. ");
return new PrepareResult(sessionId, actions, deployLog);
}
public PrepareResult prepareAndActivate(Tenant tenant, long sessionId, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
PrepareResult result = prepare(tenant, sessionId, prepareParams, now);
activate(tenant, sessionId, prepareParams.getTimeoutBudget(), ignoreLockFailure, ignoreSessionStaleFailure);
return result;
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) {
return deploy(in, prepareParams, false, false, clock.instant());
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
File tempDir = Files.createTempDir();
PrepareResult prepareResult;
try {
prepareResult = deploy(decompressApplication(in, tempDir), prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
} finally {
cleanupTempDirectory(tempDir);
}
return prepareResult;
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams) {
return deploy(applicationPackage, prepareParams, false, false, Instant.now());
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
ApplicationId applicationId = prepareParams.getApplicationId();
long sessionId = createSession(applicationId, prepareParams.getTimeoutBudget(), applicationPackage);
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return prepareAndActivate(tenant, sessionId, prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application) {
return deployFromLocalActive(application, Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()).plus(Duration.ofSeconds(5)));
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param timeout the timeout to use for each individual deployment operation
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application, Duration timeout) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
TimeoutBudget timeoutBudget = new TimeoutBudget(clock, timeout);
LocalSession newSession = tenant.getSessionFactory().createSessionFromExisting(activeSession, logger, true, timeoutBudget);
tenant.getLocalSessionRepo().addSession(newSession);
Version version = decideVersion(application, environment, newSession.getVespaVersion());
return Optional.of(Deployment.unprepared(newSession, this, hostProvisioner, tenant, timeout, clock,
false /* don't validate as this is already deployed */, version));
}
public ApplicationId activate(Tenant tenant,
long sessionId,
TimeoutBudget timeoutBudget,
boolean ignoreLockFailure,
boolean ignoreSessionStaleFailure) {
LocalSession localSession = getLocalSession(tenant, sessionId);
Deployment deployment = deployFromPreparedSession(localSession, tenant, timeoutBudget.timeLeft());
deployment.setIgnoreLockFailure(ignoreLockFailure);
deployment.setIgnoreSessionStaleFailure(ignoreSessionStaleFailure);
deployment.activate();
return localSession.getApplicationId();
}
private Deployment deployFromPreparedSession(LocalSession session, Tenant tenant, Duration timeout) {
return Deployment.prepared(session, this, hostProvisioner, tenant, timeout, clock);
}
/**
* Removes a previously deployed application
*
* @return true if the application was found and removed, false if it was not present
* @throws RuntimeException if the remove transaction fails. This method is exception safe.
*/
public boolean remove(ApplicationId applicationId) {
Optional<Tenant> owner = Optional.ofNullable(tenantRepository.getTenant(applicationId.tenant()));
if ( ! owner.isPresent()) return false;
TenantApplications tenantApplications = owner.get().getApplicationRepo();
if ( ! tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
LocalSessionRepo localSessionRepo = owner.get().getLocalSessionRepo();
LocalSession session = localSessionRepo.getSession(sessionId);
if (session == null) return false;
NestedTransaction transaction = new NestedTransaction();
localSessionRepo.removeSession(session.getSessionId(), transaction);
session.delete(transaction);
transaction.add(new Rotations(owner.get().getCurator(), owner.get().getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
public HttpResponse clusterControllerStatusPage(ApplicationId applicationId, String hostName, String pathSuffix) {
String relativePath = "clustercontroller-status/" + pathSuffix;
return httpProxy.get(getApplication(applicationId), hostName, "container-clustercontroller", relativePath);
}
public Long getApplicationGeneration(ApplicationId applicationId) {
return getApplication(applicationId).getApplicationGeneration();
}
public void restart(ApplicationId applicationId, HostFilter hostFilter) {
hostProvisioner.ifPresent(provisioner -> provisioner.restart(applicationId, hostFilter));
}
public HttpResponse filedistributionStatus(ApplicationId applicationId, Duration timeout) {
return fileDistributionStatus.status(getApplication(applicationId), timeout);
}
public Set<String> deleteUnusedFiledistributionReferences(File fileReferencesPath, boolean deleteFromDisk) {
if (!fileReferencesPath.isDirectory()) throw new RuntimeException(fileReferencesPath + " is not a directory");
Set<String> fileReferencesInUse = new HashSet<>();
Set<ApplicationId> applicationIds = listApplications();
applicationIds.forEach(applicationId -> fileReferencesInUse.addAll(getApplication(applicationId).getModel().fileReferences()
.stream()
.map(FileReference::value)
.collect(Collectors.toSet())));
log.log(LogLevel.DEBUG, "File references in use : " + fileReferencesInUse);
Set<String> fileReferencesOnDisk = new HashSet<>();
File[] filesOnDisk = fileReferencesPath.listFiles();
if (filesOnDisk != null)
fileReferencesOnDisk.addAll(Arrays.stream(filesOnDisk).map(File::getName).collect(Collectors.toSet()));
log.log(LogLevel.DEBUG, "File references on disk (in " + fileReferencesPath + "): " + fileReferencesOnDisk);
Instant instant = Instant.now().minus(Duration.ofDays(14));
Set<String> fileReferencesToDelete = fileReferencesOnDisk
.stream()
.filter(fileReference -> ! fileReferencesInUse.contains(fileReference))
.filter(fileReference -> isFileLastModifiedBefore(new File(fileReferencesPath, fileReference), instant))
.collect(Collectors.toSet());
if (deleteFromDisk && fileReferencesToDelete.size() > 0) {
log.log(LogLevel.INFO, "Will delete file references not in use: " + fileReferencesToDelete);
fileReferencesToDelete.forEach(fileReference -> {
File file = new File(fileReferencesPath, fileReference);
if ( ! IOUtils.recursiveDeleteDir(file))
log.log(LogLevel.WARNING, "Could not delete " + file.getAbsolutePath());
});
}
return fileReferencesToDelete;
}
public ApplicationFile getApplicationFileFromSession(TenantName tenantName, long sessionId, String path, LocalSession.Mode mode) {
Tenant tenant = tenantRepository.getTenant(tenantName);
return getLocalSession(tenant, sessionId).getApplicationFile(Path.fromString(path), mode);
}
private Application getApplication(ApplicationId applicationId) {
try {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
long sessionId = getSessionIdForApplication(tenant, applicationId);
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId, 0);
return session.ensureApplicationLoaded().getForVersionOrLatest(Optional.empty(), clock.instant());
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed getting application for '" + applicationId + "'", e);
throw e;
}
}
private Set<ApplicationId> listApplications() {
return tenantRepository.getAllTenants().stream()
.flatMap(tenant -> tenant.getApplicationRepo().listApplications().stream())
.collect(Collectors.toSet());
}
private boolean isFileLastModifiedBefore(File fileReference, Instant instant) {
BasicFileAttributes fileAttributes;
try {
fileAttributes = readAttributes(fileReference.toPath(), BasicFileAttributes.class);
return fileAttributes.lastModifiedTime().toInstant().isBefore(instant);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public HttpResponse checkServiceForConfigConvergence(ApplicationId applicationId, String hostAndPort, URI uri) {
return convergeChecker.checkService(getApplication(applicationId), hostAndPort, uri);
}
public HttpResponse servicesToCheckForConfigConvergence(ApplicationId applicationId, URI uri) {
return convergeChecker.servicesToCheck(getApplication(applicationId), uri);
}
/**
* Gets the active Session for the given application id.
*
* @return the active session, or null if there is no active session for the given application id.
*/
public LocalSession getActiveSession(ApplicationId applicationId) {
return getActiveSession(tenantRepository.getTenant(applicationId.tenant()), applicationId);
}
public long getSessionIdForApplication(Tenant tenant, ApplicationId applicationId) {
return tenant.getApplicationRepo().getSessionIdForApplication(applicationId);
}
public void validateThatRemoteSessionIsNotActive(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
public void validateThatRemoteSessionIsPrepared(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if ( ! Session.Status.PREPARE.equals(session.getStatus()))
throw new IllegalStateException("Session not prepared: " + sessionId);
}
public long createSessionFromExisting(ApplicationId applicationId,
DeployLogger logger,
boolean internalRedeploy,
TimeoutBudget timeoutBudget) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession fromSession = getExistingSession(tenant, applicationId);
LocalSession session = sessionFactory.createSessionFromExisting(fromSession, logger, internalRedeploy, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, InputStream in, String contentType) {
File tempDir = Files.createTempDir();
long sessionId;
try {
sessionId = createSession(applicationId, timeoutBudget, decompressApplication(in, contentType, tempDir));
} finally {
cleanupTempDirectory(tempDir);
}
return sessionId;
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, File applicationDirectory) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession session = sessionFactory.createSession(applicationDirectory, applicationId, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public Set<TenantName> deleteUnusedTenants(Duration ttlForUnusedTenant, Instant now) {
return tenantRepository.getAllTenantNames().stream()
.filter(tenantName -> activeApplications(tenantName).isEmpty())
.filter(tenantName -> !tenantName.equals(TenantName.defaultName()))
.filter(tenantName -> !tenantName.equals(TenantRepository.HOSTED_VESPA_TENANT))
.filter(tenantName -> tenantRepository.getTenant(tenantName).getCreatedTime().isBefore(now.minus(ttlForUnusedTenant)))
.peek(tenantRepository::deleteTenant)
.collect(Collectors.toSet());
}
public void deleteTenant(TenantName tenantName) {
List<ApplicationId> activeApplications = activeApplications(tenantName);
if (activeApplications.isEmpty())
tenantRepository.deleteTenant(tenantName);
else
throw new IllegalArgumentException("Cannot delete tenant '" + tenantName + "', it has active applications: " + activeApplications);
}
private List<ApplicationId> activeApplications(TenantName tenantName) {
return tenantRepository.getTenant(tenantName).getApplicationRepo().listApplications();
}
public Tenant verifyTenantAndApplication(ApplicationId applicationId) {
TenantName tenantName = applicationId.tenant();
if (!tenantRepository.checkThatTenantExists(tenantName)) {
throw new IllegalArgumentException("Tenant " + tenantName + " was not found.");
}
Tenant tenant = tenantRepository.getTenant(tenantName);
List<ApplicationId> applicationIds = listApplicationIds(tenant);
if (!applicationIds.contains(applicationId)) {
throw new IllegalArgumentException("No such application id: " + applicationId);
}
return tenant;
}
public ApplicationMetaData getMetadataFromSession(Tenant tenant, long sessionId) {
return getLocalSession(tenant, sessionId).getMetaData();
}
ConfigserverConfig configserverConfig() {
return configserverConfig;
}
private void validateThatLocalSessionIsNotActive(Tenant tenant, long sessionId) {
LocalSession session = getLocalSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
private LocalSession getLocalSession(Tenant tenant, long sessionId) {
LocalSession session = tenant.getLocalSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private RemoteSession getRemoteSession(Tenant tenant, long sessionId) {
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private Optional<ApplicationSet> getCurrentActiveApplicationSet(Tenant tenant, ApplicationId appId) {
Optional<ApplicationSet> currentActiveApplicationSet = Optional.empty();
TenantApplications applicationRepo = tenant.getApplicationRepo();
try {
long currentActiveSessionId = applicationRepo.getSessionIdForApplication(appId);
RemoteSession currentActiveSession = getRemoteSession(tenant, currentActiveSessionId);
if (currentActiveSession != null) {
currentActiveApplicationSet = Optional.ofNullable(currentActiveSession.ensureApplicationLoaded());
}
} catch (IllegalArgumentException e) {
}
return currentActiveApplicationSet;
}
private File decompressApplication(InputStream in, String contentType, File tempDir) {
try (CompressedApplicationInputStream application =
CompressedApplicationInputStream.createFromCompressedStream(in, contentType)) {
return decompressApplication(application, tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress data in body", e);
}
}
private File decompressApplication(CompressedApplicationInputStream in, File tempDir) {
try {
return in.decompress(tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress stream", e);
}
}
private List<ApplicationId> listApplicationIds(Tenant tenant) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return applicationRepo.listApplications();
}
private void cleanupTempDirectory(File tempDir) {
logger.log(LogLevel.DEBUG, "Deleting tmp dir '" + tempDir + "'");
if (!IOUtils.recursiveDeleteDir(tempDir)) {
logger.log(LogLevel.WARNING, "Not able to delete tmp dir '" + tempDir + "'");
}
}
private Set<ApplicationId> redeployApplications(Set<ApplicationId> applicationIds) throws InterruptedException {
ExecutorService executor = Executors.newFixedThreadPool(configserverConfig.numParallelTenantLoaders(),
new DaemonThreadFactory("redeploy apps"));
Map<ApplicationId, Future<?>> futures = new HashMap<>();
Set<ApplicationId> failedDeployments = new HashSet<>();
applicationIds.forEach(appId -> deployFromLocalActive(appId).ifPresent(
deployment -> futures.put(appId, executor.submit(deployment::activate))));
for (Map.Entry<ApplicationId, Future<?>> f : futures.entrySet()) {
try {
f.getValue().get();
} catch (ExecutionException e) {
ApplicationId app = f.getKey();
log.log(LogLevel.WARNING, "Redeploying " + app + " failed, will retry");
failedDeployments.add(app);
}
}
executor.shutdown();
executor.awaitTermination(365, TimeUnit.DAYS);
return failedDeployments;
}
private LocalSession getExistingSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return getLocalSession(tenant, applicationRepo.getSessionIdForApplication(applicationId));
}
private LocalSession getActiveSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
if (applicationRepo.listApplications().contains(applicationId)) {
return tenant.getLocalSessionRepo().getSession(applicationRepo.getSessionIdForApplication(applicationId));
}
return null;
}
private static void logConfigChangeActions(ConfigChangeActions actions, DeployLogger logger) {
RestartActions restartActions = actions.getRestartActions();
if ( ! restartActions.isEmpty()) {
logger.log(Level.WARNING, "Change(s) between active and new application that require restart:\n" +
restartActions.format());
}
RefeedActions refeedActions = actions.getRefeedActions();
if ( ! refeedActions.isEmpty()) {
boolean allAllowed = refeedActions.getEntries().stream().allMatch(RefeedActions.Entry::allowed);
logger.log(allAllowed ? Level.INFO : Level.WARNING,
"Change(s) between active and new application that may require re-feed:\n" +
refeedActions.format());
}
}
/** Returns version to use when deploying application in given environment */
static Version decideVersion(ApplicationId application, Environment environment, Version targetVersion) {
if (environment.isManuallyDeployed() &&
!"hosted-vespa".equals(application.tenant().value())) {
return Vtag.currentVersion;
}
return targetVersion;
}
public Slime createDeployLog() {
Slime deployLog = new Slime();
deployLog.setObject();
return deployLog;
}
} |
Aww yiss, this is what I like to type into the URL bar 😱 | public void testRequestMapping() throws Exception {
JSONObject json = new JSONObject();
json.put("yql", "select * from sources * where sddocname contains \"blog_post\" limit 0 | all(group(date) max(3) order(-count())each(output(count())));");
json.put("hits", 10.0);
json.put("offset", 5);
json.put("queryProfile", "foo");
json.put("nocache", false);
json.put("groupingSessionCache", false);
json.put("searchChain", "exceptionInPlugin");
json.put("timeout", 0);
json.put("tracelevel", 1);
json.put("trace.timestamps", false);
JSONObject model = new JSONObject();
model.put("defaultIndex", 1);
model.put("encoding", "json");
model.put("filter", "default");
model.put("language", "en");
model.put("queryString", "abc");
model.put("restrict", "_doc,json,xml");
model.put("searchPath", "node1");
model.put("sources", "source1,source2");
model.put("type", "yql");
json.put("model", model);
JSONObject ranking = new JSONObject();
ranking.put("location", "123789.89123N;128123W");
ranking.put("features", "none");
ranking.put("listFeatures", false);
ranking.put("profile", "1");
ranking.put("properties", "default");
ranking.put("sorting", "desc");
ranking.put("freshness", "0.05");
ranking.put("queryCache", false);
JSONObject matchPhase = new JSONObject();
matchPhase.put("maxHits", "100");
matchPhase.put("attribute", "title");
matchPhase.put("ascending", true);
JSONObject diversity = new JSONObject();
diversity.put("attribute", "title");
diversity.put("minGroups", 1);
matchPhase.put("diversity", diversity);
ranking.put("matchPhase", matchPhase);
json.put("ranking", ranking);
JSONObject presentation = new JSONObject();
presentation.put("bolding", true);
presentation.put("format", "json");
presentation.put("summary", "none");
presentation.put("template", "json");
presentation.put("timing", false);
json.put("presentation", presentation);
JSONObject grouping = new JSONObject();
grouping.put("select", "_all");
grouping.put("collapsefield", "none");
grouping.put("collapsesize", 2);
grouping.put("collapse.summary", "default");
json.put("grouping", grouping);
JSONObject pos = new JSONObject();
pos.put("ll", "1263123N;1231.9W");
pos.put("radius", "71234m");
pos.put("bb", "1237123W;123218N");
pos.put("attribute", "default");
json.put("pos", pos);
JSONObject streaming = new JSONObject();
streaming.put("userid", 123);
streaming.put("groupname", "abc");
streaming.put("selection", "none");
streaming.put("priority", 10);
streaming.put("maxbucketspervisitor", 5);
json.put("streaming", streaming);
JSONObject rules = new JSONObject();
rules.put("off", false);
rules.put("rulebase", "default");
json.put("rules", rules);
json.put("recall", "none");
json.put("user", 123);
json.put("nocachewrite", false);
json.put("hitcountestimate", true);
json.put("metrics.ignore", false);
Inspector inspector = SlimeUtils.jsonToSlime(json.toString().getBytes("utf-8")).get();
Map<String, String> map = new HashMap<>();
createRequestMapping(inspector, map, "");
String url = uri + "&model.sources=source1%2Csource2&select=_all&model.language=en&presentation.timing=false&pos.attribute=default&pos.radius=71234m&model.searchPath=node1&nocachewrite=false&ranking.matchPhase.maxHits=100&presentation.summary=none" +
"&nocache=false&model.type=yql&collapse.summary=default&ranking.matchPhase.diversity.minGroups=1&ranking.location=123789.89123N%3B128123W&ranking.queryCache=false&offset=5&streaming.groupname=abc&groupingSessionCache=false" +
"&presentation.template=json&rules.off=false&ranking.properties=default&searchChain=exceptionInPlugin&pos.ll=1263123N%3B1231.9W&ranking.sorting=desc&ranking.matchPhase.ascending=true&ranking.features=none&hitcountestimate=true" +
"&model.filter=default&metrics.ignore=false&collapsefield=none&ranking.profile=1&rules.rulebase=default&model.defaultIndex=1&tracelevel=1&ranking.listFeatures=false&timeout=0&presentation.format=json" +
"&yql=select+%2A+from+sources+%2A+where+sddocname+contains+%22blog_post%22+limit+0+%7C+all%28group%28date%29+max%283%29+order%28-count%28%29%29each%28output%28count%28%29%29%29%29%3B&recall=none&streaming.maxbucketspervisitor=5" +
"&queryProfile=foo&presentation.bolding=true&model.encoding=json&model.queryString=abc&streaming.selection=none&trace.timestamps=false&collapsesize=2&streaming.priority=10&ranking.matchPhase.diversity.attribute=title" +
"&ranking.matchPhase.attribute=title&hits=10&streaming.userid=123&pos.bb=1237123W%3B123218N&model.restrict=_doc%2Cjson%2Cxml&ranking.freshness=0.05&user=123";
final HttpRequest request = HttpRequest.createTestRequest(url, GET);
Map<String, String> propertyMap = request.propertyMap();
assertEquals("Should have same mapping for properties", map, propertyMap);
} | "&ranking.matchPhase.attribute=title&hits=10&streaming.userid=123&pos.bb=1237123W%3B123218N&model.restrict=_doc%2Cjson%2Cxml&ranking.freshness=0.05&user=123"; | public void testRequestMapping() throws Exception {
JSONObject json = new JSONObject();
json.put("yql", "select * from sources * where sddocname contains \"blog_post\" limit 0 | all(group(date) max(3) order(-count())each(output(count())));");
json.put("hits", 10.0);
json.put("offset", 5);
json.put("queryProfile", "foo");
json.put("nocache", false);
json.put("groupingSessionCache", false);
json.put("searchChain", "exceptionInPlugin");
json.put("timeout", 0);
json.put("select", "_all");
JSONObject model = new JSONObject();
model.put("defaultIndex", 1);
model.put("encoding", "json");
model.put("filter", "default");
model.put("language", "en");
model.put("queryString", "abc");
model.put("restrict", "_doc,json,xml");
model.put("searchPath", "node1");
model.put("sources", "source1,source2");
model.put("type", "yql");
json.put("model", model);
JSONObject ranking = new JSONObject();
ranking.put("location", "123789.89123N;128123W");
ranking.put("features", "none");
ranking.put("listFeatures", false);
ranking.put("profile", "1");
ranking.put("properties", "default");
ranking.put("sorting", "desc");
ranking.put("freshness", "0.05");
ranking.put("queryCache", false);
JSONObject matchPhase = new JSONObject();
matchPhase.put("maxHits", "100");
matchPhase.put("attribute", "title");
matchPhase.put("ascending", true);
JSONObject diversity = new JSONObject();
diversity.put("attribute", "title");
diversity.put("minGroups", 1);
matchPhase.put("diversity", diversity);
ranking.put("matchPhase", matchPhase);
json.put("ranking", ranking);
JSONObject presentation = new JSONObject();
presentation.put("bolding", true);
presentation.put("format", "json");
presentation.put("summary", "none");
presentation.put("template", "json");
presentation.put("timing", false);
json.put("presentation", presentation);
JSONObject collapse = new JSONObject();
collapse.put("field", "none");
collapse.put("size", 2);
collapse.put("summary", "default");
json.put("collapse", collapse);
JSONObject trace = new JSONObject();
trace.put("level", 1);
trace.put("timestamps", false);
trace.put("rules", "none");
json.put("trace", trace);
JSONObject pos = new JSONObject();
pos.put("ll", "1263123N;1231.9W");
pos.put("radius", "71234m");
pos.put("bb", "1237123W;123218N");
pos.put("attribute", "default");
json.put("pos", pos);
JSONObject streaming = new JSONObject();
streaming.put("userid", 123);
streaming.put("groupname", "abc");
streaming.put("selection", "none");
streaming.put("priority", 10);
streaming.put("maxbucketspervisitor", 5);
json.put("streaming", streaming);
JSONObject rules = new JSONObject();
rules.put("off", false);
rules.put("rulebase", "default");
json.put("rules", rules);
JSONObject metrics = new JSONObject();
metrics.put("ignore", "_all");
json.put("metrics", metrics);
json.put("recall", "none");
json.put("user", 123);
json.put("nocachewrite", false);
json.put("hitcountestimate", true);
Inspector inspector = SlimeUtils.jsonToSlime(json.toString().getBytes("utf-8")).get();
Map<String, String> map = new HashMap<>();
searchHandler.createRequestMapping(inspector, map, "");
String url = uri + "&model.sources=source1%2Csource2&select=_all&model.language=en&presentation.timing=false&pos.attribute=default&pos.radius=71234m&model.searchPath=node1&nocachewrite=false&ranking.matchPhase.maxHits=100&presentation.summary=none" +
"&nocache=false&model.type=yql&collapse.summary=default&ranking.matchPhase.diversity.minGroups=1&ranking.location=123789.89123N%3B128123W&ranking.queryCache=false&offset=5&streaming.groupname=abc&groupingSessionCache=false" +
"&presentation.template=json&trace.rules=none&rules.off=false&ranking.properties=default&searchChain=exceptionInPlugin&pos.ll=1263123N%3B1231.9W&ranking.sorting=desc&ranking.matchPhase.ascending=true&ranking.features=none&hitcountestimate=true" +
"&model.filter=default&metrics.ignore=_all&collapse.field=none&ranking.profile=1&rules.rulebase=default&model.defaultIndex=1&trace.level=1&ranking.listFeatures=false&timeout=0&presentation.format=json" +
"&yql=select+%2A+from+sources+%2A+where+sddocname+contains+%22blog_post%22+limit+0+%7C+all%28group%28date%29+max%283%29+order%28-count%28%29%29each%28output%28count%28%29%29%29%29%3B&recall=none&streaming.maxbucketspervisitor=5" +
"&queryProfile=foo&presentation.bolding=true&model.encoding=json&model.queryString=abc&streaming.selection=none&trace.timestamps=false&collapse.size=2&streaming.priority=10&ranking.matchPhase.diversity.attribute=title" +
"&ranking.matchPhase.attribute=title&hits=10&streaming.userid=123&pos.bb=1237123W%3B123218N&model.restrict=_doc%2Cjson%2Cxml&ranking.freshness=0.05&user=123";
final HttpRequest request = HttpRequest.createTestRequest(url, GET);
Map<String, String> propertyMap = request.propertyMap();
assertEquals("Should have same mapping for properties", map, propertyMap);
} | class JSONSearchHandlerTestCase {
private static final String testDir = "src/test/java/com/yahoo/search/handler/test/config";
private static final String myHostnameHeader = "my-hostname-header";
private static final String selfHostname = HostName.getLocalhost();
private static String tempDir = "";
private static String configId = null;
private static final String uri = "http:
private static final String JSON_CONTENT_TYPE = "application/json";
@Rule
public TemporaryFolder tempfolder = new TemporaryFolder();
private RequestHandlerTestDriver driver = null;
private HandlersConfigurerTestWrapper configurer = null;
private SearchHandler searchHandler;
@Before
public void startUp() throws IOException {
File cfgDir = tempfolder.newFolder("SearchHandlerTestCase");
tempDir = cfgDir.getAbsolutePath();
configId = "dir:" + tempDir;
IOUtils.copyDirectory(new File(testDir), cfgDir, 1);
generateComponentsConfigForActive();
configurer = new HandlersConfigurerTestWrapper(new Container(), configId);
searchHandler = (SearchHandler)configurer.getRequestHandlerRegistry().getComponent(SearchHandler.class.getName());
driver = new RequestHandlerTestDriver(searchHandler);
}
@After
public void shutDown() {
if (configurer != null) configurer.shutdown();
if (driver != null) driver.close();
}
private void generateComponentsConfigForActive() throws IOException {
File activeConfig = new File(tempDir);
SearchChainConfigurerTestCase.
createComponentsConfig(new File(activeConfig, "chains.cfg").getPath(),
new File(activeConfig, "handlers.cfg").getPath(),
new File(activeConfig, "components.cfg").getPath());
}
private SearchHandler fetchSearchHandler(HandlersConfigurerTestWrapper configurer) {
return (SearchHandler) configurer.getRequestHandlerRegistry().getComponent(SearchHandler.class.getName());
}
@Test
public void testFailing() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "test");
json.put("searchChain", "classLoadingError");
assertTrue(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll().contains("NoClassDefFoundError"));
}
@Test
public synchronized void testPluginError() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "test");
json.put("searchChain", "exceptionInPlugin");
assertTrue(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll().contains("NullPointerException"));
}
@Test
public synchronized void testWorkingReconfiguration() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
assertJsonResult(json, driver);
IOUtils.copyDirectory(new File(testDir, "handlers2"), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
SearchHandler newSearchHandler = fetchSearchHandler(configurer);
assertNotSame("Have a new instance of the search handler", searchHandler, newSearchHandler);
assertNotNull("Have the new search chain", fetchSearchHandler(configurer).getSearchChainRegistry().getChain("hello"));
assertNull("Don't have the new search chain", fetchSearchHandler(configurer).getSearchChainRegistry().getChain("classLoadingError"));
try (RequestHandlerTestDriver newDriver = new RequestHandlerTestDriver(searchHandler)) {
assertJsonResult(json, newDriver);
}
}
@Test
public void testInvalidYqlQuery() throws Exception {
IOUtils.copyDirectory(new File(testDir, "config_yql"), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
SearchHandler newSearchHandler = fetchSearchHandler(configurer);
assertTrue("Do I have a new instance of the search handler?", searchHandler != newSearchHandler);
try (RequestHandlerTestDriver newDriver = new RequestHandlerTestDriver(newSearchHandler)) {
JSONObject json = new JSONObject();
json.put("yql", "select * from foo where bar > 1453501295");
RequestHandlerTestDriver.MockResponseHandler responseHandler = newDriver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
responseHandler.readAll();
assertThat(responseHandler.getStatus(), is(400));
}
}
@Test
public void testInvalidQueryParamWithQueryProfile() throws Exception {
try (RequestHandlerTestDriver newDriver = driverWithConfig("config_invalid_param")) {
testInvalidQueryParam(newDriver);
}
}
private void testInvalidQueryParam(final RequestHandlerTestDriver testDriver) throws Exception{
JSONObject json = new JSONObject();
json.put("query", "status_code:0");
json.put("hits", 20);
json.put("offset", -20);
RequestHandlerTestDriver.MockResponseHandler responseHandler =
testDriver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
String response = responseHandler.readAll();
assertThat(responseHandler.getStatus(), is(400));
assertThat(response, containsString("offset"));
assertThat(response, containsString("\"code\":" + com.yahoo.container.protect.Error.INVALID_QUERY_PARAMETER.code));
}
@Test
public void testNormalResultJsonAliasRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("format", "json");
json.put("query", "abc");
assertJsonResult(json, driver);
}
@Test
public void testNullQuery() throws Exception {
JSONObject json = new JSONObject();
json.put("format", "xml");
assertEquals("<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<result total-hit-count=\"0\">\n" +
" <hit relevancy=\"1.0\">\n" +
" <field name=\"relevancy\">1.0</field>\n" +
" <field name=\"uri\">testHit</field>\n" +
" </hit>\n" +
"</result>\n", driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll());
}
@Test
public void testWebServiceStatus() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "web_service_status_code");
RequestHandlerTestDriver.MockResponseHandler responseHandler =
driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
String response = responseHandler.readAll();
assertThat(responseHandler.getStatus(), is(406));
assertThat(response, containsString("\"code\":" + 406));
}
@Test
public void testNormalResultImplicitDefaultRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
assertJsonResult(json, driver);
}
@Test
public void testNormalResultExplicitDefaultRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "default");
assertJsonResult(json, driver);
}
@Test
public void testNormalResultXmlAliasRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "xml");
assertXmlResult(json, driver);
}
@Test
public void testNormalResultExplicitDefaultRenderingFullRendererName1() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "DefaultRenderer");
assertXmlResult(json, driver);
}
@Test
public void testNormalResultExplicitDefaultRenderingFullRendererName2() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "JsonRenderer");
assertJsonResult(json, driver);
}
@Test
public void testResultLegacyTiledFormat() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "tiled");
assertTiledResult(json, driver);
}
@Test
public void testResultLegacyPageFormat() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "page");
assertPageResult(json, driver);
}
private static final String xmlResult =
"<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<result total-hit-count=\"0\">\n" +
" <hit relevancy=\"1.0\">\n" +
" <field name=\"relevancy\">1.0</field>\n" +
" <field name=\"uri\">testHit</field>\n" +
" </hit>\n" +
"</result>\n";
private void assertXmlResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), xmlResult);
}
private static final String jsonResult = "{\"root\":{"
+ "\"id\":\"toplevel\",\"relevance\":1.0,\"fields\":{\"totalCount\":0},"
+ "\"children\":["
+ "{\"id\":\"testHit\",\"relevance\":1.0,\"fields\":{\"uri\":\"testHit\"}}"
+ "]}}";
private void assertJsonResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), jsonResult);
}
private static final String tiledResult =
"<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<result version=\"1.0\">\n" +
"\n" +
" <hit relevance=\"1.0\">\n" +
" <id>testHit</id>\n" +
" <uri>testHit</uri>\n" +
" </hit>\n" +
"\n" +
"</result>\n";
private void assertTiledResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), tiledResult);
}
private static final String pageResult =
"<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<page version=\"1.0\">\n" +
"\n" +
" <content>\n" +
" <hit relevance=\"1.0\">\n" +
" <id>testHit</id>\n" +
" <uri>testHit</uri>\n" +
" </hit>\n" +
" </content>\n" +
"\n" +
"</page>\n";
private void assertPageResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), pageResult);
}
private void assertOkResult(RequestHandlerTestDriver.MockResponseHandler response, String expected) {
assertEquals(expected, response.readAll());
assertEquals(200, response.getStatus());
assertEquals(selfHostname, response.getResponse().headers().get(myHostnameHeader).get(0));
}
private RequestHandlerTestDriver driverWithConfig(String configDirectory) throws Exception {
IOUtils.copyDirectory(new File(testDir, configDirectory), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
SearchHandler newSearchHandler = fetchSearchHandler(configurer);
assertTrue("Do I have a new instance of the search handler?", searchHandler != newSearchHandler);
return new RequestHandlerTestDriver(newSearchHandler);
}
private void createRequestMapping(Inspector inspector, Map<String, String> map, String parent){
inspector.traverse((ObjectTraverser) (key, value) -> {
String qualifiedKey = parent + key;
switch (value.type()) {
case BOOL:
map.put(qualifiedKey, Boolean.toString(value.asBool()));
break;
case DOUBLE:
map.put(qualifiedKey, Double.toString(value.asDouble()));
break;
case LONG:
map.put(qualifiedKey, Long.toString(value.asLong()));
break;
case STRING:
map.put(qualifiedKey , value.asString());
break;
case OBJECT:
if (key.equals("grouping")) {
createRequestMapping(value, map, "");
} else {
createRequestMapping(value, map, qualifiedKey+".");
break;
}
}
});
}
@Test
} | class JSONSearchHandlerTestCase {
private static final String testDir = "src/test/java/com/yahoo/search/handler/test/config";
private static final String myHostnameHeader = "my-hostname-header";
private static final String selfHostname = HostName.getLocalhost();
private static String tempDir = "";
private static String configId = null;
private static final String uri = "http:
private static final String JSON_CONTENT_TYPE = "application/json";
@Rule
public TemporaryFolder tempfolder = new TemporaryFolder();
private RequestHandlerTestDriver driver = null;
private HandlersConfigurerTestWrapper configurer = null;
private SearchHandler searchHandler;
@Before
public void startUp() throws IOException {
File cfgDir = tempfolder.newFolder("SearchHandlerTestCase");
tempDir = cfgDir.getAbsolutePath();
configId = "dir:" + tempDir;
IOUtils.copyDirectory(new File(testDir), cfgDir, 1);
generateComponentsConfigForActive();
configurer = new HandlersConfigurerTestWrapper(new Container(), configId);
searchHandler = (SearchHandler)configurer.getRequestHandlerRegistry().getComponent(SearchHandler.class.getName());
driver = new RequestHandlerTestDriver(searchHandler);
}
@After
public void shutDown() {
if (configurer != null) configurer.shutdown();
if (driver != null) driver.close();
}
private void generateComponentsConfigForActive() throws IOException {
File activeConfig = new File(tempDir);
SearchChainConfigurerTestCase.
createComponentsConfig(new File(activeConfig, "chains.cfg").getPath(),
new File(activeConfig, "handlers.cfg").getPath(),
new File(activeConfig, "components.cfg").getPath());
}
private SearchHandler fetchSearchHandler(HandlersConfigurerTestWrapper configurer) {
return (SearchHandler) configurer.getRequestHandlerRegistry().getComponent(SearchHandler.class.getName());
}
@Test
public void testBadJSON() throws Exception{
String json = "Not a valid JSON-string";
RequestHandlerTestDriver.MockResponseHandler responseHandler = driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json, JSON_CONTENT_TYPE);
String response = responseHandler.readAll();
assertThat(responseHandler.getStatus(), is(400));
assertThat(response, containsString("errors"));
assertThat(response, containsString("\"code\":" + Error.ILLEGAL_QUERY.code));
}
@Test
public void testFailing() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "test");
json.put("searchChain", "classLoadingError");
assertTrue(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll().contains("NoClassDefFoundError"));
}
@Test
public synchronized void testPluginError() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "test");
json.put("searchChain", "exceptionInPlugin");
assertTrue(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll().contains("NullPointerException"));
}
@Test
public synchronized void testWorkingReconfiguration() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
assertJsonResult(json, driver);
IOUtils.copyDirectory(new File(testDir, "handlers2"), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
SearchHandler newSearchHandler = fetchSearchHandler(configurer);
assertNotSame("Have a new instance of the search handler", searchHandler, newSearchHandler);
assertNotNull("Have the new search chain", fetchSearchHandler(configurer).getSearchChainRegistry().getChain("hello"));
assertNull("Don't have the new search chain", fetchSearchHandler(configurer).getSearchChainRegistry().getChain("classLoadingError"));
try (RequestHandlerTestDriver newDriver = new RequestHandlerTestDriver(searchHandler)) {
assertJsonResult(json, newDriver);
}
}
@Test
public void testInvalidYqlQuery() throws Exception {
IOUtils.copyDirectory(new File(testDir, "config_yql"), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
SearchHandler newSearchHandler = fetchSearchHandler(configurer);
assertTrue("Do I have a new instance of the search handler?", searchHandler != newSearchHandler);
try (RequestHandlerTestDriver newDriver = new RequestHandlerTestDriver(newSearchHandler)) {
JSONObject json = new JSONObject();
json.put("yql", "select * from foo where bar > 1453501295");
RequestHandlerTestDriver.MockResponseHandler responseHandler = newDriver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
responseHandler.readAll();
assertThat(responseHandler.getStatus(), is(400));
}
}
@Test
public void testInvalidQueryParamWithQueryProfile() throws Exception {
try (RequestHandlerTestDriver newDriver = driverWithConfig("config_invalid_param")) {
testInvalidQueryParam(newDriver);
}
}
private void testInvalidQueryParam(final RequestHandlerTestDriver testDriver) throws Exception{
JSONObject json = new JSONObject();
json.put("query", "status_code:0");
json.put("hits", 20);
json.put("offset", -20);
RequestHandlerTestDriver.MockResponseHandler responseHandler =
testDriver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
String response = responseHandler.readAll();
assertThat(responseHandler.getStatus(), is(400));
assertThat(response, containsString("offset"));
assertThat(response, containsString("\"code\":" + com.yahoo.container.protect.Error.INVALID_QUERY_PARAMETER.code));
}
@Test
public void testNormalResultJsonAliasRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("format", "json");
json.put("query", "abc");
assertJsonResult(json, driver);
}
@Test
public void testNullQuery() throws Exception {
JSONObject json = new JSONObject();
json.put("format", "xml");
assertEquals("<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<result total-hit-count=\"0\">\n" +
" <hit relevancy=\"1.0\">\n" +
" <field name=\"relevancy\">1.0</field>\n" +
" <field name=\"uri\">testHit</field>\n" +
" </hit>\n" +
"</result>\n", driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE).readAll());
}
@Test
public void testWebServiceStatus() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "web_service_status_code");
RequestHandlerTestDriver.MockResponseHandler responseHandler =
driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE);
String response = responseHandler.readAll();
assertThat(responseHandler.getStatus(), is(406));
assertThat(response, containsString("\"code\":" + 406));
}
@Test
public void testNormalResultImplicitDefaultRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
assertJsonResult(json, driver);
}
@Test
public void testNormalResultExplicitDefaultRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "default");
assertJsonResult(json, driver);
}
@Test
public void testNormalResultXmlAliasRendering() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "xml");
assertXmlResult(json, driver);
}
@Test
public void testNormalResultExplicitDefaultRenderingFullRendererName1() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "DefaultRenderer");
assertXmlResult(json, driver);
}
@Test
public void testNormalResultExplicitDefaultRenderingFullRendererName2() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "JsonRenderer");
assertJsonResult(json, driver);
}
@Test
public void testResultLegacyTiledFormat() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "tiled");
assertTiledResult(json, driver);
}
@Test
public void testResultLegacyPageFormat() throws Exception {
JSONObject json = new JSONObject();
json.put("query", "abc");
json.put("format", "page");
assertPageResult(json, driver);
}
private static final String xmlResult =
"<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<result total-hit-count=\"0\">\n" +
" <hit relevancy=\"1.0\">\n" +
" <field name=\"relevancy\">1.0</field>\n" +
" <field name=\"uri\">testHit</field>\n" +
" </hit>\n" +
"</result>\n";
private void assertXmlResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), xmlResult);
}
private static final String jsonResult = "{\"root\":{"
+ "\"id\":\"toplevel\",\"relevance\":1.0,\"fields\":{\"totalCount\":0},"
+ "\"children\":["
+ "{\"id\":\"testHit\",\"relevance\":1.0,\"fields\":{\"uri\":\"testHit\"}}"
+ "]}}";
private void assertJsonResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), jsonResult);
}
private static final String tiledResult =
"<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<result version=\"1.0\">\n" +
"\n" +
" <hit relevance=\"1.0\">\n" +
" <id>testHit</id>\n" +
" <uri>testHit</uri>\n" +
" </hit>\n" +
"\n" +
"</result>\n";
private void assertTiledResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), tiledResult);
}
private static final String pageResult =
"<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n" +
"<page version=\"1.0\">\n" +
"\n" +
" <content>\n" +
" <hit relevance=\"1.0\">\n" +
" <id>testHit</id>\n" +
" <uri>testHit</uri>\n" +
" </hit>\n" +
" </content>\n" +
"\n" +
"</page>\n";
private void assertPageResult(JSONObject json, RequestHandlerTestDriver driver) throws Exception {
assertOkResult(driver.sendRequest(uri, com.yahoo.jdisc.http.HttpRequest.Method.POST, json.toString(), JSON_CONTENT_TYPE), pageResult);
}
private void assertOkResult(RequestHandlerTestDriver.MockResponseHandler response, String expected) {
assertEquals(expected, response.readAll());
assertEquals(200, response.getStatus());
assertEquals(selfHostname, response.getResponse().headers().get(myHostnameHeader).get(0));
}
private RequestHandlerTestDriver driverWithConfig(String configDirectory) throws Exception {
IOUtils.copyDirectory(new File(testDir, configDirectory), new File(tempDir), 1);
generateComponentsConfigForActive();
configurer.reloadConfig();
SearchHandler newSearchHandler = fetchSearchHandler(configurer);
assertTrue("Do I have a new instance of the search handler?", searchHandler != newSearchHandler);
return new RequestHandlerTestDriver(newSearchHandler);
}
@Test
} |
Preferable if you let the above constructor call `this(request, request.propertyMap(), queryProfile)`. | public Query(HttpRequest request, Map<String, String> requestMap, CompiledQueryProfile queryProfile) {
super(new QueryPropertyAliases(propertyAliases));
this.httpRequest = request;
init(requestMap, queryProfile);
} | init(requestMap, queryProfile); | public Query(HttpRequest request, Map<String, String> requestMap, CompiledQueryProfile queryProfile) {
super(new QueryPropertyAliases(propertyAliases));
this.httpRequest = request;
init(requestMap, queryProfile);
} | class Query extends com.yahoo.processing.Request implements Cloneable {
/** The type of the query */
public enum Type {
ALL(0,"all"),
ANY(1,"any"),
PHRASE(2,"phrase"),
ADVANCED(3,"adv"),
WEB(4,"web"),
PROGRAMMATIC(5, "prog"),
YQL(6, "yql");
private final int intValue;
private final String stringValue;
Type(int intValue,String stringValue) {
this.intValue = intValue;
this.stringValue = stringValue;
}
/** Converts a type argument value into a query type */
public static Type getType(String typeString) {
for (Type type:Type.values())
if (type.stringValue.equals(typeString))
return type;
return ALL;
}
public int asInt() { return intValue; }
public String toString() { return stringValue; }
}
/** The offset from the most relevant hits found from this query */
private int offset = 0;
/** The number of hits to return */
private int hits = 10;
/** The query context level, 0 means no tracing */
private int traceLevel = 0;
private static final long dumpTimeout = (6 * 60 * 1000);
private static final long defaultTimeout = 5000;
/** The timeout of the query, in milliseconds */
private long timeout = defaultTimeout;
/** Whether this query is forbidden to access cached information */
private boolean noCache = false;
/** Whether or not grouping should use a session cache */
private boolean groupingSessionCache = false;
/** The synchronous view of the JDisc request causing this query */
private final HttpRequest httpRequest;
/** The context, or null if there is no context */
private QueryContext context = null;
/** Used for downstream session caches */
private UniqueRequestId requestId = null;
/** The ranking requested in this query */
private Ranking ranking = new Ranking(this);
/** The query query and/or query program declaration */
private Model model = new Model(this);
/** How results of this query should be presented */
private Presentation presentation = new Presentation(this);
private static Logger log = Logger.getLogger(Query.class.getName());
/** The time this query was created */
private long startTime;
public static final CompoundName OFFSET = new CompoundName("offset");
public static final CompoundName HITS = new CompoundName("hits");
public static final CompoundName SEARCH_CHAIN = new CompoundName("searchChain");
public static final CompoundName TRACE_LEVEL = new CompoundName("traceLevel");
public static final CompoundName NO_CACHE = new CompoundName("noCache");
public static final CompoundName GROUPING_SESSION_CACHE = new CompoundName("groupingSessionCache");
public static final CompoundName TIMEOUT = new CompoundName("timeout");
private static QueryProfileType argumentType;
static {
argumentType = new QueryProfileType("native");
argumentType.setBuiltin(true);
argumentType.addField(new FieldDescription(OFFSET.toString(), "integer", "offset start"));
argumentType.addField(new FieldDescription(HITS.toString(), "integer", "hits count"));
argumentType.addField(new FieldDescription(SEARCH_CHAIN.toString(), "string"));
argumentType.addField(new FieldDescription(TRACE_LEVEL.toString(), "integer", "tracelevel"));
argumentType.addField(new FieldDescription(NO_CACHE.toString(), "boolean", "nocache"));
argumentType.addField(new FieldDescription(GROUPING_SESSION_CACHE.toString(), "boolean", "groupingSessionCache"));
argumentType.addField(new FieldDescription(TIMEOUT.toString(), "string", "timeout"));
argumentType.addField(new FieldDescription(FederationSearcher.SOURCENAME.toString(),"string"));
argumentType.addField(new FieldDescription(FederationSearcher.PROVIDERNAME.toString(),"string"));
argumentType.addField(new FieldDescription(Presentation.PRESENTATION, new QueryProfileFieldType(Presentation.getArgumentType())));
argumentType.addField(new FieldDescription(Ranking.RANKING, new QueryProfileFieldType(Ranking.getArgumentType())));
argumentType.addField(new FieldDescription(Model.MODEL, new QueryProfileFieldType(Model.getArgumentType())));
argumentType.freeze();
}
public static QueryProfileType getArgumentType() { return argumentType; }
/** The aliases of query properties */
private static Map<String,CompoundName> propertyAliases;
static {
Map<String,CompoundName> propertyAliasesBuilder = new HashMap<>();
addAliases(Query.getArgumentType(), propertyAliasesBuilder);
addAliases(Ranking.getArgumentType(), propertyAliasesBuilder);
addAliases(Model.getArgumentType(), propertyAliasesBuilder);
addAliases(Presentation.getArgumentType(), propertyAliasesBuilder);
propertyAliases = ImmutableMap.copyOf(propertyAliasesBuilder);
}
private static void addAliases(QueryProfileType arguments, Map<String, CompoundName> aliases) {
String prefix = getPrefix(arguments);
for (FieldDescription field : arguments.fields().values()) {
for (String alias : field.getAliases())
aliases.put(alias, new CompoundName(prefix+field.getName()));
}
}
private static String getPrefix(QueryProfileType type) {
if (type.getId().getName().equals("native")) return "";
return type.getId().getName() + ".";
}
public static void addNativeQueryProfileTypesTo(QueryProfileTypeRegistry registry) {
registry.register(Query.getArgumentType().unfrozen());
registry.register(Ranking.getArgumentType().unfrozen());
registry.register(Model.getArgumentType().unfrozen());
registry.register(Presentation.getArgumentType().unfrozen());
registry.register(DefaultProperties.argumentType.unfrozen());
}
/** Returns an unmodifiable list of all the native properties under a Query */
public static final List<CompoundName> nativeProperties =
ImmutableList.copyOf(namesUnder(CompoundName.empty, Query.getArgumentType()));
private static List<CompoundName> namesUnder(CompoundName prefix, QueryProfileType type) {
if ( type == null) return Collections.emptyList();
List<CompoundName> names = new ArrayList<>();
for (Map.Entry<String, FieldDescription> field : type.fields().entrySet()) {
if (field.getValue().getType() instanceof QueryProfileFieldType) {
names.addAll(namesUnder(prefix.append(field.getKey()),
((QueryProfileFieldType) field.getValue().getType()).getQueryProfileType()));
}
else {
names.add(prefix.append(field.getKey()));
}
}
return names;
}
/**
* Constructs an empty (null) query
*/
public Query() {
this("");
}
/**
* Construct a query from a string formatted in the http style, e.g <code>?query=test&offset=10&hits=13</code>
* The query must be uri encoded.
*/
public Query(String query) {
this(query, null);
}
/**
* Creates a query from a request
*
* @param request the HTTP request from which this is created
*/
public Query(HttpRequest request) {
this(request, null);
}
/**
* Construct a query from a string formatted in the http style, e.g <code>?query=test&offset=10&hits=13</code>
* The query must be uri encoded.
*/
public Query(String query, CompiledQueryProfile queryProfile) {
this(HttpRequest.createTestRequest(query, com.yahoo.jdisc.http.HttpRequest.Method.GET), queryProfile);
}
/**
* Creates a query from a request
*
* @param request the HTTP request from which this is created
* @param queryProfile the query profile to use for this query, or null if none.
*/
public Query(HttpRequest request, CompiledQueryProfile queryProfile) {
super(new QueryPropertyAliases(propertyAliases));
this.httpRequest = request;
init(request.propertyMap(), queryProfile);
}
/**
* Creates a query from a request containing a JSON-query.
*
* @param request the HTTP request from which this is created.
* @param requestMap the property map of the query.
* @param queryProfile the query profile to use for this query, or null if none.
*/
private void init(Map<String, String> requestMap, CompiledQueryProfile queryProfile) {
startTime = System.currentTimeMillis();
if (queryProfile != null) {
Properties queryProfileProperties = new QueryProfileProperties(queryProfile);
properties().chain(queryProfileProperties);
setPropertiesFromRequestMap(requestMap, properties());
properties().chain(new QueryProperties(this, queryProfile.getRegistry())).
chain(new ModelObjectMap()).
chain(new RequestContextProperties(requestMap)).
chain(queryProfileProperties).
chain(new DefaultProperties());
setFieldsFrom(queryProfileProperties, requestMap);
}
else {
properties().
chain(new QueryProperties(this, CompiledQueryProfileRegistry.empty)).
chain(new PropertyMap()).
chain(new DefaultProperties());
setPropertiesFromRequestMap(requestMap, properties());
}
properties().setParentQuery(this);
traceProperties();
}
public Query(Query query) {
this(query, query.getStartTime());
}
private Query(Query query, long startTime) {
super(query.properties().clone());
this.startTime = startTime;
this.httpRequest = query.httpRequest;
query.copyPropertiesTo(this);
}
/**
* Creates a new query from another query, but with time sensitive fields reset.
*/
public static Query createNewQuery(Query query) {
return new Query(query, System.currentTimeMillis());
}
/**
* Calls properties().set on each value in the given properties which is declared in this query or
* one of its dependent objects. This will ensure the appropriate setters are called on this and all
* dependent objects for the appropriate subset of the given property values
*/
private void setFieldsFrom(Properties properties, Map<String,String> context) {
setFrom(properties,Query.getArgumentType(), context);
setFrom(properties,Model.getArgumentType(), context);
setFrom(properties,Presentation.getArgumentType(), context);
setFrom(properties,Ranking.getArgumentType(), context);
}
/**
* For each field in the given query profile type, take the corresponding value from originalProperties
* (if any) set it to properties().
*/
private void setFrom(Properties originalProperties,QueryProfileType arguments,Map<String,String> context) {
String prefix = getPrefix(arguments);
for (FieldDescription field : arguments.fields().values()) {
String fullName = prefix + field.getName();
if (field.getType() == FieldType.genericQueryProfileType) {
for (Map.Entry<String, Object> entry : originalProperties.listProperties(fullName,context).entrySet()) {
try {
properties().set(fullName + "." + entry.getKey(), entry.getValue(), context);
} catch (IllegalArgumentException e) {
throw new QueryException("Invalid request parameter", e);
}
}
} else {
Object value=originalProperties.get(fullName,context);
if (value!=null) {
try {
properties().set(fullName,value,context);
} catch (IllegalArgumentException e) {
throw new QueryException("Invalid request parameter", e);
}
}
}
}
}
/** Calls properties.set on all entries in requestMap */
private void setPropertiesFromRequestMap(Map<String, String> requestMap, Properties properties) {
for (Map.Entry<String, String> entry : requestMap.entrySet()) {
try {
if (entry.getKey().equals("queryProfile")) continue;
properties.set(entry.getKey(), entry.getValue(), requestMap);
}
catch (IllegalArgumentException e) {
throw new QueryException("Invalid request parameter", e);
}
}
}
/** Returns the properties of this query. The properties are modifiable */
@Override
public Properties properties() { return (Properties)super.properties(); }
/**
* Traces how properties was resolved and from where. Done after the fact to avoid special handling
* of tracelevel, which is the property deciding whether this needs to be done
*/
private void traceProperties() {
if (traceLevel == 0) return;
CompiledQueryProfile profile=null;
QueryProfileProperties profileProperties=properties().getInstance(QueryProfileProperties.class);
if (profileProperties!=null)
profile=profileProperties.getQueryProfile();
if (profile==null)
trace("No query profile is used", false, 1);
else
trace("Using " + profile.toString(), false, 1);
if (traceLevel < 4) return;
StringBuilder b = new StringBuilder("Resolved properties:\n");
Set<String> mentioned = new HashSet<>();
for (Map.Entry<String,String> requestProperty : requestProperties().entrySet() ) {
Object resolvedValue = properties().get(requestProperty.getKey(), requestProperties());
if (resolvedValue == null && requestProperty.getKey().equals("queryProfile"))
resolvedValue = requestProperty.getValue();
b.append(requestProperty.getKey());
b.append("=");
b.append(String.valueOf(resolvedValue));
b.append(" (");
if (profile != null && ! profile.isOverridable(new CompoundName(requestProperty.getKey()), requestProperties()))
b.append("value from query profile - unoverridable, ignoring request value");
else
b.append("value from request");
b.append(")\n");
mentioned.add(requestProperty.getKey());
}
if (profile!=null) {
appendQueryProfileProperties(profile,mentioned,b);
}
trace(b.toString(),false,4);
}
private Map<String, String> requestProperties() {
return httpRequest.propertyMap();
}
private void appendQueryProfileProperties(CompiledQueryProfile profile,Set<String> mentioned,StringBuilder b) {
for (Map.Entry<String,Object> property : profile.listValues("",requestProperties()).entrySet()) {
if ( ! mentioned.contains(property.getKey()))
b.append(property.getKey() + "=" + property.getValue() + " (value from query profile)<br/>\n");
}
}
/**
* Validates this query
*
* @return the reason if it is invalid, null if it is valid
*/
public String validate() {
QueryProfileProperties queryProfileProperties = properties().getInstance(QueryProfileProperties.class);
if (queryProfileProperties == null) return null;
StringBuilder missingName = new StringBuilder();
if ( ! queryProfileProperties.isComplete(missingName, httpRequest.propertyMap()))
return "Incomplete query: Parameter '" + missingName + "' is mandatory in " +
queryProfileProperties.getQueryProfile() + " but is not set";
else
return null;
}
/** Returns the time (in milliseconds since epoch) when this query was started */
public long getStartTime() { return startTime; }
/** Returns the time (in milliseconds) since the query was started/created */
public long getDurationTime() {
return System.currentTimeMillis() - startTime;
}
/**
* Get the appropriate timeout for the query.
*
* @return timeout in milliseconds
**/
public long getTimeLeft() {
return getTimeout() - getDurationTime();
}
public boolean requestHasProperty(String name) {
return httpRequest.hasProperty(name);
}
/**
* Returns the number of milliseconds to wait for a response from a search backend
* before timing it out. Default is 5000.
* <p>
* Note: If Ranking.RANKFEATURES is turned on, this is hardcoded to 6 minutes.
*
* @return timeout in milliseconds.
*/
public long getTimeout() {
return properties().getBoolean(Ranking.RANKFEATURES, false) ? dumpTimeout : timeout;
}
/**
* Sets the number of milliseconds to wait for a response from a search backend
* before time out. Default is 5000.
*/
public void setTimeout(long timeout) {
if (timeout > 1000000000 || timeout < 0)
throw new IllegalArgumentException("'timeout' must be positive and smaller than 1000000000 ms but was " + timeout);
this.timeout = timeout;
}
/**
* Sets timeout from a string which will be parsed as a
*/
public void setTimeout(String timeoutString) {
setTimeout(ParameterParser.asMilliSeconds(timeoutString, timeout));
}
/**
* Resets the start time of the query. This will ensure that the query will run
* for the same amount of time as a newly created query.
*/
public void resetTimeout() { this.startTime = System.currentTimeMillis(); }
/**
* Sets the context level of this query, 0 means no tracing
* Higher numbers means increasingly more tracing
*/
public void setTraceLevel(int traceLevel) { this.traceLevel = traceLevel; }
/**
* Returns the context level of this query, 0 means no tracing
* Higher numbers means increasingly more tracing
*/
public int getTraceLevel() { return traceLevel; }
/**
* Returns the context level of this query, 0 means no tracing
* Higher numbers means increasingly more tracing
*/
public final boolean isTraceable(int level) { return traceLevel >= level; }
/** Returns whether this query should never be served from a cache. Default is false */
public boolean getNoCache() { return noCache; }
/** Sets whether this query should never be server from a cache. Default is false */
public void setNoCache(boolean noCache) { this.noCache = noCache; }
/** Returns whether this query should use the grouping session cache. Default is false */
public boolean getGroupingSessionCache() { return groupingSessionCache; }
/** Sets whether this query should use the grouping session cache. Default is false */
public void setGroupingSessionCache(boolean groupingSessionCache) { this.groupingSessionCache = groupingSessionCache; }
/**
* Returns the offset from the most relevant hits requested by the submitter
* of this query.
* Default is 0 - to return the most relevant hits
*/
public int getOffset() { return offset; }
/**
* Returns the number of hits requested by the submitter of this query.
* The default is 10.
*/
public int getHits() { return hits; }
/**
* Sets the number of hits requested. If hits is less than 0, an
* IllegalArgumentException is thrown. Default number of hits is 10.
*/
public void setHits(int hits) {
if (hits < 0)
throw new IllegalArgumentException("Must be a positive number");
this.hits = hits;
}
/**
* Set the hit offset. Can not be less than 0. Default is 0.
*/
public void setOffset(int offset) {
if (offset < 0)
throw new IllegalArgumentException("Must be a positive number");
this.offset = offset;
}
/** Convenience method to set both the offset and the number of hits to return */
public void setWindow(int offset,int hits) {
setOffset(offset);
setHits(hits);
}
/**
* This is ignored - compression is controlled at the network level.
*
* @deprecated this is ignored
*/
@Deprecated
public void setCompress(boolean ignored) { }
/**
* Returns false.
*
* @deprecated this always returns false
*/
@Deprecated
public boolean getCompress() { return false; }
/** Returns a string describing this query */
@Override
public String toString() {
String queryTree;
try {
queryTree = model.getQueryTree().toString();
} catch (Exception e) {
queryTree = "[Could not parse user input: " + model.getQueryString() + "]";
}
return "query '" + queryTree + "'";
}
/** Returns a string describing this query in more detail */
public String toDetailString() {
String queryTree;
try {
queryTree = model.getQueryTree().toString();
} catch (Exception e) {
queryTree = "Could not parse user input: " + model.getQueryString();
}
return "query=[" + queryTree + "]" + " offset=" + getOffset() + " hits=" + getHits() + "]";
}
/**
* Encodes this query onto the given buffer
*
* @param buffer The buffer to encode the query to
* @return the number of encoded items
*/
public int encode(ByteBuffer buffer) {
return model.getQueryTree().encode(buffer);
}
/**
* Adds a context message to this query and to the info log,
* if the context level of the query is sufficiently high.
* The context information will be carried over to the result at creation.
* The message parameter will be included <i>with</i> XML escaping.
*
* @param message the message to add
* @param traceLevel the context level of the message, this method will do nothing
* if the traceLevel of the query is lower than this value
*/
public void trace(String message, int traceLevel) {
trace(message, false, traceLevel);
}
/**
* Adds a trace message to this query
* if the trace level of the query is sufficiently high.
*
* @param message the message to add
* @param includeQuery true to append the query root stringValue
* at the end of the message
* @param traceLevel the context level of the message, this method will do nothing
* if the traceLevel of the query is lower than this value
*/
public void trace(String message, boolean includeQuery, int traceLevel) {
if ( ! isTraceable(traceLevel)) return;
if (includeQuery)
message += ": [" + queryTreeText() + "]";
log.log(LogLevel.DEBUG,message);
getContext(true).trace(message, 0);
}
/**
* Adds a trace message to this query
* if the trace level of the query is sufficiently high.
*
* @param includeQuery true to append the query root stringValue at the end of the message
* @param traceLevel the context level of the message, this method will do nothing
* if the traceLevel of the query is lower than this value
* @param messages the messages whose toStrings will be concatenated into the trace message.
* Concatenation will only happen if the trace level is sufficiently high.
*/
public void trace(boolean includeQuery, int traceLevel, Object... messages) {
if ( ! isTraceable(traceLevel)) return;
StringBuilder concatenated = new StringBuilder();
for (Object message : messages)
concatenated.append(String.valueOf(message));
trace(concatenated.toString(), includeQuery, traceLevel);
}
/**
* Set the context information for another query to be part of this query's
* context information. This is to be used if creating fresh query objects as
* part of a plug-in's execution. The query should be attached before it is
* used, in case an exception causes premature termination. This is enforced
* by an IllegalStateException. In other words, intended use is create the
* new query, and attach the context to the invoking query as soon as the new
* query is properly initialized.
*
* <p>
* This method will always set the argument query's context level to the context
* level of this query.
*
* @param query
* The query which should be traced as a part of this query.
* @throws IllegalStateException
* If the query given as argument already has context
* information.
*/
public void attachContext(Query query) throws IllegalStateException {
query.setTraceLevel(getTraceLevel());
if (context == null) {
return;
}
if (query.getContext(false) != null) {
throw new IllegalStateException("Query to attach already has context information stored.");
}
query.context = context;
}
private String queryTreeText() {
QueryTree root = getModel().getQueryTree();
if (getTraceLevel() < 2)
return root.toString();
if (getTraceLevel() < 6)
return yqlRepresentation();
else
return "\n" + yqlRepresentation() + "\n" + new TextualQueryRepresentation(root.getRoot()) + "\n";
}
/**
* Serialize this query as YQL+. This method will never throw exceptions,
* but instead return a human readable error message if a problem occured
* serializing the query. Hits and offset information will be included if
* different from default, while linguistics metadata are not added.
*
* @return a valid YQL+ query string or a human readable error message
* @see Query
*/
public String yqlRepresentation() {
try {
return yqlRepresentation(null, true);
} catch (NullItemException e) {
return "Query currently a placeholder, NullItem encountered.";
} catch (RuntimeException e) {
return "Failed serializing query as YQL+, please file a ticket including the query causing this: "
+ Exceptions.toMessageString(e);
}
}
private void commaSeparated(StringBuilder yql, Set<String> fields) {
int initLen = yql.length();
for (String field : fields) {
if (yql.length() > initLen) {
yql.append(", ");
}
yql.append(field);
}
}
/**
* Serialize this query as YQL+. This will create a string representation
* which should always be legal YQL+. If a problem occurs, a
* RuntimeException is thrown.
*
* @param segmenterVersion
* linguistics metadata used in federation, set to null if the
* annotation is not necessary
* @param includeHitsAndOffset
* whether to include hits and offset parameters converted to a
* offset/limit slice
* @return a valid YQL+ query string
* @throws RuntimeException if there is a problem serializing the query tree
*/
public String yqlRepresentation(@Nullable Tuple2<String, Version> segmenterVersion, boolean includeHitsAndOffset) {
String q = VespaSerializer.serialize(this);
Set<String> sources = getModel().getSources();
Set<String> fields = getPresentation().getSummaryFields();
StringBuilder yql = new StringBuilder("select ");
if (fields.isEmpty()) {
yql.append('*');
} else {
commaSeparated(yql, fields);
}
yql.append(" from ");
if (sources.isEmpty()) {
yql.append("sources *");
} else {
if (sources.size() > 1) {
yql.append("sources ");
}
commaSeparated(yql, sources);
}
yql.append(" where ");
if (segmenterVersion != null) {
yql.append("[{\"segmenter\": {\"version\": \"")
.append(segmenterVersion.second.toString())
.append("\", \"backend\": \"")
.append(segmenterVersion.first).append("\"}}](");
}
yql.append(q);
if (segmenterVersion != null) {
yql.append(')');
}
if (getRanking().getSorting() != null && getRanking().getSorting().fieldOrders().size() > 0) {
serializeSorting(yql);
}
if (includeHitsAndOffset) {
if (getOffset() != 0) {
yql.append(" limit ")
.append(Integer.toString(getHits() + getOffset()))
.append(" offset ")
.append(Integer.toString(getOffset()));
} else if (getHits() != 10) {
yql.append(" limit ").append(Integer.toString(getHits()));
}
}
if (getTimeout() != 5000L) {
yql.append(" timeout ").append(Long.toString(getTimeout()));
}
yql.append(';');
return yql.toString();
}
private void serializeSorting(StringBuilder yql) {
yql.append(" order by ");
int initLen = yql.length();
for (FieldOrder f : getRanking().getSorting().fieldOrders()) {
if (yql.length() > initLen) {
yql.append(", ");
}
final Class<? extends AttributeSorter> sorterType = f.getSorter()
.getClass();
if (sorterType == Sorting.RawSorter.class) {
yql.append("[{\"").append(YqlParser.SORTING_FUNCTION)
.append("\": \"").append(Sorting.RAW).append("\"}]");
} else if (sorterType == Sorting.LowerCaseSorter.class) {
yql.append("[{\"").append(YqlParser.SORTING_FUNCTION)
.append("\": \"").append(Sorting.LOWERCASE)
.append("\"}]");
} else if (sorterType == Sorting.UcaSorter.class) {
Sorting.UcaSorter uca = (Sorting.UcaSorter) f.getSorter();
String ucaLocale = uca.getLocale();
Sorting.UcaSorter.Strength ucaStrength = uca.getStrength();
yql.append("[{\"").append(YqlParser.SORTING_FUNCTION)
.append("\": \"").append(Sorting.UCA).append("\"");
if (ucaLocale != null) {
yql.append(", \"").append(YqlParser.SORTING_LOCALE)
.append("\": \"").append(ucaLocale).append('"');
}
if (ucaStrength != Sorting.UcaSorter.Strength.UNDEFINED) {
yql.append(", \"").append(YqlParser.SORTING_STRENGTH)
.append("\": \"").append(ucaStrength.name())
.append('"');
}
yql.append("}]");
}
yql.append(f.getFieldName());
if (f.getSortOrder() == Order.DESCENDING) {
yql.append(" desc");
}
}
}
/** Returns the context of this query, possibly creating it if missing. Returns the context, or null */
public QueryContext getContext(boolean create) {
if (context==null && create)
context=new QueryContext(getTraceLevel(),this);
return context;
}
/** Returns a hash of this query based on (some of) its content. */
@Override
public int hashCode() {
return ranking.hashCode()+3*presentation.hashCode()+5* model.hashCode()+ 11*offset+ 13*hits;
}
/** Returns whether the given query is equal to this */
@Override
public boolean equals(Object other) {
if (this==other) return true;
if ( ! (other instanceof Query)) return false;
Query q = (Query) other;
if (getOffset() != q.getOffset()) return false;
if (getHits() != q.getHits()) return false;
if ( ! getPresentation().equals(q.getPresentation())) return false;
if ( ! getRanking().equals(q.getRanking())) return false;
if ( ! getModel().equals(q.getModel())) return false;
return true;
}
/** Returns a clone of this query */
@Override
public Query clone() {
Query clone = (Query) super.clone();
copyPropertiesTo(clone);
return clone;
}
private void copyPropertiesTo(Query clone) {
clone.model = model.cloneFor(clone);
clone.ranking = (Ranking) ranking.clone();
clone.presentation = (Presentation) presentation.clone();
clone.context = getContext(true).cloneFor(clone);
clone.properties().setParentQuery(clone);
assert (clone.properties().getParentQuery() == clone);
clone.setTraceLevel(getTraceLevel());
clone.setHits(getHits());
clone.setOffset(getOffset());
clone.setNoCache(getNoCache());
clone.setGroupingSessionCache(getGroupingSessionCache());
clone.requestId = null;
}
/** Returns the presentation to be used for this query, never null */
public Presentation getPresentation() { return presentation; }
/** Returns the ranking to be used for this query, never null */
public Ranking getRanking() { return ranking; }
/** Returns the query representation model to be used for this query, never null */
public Model getModel() { return model; }
/**
* Return the HTTP request which caused this query. This will never be null
* when running with queries from the network.
*/
public HttpRequest getHttpRequest() { return httpRequest; }
/**
* Returns the unique and stable session id of this query.
*
* @param create if true this is created if not already set
* @return the session id of this query, or null if not set and create is false
*/
public SessionId getSessionId(boolean create) {
if (requestId == null && ! create) return null;
if (requestId == null && create) {
requestId = UniqueRequestId.next();
}
return new SessionId(requestId, getRanking().getProfile());
}
public boolean hasEncodableProperties() {
if ( ! ranking.getProperties().isEmpty()) return true;
if ( ! ranking.getFeatures().isEmpty()) return true;
if ( ranking.getFreshness() != null) return true;
if ( model.getSearchPath() != null) return true;
if ( model.getDocumentDb() != null) return true;
if ( presentation.getHighlight() != null && ! presentation.getHighlight().getHighlightItems().isEmpty()) return true;
return false;
}
/**
* Encodes properties of this query.
*
* @param buffer the buffer to encode to
* @param encodeQueryData true to encode all properties, false to only include session information, not actual query data
* @return the encoded length
*/
public int encodeAsProperties(ByteBuffer buffer, boolean encodeQueryData) {
if (! hasEncodableProperties()) return 0;
int start = buffer.position();
int mapCountPosition = buffer.position();
buffer.putInt(0);
int mapCount = 0;
mapCount += ranking.getProperties().encode(buffer, encodeQueryData);
if (encodeQueryData) mapCount += ranking.getFeatures().encode(buffer);
if (encodeQueryData && presentation.getHighlight() != null) mapCount += MapEncoder.encodeStringMultiMap(Highlight.HIGHLIGHTTERMS, presentation.getHighlight().getHighlightTerms(), buffer);
if (encodeQueryData) mapCount += MapEncoder.encodeSingleValue("model", "searchpath", model.getSearchPath(), buffer);
mapCount += MapEncoder.encodeSingleValue(DocumentDatabase.MATCH_PROPERTY, DocumentDatabase.SEARCH_DOC_TYPE_KEY, model.getDocumentDb(), buffer);
mapCount += MapEncoder.encodeMap("caches", createCacheSettingMap(), buffer);
buffer.putInt(mapCountPosition, mapCount);
return buffer.position() - start;
}
private Map<String, Boolean> createCacheSettingMap() {
if (getGroupingSessionCache() && ranking.getQueryCache()) {
Map<String, Boolean> cacheSettingMap = new HashMap<>();
cacheSettingMap.put("grouping", true);
cacheSettingMap.put("query", true);
return cacheSettingMap;
}
if (getGroupingSessionCache())
return Collections.singletonMap("grouping", true);
if (ranking.getQueryCache())
return Collections.singletonMap("query", true);
return Collections.<String,Boolean>emptyMap();
}
/**
* Prepares this for binary serialization.
* <p>
* This must be invoked after all changes have been made to this query before it is passed
* on to a receiving backend. Calling it is somewhat expensive, so it should only happen once.
* If a prepared query is cloned, it stays prepared.
*/
public void prepare() {
getModel().prepare(getRanking());
getPresentation().prepare();
getRanking().prepare();
}
} | class Query extends com.yahoo.processing.Request implements Cloneable {
/** The type of the query */
public enum Type {
ALL(0,"all"),
ANY(1,"any"),
PHRASE(2,"phrase"),
ADVANCED(3,"adv"),
WEB(4,"web"),
PROGRAMMATIC(5, "prog"),
YQL(6, "yql");
private final int intValue;
private final String stringValue;
Type(int intValue,String stringValue) {
this.intValue = intValue;
this.stringValue = stringValue;
}
/** Converts a type argument value into a query type */
public static Type getType(String typeString) {
for (Type type:Type.values())
if (type.stringValue.equals(typeString))
return type;
return ALL;
}
public int asInt() { return intValue; }
public String toString() { return stringValue; }
}
/** The offset from the most relevant hits found from this query */
private int offset = 0;
/** The number of hits to return */
private int hits = 10;
/** The query context level, 0 means no tracing */
private int traceLevel = 0;
private static final long dumpTimeout = (6 * 60 * 1000);
private static final long defaultTimeout = 5000;
/** The timeout of the query, in milliseconds */
private long timeout = defaultTimeout;
/** Whether this query is forbidden to access cached information */
private boolean noCache = false;
/** Whether or not grouping should use a session cache */
private boolean groupingSessionCache = false;
/** The synchronous view of the JDisc request causing this query */
private final HttpRequest httpRequest;
/** The context, or null if there is no context */
private QueryContext context = null;
/** Used for downstream session caches */
private UniqueRequestId requestId = null;
/** The ranking requested in this query */
private Ranking ranking = new Ranking(this);
/** The query query and/or query program declaration */
private Model model = new Model(this);
/** How results of this query should be presented */
private Presentation presentation = new Presentation(this);
private static Logger log = Logger.getLogger(Query.class.getName());
/** The time this query was created */
private long startTime;
public static final CompoundName OFFSET = new CompoundName("offset");
public static final CompoundName HITS = new CompoundName("hits");
public static final CompoundName SEARCH_CHAIN = new CompoundName("searchChain");
public static final CompoundName TRACE_LEVEL = new CompoundName("traceLevel");
public static final CompoundName NO_CACHE = new CompoundName("noCache");
public static final CompoundName GROUPING_SESSION_CACHE = new CompoundName("groupingSessionCache");
public static final CompoundName TIMEOUT = new CompoundName("timeout");
private static QueryProfileType argumentType;
static {
argumentType = new QueryProfileType("native");
argumentType.setBuiltin(true);
argumentType.addField(new FieldDescription(OFFSET.toString(), "integer", "offset start"));
argumentType.addField(new FieldDescription(HITS.toString(), "integer", "hits count"));
argumentType.addField(new FieldDescription(SEARCH_CHAIN.toString(), "string"));
argumentType.addField(new FieldDescription(TRACE_LEVEL.toString(), "integer", "tracelevel"));
argumentType.addField(new FieldDescription(NO_CACHE.toString(), "boolean", "nocache"));
argumentType.addField(new FieldDescription(GROUPING_SESSION_CACHE.toString(), "boolean", "groupingSessionCache"));
argumentType.addField(new FieldDescription(TIMEOUT.toString(), "string", "timeout"));
argumentType.addField(new FieldDescription(FederationSearcher.SOURCENAME.toString(),"string"));
argumentType.addField(new FieldDescription(FederationSearcher.PROVIDERNAME.toString(),"string"));
argumentType.addField(new FieldDescription(Presentation.PRESENTATION, new QueryProfileFieldType(Presentation.getArgumentType())));
argumentType.addField(new FieldDescription(Ranking.RANKING, new QueryProfileFieldType(Ranking.getArgumentType())));
argumentType.addField(new FieldDescription(Model.MODEL, new QueryProfileFieldType(Model.getArgumentType())));
argumentType.freeze();
}
public static QueryProfileType getArgumentType() { return argumentType; }
/** The aliases of query properties */
private static Map<String,CompoundName> propertyAliases;
static {
Map<String,CompoundName> propertyAliasesBuilder = new HashMap<>();
addAliases(Query.getArgumentType(), propertyAliasesBuilder);
addAliases(Ranking.getArgumentType(), propertyAliasesBuilder);
addAliases(Model.getArgumentType(), propertyAliasesBuilder);
addAliases(Presentation.getArgumentType(), propertyAliasesBuilder);
propertyAliases = ImmutableMap.copyOf(propertyAliasesBuilder);
}
private static void addAliases(QueryProfileType arguments, Map<String, CompoundName> aliases) {
String prefix = getPrefix(arguments);
for (FieldDescription field : arguments.fields().values()) {
for (String alias : field.getAliases())
aliases.put(alias, new CompoundName(prefix+field.getName()));
}
}
private static String getPrefix(QueryProfileType type) {
if (type.getId().getName().equals("native")) return "";
return type.getId().getName() + ".";
}
public static void addNativeQueryProfileTypesTo(QueryProfileTypeRegistry registry) {
registry.register(Query.getArgumentType().unfrozen());
registry.register(Ranking.getArgumentType().unfrozen());
registry.register(Model.getArgumentType().unfrozen());
registry.register(Presentation.getArgumentType().unfrozen());
registry.register(DefaultProperties.argumentType.unfrozen());
}
/** Returns an unmodifiable list of all the native properties under a Query */
public static final List<CompoundName> nativeProperties =
ImmutableList.copyOf(namesUnder(CompoundName.empty, Query.getArgumentType()));
private static List<CompoundName> namesUnder(CompoundName prefix, QueryProfileType type) {
if ( type == null) return Collections.emptyList();
List<CompoundName> names = new ArrayList<>();
for (Map.Entry<String, FieldDescription> field : type.fields().entrySet()) {
if (field.getValue().getType() instanceof QueryProfileFieldType) {
names.addAll(namesUnder(prefix.append(field.getKey()),
((QueryProfileFieldType) field.getValue().getType()).getQueryProfileType()));
}
else {
names.add(prefix.append(field.getKey()));
}
}
return names;
}
/**
* Constructs an empty (null) query
*/
public Query() {
this("");
}
/**
* Construct a query from a string formatted in the http style, e.g <code>?query=test&offset=10&hits=13</code>
* The query must be uri encoded.
*/
public Query(String query) {
this(query, null);
}
/**
* Creates a query from a request
*
* @param request the HTTP request from which this is created
*/
public Query(HttpRequest request) {
this(request, null);
}
/**
* Construct a query from a string formatted in the http style, e.g <code>?query=test&offset=10&hits=13</code>
* The query must be uri encoded.
*/
public Query(String query, CompiledQueryProfile queryProfile) {
this(HttpRequest.createTestRequest(query, com.yahoo.jdisc.http.HttpRequest.Method.GET), queryProfile);
}
/**
* Creates a query from a request
*
* @param request the HTTP request from which this is created
* @param queryProfile the query profile to use for this query, or null if none.
*/
public Query(HttpRequest request, CompiledQueryProfile queryProfile) {
this(request, request.propertyMap(), queryProfile);
}
/**
* Creates a query from a request
*
* @param request the HTTP request from which this is created.
* @param requestMap the property map of the query.
* @param queryProfile the query profile to use for this query, or null if none.
*/
private void init(Map<String, String> requestMap, CompiledQueryProfile queryProfile) {
startTime = System.currentTimeMillis();
if (queryProfile != null) {
Properties queryProfileProperties = new QueryProfileProperties(queryProfile);
properties().chain(queryProfileProperties);
setPropertiesFromRequestMap(requestMap, properties());
properties().chain(new QueryProperties(this, queryProfile.getRegistry())).
chain(new ModelObjectMap()).
chain(new RequestContextProperties(requestMap)).
chain(queryProfileProperties).
chain(new DefaultProperties());
setFieldsFrom(queryProfileProperties, requestMap);
}
else {
properties().
chain(new QueryProperties(this, CompiledQueryProfileRegistry.empty)).
chain(new PropertyMap()).
chain(new DefaultProperties());
setPropertiesFromRequestMap(requestMap, properties());
}
properties().setParentQuery(this);
traceProperties();
}
public Query(Query query) {
this(query, query.getStartTime());
}
private Query(Query query, long startTime) {
super(query.properties().clone());
this.startTime = startTime;
this.httpRequest = query.httpRequest;
query.copyPropertiesTo(this);
}
/**
* Creates a new query from another query, but with time sensitive fields reset.
*/
public static Query createNewQuery(Query query) {
return new Query(query, System.currentTimeMillis());
}
/**
* Calls properties().set on each value in the given properties which is declared in this query or
* one of its dependent objects. This will ensure the appropriate setters are called on this and all
* dependent objects for the appropriate subset of the given property values
*/
private void setFieldsFrom(Properties properties, Map<String,String> context) {
setFrom(properties,Query.getArgumentType(), context);
setFrom(properties,Model.getArgumentType(), context);
setFrom(properties,Presentation.getArgumentType(), context);
setFrom(properties,Ranking.getArgumentType(), context);
}
/**
* For each field in the given query profile type, take the corresponding value from originalProperties
* (if any) set it to properties().
*/
private void setFrom(Properties originalProperties,QueryProfileType arguments,Map<String,String> context) {
String prefix = getPrefix(arguments);
for (FieldDescription field : arguments.fields().values()) {
String fullName = prefix + field.getName();
if (field.getType() == FieldType.genericQueryProfileType) {
for (Map.Entry<String, Object> entry : originalProperties.listProperties(fullName,context).entrySet()) {
try {
properties().set(fullName + "." + entry.getKey(), entry.getValue(), context);
} catch (IllegalArgumentException e) {
throw new QueryException("Invalid request parameter", e);
}
}
} else {
Object value=originalProperties.get(fullName,context);
if (value!=null) {
try {
properties().set(fullName,value,context);
} catch (IllegalArgumentException e) {
throw new QueryException("Invalid request parameter", e);
}
}
}
}
}
/** Calls properties.set on all entries in requestMap */
private void setPropertiesFromRequestMap(Map<String, String> requestMap, Properties properties) {
for (Map.Entry<String, String> entry : requestMap.entrySet()) {
try {
if (entry.getKey().equals("queryProfile")) continue;
properties.set(entry.getKey(), entry.getValue(), requestMap);
}
catch (IllegalArgumentException e) {
throw new QueryException("Invalid request parameter", e);
}
}
}
/** Returns the properties of this query. The properties are modifiable */
@Override
public Properties properties() { return (Properties)super.properties(); }
/**
* Traces how properties was resolved and from where. Done after the fact to avoid special handling
* of tracelevel, which is the property deciding whether this needs to be done
*/
private void traceProperties() {
if (traceLevel == 0) return;
CompiledQueryProfile profile=null;
QueryProfileProperties profileProperties=properties().getInstance(QueryProfileProperties.class);
if (profileProperties!=null)
profile=profileProperties.getQueryProfile();
if (profile==null)
trace("No query profile is used", false, 1);
else
trace("Using " + profile.toString(), false, 1);
if (traceLevel < 4) return;
StringBuilder b = new StringBuilder("Resolved properties:\n");
Set<String> mentioned = new HashSet<>();
for (Map.Entry<String,String> requestProperty : requestProperties().entrySet() ) {
Object resolvedValue = properties().get(requestProperty.getKey(), requestProperties());
if (resolvedValue == null && requestProperty.getKey().equals("queryProfile"))
resolvedValue = requestProperty.getValue();
b.append(requestProperty.getKey());
b.append("=");
b.append(String.valueOf(resolvedValue));
b.append(" (");
if (profile != null && ! profile.isOverridable(new CompoundName(requestProperty.getKey()), requestProperties()))
b.append("value from query profile - unoverridable, ignoring request value");
else
b.append("value from request");
b.append(")\n");
mentioned.add(requestProperty.getKey());
}
if (profile!=null) {
appendQueryProfileProperties(profile,mentioned,b);
}
trace(b.toString(),false,4);
}
private Map<String, String> requestProperties() {
return httpRequest.propertyMap();
}
private void appendQueryProfileProperties(CompiledQueryProfile profile,Set<String> mentioned,StringBuilder b) {
for (Map.Entry<String,Object> property : profile.listValues("",requestProperties()).entrySet()) {
if ( ! mentioned.contains(property.getKey()))
b.append(property.getKey() + "=" + property.getValue() + " (value from query profile)<br/>\n");
}
}
/**
* Validates this query
*
* @return the reason if it is invalid, null if it is valid
*/
public String validate() {
QueryProfileProperties queryProfileProperties = properties().getInstance(QueryProfileProperties.class);
if (queryProfileProperties == null) return null;
StringBuilder missingName = new StringBuilder();
if ( ! queryProfileProperties.isComplete(missingName, httpRequest.propertyMap()))
return "Incomplete query: Parameter '" + missingName + "' is mandatory in " +
queryProfileProperties.getQueryProfile() + " but is not set";
else
return null;
}
/** Returns the time (in milliseconds since epoch) when this query was started */
public long getStartTime() { return startTime; }
/** Returns the time (in milliseconds) since the query was started/created */
public long getDurationTime() {
return System.currentTimeMillis() - startTime;
}
/**
* Get the appropriate timeout for the query.
*
* @return timeout in milliseconds
**/
public long getTimeLeft() {
return getTimeout() - getDurationTime();
}
public boolean requestHasProperty(String name) {
return httpRequest.hasProperty(name);
}
/**
* Returns the number of milliseconds to wait for a response from a search backend
* before timing it out. Default is 5000.
* <p>
* Note: If Ranking.RANKFEATURES is turned on, this is hardcoded to 6 minutes.
*
* @return timeout in milliseconds.
*/
public long getTimeout() {
return properties().getBoolean(Ranking.RANKFEATURES, false) ? dumpTimeout : timeout;
}
/**
* Sets the number of milliseconds to wait for a response from a search backend
* before time out. Default is 5000.
*/
public void setTimeout(long timeout) {
if (timeout > 1000000000 || timeout < 0)
throw new IllegalArgumentException("'timeout' must be positive and smaller than 1000000000 ms but was " + timeout);
this.timeout = timeout;
}
/**
* Sets timeout from a string which will be parsed as a
*/
public void setTimeout(String timeoutString) {
setTimeout(ParameterParser.asMilliSeconds(timeoutString, timeout));
}
/**
* Resets the start time of the query. This will ensure that the query will run
* for the same amount of time as a newly created query.
*/
public void resetTimeout() { this.startTime = System.currentTimeMillis(); }
/**
* Sets the context level of this query, 0 means no tracing
* Higher numbers means increasingly more tracing
*/
public void setTraceLevel(int traceLevel) { this.traceLevel = traceLevel; }
/**
* Returns the context level of this query, 0 means no tracing
* Higher numbers means increasingly more tracing
*/
public int getTraceLevel() { return traceLevel; }
/**
* Returns the context level of this query, 0 means no tracing
* Higher numbers means increasingly more tracing
*/
public final boolean isTraceable(int level) { return traceLevel >= level; }
/** Returns whether this query should never be served from a cache. Default is false */
public boolean getNoCache() { return noCache; }
/** Sets whether this query should never be server from a cache. Default is false */
public void setNoCache(boolean noCache) { this.noCache = noCache; }
/** Returns whether this query should use the grouping session cache. Default is false */
public boolean getGroupingSessionCache() { return groupingSessionCache; }
/** Sets whether this query should use the grouping session cache. Default is false */
public void setGroupingSessionCache(boolean groupingSessionCache) { this.groupingSessionCache = groupingSessionCache; }
/**
* Returns the offset from the most relevant hits requested by the submitter
* of this query.
* Default is 0 - to return the most relevant hits
*/
public int getOffset() { return offset; }
/**
* Returns the number of hits requested by the submitter of this query.
* The default is 10.
*/
public int getHits() { return hits; }
/**
* Sets the number of hits requested. If hits is less than 0, an
* IllegalArgumentException is thrown. Default number of hits is 10.
*/
public void setHits(int hits) {
if (hits < 0)
throw new IllegalArgumentException("Must be a positive number");
this.hits = hits;
}
/**
* Set the hit offset. Can not be less than 0. Default is 0.
*/
public void setOffset(int offset) {
if (offset < 0)
throw new IllegalArgumentException("Must be a positive number");
this.offset = offset;
}
/** Convenience method to set both the offset and the number of hits to return */
public void setWindow(int offset,int hits) {
setOffset(offset);
setHits(hits);
}
/**
* This is ignored - compression is controlled at the network level.
*
* @deprecated this is ignored
*/
@Deprecated
public void setCompress(boolean ignored) { }
/**
* Returns false.
*
* @deprecated this always returns false
*/
@Deprecated
public boolean getCompress() { return false; }
/** Returns a string describing this query */
@Override
public String toString() {
String queryTree;
try {
queryTree = model.getQueryTree().toString();
} catch (Exception e) {
queryTree = "[Could not parse user input: " + model.getQueryString() + "]";
}
return "query '" + queryTree + "'";
}
/** Returns a string describing this query in more detail */
public String toDetailString() {
String queryTree;
try {
queryTree = model.getQueryTree().toString();
} catch (Exception e) {
queryTree = "Could not parse user input: " + model.getQueryString();
}
return "query=[" + queryTree + "]" + " offset=" + getOffset() + " hits=" + getHits() + "]";
}
/**
* Encodes this query onto the given buffer
*
* @param buffer The buffer to encode the query to
* @return the number of encoded items
*/
public int encode(ByteBuffer buffer) {
return model.getQueryTree().encode(buffer);
}
/**
* Adds a context message to this query and to the info log,
* if the context level of the query is sufficiently high.
* The context information will be carried over to the result at creation.
* The message parameter will be included <i>with</i> XML escaping.
*
* @param message the message to add
* @param traceLevel the context level of the message, this method will do nothing
* if the traceLevel of the query is lower than this value
*/
public void trace(String message, int traceLevel) {
trace(message, false, traceLevel);
}
/**
* Adds a trace message to this query
* if the trace level of the query is sufficiently high.
*
* @param message the message to add
* @param includeQuery true to append the query root stringValue
* at the end of the message
* @param traceLevel the context level of the message, this method will do nothing
* if the traceLevel of the query is lower than this value
*/
public void trace(String message, boolean includeQuery, int traceLevel) {
if ( ! isTraceable(traceLevel)) return;
if (includeQuery)
message += ": [" + queryTreeText() + "]";
log.log(LogLevel.DEBUG,message);
getContext(true).trace(message, 0);
}
/**
* Adds a trace message to this query
* if the trace level of the query is sufficiently high.
*
* @param includeQuery true to append the query root stringValue at the end of the message
* @param traceLevel the context level of the message, this method will do nothing
* if the traceLevel of the query is lower than this value
* @param messages the messages whose toStrings will be concatenated into the trace message.
* Concatenation will only happen if the trace level is sufficiently high.
*/
public void trace(boolean includeQuery, int traceLevel, Object... messages) {
if ( ! isTraceable(traceLevel)) return;
StringBuilder concatenated = new StringBuilder();
for (Object message : messages)
concatenated.append(String.valueOf(message));
trace(concatenated.toString(), includeQuery, traceLevel);
}
/**
* Set the context information for another query to be part of this query's
* context information. This is to be used if creating fresh query objects as
* part of a plug-in's execution. The query should be attached before it is
* used, in case an exception causes premature termination. This is enforced
* by an IllegalStateException. In other words, intended use is create the
* new query, and attach the context to the invoking query as soon as the new
* query is properly initialized.
*
* <p>
* This method will always set the argument query's context level to the context
* level of this query.
*
* @param query
* The query which should be traced as a part of this query.
* @throws IllegalStateException
* If the query given as argument already has context
* information.
*/
public void attachContext(Query query) throws IllegalStateException {
query.setTraceLevel(getTraceLevel());
if (context == null) {
return;
}
if (query.getContext(false) != null) {
throw new IllegalStateException("Query to attach already has context information stored.");
}
query.context = context;
}
private String queryTreeText() {
QueryTree root = getModel().getQueryTree();
if (getTraceLevel() < 2)
return root.toString();
if (getTraceLevel() < 6)
return yqlRepresentation();
else
return "\n" + yqlRepresentation() + "\n" + new TextualQueryRepresentation(root.getRoot()) + "\n";
}
/**
* Serialize this query as YQL+. This method will never throw exceptions,
* but instead return a human readable error message if a problem occured
* serializing the query. Hits and offset information will be included if
* different from default, while linguistics metadata are not added.
*
* @return a valid YQL+ query string or a human readable error message
* @see Query
*/
public String yqlRepresentation() {
try {
return yqlRepresentation(null, true);
} catch (NullItemException e) {
return "Query currently a placeholder, NullItem encountered.";
} catch (RuntimeException e) {
return "Failed serializing query as YQL+, please file a ticket including the query causing this: "
+ Exceptions.toMessageString(e);
}
}
private void commaSeparated(StringBuilder yql, Set<String> fields) {
int initLen = yql.length();
for (String field : fields) {
if (yql.length() > initLen) {
yql.append(", ");
}
yql.append(field);
}
}
/**
* Serialize this query as YQL+. This will create a string representation
* which should always be legal YQL+. If a problem occurs, a
* RuntimeException is thrown.
*
* @param segmenterVersion
* linguistics metadata used in federation, set to null if the
* annotation is not necessary
* @param includeHitsAndOffset
* whether to include hits and offset parameters converted to a
* offset/limit slice
* @return a valid YQL+ query string
* @throws RuntimeException if there is a problem serializing the query tree
*/
public String yqlRepresentation(@Nullable Tuple2<String, Version> segmenterVersion, boolean includeHitsAndOffset) {
String q = VespaSerializer.serialize(this);
Set<String> sources = getModel().getSources();
Set<String> fields = getPresentation().getSummaryFields();
StringBuilder yql = new StringBuilder("select ");
if (fields.isEmpty()) {
yql.append('*');
} else {
commaSeparated(yql, fields);
}
yql.append(" from ");
if (sources.isEmpty()) {
yql.append("sources *");
} else {
if (sources.size() > 1) {
yql.append("sources ");
}
commaSeparated(yql, sources);
}
yql.append(" where ");
if (segmenterVersion != null) {
yql.append("[{\"segmenter\": {\"version\": \"")
.append(segmenterVersion.second.toString())
.append("\", \"backend\": \"")
.append(segmenterVersion.first).append("\"}}](");
}
yql.append(q);
if (segmenterVersion != null) {
yql.append(')');
}
if (getRanking().getSorting() != null && getRanking().getSorting().fieldOrders().size() > 0) {
serializeSorting(yql);
}
if (includeHitsAndOffset) {
if (getOffset() != 0) {
yql.append(" limit ")
.append(Integer.toString(getHits() + getOffset()))
.append(" offset ")
.append(Integer.toString(getOffset()));
} else if (getHits() != 10) {
yql.append(" limit ").append(Integer.toString(getHits()));
}
}
if (getTimeout() != 5000L) {
yql.append(" timeout ").append(Long.toString(getTimeout()));
}
yql.append(';');
return yql.toString();
}
private void serializeSorting(StringBuilder yql) {
yql.append(" order by ");
int initLen = yql.length();
for (FieldOrder f : getRanking().getSorting().fieldOrders()) {
if (yql.length() > initLen) {
yql.append(", ");
}
final Class<? extends AttributeSorter> sorterType = f.getSorter()
.getClass();
if (sorterType == Sorting.RawSorter.class) {
yql.append("[{\"").append(YqlParser.SORTING_FUNCTION)
.append("\": \"").append(Sorting.RAW).append("\"}]");
} else if (sorterType == Sorting.LowerCaseSorter.class) {
yql.append("[{\"").append(YqlParser.SORTING_FUNCTION)
.append("\": \"").append(Sorting.LOWERCASE)
.append("\"}]");
} else if (sorterType == Sorting.UcaSorter.class) {
Sorting.UcaSorter uca = (Sorting.UcaSorter) f.getSorter();
String ucaLocale = uca.getLocale();
Sorting.UcaSorter.Strength ucaStrength = uca.getStrength();
yql.append("[{\"").append(YqlParser.SORTING_FUNCTION)
.append("\": \"").append(Sorting.UCA).append("\"");
if (ucaLocale != null) {
yql.append(", \"").append(YqlParser.SORTING_LOCALE)
.append("\": \"").append(ucaLocale).append('"');
}
if (ucaStrength != Sorting.UcaSorter.Strength.UNDEFINED) {
yql.append(", \"").append(YqlParser.SORTING_STRENGTH)
.append("\": \"").append(ucaStrength.name())
.append('"');
}
yql.append("}]");
}
yql.append(f.getFieldName());
if (f.getSortOrder() == Order.DESCENDING) {
yql.append(" desc");
}
}
}
/** Returns the context of this query, possibly creating it if missing. Returns the context, or null */
public QueryContext getContext(boolean create) {
if (context==null && create)
context=new QueryContext(getTraceLevel(),this);
return context;
}
/** Returns a hash of this query based on (some of) its content. */
@Override
public int hashCode() {
return ranking.hashCode()+3*presentation.hashCode()+5* model.hashCode()+ 11*offset+ 13*hits;
}
/** Returns whether the given query is equal to this */
@Override
public boolean equals(Object other) {
if (this==other) return true;
if ( ! (other instanceof Query)) return false;
Query q = (Query) other;
if (getOffset() != q.getOffset()) return false;
if (getHits() != q.getHits()) return false;
if ( ! getPresentation().equals(q.getPresentation())) return false;
if ( ! getRanking().equals(q.getRanking())) return false;
if ( ! getModel().equals(q.getModel())) return false;
return true;
}
/** Returns a clone of this query */
@Override
public Query clone() {
Query clone = (Query) super.clone();
copyPropertiesTo(clone);
return clone;
}
private void copyPropertiesTo(Query clone) {
clone.model = model.cloneFor(clone);
clone.ranking = (Ranking) ranking.clone();
clone.presentation = (Presentation) presentation.clone();
clone.context = getContext(true).cloneFor(clone);
clone.properties().setParentQuery(clone);
assert (clone.properties().getParentQuery() == clone);
clone.setTraceLevel(getTraceLevel());
clone.setHits(getHits());
clone.setOffset(getOffset());
clone.setNoCache(getNoCache());
clone.setGroupingSessionCache(getGroupingSessionCache());
clone.requestId = null;
}
/** Returns the presentation to be used for this query, never null */
public Presentation getPresentation() { return presentation; }
/** Returns the ranking to be used for this query, never null */
public Ranking getRanking() { return ranking; }
/** Returns the query representation model to be used for this query, never null */
public Model getModel() { return model; }
/**
* Return the HTTP request which caused this query. This will never be null
* when running with queries from the network.
*/
public HttpRequest getHttpRequest() { return httpRequest; }
/**
* Returns the unique and stable session id of this query.
*
* @param create if true this is created if not already set
* @return the session id of this query, or null if not set and create is false
*/
public SessionId getSessionId(boolean create) {
if (requestId == null && ! create) return null;
if (requestId == null && create) {
requestId = UniqueRequestId.next();
}
return new SessionId(requestId, getRanking().getProfile());
}
public boolean hasEncodableProperties() {
if ( ! ranking.getProperties().isEmpty()) return true;
if ( ! ranking.getFeatures().isEmpty()) return true;
if ( ranking.getFreshness() != null) return true;
if ( model.getSearchPath() != null) return true;
if ( model.getDocumentDb() != null) return true;
if ( presentation.getHighlight() != null && ! presentation.getHighlight().getHighlightItems().isEmpty()) return true;
return false;
}
/**
* Encodes properties of this query.
*
* @param buffer the buffer to encode to
* @param encodeQueryData true to encode all properties, false to only include session information, not actual query data
* @return the encoded length
*/
public int encodeAsProperties(ByteBuffer buffer, boolean encodeQueryData) {
if (! hasEncodableProperties()) return 0;
int start = buffer.position();
int mapCountPosition = buffer.position();
buffer.putInt(0);
int mapCount = 0;
mapCount += ranking.getProperties().encode(buffer, encodeQueryData);
if (encodeQueryData) mapCount += ranking.getFeatures().encode(buffer);
if (encodeQueryData && presentation.getHighlight() != null) mapCount += MapEncoder.encodeStringMultiMap(Highlight.HIGHLIGHTTERMS, presentation.getHighlight().getHighlightTerms(), buffer);
if (encodeQueryData) mapCount += MapEncoder.encodeSingleValue("model", "searchpath", model.getSearchPath(), buffer);
mapCount += MapEncoder.encodeSingleValue(DocumentDatabase.MATCH_PROPERTY, DocumentDatabase.SEARCH_DOC_TYPE_KEY, model.getDocumentDb(), buffer);
mapCount += MapEncoder.encodeMap("caches", createCacheSettingMap(), buffer);
buffer.putInt(mapCountPosition, mapCount);
return buffer.position() - start;
}
private Map<String, Boolean> createCacheSettingMap() {
if (getGroupingSessionCache() && ranking.getQueryCache()) {
Map<String, Boolean> cacheSettingMap = new HashMap<>();
cacheSettingMap.put("grouping", true);
cacheSettingMap.put("query", true);
return cacheSettingMap;
}
if (getGroupingSessionCache())
return Collections.singletonMap("grouping", true);
if (ranking.getQueryCache())
return Collections.singletonMap("query", true);
return Collections.<String,Boolean>emptyMap();
}
/**
* Prepares this for binary serialization.
* <p>
* This must be invoked after all changes have been made to this query before it is passed
* on to a receiving backend. Calling it is somewhat expensive, so it should only happen once.
* If a prepared query is cloned, it stays prepared.
*/
public void prepare() {
getModel().prepare(getRanking());
getPresentation().prepare();
getRanking().prepare();
}
} |
`(com.yahoo.io.)IOUtils.readBytes(request.getData(), 1 << 20)` to fill that array ;) | private HttpSearchResponse handleBody(HttpRequest request) throws JSONException {
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector = null;
try {
InputStream inputStream = request.getData();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[0xFFFF];
for (int len = inputStream.read(buffer); len != -1; len = inputStream.read(buffer)) {
outputStream.write(buffer, 0, len);
}
inspector = SlimeUtils.jsonToSlime(outputStream.toByteArray()).get();
} catch (IOException e) { e.printStackTrace();
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | outputStream.write(buffer, 0, len); | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
Probably throw a `new RuntimeException(e)`, or something. | private HttpSearchResponse handleBody(HttpRequest request) throws JSONException {
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector = null;
try {
InputStream inputStream = request.getData();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[0xFFFF];
for (int len = inputStream.read(buffer); len != -1; len = inputStream.read(buffer)) {
outputStream.write(buffer, 0, len);
}
inspector = SlimeUtils.jsonToSlime(outputStream.toByteArray()).get();
} catch (IOException e) { e.printStackTrace();
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | } | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
Another thing on logging: Never log on errors which are caused by the query. (Doing that makes the log unusable and worst case allows clients to DOS the system) | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector = null;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Could not resolve JSON-query");
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | e.printStackTrace(); | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
Yes, this struck my mind on the way home yesterday, as well :p So, a separate exception which indicates an error in the JSON is good, but catch it and handle it the same way as the generic RuntimeExceptions, just with a different reply, like you had initially: 400 bad request, with the error included. | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector = null;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Could not resolve JSON-query");
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | e.printStackTrace(); | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
This particular line is what throws on bad JSON, so it needs to be in a separate try-catch if you are to separate the two problems. | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Problem with reading from input-stream", e);
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | inspector = SlimeUtils.jsonToSlime(byteArray).get(); | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
Great, now you just need a unit test to see that this turned out right ;) | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
if (inspector.field("error_message").valid()){
throw new QueryException("Illegal query: "+inspector.field("error_message").asString() + ", at: "+ new String(inspector.field("offending_input").asData(), StandardCharsets.UTF_8));
}
} catch (IOException e) {
throw new RuntimeException("Problem with reading from input-stream", e);
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | throw new QueryException("Illegal query: "+inspector.field("error_message").asString() + ", at: "+ new String(inspector.field("offending_input").asData(), StandardCharsets.UTF_8)); | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
👍 | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
if (inspector.field("error_message").valid()){
throw new QueryException("Illegal query: "+inspector.field("error_message").asString() + ", at: "+ new String(inspector.field("offending_input").asData(), StandardCharsets.UTF_8));
}
} catch (IOException e) {
throw new RuntimeException("Problem with reading from input-stream", e);
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | throw new QueryException("Illegal query: "+inspector.field("error_message").asString() + ", at: "+ new String(inspector.field("offending_input").asData(), StandardCharsets.UTF_8)); | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
Could you separate out this whole thing into a separate method which returns the query? (E.g queryFromRequest(HttpRequest). This method is already too long, and that you need a separate comment for the block indicated you should. | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
if (inspector.field("error_message").valid()){
throw new QueryException("Illegal query: "+inspector.field("error_message").asString() + ", at: "+ new String(inspector.field("offending_input").asData(), StandardCharsets.UTF_8));
}
} catch (IOException e) {
throw new RuntimeException("Problem with reading from input-stream", e);
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | Query query; | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
Yes. I created the method queryFromRequest(HttpRequest, CompiledQueryProfile), which returns the query. | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
if (inspector.field("error_message").valid()){
throw new QueryException("Illegal query: "+inspector.field("error_message").asString() + ", at: "+ new String(inspector.field("offending_input").asData(), StandardCharsets.UTF_8));
}
} catch (IOException e) {
throw new RuntimeException("Problem with reading from input-stream", e);
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | Query query; | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
👍 feel free to merge ... | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query;
if (request.getMethod() == com.yahoo.jdisc.http.HttpRequest.Method.POST && request.getHeader(com.yahoo.jdisc.http.HttpHeaders.Names.CONTENT_TYPE).equals(JSON_CONTENT_TYPE)) {
Inspector inspector;
try {
byte[] byteArray = IOUtils.readBytes(request.getData(), 1 << 20);
inspector = SlimeUtils.jsonToSlime(byteArray).get();
if (inspector.field("error_message").valid()){
throw new QueryException("Illegal query: "+inspector.field("error_message").asString() + ", at: "+ new String(inspector.field("offending_input").asData(), StandardCharsets.UTF_8));
}
} catch (IOException e) {
throw new RuntimeException("Problem with reading from input-stream", e);
}
Map<String, String> requestMap = new HashMap<>();
createRequestMapping(inspector, requestMap, "");
query = new Query(request, requestMap, queryProfile);
} else {
query = new Query(request, queryProfile);
}
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | Query query; | private HttpSearchResponse handleBody(HttpRequest request){
String queryProfileName = request.getProperty("queryProfile");
CompiledQueryProfile queryProfile = queryProfileRegistry.findQueryProfile(queryProfileName);
boolean benchmarkOutput = VespaHeaders.benchmarkOutput(request);
Query query = queryFromRequest(request, queryProfile);
boolean benchmarkCoverage = VespaHeaders.benchmarkCoverage(benchmarkOutput, request.getJDiscRequest().headers());
String invalidReason = query.validate();
Chain<Searcher> searchChain = null;
String searchChainName = null;
if (invalidReason == null) {
Tuple2<String, Chain<Searcher>> nameAndChain = resolveChain(query.properties().getString(Query.SEARCH_CHAIN));
searchChainName = nameAndChain.first;
searchChain = nameAndChain.second;
}
Result result;
if (invalidReason != null) {
result = new Result(query, ErrorMessage.createIllegalQuery(invalidReason));
} else if (queryProfile == null && queryProfileName != null) {
result = new Result(
query,
ErrorMessage.createIllegalQuery("Could not resolve query profile '" + queryProfileName + "'"));
} else if (searchChain == null) {
result = new Result(
query,
ErrorMessage.createInvalidQueryParameter("No search chain named '" + searchChainName + "' was found"));
} else {
String pathAndQuery = UriTools.rawRequest(request.getUri());
result = search(pathAndQuery, query, searchChain, searchChainRegistry);
}
Renderer renderer;
if (result.getTemplating().usesDefaultTemplate()) {
renderer = toRendererCopy(query.getPresentation().getRenderer());
result.getTemplating().setRenderer(renderer);
}
else {
renderer = perRenderingCopy(result.getTemplating().getRenderer());
}
HttpSearchResponse response = new HttpSearchResponse(getHttpResponseStatus(request, result),
result, query, renderer);
if (hostResponseHeaderKey.isPresent())
response.headers().add(hostResponseHeaderKey.get(), selfHostname);
if (benchmarkOutput)
VespaHeaders.benchmarkOutput(response.headers(), benchmarkCoverage, response.getTiming(),
response.getHitCounts(), getErrors(result), response.getCoverage());
return response;
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} | class MeanConnections implements Callback {
@Override
public void run(Handle h, boolean firstTime) {
if (firstTime) {
metric.set(SEARCH_CONNECTIONS, 0.0d, null);
return;
}
Value v = (Value) h;
metric.set(SEARCH_CONNECTIONS, v.getMean(), null);
}
} |
I'm wondering if we should explicitly use utf-8 as the charset here. Not doing so seems to invoke the "platform's default character set" which is _likely_ to be utf-8, but not guaranteed. @bratseth what are the best practices here? | public String toJson() {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JsonWriter writer = new JsonWriter(buffer);
writer.write(this);
return buffer.toString();
} | return buffer.toString(); | public String toJson() {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JsonWriter writer = new JsonWriter(buffer);
writer.write(this);
try {
return buffer.toString("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
} | class Document extends StructuredFieldValue {
public static final int classId = registerClass(Ids.document + 3, Document.class);
public static final short SERIALIZED_VERSION = 8;
private DocumentId docId;
private Struct header;
private Struct body;
private Long lastModified = null;
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, String id) {
this(docType, new DocumentId(id));
}
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, DocumentId id) {
super(docType);
setNewType(docType);
internalSetId(id, docType);
}
/**
* Creates a document that is a shallow copy of another.
*
* @param doc The document to copy.
*/
public Document(Document doc) {
this(doc.getDataType(), doc.getId());
header = doc.header;
body = doc.body;
lastModified = doc.lastModified;
}
/**
*
* @param reader The deserializer to use for creating this document
*/
public Document(DocumentReader reader) {
super(null);
reader.read(this);
}
public DocumentId getId() { return docId; }
public void setId(DocumentId id) { internalSetId(id, getDataType()); }
private void internalSetId(DocumentId id, DocumentType docType) {
if (id != null && id.hasDocType() && docType != null && !id.getDocType().equals(docType.getName())) {
throw new IllegalArgumentException("Trying to set a document id (type " + id.getDocType() +
") that doesn't match the document type (" + getDataType().getName() + ").");
}
docId = id;
}
public Struct getHeader() { return header; }
public Struct getBody() { return body; }
@Override
public void assign(Object o) {
throw new IllegalArgumentException("Assign not implemented for " + getClass() + " objects");
}
@Override
public Document clone() {
Document doc = (Document) super.clone();
doc.docId = docId.clone();
doc.header = header.clone();
doc.body = body.clone();
return doc;
}
private void setNewType(DocumentType type) {
header = type.getHeaderType().createFieldValue();
body = type.getBodyType().createFieldValue();
}
public void setDataType(DataType type) {
if (docId != null && docId.hasDocType() && !docId.getDocType().equals(type.getName())) {
throw new IllegalArgumentException("Trying to set a document type (" + type.getName() +
") that doesn't match the document id (" + docId + ").");
}
super.setDataType(type);
setNewType((DocumentType)type);
}
public int getSerializedSize() throws SerializationException {
DocumentSerializer data = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
data.write(this);
return data.getBuf().position();
}
/**
* This is an approximation of serialized size. We just set it to 4096 as a definition of a medium document.
* @return Approximate size of document (4096)
*/
public final int getApproxSize() { return 4096; }
public void serialize(OutputStream out) throws SerializationException {
DocumentSerializer writer = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
writer.write(this);
GrowableByteBuffer data = writer.getBuf();
byte[] array;
if (data.hasArray()) {
array = data.array();
} else {
array = new byte[data.position()];
int endPos = data.position();
data.position(0);
data.get(array);
data.position(endPos);
}
try {
out.write(array, 0, data.position());
} catch (IOException ioe) {
throw new SerializationException(ioe);
}
}
public static Document createDocument(DocumentReader buffer) {
return new Document(buffer);
}
@Override
public Field getField(String fieldName) {
Field field = header.getField(fieldName);
if (field == null) {
field = body.getField(fieldName);
}
if (field == null) {
for(DocumentType parent : getDataType().getInheritedTypes()) {
field = parent.getField(fieldName);
if (field != null) {
break;
}
}
}
return field;
}
@Override
public FieldValue getFieldValue(Field field) {
if (field.isHeader()) {
return header.getFieldValue(field);
} else {
return body.getFieldValue(field);
}
}
@Override
protected void doSetFieldValue(Field field, FieldValue value) {
if (field.isHeader()) {
header.setFieldValue(field, value);
} else {
body.setFieldValue(field, value);
}
}
@Override
public FieldValue removeFieldValue(Field field) {
if (field.isHeader()) {
return header.removeFieldValue(field);
} else {
return body.removeFieldValue(field);
}
}
@Override
public void clear() {
header.clear();
body.clear();
}
@Override
public Iterator<Map.Entry<Field, FieldValue>> iterator() {
return new Iterator<Map.Entry<Field, FieldValue>>() {
private Iterator<Map.Entry<Field, FieldValue>> headerIt = header.iterator();
private Iterator<Map.Entry<Field, FieldValue>> bodyIt = body.iterator();
public boolean hasNext() {
if (headerIt != null) {
if (headerIt.hasNext()) {
return true;
} else {
headerIt = null;
}
}
return bodyIt.hasNext();
}
public Map.Entry<Field, FieldValue> next() {
return (headerIt == null ? bodyIt.next() : headerIt.next());
}
public void remove() {
if (headerIt == null) {
bodyIt.remove();
} else {
headerIt.remove();
}
}
};
}
public String toString() {
return "document '" + String.valueOf(docId) + "' of type '" + getDataType().getName() + "'";
}
public String toXML(String indent) {
XmlStream xml = new XmlStream();
xml.setIndent(indent);
xml.beginTag("document");
printXml(xml);
xml.endTag();
return xml.toString();
}
/**
* Get XML representation of the document root and its children, contained
* within a <document></document> tag.
* @return XML representation of document
*/
public String toXml() {
return toXML(" ");
}
public void printXml(XmlStream xml) {
XmlSerializationHelper.printDocumentXml(this, xml);
}
/**
* Get JSON representation of the document root and its children contained in a JSON object
* @return JSON representation of document
*/
/** Returns true if the argument is a document which has the same set of values */
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof Document)) return false;
Document other = (Document) o;
return (super.equals(o) && docId.equals(other.docId) &&
header.equals(other.header) && body.equals(other.body));
}
@Override
public int hashCode() {
return 31 * super.hashCode() + (docId != null ? docId.hashCode() : 0);
}
/**
* Returns the last modified time of this Document, when stored in persistent storage. This is typically set by the
* library that retrieves the Document from persistent storage.
*
* This variable doesn't really belong in document. It is used when retrieving docblocks of documents to be able to
* see when documents was last modified in VDS, without having to add modified times separate in the API.
*
* NOTE: This is a transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @return the last modified time of this Document (in milliseconds), or null if unset
*/
public Long getLastModified() {
return lastModified;
}
/**
* Sets the last modified time of this Document. This is typically set by the library that retrieves the
* Document from persistent storage, and should not be set by arbitrary clients. NOTE: This is a
* transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @param lastModified the last modified time of this Document (in milliseconds)
*/
public void setLastModified(Long lastModified) {
this.lastModified = lastModified;
}
public void onSerialize(Serializer data) throws SerializationException {
serialize((DocumentWriter)data);
}
@SuppressWarnings("deprecation")
public void serializeHeader(Serializer data) throws SerializationException {
if (data instanceof DocumentWriter) {
if (data instanceof com.yahoo.document.serialization.VespaDocumentSerializer42) {
((com.yahoo.document.serialization.VespaDocumentSerializer42)data).setHeaderOnly(true);
}
serialize((DocumentWriter)data);
} else if (data instanceof BufferSerializer) {
serialize(DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf(), true));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer(), true);
serialize(fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
public void serializeBody(Serializer data) throws SerializationException {
if (getBody().getFieldCount() > 0) {
if (data instanceof FieldWriter) {
getBody().serialize(new Field("body", getBody().getDataType()), (FieldWriter) data);
} else if (data instanceof BufferSerializer) {
getBody().serialize(new Field("body", getBody().getDataType()), DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf()));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer());
getBody().serialize(new Field("body", getBody().getDataType()), fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
}
@Override
public DocumentType getDataType() {
return (DocumentType)super.getDataType();
}
@Override
public int getFieldCount() {
return header.getFieldCount() + body.getFieldCount();
}
public void serialize(DocumentWriter writer) {
writer.write(this);
}
public void deserialize(DocumentReader reader) {
reader.read(this);
}
@Override
public void serialize(Field field, FieldWriter writer) {
writer.write(field, this);
}
/* (non-Javadoc)
* @see com.yahoo.document.datatypes.FieldValue
*/
@Override
public void deserialize(Field field, FieldReader reader) {
reader.read(field, this);
}
@Override
public int compareTo(FieldValue fieldValue) {
int comp = super.compareTo(fieldValue);
if (comp != 0) {
return comp;
}
Document otherValue = (Document) fieldValue;
comp = getId().compareTo(otherValue.getId());
if (comp != 0) {
return comp;
}
comp = header.compareTo(otherValue.header);
if (comp != 0) {
return comp;
}
comp = body.compareTo(otherValue.body);
return comp;
}
} | class Document extends StructuredFieldValue {
public static final int classId = registerClass(Ids.document + 3, Document.class);
public static final short SERIALIZED_VERSION = 8;
private DocumentId docId;
private Struct header;
private Struct body;
private Long lastModified = null;
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, String id) {
this(docType, new DocumentId(id));
}
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, DocumentId id) {
super(docType);
setNewType(docType);
internalSetId(id, docType);
}
/**
* Creates a document that is a shallow copy of another.
*
* @param doc The document to copy.
*/
public Document(Document doc) {
this(doc.getDataType(), doc.getId());
header = doc.header;
body = doc.body;
lastModified = doc.lastModified;
}
/**
*
* @param reader The deserializer to use for creating this document
*/
public Document(DocumentReader reader) {
super(null);
reader.read(this);
}
public DocumentId getId() { return docId; }
public void setId(DocumentId id) { internalSetId(id, getDataType()); }
private void internalSetId(DocumentId id, DocumentType docType) {
if (id != null && id.hasDocType() && docType != null && !id.getDocType().equals(docType.getName())) {
throw new IllegalArgumentException("Trying to set a document id (type " + id.getDocType() +
") that doesn't match the document type (" + getDataType().getName() + ").");
}
docId = id;
}
public Struct getHeader() { return header; }
public Struct getBody() { return body; }
@Override
public void assign(Object o) {
throw new IllegalArgumentException("Assign not implemented for " + getClass() + " objects");
}
@Override
public Document clone() {
Document doc = (Document) super.clone();
doc.docId = docId.clone();
doc.header = header.clone();
doc.body = body.clone();
return doc;
}
private void setNewType(DocumentType type) {
header = type.getHeaderType().createFieldValue();
body = type.getBodyType().createFieldValue();
}
public void setDataType(DataType type) {
if (docId != null && docId.hasDocType() && !docId.getDocType().equals(type.getName())) {
throw new IllegalArgumentException("Trying to set a document type (" + type.getName() +
") that doesn't match the document id (" + docId + ").");
}
super.setDataType(type);
setNewType((DocumentType)type);
}
public int getSerializedSize() throws SerializationException {
DocumentSerializer data = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
data.write(this);
return data.getBuf().position();
}
/**
* This is an approximation of serialized size. We just set it to 4096 as a definition of a medium document.
* @return Approximate size of document (4096)
*/
public final int getApproxSize() { return 4096; }
public void serialize(OutputStream out) throws SerializationException {
DocumentSerializer writer = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
writer.write(this);
GrowableByteBuffer data = writer.getBuf();
byte[] array;
if (data.hasArray()) {
array = data.array();
} else {
array = new byte[data.position()];
int endPos = data.position();
data.position(0);
data.get(array);
data.position(endPos);
}
try {
out.write(array, 0, data.position());
} catch (IOException ioe) {
throw new SerializationException(ioe);
}
}
public static Document createDocument(DocumentReader buffer) {
return new Document(buffer);
}
@Override
public Field getField(String fieldName) {
Field field = header.getField(fieldName);
if (field == null) {
field = body.getField(fieldName);
}
if (field == null) {
for(DocumentType parent : getDataType().getInheritedTypes()) {
field = parent.getField(fieldName);
if (field != null) {
break;
}
}
}
return field;
}
@Override
public FieldValue getFieldValue(Field field) {
if (field.isHeader()) {
return header.getFieldValue(field);
} else {
return body.getFieldValue(field);
}
}
@Override
protected void doSetFieldValue(Field field, FieldValue value) {
if (field.isHeader()) {
header.setFieldValue(field, value);
} else {
body.setFieldValue(field, value);
}
}
@Override
public FieldValue removeFieldValue(Field field) {
if (field.isHeader()) {
return header.removeFieldValue(field);
} else {
return body.removeFieldValue(field);
}
}
@Override
public void clear() {
header.clear();
body.clear();
}
@Override
public Iterator<Map.Entry<Field, FieldValue>> iterator() {
return new Iterator<Map.Entry<Field, FieldValue>>() {
private Iterator<Map.Entry<Field, FieldValue>> headerIt = header.iterator();
private Iterator<Map.Entry<Field, FieldValue>> bodyIt = body.iterator();
public boolean hasNext() {
if (headerIt != null) {
if (headerIt.hasNext()) {
return true;
} else {
headerIt = null;
}
}
return bodyIt.hasNext();
}
public Map.Entry<Field, FieldValue> next() {
return (headerIt == null ? bodyIt.next() : headerIt.next());
}
public void remove() {
if (headerIt == null) {
bodyIt.remove();
} else {
headerIt.remove();
}
}
};
}
public String toString() {
return "document '" + String.valueOf(docId) + "' of type '" + getDataType().getName() + "'";
}
public String toXML(String indent) {
XmlStream xml = new XmlStream();
xml.setIndent(indent);
xml.beginTag("document");
printXml(xml);
xml.endTag();
return xml.toString();
}
/**
* Get XML representation of the document root and its children, contained
* within a <document></document> tag.
* @return XML representation of document
*/
public String toXml() {
return toXML(" ");
}
public void printXml(XmlStream xml) {
XmlSerializationHelper.printDocumentXml(this, xml);
}
/**
* Get JSON representation of the document root and its children contained in a JSON object
* @return JSON representation of document
*/
/** Returns true if the argument is a document which has the same set of values */
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof Document)) return false;
Document other = (Document) o;
return (super.equals(o) && docId.equals(other.docId) &&
header.equals(other.header) && body.equals(other.body));
}
@Override
public int hashCode() {
return 31 * super.hashCode() + (docId != null ? docId.hashCode() : 0);
}
/**
* Returns the last modified time of this Document, when stored in persistent storage. This is typically set by the
* library that retrieves the Document from persistent storage.
*
* This variable doesn't really belong in document. It is used when retrieving docblocks of documents to be able to
* see when documents was last modified in VDS, without having to add modified times separate in the API.
*
* NOTE: This is a transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @return the last modified time of this Document (in milliseconds), or null if unset
*/
public Long getLastModified() {
return lastModified;
}
/**
* Sets the last modified time of this Document. This is typically set by the library that retrieves the
* Document from persistent storage, and should not be set by arbitrary clients. NOTE: This is a
* transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @param lastModified the last modified time of this Document (in milliseconds)
*/
public void setLastModified(Long lastModified) {
this.lastModified = lastModified;
}
public void onSerialize(Serializer data) throws SerializationException {
serialize((DocumentWriter)data);
}
@SuppressWarnings("deprecation")
public void serializeHeader(Serializer data) throws SerializationException {
if (data instanceof DocumentWriter) {
if (data instanceof com.yahoo.document.serialization.VespaDocumentSerializer42) {
((com.yahoo.document.serialization.VespaDocumentSerializer42)data).setHeaderOnly(true);
}
serialize((DocumentWriter)data);
} else if (data instanceof BufferSerializer) {
serialize(DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf(), true));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer(), true);
serialize(fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
public void serializeBody(Serializer data) throws SerializationException {
if (getBody().getFieldCount() > 0) {
if (data instanceof FieldWriter) {
getBody().serialize(new Field("body", getBody().getDataType()), (FieldWriter) data);
} else if (data instanceof BufferSerializer) {
getBody().serialize(new Field("body", getBody().getDataType()), DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf()));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer());
getBody().serialize(new Field("body", getBody().getDataType()), fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
}
@Override
public DocumentType getDataType() {
return (DocumentType)super.getDataType();
}
@Override
public int getFieldCount() {
return header.getFieldCount() + body.getFieldCount();
}
public void serialize(DocumentWriter writer) {
writer.write(this);
}
public void deserialize(DocumentReader reader) {
reader.read(this);
}
@Override
public void serialize(Field field, FieldWriter writer) {
writer.write(field, this);
}
/* (non-Javadoc)
* @see com.yahoo.document.datatypes.FieldValue
*/
@Override
public void deserialize(Field field, FieldReader reader) {
reader.read(field, this);
}
@Override
public int compareTo(FieldValue fieldValue) {
int comp = super.compareTo(fieldValue);
if (comp != 0) {
return comp;
}
Document otherValue = (Document) fieldValue;
comp = getId().compareTo(otherValue.getId());
if (comp != 0) {
return comp;
}
comp = header.compareTo(otherValue.header);
if (comp != 0) {
return comp;
}
comp = body.compareTo(otherValue.body);
return comp;
}
} |
Yes, you are right: Always send charset explicitly. | public String toJson() {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JsonWriter writer = new JsonWriter(buffer);
writer.write(this);
return buffer.toString();
} | return buffer.toString(); | public String toJson() {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JsonWriter writer = new JsonWriter(buffer);
writer.write(this);
try {
return buffer.toString("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
} | class Document extends StructuredFieldValue {
public static final int classId = registerClass(Ids.document + 3, Document.class);
public static final short SERIALIZED_VERSION = 8;
private DocumentId docId;
private Struct header;
private Struct body;
private Long lastModified = null;
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, String id) {
this(docType, new DocumentId(id));
}
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, DocumentId id) {
super(docType);
setNewType(docType);
internalSetId(id, docType);
}
/**
* Creates a document that is a shallow copy of another.
*
* @param doc The document to copy.
*/
public Document(Document doc) {
this(doc.getDataType(), doc.getId());
header = doc.header;
body = doc.body;
lastModified = doc.lastModified;
}
/**
*
* @param reader The deserializer to use for creating this document
*/
public Document(DocumentReader reader) {
super(null);
reader.read(this);
}
public DocumentId getId() { return docId; }
public void setId(DocumentId id) { internalSetId(id, getDataType()); }
private void internalSetId(DocumentId id, DocumentType docType) {
if (id != null && id.hasDocType() && docType != null && !id.getDocType().equals(docType.getName())) {
throw new IllegalArgumentException("Trying to set a document id (type " + id.getDocType() +
") that doesn't match the document type (" + getDataType().getName() + ").");
}
docId = id;
}
public Struct getHeader() { return header; }
public Struct getBody() { return body; }
@Override
public void assign(Object o) {
throw new IllegalArgumentException("Assign not implemented for " + getClass() + " objects");
}
@Override
public Document clone() {
Document doc = (Document) super.clone();
doc.docId = docId.clone();
doc.header = header.clone();
doc.body = body.clone();
return doc;
}
private void setNewType(DocumentType type) {
header = type.getHeaderType().createFieldValue();
body = type.getBodyType().createFieldValue();
}
public void setDataType(DataType type) {
if (docId != null && docId.hasDocType() && !docId.getDocType().equals(type.getName())) {
throw new IllegalArgumentException("Trying to set a document type (" + type.getName() +
") that doesn't match the document id (" + docId + ").");
}
super.setDataType(type);
setNewType((DocumentType)type);
}
public int getSerializedSize() throws SerializationException {
DocumentSerializer data = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
data.write(this);
return data.getBuf().position();
}
/**
* This is an approximation of serialized size. We just set it to 4096 as a definition of a medium document.
* @return Approximate size of document (4096)
*/
public final int getApproxSize() { return 4096; }
public void serialize(OutputStream out) throws SerializationException {
DocumentSerializer writer = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
writer.write(this);
GrowableByteBuffer data = writer.getBuf();
byte[] array;
if (data.hasArray()) {
array = data.array();
} else {
array = new byte[data.position()];
int endPos = data.position();
data.position(0);
data.get(array);
data.position(endPos);
}
try {
out.write(array, 0, data.position());
} catch (IOException ioe) {
throw new SerializationException(ioe);
}
}
public static Document createDocument(DocumentReader buffer) {
return new Document(buffer);
}
@Override
public Field getField(String fieldName) {
Field field = header.getField(fieldName);
if (field == null) {
field = body.getField(fieldName);
}
if (field == null) {
for(DocumentType parent : getDataType().getInheritedTypes()) {
field = parent.getField(fieldName);
if (field != null) {
break;
}
}
}
return field;
}
@Override
public FieldValue getFieldValue(Field field) {
if (field.isHeader()) {
return header.getFieldValue(field);
} else {
return body.getFieldValue(field);
}
}
@Override
protected void doSetFieldValue(Field field, FieldValue value) {
if (field.isHeader()) {
header.setFieldValue(field, value);
} else {
body.setFieldValue(field, value);
}
}
@Override
public FieldValue removeFieldValue(Field field) {
if (field.isHeader()) {
return header.removeFieldValue(field);
} else {
return body.removeFieldValue(field);
}
}
@Override
public void clear() {
header.clear();
body.clear();
}
@Override
public Iterator<Map.Entry<Field, FieldValue>> iterator() {
return new Iterator<Map.Entry<Field, FieldValue>>() {
private Iterator<Map.Entry<Field, FieldValue>> headerIt = header.iterator();
private Iterator<Map.Entry<Field, FieldValue>> bodyIt = body.iterator();
public boolean hasNext() {
if (headerIt != null) {
if (headerIt.hasNext()) {
return true;
} else {
headerIt = null;
}
}
return bodyIt.hasNext();
}
public Map.Entry<Field, FieldValue> next() {
return (headerIt == null ? bodyIt.next() : headerIt.next());
}
public void remove() {
if (headerIt == null) {
bodyIt.remove();
} else {
headerIt.remove();
}
}
};
}
public String toString() {
return "document '" + String.valueOf(docId) + "' of type '" + getDataType().getName() + "'";
}
public String toXML(String indent) {
XmlStream xml = new XmlStream();
xml.setIndent(indent);
xml.beginTag("document");
printXml(xml);
xml.endTag();
return xml.toString();
}
/**
* Get XML representation of the document root and its children, contained
* within a <document></document> tag.
* @return XML representation of document
*/
public String toXml() {
return toXML(" ");
}
public void printXml(XmlStream xml) {
XmlSerializationHelper.printDocumentXml(this, xml);
}
/**
* Get JSON representation of the document root and its children contained in a JSON object
* @return JSON representation of document
*/
/** Returns true if the argument is a document which has the same set of values */
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof Document)) return false;
Document other = (Document) o;
return (super.equals(o) && docId.equals(other.docId) &&
header.equals(other.header) && body.equals(other.body));
}
@Override
public int hashCode() {
return 31 * super.hashCode() + (docId != null ? docId.hashCode() : 0);
}
/**
* Returns the last modified time of this Document, when stored in persistent storage. This is typically set by the
* library that retrieves the Document from persistent storage.
*
* This variable doesn't really belong in document. It is used when retrieving docblocks of documents to be able to
* see when documents was last modified in VDS, without having to add modified times separate in the API.
*
* NOTE: This is a transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @return the last modified time of this Document (in milliseconds), or null if unset
*/
public Long getLastModified() {
return lastModified;
}
/**
* Sets the last modified time of this Document. This is typically set by the library that retrieves the
* Document from persistent storage, and should not be set by arbitrary clients. NOTE: This is a
* transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @param lastModified the last modified time of this Document (in milliseconds)
*/
public void setLastModified(Long lastModified) {
this.lastModified = lastModified;
}
public void onSerialize(Serializer data) throws SerializationException {
serialize((DocumentWriter)data);
}
@SuppressWarnings("deprecation")
public void serializeHeader(Serializer data) throws SerializationException {
if (data instanceof DocumentWriter) {
if (data instanceof com.yahoo.document.serialization.VespaDocumentSerializer42) {
((com.yahoo.document.serialization.VespaDocumentSerializer42)data).setHeaderOnly(true);
}
serialize((DocumentWriter)data);
} else if (data instanceof BufferSerializer) {
serialize(DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf(), true));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer(), true);
serialize(fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
public void serializeBody(Serializer data) throws SerializationException {
if (getBody().getFieldCount() > 0) {
if (data instanceof FieldWriter) {
getBody().serialize(new Field("body", getBody().getDataType()), (FieldWriter) data);
} else if (data instanceof BufferSerializer) {
getBody().serialize(new Field("body", getBody().getDataType()), DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf()));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer());
getBody().serialize(new Field("body", getBody().getDataType()), fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
}
@Override
public DocumentType getDataType() {
return (DocumentType)super.getDataType();
}
@Override
public int getFieldCount() {
return header.getFieldCount() + body.getFieldCount();
}
public void serialize(DocumentWriter writer) {
writer.write(this);
}
public void deserialize(DocumentReader reader) {
reader.read(this);
}
@Override
public void serialize(Field field, FieldWriter writer) {
writer.write(field, this);
}
/* (non-Javadoc)
* @see com.yahoo.document.datatypes.FieldValue
*/
@Override
public void deserialize(Field field, FieldReader reader) {
reader.read(field, this);
}
@Override
public int compareTo(FieldValue fieldValue) {
int comp = super.compareTo(fieldValue);
if (comp != 0) {
return comp;
}
Document otherValue = (Document) fieldValue;
comp = getId().compareTo(otherValue.getId());
if (comp != 0) {
return comp;
}
comp = header.compareTo(otherValue.header);
if (comp != 0) {
return comp;
}
comp = body.compareTo(otherValue.body);
return comp;
}
} | class Document extends StructuredFieldValue {
public static final int classId = registerClass(Ids.document + 3, Document.class);
public static final short SERIALIZED_VERSION = 8;
private DocumentId docId;
private Struct header;
private Struct body;
private Long lastModified = null;
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, String id) {
this(docType, new DocumentId(id));
}
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, DocumentId id) {
super(docType);
setNewType(docType);
internalSetId(id, docType);
}
/**
* Creates a document that is a shallow copy of another.
*
* @param doc The document to copy.
*/
public Document(Document doc) {
this(doc.getDataType(), doc.getId());
header = doc.header;
body = doc.body;
lastModified = doc.lastModified;
}
/**
*
* @param reader The deserializer to use for creating this document
*/
public Document(DocumentReader reader) {
super(null);
reader.read(this);
}
public DocumentId getId() { return docId; }
public void setId(DocumentId id) { internalSetId(id, getDataType()); }
private void internalSetId(DocumentId id, DocumentType docType) {
if (id != null && id.hasDocType() && docType != null && !id.getDocType().equals(docType.getName())) {
throw new IllegalArgumentException("Trying to set a document id (type " + id.getDocType() +
") that doesn't match the document type (" + getDataType().getName() + ").");
}
docId = id;
}
public Struct getHeader() { return header; }
public Struct getBody() { return body; }
@Override
public void assign(Object o) {
throw new IllegalArgumentException("Assign not implemented for " + getClass() + " objects");
}
@Override
public Document clone() {
Document doc = (Document) super.clone();
doc.docId = docId.clone();
doc.header = header.clone();
doc.body = body.clone();
return doc;
}
private void setNewType(DocumentType type) {
header = type.getHeaderType().createFieldValue();
body = type.getBodyType().createFieldValue();
}
public void setDataType(DataType type) {
if (docId != null && docId.hasDocType() && !docId.getDocType().equals(type.getName())) {
throw new IllegalArgumentException("Trying to set a document type (" + type.getName() +
") that doesn't match the document id (" + docId + ").");
}
super.setDataType(type);
setNewType((DocumentType)type);
}
public int getSerializedSize() throws SerializationException {
DocumentSerializer data = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
data.write(this);
return data.getBuf().position();
}
/**
* This is an approximation of serialized size. We just set it to 4096 as a definition of a medium document.
* @return Approximate size of document (4096)
*/
public final int getApproxSize() { return 4096; }
public void serialize(OutputStream out) throws SerializationException {
DocumentSerializer writer = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
writer.write(this);
GrowableByteBuffer data = writer.getBuf();
byte[] array;
if (data.hasArray()) {
array = data.array();
} else {
array = new byte[data.position()];
int endPos = data.position();
data.position(0);
data.get(array);
data.position(endPos);
}
try {
out.write(array, 0, data.position());
} catch (IOException ioe) {
throw new SerializationException(ioe);
}
}
public static Document createDocument(DocumentReader buffer) {
return new Document(buffer);
}
@Override
public Field getField(String fieldName) {
Field field = header.getField(fieldName);
if (field == null) {
field = body.getField(fieldName);
}
if (field == null) {
for(DocumentType parent : getDataType().getInheritedTypes()) {
field = parent.getField(fieldName);
if (field != null) {
break;
}
}
}
return field;
}
@Override
public FieldValue getFieldValue(Field field) {
if (field.isHeader()) {
return header.getFieldValue(field);
} else {
return body.getFieldValue(field);
}
}
@Override
protected void doSetFieldValue(Field field, FieldValue value) {
if (field.isHeader()) {
header.setFieldValue(field, value);
} else {
body.setFieldValue(field, value);
}
}
@Override
public FieldValue removeFieldValue(Field field) {
if (field.isHeader()) {
return header.removeFieldValue(field);
} else {
return body.removeFieldValue(field);
}
}
@Override
public void clear() {
header.clear();
body.clear();
}
@Override
public Iterator<Map.Entry<Field, FieldValue>> iterator() {
return new Iterator<Map.Entry<Field, FieldValue>>() {
private Iterator<Map.Entry<Field, FieldValue>> headerIt = header.iterator();
private Iterator<Map.Entry<Field, FieldValue>> bodyIt = body.iterator();
public boolean hasNext() {
if (headerIt != null) {
if (headerIt.hasNext()) {
return true;
} else {
headerIt = null;
}
}
return bodyIt.hasNext();
}
public Map.Entry<Field, FieldValue> next() {
return (headerIt == null ? bodyIt.next() : headerIt.next());
}
public void remove() {
if (headerIt == null) {
bodyIt.remove();
} else {
headerIt.remove();
}
}
};
}
public String toString() {
return "document '" + String.valueOf(docId) + "' of type '" + getDataType().getName() + "'";
}
public String toXML(String indent) {
XmlStream xml = new XmlStream();
xml.setIndent(indent);
xml.beginTag("document");
printXml(xml);
xml.endTag();
return xml.toString();
}
/**
* Get XML representation of the document root and its children, contained
* within a <document></document> tag.
* @return XML representation of document
*/
public String toXml() {
return toXML(" ");
}
public void printXml(XmlStream xml) {
XmlSerializationHelper.printDocumentXml(this, xml);
}
/**
* Get JSON representation of the document root and its children contained in a JSON object
* @return JSON representation of document
*/
/** Returns true if the argument is a document which has the same set of values */
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof Document)) return false;
Document other = (Document) o;
return (super.equals(o) && docId.equals(other.docId) &&
header.equals(other.header) && body.equals(other.body));
}
@Override
public int hashCode() {
return 31 * super.hashCode() + (docId != null ? docId.hashCode() : 0);
}
/**
* Returns the last modified time of this Document, when stored in persistent storage. This is typically set by the
* library that retrieves the Document from persistent storage.
*
* This variable doesn't really belong in document. It is used when retrieving docblocks of documents to be able to
* see when documents was last modified in VDS, without having to add modified times separate in the API.
*
* NOTE: This is a transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @return the last modified time of this Document (in milliseconds), or null if unset
*/
public Long getLastModified() {
return lastModified;
}
/**
* Sets the last modified time of this Document. This is typically set by the library that retrieves the
* Document from persistent storage, and should not be set by arbitrary clients. NOTE: This is a
* transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @param lastModified the last modified time of this Document (in milliseconds)
*/
public void setLastModified(Long lastModified) {
this.lastModified = lastModified;
}
public void onSerialize(Serializer data) throws SerializationException {
serialize((DocumentWriter)data);
}
@SuppressWarnings("deprecation")
public void serializeHeader(Serializer data) throws SerializationException {
if (data instanceof DocumentWriter) {
if (data instanceof com.yahoo.document.serialization.VespaDocumentSerializer42) {
((com.yahoo.document.serialization.VespaDocumentSerializer42)data).setHeaderOnly(true);
}
serialize((DocumentWriter)data);
} else if (data instanceof BufferSerializer) {
serialize(DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf(), true));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer(), true);
serialize(fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
public void serializeBody(Serializer data) throws SerializationException {
if (getBody().getFieldCount() > 0) {
if (data instanceof FieldWriter) {
getBody().serialize(new Field("body", getBody().getDataType()), (FieldWriter) data);
} else if (data instanceof BufferSerializer) {
getBody().serialize(new Field("body", getBody().getDataType()), DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf()));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer());
getBody().serialize(new Field("body", getBody().getDataType()), fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
}
@Override
public DocumentType getDataType() {
return (DocumentType)super.getDataType();
}
@Override
public int getFieldCount() {
return header.getFieldCount() + body.getFieldCount();
}
public void serialize(DocumentWriter writer) {
writer.write(this);
}
public void deserialize(DocumentReader reader) {
reader.read(this);
}
@Override
public void serialize(Field field, FieldWriter writer) {
writer.write(field, this);
}
/* (non-Javadoc)
* @see com.yahoo.document.datatypes.FieldValue
*/
@Override
public void deserialize(Field field, FieldReader reader) {
reader.read(field, this);
}
@Override
public int compareTo(FieldValue fieldValue) {
int comp = super.compareTo(fieldValue);
if (comp != 0) {
return comp;
}
Document otherValue = (Document) fieldValue;
comp = getId().compareTo(otherValue.getId());
if (comp != 0) {
return comp;
}
comp = header.compareTo(otherValue.header);
if (comp != 0) {
return comp;
}
comp = body.compareTo(otherValue.body);
return comp;
}
} |
Good point; fixed in next commit. If only that interface allowed for a Charset object instead of a String lookup with an impossible exception.. | public String toJson() {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JsonWriter writer = new JsonWriter(buffer);
writer.write(this);
return buffer.toString();
} | return buffer.toString(); | public String toJson() {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JsonWriter writer = new JsonWriter(buffer);
writer.write(this);
try {
return buffer.toString("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
} | class Document extends StructuredFieldValue {
public static final int classId = registerClass(Ids.document + 3, Document.class);
public static final short SERIALIZED_VERSION = 8;
private DocumentId docId;
private Struct header;
private Struct body;
private Long lastModified = null;
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, String id) {
this(docType, new DocumentId(id));
}
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, DocumentId id) {
super(docType);
setNewType(docType);
internalSetId(id, docType);
}
/**
* Creates a document that is a shallow copy of another.
*
* @param doc The document to copy.
*/
public Document(Document doc) {
this(doc.getDataType(), doc.getId());
header = doc.header;
body = doc.body;
lastModified = doc.lastModified;
}
/**
*
* @param reader The deserializer to use for creating this document
*/
public Document(DocumentReader reader) {
super(null);
reader.read(this);
}
public DocumentId getId() { return docId; }
public void setId(DocumentId id) { internalSetId(id, getDataType()); }
private void internalSetId(DocumentId id, DocumentType docType) {
if (id != null && id.hasDocType() && docType != null && !id.getDocType().equals(docType.getName())) {
throw new IllegalArgumentException("Trying to set a document id (type " + id.getDocType() +
") that doesn't match the document type (" + getDataType().getName() + ").");
}
docId = id;
}
public Struct getHeader() { return header; }
public Struct getBody() { return body; }
@Override
public void assign(Object o) {
throw new IllegalArgumentException("Assign not implemented for " + getClass() + " objects");
}
@Override
public Document clone() {
Document doc = (Document) super.clone();
doc.docId = docId.clone();
doc.header = header.clone();
doc.body = body.clone();
return doc;
}
private void setNewType(DocumentType type) {
header = type.getHeaderType().createFieldValue();
body = type.getBodyType().createFieldValue();
}
public void setDataType(DataType type) {
if (docId != null && docId.hasDocType() && !docId.getDocType().equals(type.getName())) {
throw new IllegalArgumentException("Trying to set a document type (" + type.getName() +
") that doesn't match the document id (" + docId + ").");
}
super.setDataType(type);
setNewType((DocumentType)type);
}
public int getSerializedSize() throws SerializationException {
DocumentSerializer data = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
data.write(this);
return data.getBuf().position();
}
/**
* This is an approximation of serialized size. We just set it to 4096 as a definition of a medium document.
* @return Approximate size of document (4096)
*/
public final int getApproxSize() { return 4096; }
public void serialize(OutputStream out) throws SerializationException {
DocumentSerializer writer = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
writer.write(this);
GrowableByteBuffer data = writer.getBuf();
byte[] array;
if (data.hasArray()) {
array = data.array();
} else {
array = new byte[data.position()];
int endPos = data.position();
data.position(0);
data.get(array);
data.position(endPos);
}
try {
out.write(array, 0, data.position());
} catch (IOException ioe) {
throw new SerializationException(ioe);
}
}
public static Document createDocument(DocumentReader buffer) {
return new Document(buffer);
}
@Override
public Field getField(String fieldName) {
Field field = header.getField(fieldName);
if (field == null) {
field = body.getField(fieldName);
}
if (field == null) {
for(DocumentType parent : getDataType().getInheritedTypes()) {
field = parent.getField(fieldName);
if (field != null) {
break;
}
}
}
return field;
}
@Override
public FieldValue getFieldValue(Field field) {
if (field.isHeader()) {
return header.getFieldValue(field);
} else {
return body.getFieldValue(field);
}
}
@Override
protected void doSetFieldValue(Field field, FieldValue value) {
if (field.isHeader()) {
header.setFieldValue(field, value);
} else {
body.setFieldValue(field, value);
}
}
@Override
public FieldValue removeFieldValue(Field field) {
if (field.isHeader()) {
return header.removeFieldValue(field);
} else {
return body.removeFieldValue(field);
}
}
@Override
public void clear() {
header.clear();
body.clear();
}
@Override
public Iterator<Map.Entry<Field, FieldValue>> iterator() {
return new Iterator<Map.Entry<Field, FieldValue>>() {
private Iterator<Map.Entry<Field, FieldValue>> headerIt = header.iterator();
private Iterator<Map.Entry<Field, FieldValue>> bodyIt = body.iterator();
public boolean hasNext() {
if (headerIt != null) {
if (headerIt.hasNext()) {
return true;
} else {
headerIt = null;
}
}
return bodyIt.hasNext();
}
public Map.Entry<Field, FieldValue> next() {
return (headerIt == null ? bodyIt.next() : headerIt.next());
}
public void remove() {
if (headerIt == null) {
bodyIt.remove();
} else {
headerIt.remove();
}
}
};
}
public String toString() {
return "document '" + String.valueOf(docId) + "' of type '" + getDataType().getName() + "'";
}
public String toXML(String indent) {
XmlStream xml = new XmlStream();
xml.setIndent(indent);
xml.beginTag("document");
printXml(xml);
xml.endTag();
return xml.toString();
}
/**
* Get XML representation of the document root and its children, contained
* within a <document></document> tag.
* @return XML representation of document
*/
public String toXml() {
return toXML(" ");
}
public void printXml(XmlStream xml) {
XmlSerializationHelper.printDocumentXml(this, xml);
}
/**
* Get JSON representation of the document root and its children contained in a JSON object
* @return JSON representation of document
*/
/** Returns true if the argument is a document which has the same set of values */
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof Document)) return false;
Document other = (Document) o;
return (super.equals(o) && docId.equals(other.docId) &&
header.equals(other.header) && body.equals(other.body));
}
@Override
public int hashCode() {
return 31 * super.hashCode() + (docId != null ? docId.hashCode() : 0);
}
/**
* Returns the last modified time of this Document, when stored in persistent storage. This is typically set by the
* library that retrieves the Document from persistent storage.
*
* This variable doesn't really belong in document. It is used when retrieving docblocks of documents to be able to
* see when documents was last modified in VDS, without having to add modified times separate in the API.
*
* NOTE: This is a transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @return the last modified time of this Document (in milliseconds), or null if unset
*/
public Long getLastModified() {
return lastModified;
}
/**
* Sets the last modified time of this Document. This is typically set by the library that retrieves the
* Document from persistent storage, and should not be set by arbitrary clients. NOTE: This is a
* transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @param lastModified the last modified time of this Document (in milliseconds)
*/
public void setLastModified(Long lastModified) {
this.lastModified = lastModified;
}
public void onSerialize(Serializer data) throws SerializationException {
serialize((DocumentWriter)data);
}
@SuppressWarnings("deprecation")
public void serializeHeader(Serializer data) throws SerializationException {
if (data instanceof DocumentWriter) {
if (data instanceof com.yahoo.document.serialization.VespaDocumentSerializer42) {
((com.yahoo.document.serialization.VespaDocumentSerializer42)data).setHeaderOnly(true);
}
serialize((DocumentWriter)data);
} else if (data instanceof BufferSerializer) {
serialize(DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf(), true));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer(), true);
serialize(fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
public void serializeBody(Serializer data) throws SerializationException {
if (getBody().getFieldCount() > 0) {
if (data instanceof FieldWriter) {
getBody().serialize(new Field("body", getBody().getDataType()), (FieldWriter) data);
} else if (data instanceof BufferSerializer) {
getBody().serialize(new Field("body", getBody().getDataType()), DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf()));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer());
getBody().serialize(new Field("body", getBody().getDataType()), fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
}
@Override
public DocumentType getDataType() {
return (DocumentType)super.getDataType();
}
@Override
public int getFieldCount() {
return header.getFieldCount() + body.getFieldCount();
}
public void serialize(DocumentWriter writer) {
writer.write(this);
}
public void deserialize(DocumentReader reader) {
reader.read(this);
}
@Override
public void serialize(Field field, FieldWriter writer) {
writer.write(field, this);
}
/* (non-Javadoc)
* @see com.yahoo.document.datatypes.FieldValue
*/
@Override
public void deserialize(Field field, FieldReader reader) {
reader.read(field, this);
}
@Override
public int compareTo(FieldValue fieldValue) {
int comp = super.compareTo(fieldValue);
if (comp != 0) {
return comp;
}
Document otherValue = (Document) fieldValue;
comp = getId().compareTo(otherValue.getId());
if (comp != 0) {
return comp;
}
comp = header.compareTo(otherValue.header);
if (comp != 0) {
return comp;
}
comp = body.compareTo(otherValue.body);
return comp;
}
} | class Document extends StructuredFieldValue {
public static final int classId = registerClass(Ids.document + 3, Document.class);
public static final short SERIALIZED_VERSION = 8;
private DocumentId docId;
private Struct header;
private Struct body;
private Long lastModified = null;
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, String id) {
this(docType, new DocumentId(id));
}
/**
* Create a document with the given document type and identifier.
* @param docType DocumentType to use for creation
* @param id The id for this document
*/
public Document(DocumentType docType, DocumentId id) {
super(docType);
setNewType(docType);
internalSetId(id, docType);
}
/**
* Creates a document that is a shallow copy of another.
*
* @param doc The document to copy.
*/
public Document(Document doc) {
this(doc.getDataType(), doc.getId());
header = doc.header;
body = doc.body;
lastModified = doc.lastModified;
}
/**
*
* @param reader The deserializer to use for creating this document
*/
public Document(DocumentReader reader) {
super(null);
reader.read(this);
}
public DocumentId getId() { return docId; }
public void setId(DocumentId id) { internalSetId(id, getDataType()); }
private void internalSetId(DocumentId id, DocumentType docType) {
if (id != null && id.hasDocType() && docType != null && !id.getDocType().equals(docType.getName())) {
throw new IllegalArgumentException("Trying to set a document id (type " + id.getDocType() +
") that doesn't match the document type (" + getDataType().getName() + ").");
}
docId = id;
}
public Struct getHeader() { return header; }
public Struct getBody() { return body; }
@Override
public void assign(Object o) {
throw new IllegalArgumentException("Assign not implemented for " + getClass() + " objects");
}
@Override
public Document clone() {
Document doc = (Document) super.clone();
doc.docId = docId.clone();
doc.header = header.clone();
doc.body = body.clone();
return doc;
}
private void setNewType(DocumentType type) {
header = type.getHeaderType().createFieldValue();
body = type.getBodyType().createFieldValue();
}
public void setDataType(DataType type) {
if (docId != null && docId.hasDocType() && !docId.getDocType().equals(type.getName())) {
throw new IllegalArgumentException("Trying to set a document type (" + type.getName() +
") that doesn't match the document id (" + docId + ").");
}
super.setDataType(type);
setNewType((DocumentType)type);
}
public int getSerializedSize() throws SerializationException {
DocumentSerializer data = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
data.write(this);
return data.getBuf().position();
}
/**
* This is an approximation of serialized size. We just set it to 4096 as a definition of a medium document.
* @return Approximate size of document (4096)
*/
public final int getApproxSize() { return 4096; }
public void serialize(OutputStream out) throws SerializationException {
DocumentSerializer writer = DocumentSerializerFactory.create42(new GrowableByteBuffer(64 * 1024, 2.0f));
writer.write(this);
GrowableByteBuffer data = writer.getBuf();
byte[] array;
if (data.hasArray()) {
array = data.array();
} else {
array = new byte[data.position()];
int endPos = data.position();
data.position(0);
data.get(array);
data.position(endPos);
}
try {
out.write(array, 0, data.position());
} catch (IOException ioe) {
throw new SerializationException(ioe);
}
}
public static Document createDocument(DocumentReader buffer) {
return new Document(buffer);
}
@Override
public Field getField(String fieldName) {
Field field = header.getField(fieldName);
if (field == null) {
field = body.getField(fieldName);
}
if (field == null) {
for(DocumentType parent : getDataType().getInheritedTypes()) {
field = parent.getField(fieldName);
if (field != null) {
break;
}
}
}
return field;
}
@Override
public FieldValue getFieldValue(Field field) {
if (field.isHeader()) {
return header.getFieldValue(field);
} else {
return body.getFieldValue(field);
}
}
@Override
protected void doSetFieldValue(Field field, FieldValue value) {
if (field.isHeader()) {
header.setFieldValue(field, value);
} else {
body.setFieldValue(field, value);
}
}
@Override
public FieldValue removeFieldValue(Field field) {
if (field.isHeader()) {
return header.removeFieldValue(field);
} else {
return body.removeFieldValue(field);
}
}
@Override
public void clear() {
header.clear();
body.clear();
}
@Override
public Iterator<Map.Entry<Field, FieldValue>> iterator() {
return new Iterator<Map.Entry<Field, FieldValue>>() {
private Iterator<Map.Entry<Field, FieldValue>> headerIt = header.iterator();
private Iterator<Map.Entry<Field, FieldValue>> bodyIt = body.iterator();
public boolean hasNext() {
if (headerIt != null) {
if (headerIt.hasNext()) {
return true;
} else {
headerIt = null;
}
}
return bodyIt.hasNext();
}
public Map.Entry<Field, FieldValue> next() {
return (headerIt == null ? bodyIt.next() : headerIt.next());
}
public void remove() {
if (headerIt == null) {
bodyIt.remove();
} else {
headerIt.remove();
}
}
};
}
public String toString() {
return "document '" + String.valueOf(docId) + "' of type '" + getDataType().getName() + "'";
}
public String toXML(String indent) {
XmlStream xml = new XmlStream();
xml.setIndent(indent);
xml.beginTag("document");
printXml(xml);
xml.endTag();
return xml.toString();
}
/**
* Get XML representation of the document root and its children, contained
* within a <document></document> tag.
* @return XML representation of document
*/
public String toXml() {
return toXML(" ");
}
public void printXml(XmlStream xml) {
XmlSerializationHelper.printDocumentXml(this, xml);
}
/**
* Get JSON representation of the document root and its children contained in a JSON object
* @return JSON representation of document
*/
/** Returns true if the argument is a document which has the same set of values */
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof Document)) return false;
Document other = (Document) o;
return (super.equals(o) && docId.equals(other.docId) &&
header.equals(other.header) && body.equals(other.body));
}
@Override
public int hashCode() {
return 31 * super.hashCode() + (docId != null ? docId.hashCode() : 0);
}
/**
* Returns the last modified time of this Document, when stored in persistent storage. This is typically set by the
* library that retrieves the Document from persistent storage.
*
* This variable doesn't really belong in document. It is used when retrieving docblocks of documents to be able to
* see when documents was last modified in VDS, without having to add modified times separate in the API.
*
* NOTE: This is a transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @return the last modified time of this Document (in milliseconds), or null if unset
*/
public Long getLastModified() {
return lastModified;
}
/**
* Sets the last modified time of this Document. This is typically set by the library that retrieves the
* Document from persistent storage, and should not be set by arbitrary clients. NOTE: This is a
* transient field, and will not be serialized with a Document (will be null after deserialization).
*
* @param lastModified the last modified time of this Document (in milliseconds)
*/
public void setLastModified(Long lastModified) {
this.lastModified = lastModified;
}
public void onSerialize(Serializer data) throws SerializationException {
serialize((DocumentWriter)data);
}
@SuppressWarnings("deprecation")
public void serializeHeader(Serializer data) throws SerializationException {
if (data instanceof DocumentWriter) {
if (data instanceof com.yahoo.document.serialization.VespaDocumentSerializer42) {
((com.yahoo.document.serialization.VespaDocumentSerializer42)data).setHeaderOnly(true);
}
serialize((DocumentWriter)data);
} else if (data instanceof BufferSerializer) {
serialize(DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf(), true));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer(), true);
serialize(fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
public void serializeBody(Serializer data) throws SerializationException {
if (getBody().getFieldCount() > 0) {
if (data instanceof FieldWriter) {
getBody().serialize(new Field("body", getBody().getDataType()), (FieldWriter) data);
} else if (data instanceof BufferSerializer) {
getBody().serialize(new Field("body", getBody().getDataType()), DocumentSerializerFactory.create42(((BufferSerializer) data).getBuf()));
} else {
DocumentSerializer fw = DocumentSerializerFactory.create42(new GrowableByteBuffer());
getBody().serialize(new Field("body", getBody().getDataType()), fw);
data.put(null, fw.getBuf().getByteBuffer());
}
}
}
@Override
public DocumentType getDataType() {
return (DocumentType)super.getDataType();
}
@Override
public int getFieldCount() {
return header.getFieldCount() + body.getFieldCount();
}
public void serialize(DocumentWriter writer) {
writer.write(this);
}
public void deserialize(DocumentReader reader) {
reader.read(this);
}
@Override
public void serialize(Field field, FieldWriter writer) {
writer.write(field, this);
}
/* (non-Javadoc)
* @see com.yahoo.document.datatypes.FieldValue
*/
@Override
public void deserialize(Field field, FieldReader reader) {
reader.read(field, this);
}
@Override
public int compareTo(FieldValue fieldValue) {
int comp = super.compareTo(fieldValue);
if (comp != 0) {
return comp;
}
Document otherValue = (Document) fieldValue;
comp = getId().compareTo(otherValue.getId());
if (comp != 0) {
return comp;
}
comp = header.compareTo(otherValue.header);
if (comp != 0) {
return comp;
}
comp = body.compareTo(otherValue.body);
return comp;
}
} |
Please move the `http.status.xxx.rate` metrics here as well, as they are related. I put them in getQrserverMetrics by mistake. | private static Set<Metric> getContainerMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("handled.requests.count", "handled.requests"));
metrics.add(new Metric("handled.latency.average"));
metrics.add(new Metric("handled.latency.max"));
metrics.add(new Metric("serverRejectedRequests.rate"));
metrics.add(new Metric("serverRejectedRequests.count"));
metrics.add(new Metric("serverThreadPoolSize.average"));
metrics.add(new Metric("serverThreadPoolSize.min"));
metrics.add(new Metric("serverThreadPoolSize.max"));
metrics.add(new Metric("serverThreadPoolSize.rate"));
metrics.add(new Metric("serverThreadPoolSize.count"));
metrics.add(new Metric("serverThreadPoolSize.last"));
metrics.add(new Metric("serverActiveThreads.average"));
metrics.add(new Metric("serverActiveThreads.min"));
metrics.add(new Metric("serverActiveThreads.max"));
metrics.add(new Metric("serverActiveThreads.rate"));
metrics.add(new Metric("serverActiveThreads.count"));
metrics.add(new Metric("serverActiveThreads.last"));
metrics.add(new Metric("httpapi_latency.average"));
metrics.add(new Metric("httpapi_pending.average"));
metrics.add(new Metric("httpapi_num_operations.rate"));
metrics.add(new Metric("httpapi_num_updates.rate"));
metrics.add(new Metric("httpapi_num_removes.rate"));
metrics.add(new Metric("httpapi_num_puts.rate"));
metrics.add(new Metric("httpapi_succeeded.rate"));
metrics.add(new Metric("httpapi_failed.rate"));
metrics.add(new Metric("mem.heap.total.average"));
metrics.add(new Metric("mem.heap.free.average"));
metrics.add(new Metric("mem.heap.used.average"));
metrics.add(new Metric("jdisc.memory_mappings.max"));
metrics.add(new Metric("jdisc.open_file_descriptors.max"));
metrics.add(new Metric("jdisc.gc.count.average"));
metrics.add(new Metric("jdisc.gc.count.max"));
metrics.add(new Metric("jdisc.gc.count.last"));
metrics.add(new Metric("jdisc.gc.ms.average"));
metrics.add(new Metric("jdisc.gc.ms.max"));
metrics.add(new Metric("jdisc.gc.ms.last"));
metrics.add(new Metric("jdisc.deactivated_containers.total.last"));
metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last"));
metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last", "athenz-tenant-cert.expiry.seconds"));
metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate"));
return metrics;
} | metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate")); | private static Set<Metric> getContainerMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("handled.requests.count", "handled.requests"));
metrics.add(new Metric("handled.latency.average"));
metrics.add(new Metric("handled.latency.max"));
metrics.add(new Metric("serverRejectedRequests.rate"));
metrics.add(new Metric("serverRejectedRequests.count"));
metrics.add(new Metric("serverThreadPoolSize.average"));
metrics.add(new Metric("serverThreadPoolSize.min"));
metrics.add(new Metric("serverThreadPoolSize.max"));
metrics.add(new Metric("serverThreadPoolSize.rate"));
metrics.add(new Metric("serverThreadPoolSize.count"));
metrics.add(new Metric("serverThreadPoolSize.last"));
metrics.add(new Metric("serverActiveThreads.average"));
metrics.add(new Metric("serverActiveThreads.min"));
metrics.add(new Metric("serverActiveThreads.max"));
metrics.add(new Metric("serverActiveThreads.rate"));
metrics.add(new Metric("serverActiveThreads.count"));
metrics.add(new Metric("serverActiveThreads.last"));
metrics.add(new Metric("httpapi_latency.average"));
metrics.add(new Metric("httpapi_pending.average"));
metrics.add(new Metric("httpapi_num_operations.rate"));
metrics.add(new Metric("httpapi_num_updates.rate"));
metrics.add(new Metric("httpapi_num_removes.rate"));
metrics.add(new Metric("httpapi_num_puts.rate"));
metrics.add(new Metric("httpapi_succeeded.rate"));
metrics.add(new Metric("httpapi_failed.rate"));
metrics.add(new Metric("mem.heap.total.average"));
metrics.add(new Metric("mem.heap.free.average"));
metrics.add(new Metric("mem.heap.used.average"));
metrics.add(new Metric("jdisc.memory_mappings.max"));
metrics.add(new Metric("jdisc.open_file_descriptors.max"));
metrics.add(new Metric("jdisc.gc.count.average"));
metrics.add(new Metric("jdisc.gc.count.max"));
metrics.add(new Metric("jdisc.gc.count.last"));
metrics.add(new Metric("jdisc.gc.ms.average"));
metrics.add(new Metric("jdisc.gc.ms.max"));
metrics.add(new Metric("jdisc.gc.ms.last"));
metrics.add(new Metric("jdisc.deactivated_containers.total.last"));
metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last"));
metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last", "athenz-tenant-cert.expiry.seconds"));
metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate"));
metrics.add(new Metric("http.status.1xx.rate"));
metrics.add(new Metric("http.status.2xx.rate"));
metrics.add(new Metric("http.status.3xx.rate"));
metrics.add(new Metric("http.status.4xx.rate"));
metrics.add(new Metric("http.status.5xx.rate"));
return metrics;
} | class VespaMetricSet {
public static final MetricSet vespaMetricSet = new MetricSet("vespa",
getVespaMetrics(),
singleton(defaultVespaMetricSet));
private static Set<Metric> getVespaMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.addAll(getSearchNodeMetrics());
metrics.addAll(getStorageMetrics());
metrics.addAll(getDocprocMetrics());
metrics.addAll(getClusterControllerMetrics());
metrics.addAll(getQrserverMetrics());
metrics.addAll(getContainerMetrics());
metrics.addAll(getConfigServerMetrics());
metrics.addAll(getSentinelMetrics());
metrics.addAll(getOtherMetrics());
return Collections.unmodifiableSet(metrics);
}
private static Set<Metric> getSentinelMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("sentinel.restarts.count"));
metrics.add(new Metric("sentinel.totalRestarts.last"));
metrics.add(new Metric("sentinel.uptime.last", "sentinel.uptime"));
metrics.add(new Metric("sentinel.running.count"));
metrics.add(new Metric("sentinel.running.last"));
return metrics;
}
private static Set<Metric> getOtherMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("slobrok.heartbeats.failed.count", "slobrok.heartbeats.failed"));
metrics.add(new Metric("logd.processed.lines.count", "logd.processed.lines"));
return metrics;
}
private static Set<Metric> getConfigServerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("configserver.requests.count", "configserver.requests"));
metrics.add(new Metric("configserver.failedRequests.count", "configserver.failedRequests"));
metrics.add(new Metric("configserver.latency.average", "configserver.latency"));
metrics.add(new Metric("configserver.cacheConfigElems.last", "configserver.cacheConfigElems"));
metrics.add(new Metric("configserver.cacheChecksumElems.last", "configserver.cacheChecksumElems"));
metrics.add(new Metric("configserver.hosts.last", "configserver.hosts"));
metrics.add(new Metric("configserver.delayedResponses.count", "configserver.delayedResponses"));
metrics.add(new Metric("configserver.sessionChangeErrors.count", "configserver.sessionChangeErrors"));
return metrics;
}
private static Set<Metric> getClusterControllerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("cluster-controller.down.count.last"));
metrics.add(new Metric("cluster-controller.initializing.count.last"));
metrics.add(new Metric("cluster-controller.maintenance.count.last"));
metrics.add(new Metric("cluster-controller.retired.count.last"));
metrics.add(new Metric("cluster-controller.stopping.count.last"));
metrics.add(new Metric("cluster-controller.up.count.last"));
metrics.add(new Metric("cluster-controller.cluster-state-change.count", "content.cluster-controller.cluster-state-change.count"));
metrics.add(new Metric("cluster-controller.is-master.last"));
metrics.add(new Metric("cluster-controller.node-event.count"));
return metrics;
}
private static Set<Metric> getDocprocMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("documents_processed.rate", "documents_processed"));
return metrics;
}
private static Set<Metric> getQrserverMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("peak_qps.max", "peak_qps"));
metrics.add(new Metric("search_connections.average", "search_connections"));
metrics.add(new Metric("active_queries.average", "active_queries"));
metrics.add(new Metric("feed.latency.average"));
metrics.add(new Metric("queries.rate", "queries"));
metrics.add(new Metric("query_latency.average", "mean_query_latency"));
metrics.add(new Metric("query_latency.max", "max_query_latency"));
metrics.add(new Metric("query_latency.95percentile", "95p_query_latency"));
metrics.add(new Metric("query_latency.99percentile", "99p_query_latency"));
metrics.add(new Metric("failed_queries.rate", "failed_queries"));
metrics.add(new Metric("degraded_queries.rate", "degraded_queries"));
metrics.add(new Metric("hits_per_query.average", "hits_per_query"));
metrics.add(new Metric("documents_covered.count"));
metrics.add(new Metric("documents_total.count"));
metrics.add(new Metric("totalhits_per_query.average", "totalhits_per_query"));
metrics.add(new Metric("empty_results.rate", "empty_results"));
metrics.add(new Metric("requestsOverQuota.rate"));
metrics.add(new Metric("requestsOverQuota.count"));
metrics.add(new Metric("error.timeout.rate","error.timeout"));
metrics.add(new Metric("error.backends_oos.rate","error.backends_oos"));
metrics.add(new Metric("error.plugin_failure.rate","error.plugin_failure"));
metrics.add(new Metric("error.backend_communication_error.rate","error.backend_communication_error"));
metrics.add(new Metric("error.empty_document_summaries.rate","error.empty_document_summaries"));
metrics.add(new Metric("error.invalid_query_parameter.rate","error.invalid_query_parameter"));
metrics.add(new Metric("error.internal_server_error.rate", "error.internal_server_error"));
metrics.add(new Metric("error.misconfigured_server.rate","error.misconfigured_server"));
metrics.add(new Metric("error.invalid_query_transformation.rate","error.invalid_query_transformation"));
metrics.add(new Metric("error.result_with_errors.rate","error.result_with_errors"));
metrics.add(new Metric("error.unspecified.rate","error.unspecified"));
metrics.add(new Metric("error.unhandled_exception.rate","error.unhandled_exception"));
metrics.add(new Metric("http.status.1xx.rate"));
metrics.add(new Metric("http.status.2xx.rate"));
metrics.add(new Metric("http.status.3xx.rate"));
metrics.add(new Metric("http.status.4xx.rate"));
metrics.add(new Metric("http.status.5xx.rate"));
return metrics;
}
private static Set<Metric> getSearchNodeMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("proton.numstoreddocs.last", "documents_total"));
metrics.add(new Metric("proton.numindexeddocs.last", "documents_ready"));
metrics.add(new Metric("proton.numactivedocs.last", "documents_active"));
metrics.add(new Metric("proton.numremoveddocs.last", "documents_removed"));
metrics.add(new Metric("proton.docsinmemory.last", "documents_inmemory"));
metrics.add(new Metric("proton.diskusage.last", "diskusage"));
metrics.add(new Metric("proton.memoryusage.max", "content.proton.memoryusage.max"));
metrics.add(new Metric("proton.transport.query.count.rate", "query_requests"));
metrics.add(new Metric("proton.docsum.docs.rate", "document_requests"));
metrics.add(new Metric("proton.docsum.latency.average", "content.proton.transport.docsum.latency.average"));
metrics.add(new Metric("proton.transport.query.latency.average", "query_latency"));
metrics.add(new Metric("content.proton.documentdb.job.total.average"));
metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average"));
metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average"));
metrics.add(new Metric("content.proton.documentdb.threading_service.master.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.summary.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_inverter.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.attribute_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.resource_usage.disk.average"));
metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max"));
metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.enum_store.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.multi_value.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last"));
metrics.add(new Metric("content.proton.transactionlog.entries.average"));
metrics.add(new Metric("content.proton.transactionlog.disk_usage.average"));
metrics.add(new Metric("content.proton.transactionlog.replay_time.last"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.matching.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate"));
return metrics;
}
private static Set<Metric> getStorageMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("vds.datastored.alldisks.docs.average","docs"));
metrics.add(new Metric("vds.datastored.alldisks.bytes.average","bytes"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average","visitorlifetime"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average","visitorqueuewait"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate","put"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate","remove"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate","get"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate","update"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.average","diskqueuesize"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average","diskqueuewait"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average"));
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average"));
metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate","visit"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate"));
metrics.add(new Metric("vds.filestor.spi.put.success.average"));
metrics.add(new Metric("vds.filestor.spi.remove.success.average"));
metrics.add(new Metric("vds.filestor.spi.update.success.average"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.average"));
metrics.add(new Metric("vds.filestor.spi.get.success.average"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.average"));
metrics.add(new Metric("vds.filestor.spi.put.success.rate"));
metrics.add(new Metric("vds.filestor.spi.remove.success.rate"));
metrics.add(new Metric("vds.filestor.spi.update.success.rate"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.rate"));
metrics.add(new Metric("vds.filestor.spi.get.success.rate"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.rate"));
metrics.add(new Metric("vds.idealstate.buckets_rechecking.average"));
metrics.add(new Metric("vds.idealstate.idealstate_diff.average"));
metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average"));
metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average"));
metrics.add(new Metric("vds.idealstate.buckets.average"));
metrics.add(new Metric("vds.idealstate.buckets_notrusted.average"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate","deleteok"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate","deletefailed"));
metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average","deletepending"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate","mergeok"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate","mergefailed"));
metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average","mergepending"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate","splitok"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate","splitfailed"));
metrics.add(new Metric("vds.idealstate.split_bucket.pending.average","splitpending"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate","joinok"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate","joinfailed"));
metrics.add(new Metric("vds.idealstate.join_bucket.pending.average","joinpending"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.average"));
metrics.add(new Metric("vds.distributor.puts.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removes.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.average"));
metrics.add(new Metric("vds.distributor.updates.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.average"));
metrics.add(new Metric("vds.distributor.gets.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.average"));
metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.docsstored.average"));
metrics.add(new Metric("vds.distributor.bytesstored.average"));
metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count"));
return metrics;
}
} | class VespaMetricSet {
public static final MetricSet vespaMetricSet = new MetricSet("vespa",
getVespaMetrics(),
singleton(defaultVespaMetricSet));
private static Set<Metric> getVespaMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.addAll(getSearchNodeMetrics());
metrics.addAll(getStorageMetrics());
metrics.addAll(getDocprocMetrics());
metrics.addAll(getClusterControllerMetrics());
metrics.addAll(getQrserverMetrics());
metrics.addAll(getContainerMetrics());
metrics.addAll(getConfigServerMetrics());
metrics.addAll(getSentinelMetrics());
metrics.addAll(getOtherMetrics());
return Collections.unmodifiableSet(metrics);
}
private static Set<Metric> getSentinelMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("sentinel.restarts.count"));
metrics.add(new Metric("sentinel.totalRestarts.last"));
metrics.add(new Metric("sentinel.uptime.last", "sentinel.uptime"));
metrics.add(new Metric("sentinel.running.count"));
metrics.add(new Metric("sentinel.running.last"));
return metrics;
}
private static Set<Metric> getOtherMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("slobrok.heartbeats.failed.count", "slobrok.heartbeats.failed"));
metrics.add(new Metric("logd.processed.lines.count", "logd.processed.lines"));
return metrics;
}
private static Set<Metric> getConfigServerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("configserver.requests.count", "configserver.requests"));
metrics.add(new Metric("configserver.failedRequests.count", "configserver.failedRequests"));
metrics.add(new Metric("configserver.latency.average", "configserver.latency"));
metrics.add(new Metric("configserver.cacheConfigElems.last", "configserver.cacheConfigElems"));
metrics.add(new Metric("configserver.cacheChecksumElems.last", "configserver.cacheChecksumElems"));
metrics.add(new Metric("configserver.hosts.last", "configserver.hosts"));
metrics.add(new Metric("configserver.delayedResponses.count", "configserver.delayedResponses"));
metrics.add(new Metric("configserver.sessionChangeErrors.count", "configserver.sessionChangeErrors"));
return metrics;
}
private static Set<Metric> getClusterControllerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("cluster-controller.down.count.last"));
metrics.add(new Metric("cluster-controller.initializing.count.last"));
metrics.add(new Metric("cluster-controller.maintenance.count.last"));
metrics.add(new Metric("cluster-controller.retired.count.last"));
metrics.add(new Metric("cluster-controller.stopping.count.last"));
metrics.add(new Metric("cluster-controller.up.count.last"));
metrics.add(new Metric("cluster-controller.cluster-state-change.count", "content.cluster-controller.cluster-state-change.count"));
metrics.add(new Metric("cluster-controller.is-master.last"));
metrics.add(new Metric("cluster-controller.node-event.count"));
return metrics;
}
private static Set<Metric> getDocprocMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("documents_processed.rate", "documents_processed"));
return metrics;
}
private static Set<Metric> getQrserverMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("peak_qps.max", "peak_qps"));
metrics.add(new Metric("search_connections.average", "search_connections"));
metrics.add(new Metric("active_queries.average", "active_queries"));
metrics.add(new Metric("feed.latency.average"));
metrics.add(new Metric("queries.rate", "queries"));
metrics.add(new Metric("query_latency.average", "mean_query_latency"));
metrics.add(new Metric("query_latency.max", "max_query_latency"));
metrics.add(new Metric("query_latency.95percentile", "95p_query_latency"));
metrics.add(new Metric("query_latency.99percentile", "99p_query_latency"));
metrics.add(new Metric("failed_queries.rate", "failed_queries"));
metrics.add(new Metric("degraded_queries.rate", "degraded_queries"));
metrics.add(new Metric("hits_per_query.average", "hits_per_query"));
metrics.add(new Metric("documents_covered.count"));
metrics.add(new Metric("documents_total.count"));
metrics.add(new Metric("totalhits_per_query.average", "totalhits_per_query"));
metrics.add(new Metric("empty_results.rate", "empty_results"));
metrics.add(new Metric("requestsOverQuota.rate"));
metrics.add(new Metric("requestsOverQuota.count"));
metrics.add(new Metric("error.timeout.rate","error.timeout"));
metrics.add(new Metric("error.backends_oos.rate","error.backends_oos"));
metrics.add(new Metric("error.plugin_failure.rate","error.plugin_failure"));
metrics.add(new Metric("error.backend_communication_error.rate","error.backend_communication_error"));
metrics.add(new Metric("error.empty_document_summaries.rate","error.empty_document_summaries"));
metrics.add(new Metric("error.invalid_query_parameter.rate","error.invalid_query_parameter"));
metrics.add(new Metric("error.internal_server_error.rate", "error.internal_server_error"));
metrics.add(new Metric("error.misconfigured_server.rate","error.misconfigured_server"));
metrics.add(new Metric("error.invalid_query_transformation.rate","error.invalid_query_transformation"));
metrics.add(new Metric("error.result_with_errors.rate","error.result_with_errors"));
metrics.add(new Metric("error.unspecified.rate","error.unspecified"));
metrics.add(new Metric("error.unhandled_exception.rate","error.unhandled_exception"));
return metrics;
}
private static Set<Metric> getSearchNodeMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("proton.numstoreddocs.last", "documents_total"));
metrics.add(new Metric("proton.numindexeddocs.last", "documents_ready"));
metrics.add(new Metric("proton.numactivedocs.last", "documents_active"));
metrics.add(new Metric("proton.numremoveddocs.last", "documents_removed"));
metrics.add(new Metric("proton.docsinmemory.last", "documents_inmemory"));
metrics.add(new Metric("proton.diskusage.last", "diskusage"));
metrics.add(new Metric("proton.memoryusage.max", "content.proton.memoryusage.max"));
metrics.add(new Metric("proton.transport.query.count.rate", "query_requests"));
metrics.add(new Metric("proton.docsum.docs.rate", "document_requests"));
metrics.add(new Metric("proton.docsum.latency.average", "content.proton.transport.docsum.latency.average"));
metrics.add(new Metric("proton.transport.query.latency.average", "query_latency"));
metrics.add(new Metric("content.proton.documentdb.job.total.average"));
metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average"));
metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average"));
metrics.add(new Metric("content.proton.documentdb.threading_service.master.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.summary.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_inverter.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.attribute_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.resource_usage.disk.average"));
metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max"));
metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.enum_store.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.multi_value.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last"));
metrics.add(new Metric("content.proton.transactionlog.entries.average"));
metrics.add(new Metric("content.proton.transactionlog.disk_usage.average"));
metrics.add(new Metric("content.proton.transactionlog.replay_time.last"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.matching.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate"));
return metrics;
}
private static Set<Metric> getStorageMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("vds.datastored.alldisks.docs.average","docs"));
metrics.add(new Metric("vds.datastored.alldisks.bytes.average","bytes"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average","visitorlifetime"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average","visitorqueuewait"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate","put"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate","remove"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate","get"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate","update"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.average","diskqueuesize"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average","diskqueuewait"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average"));
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average"));
metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate","visit"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate"));
metrics.add(new Metric("vds.filestor.spi.put.success.average"));
metrics.add(new Metric("vds.filestor.spi.remove.success.average"));
metrics.add(new Metric("vds.filestor.spi.update.success.average"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.average"));
metrics.add(new Metric("vds.filestor.spi.get.success.average"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.average"));
metrics.add(new Metric("vds.filestor.spi.put.success.rate"));
metrics.add(new Metric("vds.filestor.spi.remove.success.rate"));
metrics.add(new Metric("vds.filestor.spi.update.success.rate"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.rate"));
metrics.add(new Metric("vds.filestor.spi.get.success.rate"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.rate"));
metrics.add(new Metric("vds.idealstate.buckets_rechecking.average"));
metrics.add(new Metric("vds.idealstate.idealstate_diff.average"));
metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average"));
metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average"));
metrics.add(new Metric("vds.idealstate.buckets.average"));
metrics.add(new Metric("vds.idealstate.buckets_notrusted.average"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate","deleteok"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate","deletefailed"));
metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average","deletepending"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate","mergeok"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate","mergefailed"));
metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average","mergepending"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate","splitok"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate","splitfailed"));
metrics.add(new Metric("vds.idealstate.split_bucket.pending.average","splitpending"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate","joinok"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate","joinfailed"));
metrics.add(new Metric("vds.idealstate.join_bucket.pending.average","joinpending"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.average"));
metrics.add(new Metric("vds.distributor.puts.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removes.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.average"));
metrics.add(new Metric("vds.distributor.updates.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.average"));
metrics.add(new Metric("vds.distributor.gets.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.average"));
metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.docsstored.average"));
metrics.add(new Metric("vds.distributor.bytesstored.average"));
metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count"));
return metrics;
}
} |
Fixed - PTAL | private static Set<Metric> getContainerMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("handled.requests.count", "handled.requests"));
metrics.add(new Metric("handled.latency.average"));
metrics.add(new Metric("handled.latency.max"));
metrics.add(new Metric("serverRejectedRequests.rate"));
metrics.add(new Metric("serverRejectedRequests.count"));
metrics.add(new Metric("serverThreadPoolSize.average"));
metrics.add(new Metric("serverThreadPoolSize.min"));
metrics.add(new Metric("serverThreadPoolSize.max"));
metrics.add(new Metric("serverThreadPoolSize.rate"));
metrics.add(new Metric("serverThreadPoolSize.count"));
metrics.add(new Metric("serverThreadPoolSize.last"));
metrics.add(new Metric("serverActiveThreads.average"));
metrics.add(new Metric("serverActiveThreads.min"));
metrics.add(new Metric("serverActiveThreads.max"));
metrics.add(new Metric("serverActiveThreads.rate"));
metrics.add(new Metric("serverActiveThreads.count"));
metrics.add(new Metric("serverActiveThreads.last"));
metrics.add(new Metric("httpapi_latency.average"));
metrics.add(new Metric("httpapi_pending.average"));
metrics.add(new Metric("httpapi_num_operations.rate"));
metrics.add(new Metric("httpapi_num_updates.rate"));
metrics.add(new Metric("httpapi_num_removes.rate"));
metrics.add(new Metric("httpapi_num_puts.rate"));
metrics.add(new Metric("httpapi_succeeded.rate"));
metrics.add(new Metric("httpapi_failed.rate"));
metrics.add(new Metric("mem.heap.total.average"));
metrics.add(new Metric("mem.heap.free.average"));
metrics.add(new Metric("mem.heap.used.average"));
metrics.add(new Metric("jdisc.memory_mappings.max"));
metrics.add(new Metric("jdisc.open_file_descriptors.max"));
metrics.add(new Metric("jdisc.gc.count.average"));
metrics.add(new Metric("jdisc.gc.count.max"));
metrics.add(new Metric("jdisc.gc.count.last"));
metrics.add(new Metric("jdisc.gc.ms.average"));
metrics.add(new Metric("jdisc.gc.ms.max"));
metrics.add(new Metric("jdisc.gc.ms.last"));
metrics.add(new Metric("jdisc.deactivated_containers.total.last"));
metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last"));
metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last", "athenz-tenant-cert.expiry.seconds"));
metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate"));
return metrics;
} | metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate")); | private static Set<Metric> getContainerMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("handled.requests.count", "handled.requests"));
metrics.add(new Metric("handled.latency.average"));
metrics.add(new Metric("handled.latency.max"));
metrics.add(new Metric("serverRejectedRequests.rate"));
metrics.add(new Metric("serverRejectedRequests.count"));
metrics.add(new Metric("serverThreadPoolSize.average"));
metrics.add(new Metric("serverThreadPoolSize.min"));
metrics.add(new Metric("serverThreadPoolSize.max"));
metrics.add(new Metric("serverThreadPoolSize.rate"));
metrics.add(new Metric("serverThreadPoolSize.count"));
metrics.add(new Metric("serverThreadPoolSize.last"));
metrics.add(new Metric("serverActiveThreads.average"));
metrics.add(new Metric("serverActiveThreads.min"));
metrics.add(new Metric("serverActiveThreads.max"));
metrics.add(new Metric("serverActiveThreads.rate"));
metrics.add(new Metric("serverActiveThreads.count"));
metrics.add(new Metric("serverActiveThreads.last"));
metrics.add(new Metric("httpapi_latency.average"));
metrics.add(new Metric("httpapi_pending.average"));
metrics.add(new Metric("httpapi_num_operations.rate"));
metrics.add(new Metric("httpapi_num_updates.rate"));
metrics.add(new Metric("httpapi_num_removes.rate"));
metrics.add(new Metric("httpapi_num_puts.rate"));
metrics.add(new Metric("httpapi_succeeded.rate"));
metrics.add(new Metric("httpapi_failed.rate"));
metrics.add(new Metric("mem.heap.total.average"));
metrics.add(new Metric("mem.heap.free.average"));
metrics.add(new Metric("mem.heap.used.average"));
metrics.add(new Metric("jdisc.memory_mappings.max"));
metrics.add(new Metric("jdisc.open_file_descriptors.max"));
metrics.add(new Metric("jdisc.gc.count.average"));
metrics.add(new Metric("jdisc.gc.count.max"));
metrics.add(new Metric("jdisc.gc.count.last"));
metrics.add(new Metric("jdisc.gc.ms.average"));
metrics.add(new Metric("jdisc.gc.ms.max"));
metrics.add(new Metric("jdisc.gc.ms.last"));
metrics.add(new Metric("jdisc.deactivated_containers.total.last"));
metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last"));
metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last", "athenz-tenant-cert.expiry.seconds"));
metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate"));
metrics.add(new Metric("http.status.1xx.rate"));
metrics.add(new Metric("http.status.2xx.rate"));
metrics.add(new Metric("http.status.3xx.rate"));
metrics.add(new Metric("http.status.4xx.rate"));
metrics.add(new Metric("http.status.5xx.rate"));
return metrics;
} | class VespaMetricSet {
public static final MetricSet vespaMetricSet = new MetricSet("vespa",
getVespaMetrics(),
singleton(defaultVespaMetricSet));
private static Set<Metric> getVespaMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.addAll(getSearchNodeMetrics());
metrics.addAll(getStorageMetrics());
metrics.addAll(getDocprocMetrics());
metrics.addAll(getClusterControllerMetrics());
metrics.addAll(getQrserverMetrics());
metrics.addAll(getContainerMetrics());
metrics.addAll(getConfigServerMetrics());
metrics.addAll(getSentinelMetrics());
metrics.addAll(getOtherMetrics());
return Collections.unmodifiableSet(metrics);
}
private static Set<Metric> getSentinelMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("sentinel.restarts.count"));
metrics.add(new Metric("sentinel.totalRestarts.last"));
metrics.add(new Metric("sentinel.uptime.last", "sentinel.uptime"));
metrics.add(new Metric("sentinel.running.count"));
metrics.add(new Metric("sentinel.running.last"));
return metrics;
}
private static Set<Metric> getOtherMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("slobrok.heartbeats.failed.count", "slobrok.heartbeats.failed"));
metrics.add(new Metric("logd.processed.lines.count", "logd.processed.lines"));
return metrics;
}
private static Set<Metric> getConfigServerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("configserver.requests.count", "configserver.requests"));
metrics.add(new Metric("configserver.failedRequests.count", "configserver.failedRequests"));
metrics.add(new Metric("configserver.latency.average", "configserver.latency"));
metrics.add(new Metric("configserver.cacheConfigElems.last", "configserver.cacheConfigElems"));
metrics.add(new Metric("configserver.cacheChecksumElems.last", "configserver.cacheChecksumElems"));
metrics.add(new Metric("configserver.hosts.last", "configserver.hosts"));
metrics.add(new Metric("configserver.delayedResponses.count", "configserver.delayedResponses"));
metrics.add(new Metric("configserver.sessionChangeErrors.count", "configserver.sessionChangeErrors"));
return metrics;
}
private static Set<Metric> getClusterControllerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("cluster-controller.down.count.last"));
metrics.add(new Metric("cluster-controller.initializing.count.last"));
metrics.add(new Metric("cluster-controller.maintenance.count.last"));
metrics.add(new Metric("cluster-controller.retired.count.last"));
metrics.add(new Metric("cluster-controller.stopping.count.last"));
metrics.add(new Metric("cluster-controller.up.count.last"));
metrics.add(new Metric("cluster-controller.cluster-state-change.count", "content.cluster-controller.cluster-state-change.count"));
metrics.add(new Metric("cluster-controller.is-master.last"));
metrics.add(new Metric("cluster-controller.node-event.count"));
return metrics;
}
private static Set<Metric> getDocprocMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("documents_processed.rate", "documents_processed"));
return metrics;
}
private static Set<Metric> getQrserverMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("peak_qps.max", "peak_qps"));
metrics.add(new Metric("search_connections.average", "search_connections"));
metrics.add(new Metric("active_queries.average", "active_queries"));
metrics.add(new Metric("feed.latency.average"));
metrics.add(new Metric("queries.rate", "queries"));
metrics.add(new Metric("query_latency.average", "mean_query_latency"));
metrics.add(new Metric("query_latency.max", "max_query_latency"));
metrics.add(new Metric("query_latency.95percentile", "95p_query_latency"));
metrics.add(new Metric("query_latency.99percentile", "99p_query_latency"));
metrics.add(new Metric("failed_queries.rate", "failed_queries"));
metrics.add(new Metric("degraded_queries.rate", "degraded_queries"));
metrics.add(new Metric("hits_per_query.average", "hits_per_query"));
metrics.add(new Metric("documents_covered.count"));
metrics.add(new Metric("documents_total.count"));
metrics.add(new Metric("totalhits_per_query.average", "totalhits_per_query"));
metrics.add(new Metric("empty_results.rate", "empty_results"));
metrics.add(new Metric("requestsOverQuota.rate"));
metrics.add(new Metric("requestsOverQuota.count"));
metrics.add(new Metric("error.timeout.rate","error.timeout"));
metrics.add(new Metric("error.backends_oos.rate","error.backends_oos"));
metrics.add(new Metric("error.plugin_failure.rate","error.plugin_failure"));
metrics.add(new Metric("error.backend_communication_error.rate","error.backend_communication_error"));
metrics.add(new Metric("error.empty_document_summaries.rate","error.empty_document_summaries"));
metrics.add(new Metric("error.invalid_query_parameter.rate","error.invalid_query_parameter"));
metrics.add(new Metric("error.internal_server_error.rate", "error.internal_server_error"));
metrics.add(new Metric("error.misconfigured_server.rate","error.misconfigured_server"));
metrics.add(new Metric("error.invalid_query_transformation.rate","error.invalid_query_transformation"));
metrics.add(new Metric("error.result_with_errors.rate","error.result_with_errors"));
metrics.add(new Metric("error.unspecified.rate","error.unspecified"));
metrics.add(new Metric("error.unhandled_exception.rate","error.unhandled_exception"));
metrics.add(new Metric("http.status.1xx.rate"));
metrics.add(new Metric("http.status.2xx.rate"));
metrics.add(new Metric("http.status.3xx.rate"));
metrics.add(new Metric("http.status.4xx.rate"));
metrics.add(new Metric("http.status.5xx.rate"));
return metrics;
}
private static Set<Metric> getSearchNodeMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("proton.numstoreddocs.last", "documents_total"));
metrics.add(new Metric("proton.numindexeddocs.last", "documents_ready"));
metrics.add(new Metric("proton.numactivedocs.last", "documents_active"));
metrics.add(new Metric("proton.numremoveddocs.last", "documents_removed"));
metrics.add(new Metric("proton.docsinmemory.last", "documents_inmemory"));
metrics.add(new Metric("proton.diskusage.last", "diskusage"));
metrics.add(new Metric("proton.memoryusage.max", "content.proton.memoryusage.max"));
metrics.add(new Metric("proton.transport.query.count.rate", "query_requests"));
metrics.add(new Metric("proton.docsum.docs.rate", "document_requests"));
metrics.add(new Metric("proton.docsum.latency.average", "content.proton.transport.docsum.latency.average"));
metrics.add(new Metric("proton.transport.query.latency.average", "query_latency"));
metrics.add(new Metric("content.proton.documentdb.job.total.average"));
metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average"));
metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average"));
metrics.add(new Metric("content.proton.documentdb.threading_service.master.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.summary.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_inverter.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.attribute_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.resource_usage.disk.average"));
metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max"));
metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.enum_store.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.multi_value.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last"));
metrics.add(new Metric("content.proton.transactionlog.entries.average"));
metrics.add(new Metric("content.proton.transactionlog.disk_usage.average"));
metrics.add(new Metric("content.proton.transactionlog.replay_time.last"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.matching.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate"));
return metrics;
}
private static Set<Metric> getStorageMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("vds.datastored.alldisks.docs.average","docs"));
metrics.add(new Metric("vds.datastored.alldisks.bytes.average","bytes"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average","visitorlifetime"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average","visitorqueuewait"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate","put"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate","remove"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate","get"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate","update"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.average","diskqueuesize"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average","diskqueuewait"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average"));
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average"));
metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate","visit"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate"));
metrics.add(new Metric("vds.filestor.spi.put.success.average"));
metrics.add(new Metric("vds.filestor.spi.remove.success.average"));
metrics.add(new Metric("vds.filestor.spi.update.success.average"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.average"));
metrics.add(new Metric("vds.filestor.spi.get.success.average"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.average"));
metrics.add(new Metric("vds.filestor.spi.put.success.rate"));
metrics.add(new Metric("vds.filestor.spi.remove.success.rate"));
metrics.add(new Metric("vds.filestor.spi.update.success.rate"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.rate"));
metrics.add(new Metric("vds.filestor.spi.get.success.rate"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.rate"));
metrics.add(new Metric("vds.idealstate.buckets_rechecking.average"));
metrics.add(new Metric("vds.idealstate.idealstate_diff.average"));
metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average"));
metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average"));
metrics.add(new Metric("vds.idealstate.buckets.average"));
metrics.add(new Metric("vds.idealstate.buckets_notrusted.average"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate","deleteok"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate","deletefailed"));
metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average","deletepending"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate","mergeok"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate","mergefailed"));
metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average","mergepending"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate","splitok"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate","splitfailed"));
metrics.add(new Metric("vds.idealstate.split_bucket.pending.average","splitpending"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate","joinok"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate","joinfailed"));
metrics.add(new Metric("vds.idealstate.join_bucket.pending.average","joinpending"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.average"));
metrics.add(new Metric("vds.distributor.puts.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removes.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.average"));
metrics.add(new Metric("vds.distributor.updates.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.average"));
metrics.add(new Metric("vds.distributor.gets.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.average"));
metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.docsstored.average"));
metrics.add(new Metric("vds.distributor.bytesstored.average"));
metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count"));
return metrics;
}
} | class VespaMetricSet {
public static final MetricSet vespaMetricSet = new MetricSet("vespa",
getVespaMetrics(),
singleton(defaultVespaMetricSet));
private static Set<Metric> getVespaMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.addAll(getSearchNodeMetrics());
metrics.addAll(getStorageMetrics());
metrics.addAll(getDocprocMetrics());
metrics.addAll(getClusterControllerMetrics());
metrics.addAll(getQrserverMetrics());
metrics.addAll(getContainerMetrics());
metrics.addAll(getConfigServerMetrics());
metrics.addAll(getSentinelMetrics());
metrics.addAll(getOtherMetrics());
return Collections.unmodifiableSet(metrics);
}
private static Set<Metric> getSentinelMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("sentinel.restarts.count"));
metrics.add(new Metric("sentinel.totalRestarts.last"));
metrics.add(new Metric("sentinel.uptime.last", "sentinel.uptime"));
metrics.add(new Metric("sentinel.running.count"));
metrics.add(new Metric("sentinel.running.last"));
return metrics;
}
private static Set<Metric> getOtherMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("slobrok.heartbeats.failed.count", "slobrok.heartbeats.failed"));
metrics.add(new Metric("logd.processed.lines.count", "logd.processed.lines"));
return metrics;
}
private static Set<Metric> getConfigServerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("configserver.requests.count", "configserver.requests"));
metrics.add(new Metric("configserver.failedRequests.count", "configserver.failedRequests"));
metrics.add(new Metric("configserver.latency.average", "configserver.latency"));
metrics.add(new Metric("configserver.cacheConfigElems.last", "configserver.cacheConfigElems"));
metrics.add(new Metric("configserver.cacheChecksumElems.last", "configserver.cacheChecksumElems"));
metrics.add(new Metric("configserver.hosts.last", "configserver.hosts"));
metrics.add(new Metric("configserver.delayedResponses.count", "configserver.delayedResponses"));
metrics.add(new Metric("configserver.sessionChangeErrors.count", "configserver.sessionChangeErrors"));
return metrics;
}
private static Set<Metric> getClusterControllerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("cluster-controller.down.count.last"));
metrics.add(new Metric("cluster-controller.initializing.count.last"));
metrics.add(new Metric("cluster-controller.maintenance.count.last"));
metrics.add(new Metric("cluster-controller.retired.count.last"));
metrics.add(new Metric("cluster-controller.stopping.count.last"));
metrics.add(new Metric("cluster-controller.up.count.last"));
metrics.add(new Metric("cluster-controller.cluster-state-change.count", "content.cluster-controller.cluster-state-change.count"));
metrics.add(new Metric("cluster-controller.is-master.last"));
metrics.add(new Metric("cluster-controller.node-event.count"));
return metrics;
}
private static Set<Metric> getDocprocMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("documents_processed.rate", "documents_processed"));
return metrics;
}
private static Set<Metric> getQrserverMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("peak_qps.max", "peak_qps"));
metrics.add(new Metric("search_connections.average", "search_connections"));
metrics.add(new Metric("active_queries.average", "active_queries"));
metrics.add(new Metric("feed.latency.average"));
metrics.add(new Metric("queries.rate", "queries"));
metrics.add(new Metric("query_latency.average", "mean_query_latency"));
metrics.add(new Metric("query_latency.max", "max_query_latency"));
metrics.add(new Metric("query_latency.95percentile", "95p_query_latency"));
metrics.add(new Metric("query_latency.99percentile", "99p_query_latency"));
metrics.add(new Metric("failed_queries.rate", "failed_queries"));
metrics.add(new Metric("degraded_queries.rate", "degraded_queries"));
metrics.add(new Metric("hits_per_query.average", "hits_per_query"));
metrics.add(new Metric("documents_covered.count"));
metrics.add(new Metric("documents_total.count"));
metrics.add(new Metric("totalhits_per_query.average", "totalhits_per_query"));
metrics.add(new Metric("empty_results.rate", "empty_results"));
metrics.add(new Metric("requestsOverQuota.rate"));
metrics.add(new Metric("requestsOverQuota.count"));
metrics.add(new Metric("error.timeout.rate","error.timeout"));
metrics.add(new Metric("error.backends_oos.rate","error.backends_oos"));
metrics.add(new Metric("error.plugin_failure.rate","error.plugin_failure"));
metrics.add(new Metric("error.backend_communication_error.rate","error.backend_communication_error"));
metrics.add(new Metric("error.empty_document_summaries.rate","error.empty_document_summaries"));
metrics.add(new Metric("error.invalid_query_parameter.rate","error.invalid_query_parameter"));
metrics.add(new Metric("error.internal_server_error.rate", "error.internal_server_error"));
metrics.add(new Metric("error.misconfigured_server.rate","error.misconfigured_server"));
metrics.add(new Metric("error.invalid_query_transformation.rate","error.invalid_query_transformation"));
metrics.add(new Metric("error.result_with_errors.rate","error.result_with_errors"));
metrics.add(new Metric("error.unspecified.rate","error.unspecified"));
metrics.add(new Metric("error.unhandled_exception.rate","error.unhandled_exception"));
return metrics;
}
private static Set<Metric> getSearchNodeMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("proton.numstoreddocs.last", "documents_total"));
metrics.add(new Metric("proton.numindexeddocs.last", "documents_ready"));
metrics.add(new Metric("proton.numactivedocs.last", "documents_active"));
metrics.add(new Metric("proton.numremoveddocs.last", "documents_removed"));
metrics.add(new Metric("proton.docsinmemory.last", "documents_inmemory"));
metrics.add(new Metric("proton.diskusage.last", "diskusage"));
metrics.add(new Metric("proton.memoryusage.max", "content.proton.memoryusage.max"));
metrics.add(new Metric("proton.transport.query.count.rate", "query_requests"));
metrics.add(new Metric("proton.docsum.docs.rate", "document_requests"));
metrics.add(new Metric("proton.docsum.latency.average", "content.proton.transport.docsum.latency.average"));
metrics.add(new Metric("proton.transport.query.latency.average", "query_latency"));
metrics.add(new Metric("content.proton.documentdb.job.total.average"));
metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average"));
metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average"));
metrics.add(new Metric("content.proton.documentdb.threading_service.master.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.summary.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_inverter.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.attribute_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.resource_usage.disk.average"));
metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max"));
metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.enum_store.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.multi_value.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last"));
metrics.add(new Metric("content.proton.transactionlog.entries.average"));
metrics.add(new Metric("content.proton.transactionlog.disk_usage.average"));
metrics.add(new Metric("content.proton.transactionlog.replay_time.last"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.matching.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate"));
return metrics;
}
private static Set<Metric> getStorageMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("vds.datastored.alldisks.docs.average","docs"));
metrics.add(new Metric("vds.datastored.alldisks.bytes.average","bytes"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average","visitorlifetime"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average","visitorqueuewait"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate","put"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate","remove"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate","get"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate","update"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.average","diskqueuesize"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average","diskqueuewait"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average"));
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average"));
metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate","visit"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate"));
metrics.add(new Metric("vds.filestor.spi.put.success.average"));
metrics.add(new Metric("vds.filestor.spi.remove.success.average"));
metrics.add(new Metric("vds.filestor.spi.update.success.average"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.average"));
metrics.add(new Metric("vds.filestor.spi.get.success.average"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.average"));
metrics.add(new Metric("vds.filestor.spi.put.success.rate"));
metrics.add(new Metric("vds.filestor.spi.remove.success.rate"));
metrics.add(new Metric("vds.filestor.spi.update.success.rate"));
metrics.add(new Metric("vds.filestor.spi.deleteBucket.success.rate"));
metrics.add(new Metric("vds.filestor.spi.get.success.rate"));
metrics.add(new Metric("vds.filestor.spi.iterate.success.rate"));
metrics.add(new Metric("vds.idealstate.buckets_rechecking.average"));
metrics.add(new Metric("vds.idealstate.idealstate_diff.average"));
metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average"));
metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average"));
metrics.add(new Metric("vds.idealstate.buckets.average"));
metrics.add(new Metric("vds.idealstate.buckets_notrusted.average"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate","deleteok"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate","deletefailed"));
metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average","deletepending"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate","mergeok"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate","mergefailed"));
metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average","mergepending"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate","splitok"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate","splitfailed"));
metrics.add(new Metric("vds.idealstate.split_bucket.pending.average","splitpending"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate","joinok"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate","joinfailed"));
metrics.add(new Metric("vds.idealstate.join_bucket.pending.average","joinpending"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.average"));
metrics.add(new Metric("vds.distributor.puts.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removes.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.average"));
metrics.add(new Metric("vds.distributor.updates.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.latency.average"));
metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.average"));
metrics.add(new Metric("vds.distributor.gets.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.average"));
metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.docsstored.average"));
metrics.add(new Metric("vds.distributor.bytesstored.average"));
metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count"));
return metrics;
}
} |
Please add a comment why this must be called after handleRequest | private ServletRequestReader handleRequest() throws IOException {
HttpRequest jdiscRequest = HttpRequestFactory.newJDiscRequest(jDiscContext.container, jettyRequest);
ContentChannel requestContentChannel;
try (ResourceReference ref = References.fromResource(jdiscRequest)) {
HttpRequestFactory.copyHeaders(jettyRequest, jdiscRequest);
requestContentChannel = requestHandler.handleRequest(jdiscRequest, servletResponseController.responseHandler);
metricReporter.setBindingMatch(jdiscRequest.getBindingMatch());
}
ServletInputStream servletInputStream = jettyRequest.getInputStream();
ServletRequestReader servletRequestReader =
new ServletRequestReader(
servletInputStream,
requestContentChannel,
jDiscContext.janitor,
metricReporter);
servletInputStream.setReadListener(servletRequestReader);
return servletRequestReader;
} | metricReporter.setBindingMatch(jdiscRequest.getBindingMatch()); | private ServletRequestReader handleRequest() throws IOException {
HttpRequest jdiscRequest = HttpRequestFactory.newJDiscRequest(jDiscContext.container, jettyRequest);
ContentChannel requestContentChannel;
try (ResourceReference ref = References.fromResource(jdiscRequest)) {
HttpRequestFactory.copyHeaders(jettyRequest, jdiscRequest);
requestContentChannel = requestHandler.handleRequest(jdiscRequest, servletResponseController.responseHandler);
metricReporter.setBindingMatch(jdiscRequest.getBindingMatch());
}
ServletInputStream servletInputStream = jettyRequest.getInputStream();
ServletRequestReader servletRequestReader =
new ServletRequestReader(
servletInputStream,
requestContentChannel,
jDiscContext.janitor,
metricReporter);
servletInputStream.setReadListener(servletRequestReader);
return servletRequestReader;
} | class HttpRequestDispatch {
private static final Logger log = Logger.getLogger(HttpRequestDispatch.class.getName());
private final static String CHARSET_ANNOTATION = ";charset=";
private final JDiscContext jDiscContext;
private final AsyncContext async;
private final Request jettyRequest;
private final ServletResponseController servletResponseController;
private final RequestHandler requestHandler;
private final MetricReporter metricReporter;
public HttpRequestDispatch(JDiscContext jDiscContext,
AccessLogEntry accessLogEntry,
Map<String, Object> requestMetricDimensions,
HttpServletRequest servletRequest,
HttpServletResponse servletResponse) throws IOException {
this.jDiscContext = jDiscContext;
requestHandler = newRequestHandler(jDiscContext, accessLogEntry, servletRequest);
this.jettyRequest = (Request) servletRequest;
this.metricReporter = new MetricReporter(jDiscContext.metric, requestMetricDimensions, jettyRequest.getTimeStamp());
honourMaxKeepAliveRequests();
this.servletResponseController = new ServletResponseController(
servletRequest,
servletResponse,
jDiscContext.janitor,
metricReporter,
jDiscContext.developerMode());
this.async = servletRequest.startAsync();
async.setTimeout(0);
metricReporter.uriLength(jettyRequest.getOriginalURI().length());
}
public void dispatch() throws IOException {
ServletRequestReader servletRequestReader;
try {
servletRequestReader = handleRequest();
} catch (Throwable throwable) {
servletResponseController.trySendError(throwable);
servletResponseController.finishedFuture().whenComplete((result, exception) ->
completeRequestCallback.accept(null, throwable));
return;
}
try {
onError(servletRequestReader.finishedFuture,
servletResponseController::trySendError);
onError(servletResponseController.finishedFuture(),
servletRequestReader::onError);
CompletableFuture.allOf(servletRequestReader.finishedFuture, servletResponseController.finishedFuture())
.whenComplete(completeRequestCallback);
} catch (Throwable throwable) {
log.log(Level.WARNING, "Failed registering finished listeners.", throwable);
}
}
private void honourMaxKeepAliveRequests() {
if (jDiscContext.serverConfig.maxKeepAliveRequests() > 0) {
HttpConnection connection = getConnection(jettyRequest);
if (connection.getMessagesIn() >= jDiscContext.serverConfig.maxKeepAliveRequests()) {
connection.getGenerator().setPersistent(false);
}
}
}
private BiConsumer<Void, Throwable> completeRequestCallback;
{
AtomicBoolean completeRequestCalled = new AtomicBoolean(false);
HttpRequestDispatch parent = this;
completeRequestCallback = (result, error) -> {
boolean alreadyCalled = completeRequestCalled.getAndSet(true);
if (alreadyCalled) {
AssertionError e = new AssertionError("completeRequest called more than once");
log.log(Level.WARNING, "Assertion failed.", e);
throw e;
}
boolean reportedError = false;
parent.metricReporter.contentSize((int) parent.jettyRequest.getContentRead());
if (error != null) {
if (error instanceof CompletionException && error.getCause() instanceof EofException) {
log.log(Level.FINE,
error,
() -> "Network connection was unexpectedly terminated: " + parent.jettyRequest.getRequestURI());
parent.metricReporter.prematurelyClosed();
} else if (!(error instanceof OverloadException || error instanceof BindingNotFoundException)) {
log.log(Level.WARNING, "Request failed: " + parent.jettyRequest.getRequestURI(), error);
}
reportedError = true;
parent.metricReporter.failedResponse();
} else {
parent.metricReporter.successfulResponse();
}
try {
parent.async.complete();
log.finest(() -> "Request completed successfully: " + parent.jettyRequest.getRequestURI());
} catch (Throwable throwable) {
Level level = reportedError ? Level.FINE: Level.WARNING;
log.log(level, "async.complete failed", throwable);
}
};
}
@SuppressWarnings("try")
private static void onError(CompletableFuture<?> future, Consumer<Throwable> errorHandler) {
future.whenComplete((result, exception) -> {
if (exception != null) {
errorHandler.accept(exception);
}
});
}
ContentChannel handleRequestFilterResponse(Response response) {
try {
jettyRequest.getInputStream().close();
ContentChannel responseContentChannel = servletResponseController.responseHandler.handleResponse(response);
servletResponseController.finishedFuture().whenComplete(completeRequestCallback);
return responseContentChannel;
} catch (IOException e) {
throw throwUnchecked(e);
}
}
private static RequestHandler newRequestHandler(JDiscContext context,
AccessLogEntry accessLogEntry,
HttpServletRequest servletRequest) {
RequestHandler requestHandler = wrapHandlerIfFormPost(
new FilteringRequestHandler(context.requestFilters, context.responseFilters),
servletRequest, context.serverConfig.removeRawPostBodyForWwwUrlEncodedPost());
return new AccessLoggingRequestHandler(requestHandler, accessLogEntry);
}
private static RequestHandler wrapHandlerIfFormPost(RequestHandler requestHandler,
HttpServletRequest servletRequest,
boolean removeBodyForFormPost) {
if (!servletRequest.getMethod().equals("POST")) {
return requestHandler;
}
String contentType = servletRequest.getHeader(HttpHeaders.Names.CONTENT_TYPE);
if (contentType == null) {
return requestHandler;
}
if (!contentType.startsWith(APPLICATION_X_WWW_FORM_URLENCODED)) {
return requestHandler;
}
return new FormPostRequestHandler(requestHandler, getCharsetName(contentType), removeBodyForFormPost);
}
private static String getCharsetName(String contentType) {
if (!contentType.startsWith(CHARSET_ANNOTATION, APPLICATION_X_WWW_FORM_URLENCODED.length())) {
return StandardCharsets.UTF_8.name();
}
return contentType.substring(APPLICATION_X_WWW_FORM_URLENCODED.length() + CHARSET_ANNOTATION.length());
}
} | class HttpRequestDispatch {
private static final Logger log = Logger.getLogger(HttpRequestDispatch.class.getName());
private final static String CHARSET_ANNOTATION = ";charset=";
private final JDiscContext jDiscContext;
private final AsyncContext async;
private final Request jettyRequest;
private final ServletResponseController servletResponseController;
private final RequestHandler requestHandler;
private final MetricReporter metricReporter;
public HttpRequestDispatch(JDiscContext jDiscContext,
AccessLogEntry accessLogEntry,
Map<String, Object> requestMetricDimensions,
HttpServletRequest servletRequest,
HttpServletResponse servletResponse) throws IOException {
this.jDiscContext = jDiscContext;
requestHandler = newRequestHandler(jDiscContext, accessLogEntry, servletRequest);
this.jettyRequest = (Request) servletRequest;
this.metricReporter = new MetricReporter(jDiscContext.metric, requestMetricDimensions, jettyRequest.getTimeStamp());
honourMaxKeepAliveRequests();
this.servletResponseController = new ServletResponseController(
servletRequest,
servletResponse,
jDiscContext.janitor,
metricReporter,
jDiscContext.developerMode());
this.async = servletRequest.startAsync();
async.setTimeout(0);
metricReporter.uriLength(jettyRequest.getOriginalURI().length());
}
public void dispatch() throws IOException {
ServletRequestReader servletRequestReader;
try {
servletRequestReader = handleRequest();
} catch (Throwable throwable) {
servletResponseController.trySendError(throwable);
servletResponseController.finishedFuture().whenComplete((result, exception) ->
completeRequestCallback.accept(null, throwable));
return;
}
try {
onError(servletRequestReader.finishedFuture,
servletResponseController::trySendError);
onError(servletResponseController.finishedFuture(),
servletRequestReader::onError);
CompletableFuture.allOf(servletRequestReader.finishedFuture, servletResponseController.finishedFuture())
.whenComplete(completeRequestCallback);
} catch (Throwable throwable) {
log.log(Level.WARNING, "Failed registering finished listeners.", throwable);
}
}
private void honourMaxKeepAliveRequests() {
if (jDiscContext.serverConfig.maxKeepAliveRequests() > 0) {
HttpConnection connection = getConnection(jettyRequest);
if (connection.getMessagesIn() >= jDiscContext.serverConfig.maxKeepAliveRequests()) {
connection.getGenerator().setPersistent(false);
}
}
}
private BiConsumer<Void, Throwable> completeRequestCallback;
{
AtomicBoolean completeRequestCalled = new AtomicBoolean(false);
HttpRequestDispatch parent = this;
completeRequestCallback = (result, error) -> {
boolean alreadyCalled = completeRequestCalled.getAndSet(true);
if (alreadyCalled) {
AssertionError e = new AssertionError("completeRequest called more than once");
log.log(Level.WARNING, "Assertion failed.", e);
throw e;
}
boolean reportedError = false;
parent.metricReporter.contentSize((int) parent.jettyRequest.getContentRead());
if (error != null) {
if (error instanceof CompletionException && error.getCause() instanceof EofException) {
log.log(Level.FINE,
error,
() -> "Network connection was unexpectedly terminated: " + parent.jettyRequest.getRequestURI());
parent.metricReporter.prematurelyClosed();
} else if (!(error instanceof OverloadException || error instanceof BindingNotFoundException)) {
log.log(Level.WARNING, "Request failed: " + parent.jettyRequest.getRequestURI(), error);
}
reportedError = true;
parent.metricReporter.failedResponse();
} else {
parent.metricReporter.successfulResponse();
}
try {
parent.async.complete();
log.finest(() -> "Request completed successfully: " + parent.jettyRequest.getRequestURI());
} catch (Throwable throwable) {
Level level = reportedError ? Level.FINE: Level.WARNING;
log.log(level, "async.complete failed", throwable);
}
};
}
@SuppressWarnings("try")
private static void onError(CompletableFuture<?> future, Consumer<Throwable> errorHandler) {
future.whenComplete((result, exception) -> {
if (exception != null) {
errorHandler.accept(exception);
}
});
}
ContentChannel handleRequestFilterResponse(Response response) {
try {
jettyRequest.getInputStream().close();
ContentChannel responseContentChannel = servletResponseController.responseHandler.handleResponse(response);
servletResponseController.finishedFuture().whenComplete(completeRequestCallback);
return responseContentChannel;
} catch (IOException e) {
throw throwUnchecked(e);
}
}
private static RequestHandler newRequestHandler(JDiscContext context,
AccessLogEntry accessLogEntry,
HttpServletRequest servletRequest) {
RequestHandler requestHandler = wrapHandlerIfFormPost(
new FilteringRequestHandler(context.requestFilters, context.responseFilters),
servletRequest, context.serverConfig.removeRawPostBodyForWwwUrlEncodedPost());
return new AccessLoggingRequestHandler(requestHandler, accessLogEntry);
}
private static RequestHandler wrapHandlerIfFormPost(RequestHandler requestHandler,
HttpServletRequest servletRequest,
boolean removeBodyForFormPost) {
if (!servletRequest.getMethod().equals("POST")) {
return requestHandler;
}
String contentType = servletRequest.getHeader(HttpHeaders.Names.CONTENT_TYPE);
if (contentType == null) {
return requestHandler;
}
if (!contentType.startsWith(APPLICATION_X_WWW_FORM_URLENCODED)) {
return requestHandler;
}
return new FormPostRequestHandler(requestHandler, getCharsetName(contentType), removeBodyForFormPost);
}
private static String getCharsetName(String contentType) {
if (!contentType.startsWith(CHARSET_ANNOTATION, APPLICATION_X_WWW_FORM_URLENCODED.length())) {
return StandardCharsets.UTF_8.name();
}
return contentType.substring(APPLICATION_X_WWW_FORM_URLENCODED.length() + CHARSET_ANNOTATION.length());
}
} |
Please keep the static import. | public void testInvalidPath() throws Exception{
assertResponse("/querybuilder/invalid_filepath", "{\"error-code\":\"NOT_FOUND\",\"message\":\"Nothing at path","application/json; charset=UTF-8", 404);
}
private void assertResponse(String path, String expectedStartString, String expectedContentType, int expectedStatusCode) throws IOException {
assertResponse(Request.Method.GET, path, expectedStartString,expectedContentType, expectedStatusCode);
}
private void assertResponse(Request.Method method, String path, String expectedStartString, String expectedContentType, int expectedStatusCode) throws IOException {
Response response = container.handleRequest(new Request("http:
Assert.assertEquals("Status code", expectedStatusCode, response.getStatus());
Assert.assertEquals(expectedContentType, response.getHeaders().getFirst("Content-Type"));
if(expectedStartString != null){
Assert.assertTrue(response.getBodyAsString().startsWith(expectedStartString));
}
}
private String servicesXml() {
return "<jdisc version='1.0'>\n" +
" <handler id='com.yahoo.search.query.gui.GUIHandler'>\n" +
" <binding>http:
" </handler>\n" +
" <http>\n" +
" <server id='default' port='8080'/>\n" +
" </http>\n" +
"</jdisc>";
}
} | Assert.assertTrue(response.getBodyAsString().startsWith(expectedStartString)); | public void testInvalidPath() throws Exception{
assertResponse("/querybuilder/invalid_filepath", "{\"error-code\":\"NOT_FOUND\",\"message\":\"Nothing at path","application/json; charset=UTF-8", 404);
}
private void assertResponse(String path, String expectedStartString, String expectedContentType, int expectedStatusCode) throws IOException {
assertResponse(Request.Method.GET, path, expectedStartString,expectedContentType, expectedStatusCode);
}
private void assertResponse(Request.Method method, String path, String expectedStartString, String expectedContentType, int expectedStatusCode) throws IOException {
Response response = container.handleRequest(new Request("http:
assertEquals("Status code", expectedStatusCode, response.getStatus());
assertEquals(expectedContentType, response.getHeaders().getFirst("Content-Type"));
if(expectedStartString != null){
assertTrue(response.getBodyAsString().startsWith(expectedStartString));
}
}
private String servicesXml() {
return "<jdisc version='1.0'>\n" +
" <handler id='com.yahoo.search.query.gui.GUIHandler'>\n" +
" <binding>http:
" </handler>\n" +
" <http>\n" +
" <server id='default' port='8080'/>\n" +
" </http>\n" +
"</jdisc>";
}
} | class GUIHandlerTest {
private JDisc container;
@Before
public void startContainer() {
container = JDisc.fromServicesXml(servicesXml(), Networking.enable);
}
@After
public void stopContainer() {
/*
try {
Thread.sleep(100_000);
} catch (InterruptedException e) {
e.printStackTrace();
}*/
container.close();
}
@Test
public void testRequest() throws Exception {
assertResponse("/querybuilder/", "<!-- Copyright 2018 Yahoo Holdings.","text/html; charset=UTF-8", 200);
}
@Test
public void testContentTypes() throws Exception{
assertResponse("/querybuilder/_includes/css/vespa.css", "/**","text/css; charset=UTF-8", 200);
assertResponse("/querybuilder/js/agency.js", "/*!","application/javascript; charset=UTF-8", 200);
assertResponse("/querybuilder/img/reload.svg", "<?xml","image/svg+xml; charset=UTF-8", 200);
assertResponse("/querybuilder/img/Vespa-V2.png", null,"image/png; charset=UTF-8", 200);
}
@Test | class GUIHandlerTest {
private JDisc container;
@Before
public void startContainer() {
container = JDisc.fromServicesXml(servicesXml(), Networking.disable);
}
@After
public void stopContainer() {
/*
try {
Thread.sleep(100_000);
} catch (InterruptedException e) {
e.printStackTrace();
}*/
container.close();
}
@Test
public void testRequest() throws Exception {
assertResponse("/querybuilder/", "<!-- Copyright 2018 Yahoo Holdings.","text/html; charset=UTF-8", 200);
}
@Test
public void testContentTypes() throws Exception{
assertResponse("/querybuilder/_includes/css/vespa.css", "/**","text/css; charset=UTF-8", 200);
assertResponse("/querybuilder/js/agency.js", "/*!","application/javascript; charset=UTF-8", 200);
assertResponse("/querybuilder/img/reload.svg", "<?xml","image/svg+xml; charset=UTF-8", 200);
assertResponse("/querybuilder/img/Vespa-V2.png", null,"image/png; charset=UTF-8", 200);
}
@Test |
The code is a little easier to read if white-spaces are used consistently: `} else if (...) {` | public String getContentType() {
if (path.endsWith(".css")) {
return "text/css";
} else if (path.endsWith(".js")) {
return "application/javascript";
} else if (path.endsWith(".html")) {
return "text/html";
}else if (path.endsWith(".php")) {
return "text/php";
}else if (path.endsWith(".svg")) {
return "image/svg+xml";
}else if (path.endsWith(".eot")) {
return "application/vnd.ms-fontobject";
}else if (path.endsWith(".ttf")) {
return "font/ttf";
}else if (path.endsWith(".woff")) {
return "font/woff";
}else if (path.endsWith(".woff2")) {
return "font/woff2";
}else if (path.endsWith(".otf")) {
return "font/otf";
}else if (path.endsWith(".png")) {
return "image/png";
}else if (path.endsWith(".xml")) {
return "application/xml";
}else if (path.endsWith(".ico")) {
return "image/x-icon";
}else if (path.endsWith(".json")) {
return "application/json";
}else if (path.endsWith(".ttf")) {
return "font/ttf";
}
return "text/html";
} | }else if (path.endsWith(".php")) { | public String getContentType() {
if (path.endsWith(".css")) {
return "text/css";
} else if (path.endsWith(".js")) {
return "application/javascript";
} else if (path.endsWith(".html")) {
return "text/html";
} else if (path.endsWith(".php")) {
return "text/php";
} else if (path.endsWith(".svg")) {
return "image/svg+xml";
} else if (path.endsWith(".eot")) {
return "application/vnd.ms-fontobject";
} else if (path.endsWith(".ttf")) {
return "font/ttf";
} else if (path.endsWith(".woff")) {
return "font/woff";
} else if (path.endsWith(".woff2")) {
return "font/woff2";
} else if (path.endsWith(".otf")) {
return "font/otf";
} else if (path.endsWith(".png")) {
return "image/png";
} else if (path.endsWith(".xml")) {
return "application/xml";
} else if (path.endsWith(".ico")) {
return "image/x-icon";
} else if (path.endsWith(".json")) {
return "application/json";
} else if (path.endsWith(".ttf")) {
return "font/ttf";
}
return "text/html";
} | class FileResponse extends HttpResponse {
private final String path;
public FileResponse(String relativePath) {
super(200);
this.path = relativePath;
}
@Override
public void render(OutputStream out) throws IOException {
InputStream is = GUIHandler.class.getClassLoader().getResourceAsStream("gui/"+this.path);
byte[] buf = new byte[1024];
int numRead;
while ( (numRead = is.read(buf) ) >= 0) {
out.write(buf, 0, numRead);
}
}
@Override
} | class FileResponse extends HttpResponse {
private final String path;
public FileResponse(String relativePath) {
super(200);
this.path = relativePath;
}
@Override
public void render(OutputStream out) throws IOException {
InputStream is = GUIHandler.class.getClassLoader().getResourceAsStream("gui/"+this.path);
byte[] buf = new byte[1024];
int numRead;
while ( (numRead = is.read(buf) ) >= 0) {
out.write(buf, 0, numRead);
}
}
@Override
} |
I agree. I will go through it and correct it. | public String getContentType() {
if (path.endsWith(".css")) {
return "text/css";
} else if (path.endsWith(".js")) {
return "application/javascript";
} else if (path.endsWith(".html")) {
return "text/html";
}else if (path.endsWith(".php")) {
return "text/php";
}else if (path.endsWith(".svg")) {
return "image/svg+xml";
}else if (path.endsWith(".eot")) {
return "application/vnd.ms-fontobject";
}else if (path.endsWith(".ttf")) {
return "font/ttf";
}else if (path.endsWith(".woff")) {
return "font/woff";
}else if (path.endsWith(".woff2")) {
return "font/woff2";
}else if (path.endsWith(".otf")) {
return "font/otf";
}else if (path.endsWith(".png")) {
return "image/png";
}else if (path.endsWith(".xml")) {
return "application/xml";
}else if (path.endsWith(".ico")) {
return "image/x-icon";
}else if (path.endsWith(".json")) {
return "application/json";
}else if (path.endsWith(".ttf")) {
return "font/ttf";
}
return "text/html";
} | }else if (path.endsWith(".php")) { | public String getContentType() {
if (path.endsWith(".css")) {
return "text/css";
} else if (path.endsWith(".js")) {
return "application/javascript";
} else if (path.endsWith(".html")) {
return "text/html";
} else if (path.endsWith(".php")) {
return "text/php";
} else if (path.endsWith(".svg")) {
return "image/svg+xml";
} else if (path.endsWith(".eot")) {
return "application/vnd.ms-fontobject";
} else if (path.endsWith(".ttf")) {
return "font/ttf";
} else if (path.endsWith(".woff")) {
return "font/woff";
} else if (path.endsWith(".woff2")) {
return "font/woff2";
} else if (path.endsWith(".otf")) {
return "font/otf";
} else if (path.endsWith(".png")) {
return "image/png";
} else if (path.endsWith(".xml")) {
return "application/xml";
} else if (path.endsWith(".ico")) {
return "image/x-icon";
} else if (path.endsWith(".json")) {
return "application/json";
} else if (path.endsWith(".ttf")) {
return "font/ttf";
}
return "text/html";
} | class FileResponse extends HttpResponse {
private final String path;
public FileResponse(String relativePath) {
super(200);
this.path = relativePath;
}
@Override
public void render(OutputStream out) throws IOException {
InputStream is = GUIHandler.class.getClassLoader().getResourceAsStream("gui/"+this.path);
byte[] buf = new byte[1024];
int numRead;
while ( (numRead = is.read(buf) ) >= 0) {
out.write(buf, 0, numRead);
}
}
@Override
} | class FileResponse extends HttpResponse {
private final String path;
public FileResponse(String relativePath) {
super(200);
this.path = relativePath;
}
@Override
public void render(OutputStream out) throws IOException {
InputStream is = GUIHandler.class.getClassLoader().getResourceAsStream("gui/"+this.path);
byte[] buf = new byte[1024];
int numRead;
while ( (numRead = is.read(buf) ) >= 0) {
out.write(buf, 0, numRead);
}
}
@Override
} |
i don't understand where this port actually comes from... | List<Configserver> getConfigServersFromSpec(AbstractConfigProducer parent) {
List<Configserver> configservers = new ArrayList<>();
for (ConfigServerSpec spec : configServerSpecs) {
HostSystem hostSystem = parent.getHostSystem();
HostResource host = new HostResource(Host.createConfigServerHost(hostSystem, spec.getHostName()));
hostSystem.addBoundHost(host);
Configserver configserver = new Configserver(parent, spec.getHostName(), spec.getConfigServerPort());
configserver.setHostResource(host);
configserver.setBasePort(configserver.getWantedPort());
configserver.initService();
configservers.add(configserver);
}
return configservers;
} | Configserver configserver = new Configserver(parent, spec.getHostName(), spec.getConfigServerPort()); | List<Configserver> getConfigServersFromSpec(AbstractConfigProducer parent) {
List<Configserver> configservers = new ArrayList<>();
for (ConfigServerSpec spec : configServerSpecs) {
HostSystem hostSystem = parent.getHostSystem();
HostResource host = new HostResource(Host.createConfigServerHost(hostSystem, spec.getHostName()));
hostSystem.addBoundHost(host);
Configserver configserver = new Configserver(parent, spec.getHostName(), spec.getConfigServerPort());
configserver.setHostResource(host);
configserver.setBasePort(configserver.getWantedPort());
configserver.initService();
configservers.add(configserver);
}
return configservers;
} | class DomAdminBuilderBase extends VespaDomBuilder.DomConfigProducerBuilder<Admin> {
private static final int DEFAULT_INTERVAL = 1;
private static final String DEFAULT_CLUSTER_NAME = "vespa";
private final ApplicationType applicationType;
protected final List<ConfigServerSpec> configServerSpecs;
private final FileRegistry fileRegistry;
protected final boolean multitenant;
DomAdminBuilderBase(ApplicationType applicationType, FileRegistry fileRegistry, boolean multitenant,
List<ConfigServerSpec> configServerSpecs) {
this.applicationType = applicationType;
this.fileRegistry = fileRegistry;
this.multitenant = multitenant;
this.configServerSpecs = configServerSpecs;
}
@Override
protected Admin doBuild(AbstractConfigProducer parent, Element adminElement) {
Monitoring monitoring = getMonitoring(getChildWithFallback(adminElement, "monitoring", "yamas"));
Metrics metrics = new MetricsBuilder(applicationType, predefinedMetricSets)
.buildMetrics(XML.getChild(adminElement, "metrics"));
Map<String, MetricsConsumer> legacyMetricsConsumers = DomMetricBuilderHelper
.buildMetricsConsumers(XML.getChild(adminElement, "metric-consumers"));
FileDistributionConfigProducer fileDistributionConfigProducer = getFileDistributionConfigProducer(parent);
Admin admin = new Admin(parent, monitoring, metrics, legacyMetricsConsumers, multitenant, fileDistributionConfigProducer);
admin.setApplicationType(applicationType);
doBuildAdmin(admin, adminElement);
new ModelConfigProvider(admin);
return admin;
}
private FileDistributionConfigProducer getFileDistributionConfigProducer(AbstractConfigProducer parent) {
return new FileDistributionConfigProducer(parent, fileRegistry, configServerSpecs);
}
private Element getChildWithFallback(Element parent, String childName, String alternativeChildName) {
Element child = XML.getChild(parent, childName);
if (child != null) return child;
return XML.getChild(parent, alternativeChildName);
}
protected abstract void doBuildAdmin(Admin admin, Element adminE);
private Monitoring getMonitoring(Element monitoringElement) {
if (monitoringElement == null) return new DefaultMonitoring(DEFAULT_CLUSTER_NAME, DEFAULT_INTERVAL);
Integer minutes = getMonitoringInterval(monitoringElement);
if (minutes == null)
minutes = DEFAULT_INTERVAL;
return new DefaultMonitoring(monitoringElement.getAttribute("systemname"), minutes);
}
private Integer getMonitoringInterval(Element monitoringE) {
Integer minutes = null;
String seconds = monitoringE.getAttribute("interval").trim();
if ( ! seconds.isEmpty()) {
minutes = Integer.parseInt(seconds) / 60;
if (!(minutes == 1 || minutes == 5)) {
throw new IllegalArgumentException("The only allowed values for 'interval' attribute in '" + monitoringE.getTagName() +
"' element is 60 or 300.");
}
}
return minutes;
}
void addLogForwarders(ModelElement logForwardingElement, Admin admin) {
if (logForwardingElement == null) return;
for (ModelElement e : logForwardingElement.getChildren("splunk")) {
LogForwarder.Config cfg = LogForwarder.cfg()
.withSplunkHome(e.getStringAttribute("splunk-home"))
.withDeploymentServer(e.getStringAttribute("deployment-server"))
.withClientName(e.getStringAttribute("client-name"));
admin.setLogForwarderConfig(cfg);
}
}
} | class DomAdminBuilderBase extends VespaDomBuilder.DomConfigProducerBuilder<Admin> {
private static final int DEFAULT_INTERVAL = 1;
private static final String DEFAULT_CLUSTER_NAME = "vespa";
private final ApplicationType applicationType;
protected final List<ConfigServerSpec> configServerSpecs;
private final FileRegistry fileRegistry;
protected final boolean multitenant;
DomAdminBuilderBase(ApplicationType applicationType, FileRegistry fileRegistry, boolean multitenant,
List<ConfigServerSpec> configServerSpecs) {
this.applicationType = applicationType;
this.fileRegistry = fileRegistry;
this.multitenant = multitenant;
this.configServerSpecs = configServerSpecs;
}
@Override
protected Admin doBuild(AbstractConfigProducer parent, Element adminElement) {
Monitoring monitoring = getMonitoring(getChildWithFallback(adminElement, "monitoring", "yamas"));
Metrics metrics = new MetricsBuilder(applicationType, predefinedMetricSets)
.buildMetrics(XML.getChild(adminElement, "metrics"));
Map<String, MetricsConsumer> legacyMetricsConsumers = DomMetricBuilderHelper
.buildMetricsConsumers(XML.getChild(adminElement, "metric-consumers"));
FileDistributionConfigProducer fileDistributionConfigProducer = getFileDistributionConfigProducer(parent);
Admin admin = new Admin(parent, monitoring, metrics, legacyMetricsConsumers, multitenant, fileDistributionConfigProducer);
admin.setApplicationType(applicationType);
doBuildAdmin(admin, adminElement);
new ModelConfigProvider(admin);
return admin;
}
private FileDistributionConfigProducer getFileDistributionConfigProducer(AbstractConfigProducer parent) {
return new FileDistributionConfigProducer(parent, fileRegistry, configServerSpecs);
}
private Element getChildWithFallback(Element parent, String childName, String alternativeChildName) {
Element child = XML.getChild(parent, childName);
if (child != null) return child;
return XML.getChild(parent, alternativeChildName);
}
protected abstract void doBuildAdmin(Admin admin, Element adminE);
private Monitoring getMonitoring(Element monitoringElement) {
if (monitoringElement == null) return new DefaultMonitoring(DEFAULT_CLUSTER_NAME, DEFAULT_INTERVAL);
Integer minutes = getMonitoringInterval(monitoringElement);
if (minutes == null)
minutes = DEFAULT_INTERVAL;
return new DefaultMonitoring(monitoringElement.getAttribute("systemname"), minutes);
}
private Integer getMonitoringInterval(Element monitoringE) {
Integer minutes = null;
String seconds = monitoringE.getAttribute("interval").trim();
if ( ! seconds.isEmpty()) {
minutes = Integer.parseInt(seconds) / 60;
if (!(minutes == 1 || minutes == 5)) {
throw new IllegalArgumentException("The only allowed values for 'interval' attribute in '" + monitoringE.getTagName() +
"' element is 60 or 300.");
}
}
return minutes;
}
void addLogForwarders(ModelElement logForwardingElement, Admin admin) {
if (logForwardingElement == null) return;
for (ModelElement e : logForwardingElement.getChildren("splunk")) {
LogForwarder.Config cfg = LogForwarder.cfg()
.withSplunkHome(e.getStringAttribute("splunk-home"))
.withDeploymentServer(e.getStringAttribute("deployment-server"))
.withClientName(e.getStringAttribute("client-name"));
admin.setLogForwarderConfig(cfg);
}
}
} |
did you mean size() > 0 here? | public ConfigserverBuilder(int i, List<ConfigServerSpec> configServerSpec) {
this.i = i;
Objects.requireNonNull(configServerSpec);
if (configServerSpec.size() > 1)
this.rpcPort = configServerSpec.get(0).getConfigServerPort();
else
this.rpcPort = Configserver.defaultRpcPort;
} | if (configServerSpec.size() > 1) | public ConfigserverBuilder(int i, List<ConfigServerSpec> configServerSpec) {
this.i = i;
Objects.requireNonNull(configServerSpec);
if (configServerSpec.size() > 0)
this.rpcPort = configServerSpec.get(0).getConfigServerPort();
else
this.rpcPort = Configserver.defaultRpcPort;
} | class ConfigserverBuilder extends DomConfigProducerBuilder<Configserver> {
private final int i;
private final int rpcPort;
@Override
protected Configserver doBuild(AbstractConfigProducer parent, Element spec) {
return new Configserver(parent, "configserver." + i, rpcPort);
}
} | class ConfigserverBuilder extends DomConfigProducerBuilder<Configserver> {
private final int i;
private final int rpcPort;
@Override
protected Configserver doBuild(AbstractConfigProducer parent, Element spec) {
return new Configserver(parent, "configserver." + i, rpcPort);
}
} |
Yes, thanks, fixed | public ConfigserverBuilder(int i, List<ConfigServerSpec> configServerSpec) {
this.i = i;
Objects.requireNonNull(configServerSpec);
if (configServerSpec.size() > 1)
this.rpcPort = configServerSpec.get(0).getConfigServerPort();
else
this.rpcPort = Configserver.defaultRpcPort;
} | if (configServerSpec.size() > 1) | public ConfigserverBuilder(int i, List<ConfigServerSpec> configServerSpec) {
this.i = i;
Objects.requireNonNull(configServerSpec);
if (configServerSpec.size() > 0)
this.rpcPort = configServerSpec.get(0).getConfigServerPort();
else
this.rpcPort = Configserver.defaultRpcPort;
} | class ConfigserverBuilder extends DomConfigProducerBuilder<Configserver> {
private final int i;
private final int rpcPort;
@Override
protected Configserver doBuild(AbstractConfigProducer parent, Element spec) {
return new Configserver(parent, "configserver." + i, rpcPort);
}
} | class ConfigserverBuilder extends DomConfigProducerBuilder<Configserver> {
private final int i;
private final int rpcPort;
@Override
protected Configserver doBuild(AbstractConfigProducer parent, Element spec) {
return new Configserver(parent, "configserver." + i, rpcPort);
}
} |
I think this should be 90 | private Defaults() {
vespaHome = findVespaHome();
vespaUser = findVespaUser();
vespaHost = findVespaHostname();
vespaWebServicePort = findWebServicePort(8080);
vespaPortBase = findVespaPortBase(19000);
vespaPortConfigServerRpc = findConfigServerPort(vespaPortBase + 70);
vespaPortConfigServerHttp = vespaPortConfigServerRpc + 1;
vespaPortConfigProxyRpc = findConfigProxyPort(vespaPortBase + 91);
} | vespaPortConfigProxyRpc = findConfigProxyPort(vespaPortBase + 91); | private Defaults() {
vespaHome = findVespaHome("/opt/vespa");
vespaUser = findVespaUser("vespa");
vespaHost = findVespaHostname("localhost");
vespaWebServicePort = findWebServicePort(8080);
vespaPortBase = findVespaPortBase(19000);
vespaPortConfigServerRpc = findConfigServerPort(vespaPortBase + 70);
vespaPortConfigServerHttp = vespaPortConfigServerRpc + 1;
vespaPortConfigProxyRpc = findConfigProxyPort(vespaPortBase + 90);
} | class Defaults {
private static final Logger log = Logger.getLogger(Defaults.class.getName());
private static final Defaults defaults = new Defaults();
private final String vespaHome;
private final String vespaUser;
private final String vespaHost;
private final int vespaWebServicePort;
private final int vespaPortBase;
private final int vespaPortConfigServerRpc;
private final int vespaPortConfigServerHttp;
private final int vespaPortConfigProxyRpc;
static private String findVespaHome() {
Optional<String> vespaHomeEnv = Optional.ofNullable(System.getenv("VESPA_HOME"));
if ( ! vespaHomeEnv.isPresent() || vespaHomeEnv.get().trim().isEmpty()) {
log.info("VESPA_HOME not set, using /opt/vespa");
return "/opt/vespa";
}
String vespaHome = vespaHomeEnv.get().trim();
if (vespaHome.endsWith("/")) {
int sz = vespaHome.length() - 1;
vespaHome = vespaHome.substring(0, sz);
}
return vespaHome;
}
static private String findVespaHostname() {
Optional<String> vespaHostEnv = Optional.ofNullable(System.getenv("VESPA_HOSTNAME"));
if (vespaHostEnv.isPresent() && ! vespaHostEnv.get().trim().isEmpty()) {
return vespaHostEnv.get().trim();
}
return "localhost";
}
static private String findVespaUser() {
Optional<String> vespaUserEnv = Optional.ofNullable(System.getenv("VESPA_USER"));
if (! vespaUserEnv.isPresent()) {
log.fine("VESPA_USER not set, using vespa");
return "vespa";
}
return vespaUserEnv.get().trim();
}
static private int findPort(String varName, int defaultPort) {
Optional<String> port = Optional.ofNullable(System.getenv(varName));
if ( ! port.isPresent() || port.get().trim().isEmpty()) {
log.fine("" + varName + " not set, using " + defaultPort);
return defaultPort;
}
try {
return Integer.parseInt(port.get());
} catch (NumberFormatException e) {
throw new IllegalArgumentException("must be an integer, was '" +
port.get() + "'");
}
}
static private int findVespaPortBase(int defaultPort) {
return findPort("VESPA_PORT_BASE", defaultPort);
}
static private int findConfigServerPort(int defaultPort) {
return findPort("port_configserver_rpc", defaultPort);
}
static private int findConfigProxyPort(int defaultPort) {
return findPort("port_configproxy_rpc", defaultPort);
}
static private int findWebServicePort(int defaultPort) {
return findPort("VESPA_WEB_SERVICE_PORT", defaultPort);
}
/**
* Get the username to own directories, files and processes
* @return the vespa user name
**/
public String vespaUser() { return vespaUser; }
/**
* Compute the host name that identifies myself.
* Detection of the hostname is now done before starting any Vespa
* programs and provided in the environment variable VESPA_HOSTNAME;
* if that variable isn't set a default of "localhost" is always returned.
* @return the vespa host name
**/
public String vespaHostname() { return vespaHost; }
/**
* Returns the path to the root under which Vespa should read and write files.
* Will not end with a "/".
* @return the vespa home directory
*/
public String vespaHome() { return vespaHome; }
/**
* Returns an absolute path produced by prepending vespaHome to the argument if it is relative.
* If the path starts by "/" (absolute) or "./" (explicitly relative - useful for tests),
* it is returned as-is.
*
* @param path the path to prepend vespaHome to unless it is absolute
* @return the given path string with the root path given from
* vespaHome() prepended, unless the given path is absolute, in which
* case it is be returned as-is
*/
public String underVespaHome(String path) {
if (path.startsWith("/")) return path;
if (path.startsWith("./")) return path;
return vespaHome() + "/" + path;
}
/**
* Returns the port number where Vespa web services should be available.
*
* @return the vespa webservice port
*/
public int vespaWebServicePort() { return vespaWebServicePort; }
/**
* Returns the base for port numbers where the Vespa services should listen.
*
* @return the vespa base number for ports
*/
public int vespaPortBase() { return vespaPortBase; }
/** @return port number used by cloud config server (for its RPC protocol) */
public int vespaConfigServerRpcPort() { return vespaPortConfigServerRpc; }
/** @return port number used by cloud config server (REST api on HTTP) */
public int vespaConfigServerHttpPort() { return vespaPortConfigServerHttp; }
/** @return port number used by config proxy server (RPC protocol) */
public int vespaConfigProxyRpcPort() { return vespaPortConfigProxyRpc; }
/** Returns the defaults of this runtime environment */
public static Defaults getDefaults() { return defaults; }
} | class Defaults {
private static final Logger log = Logger.getLogger(Defaults.class.getName());
private static final Defaults defaults = new Defaults();
private final String vespaHome;
private final String vespaUser;
private final String vespaHost;
private final int vespaWebServicePort;
private final int vespaPortBase;
private final int vespaPortConfigServerRpc;
private final int vespaPortConfigServerHttp;
private final int vespaPortConfigProxyRpc;
static private String findVespaHome(String defHome) {
Optional<String> vespaHomeEnv = Optional.ofNullable(System.getenv("VESPA_HOME"));
if ( ! vespaHomeEnv.isPresent() || vespaHomeEnv.get().trim().isEmpty()) {
log.info("VESPA_HOME not set, using " + defHome);
return defHome;
}
String vespaHome = vespaHomeEnv.get().trim();
if (vespaHome.endsWith("/")) {
int sz = vespaHome.length() - 1;
vespaHome = vespaHome.substring(0, sz);
}
return vespaHome;
}
static private String findVespaHostname(String defHost) {
Optional<String> vespaHostEnv = Optional.ofNullable(System.getenv("VESPA_HOSTNAME"));
if (vespaHostEnv.isPresent() && ! vespaHostEnv.get().trim().isEmpty()) {
return vespaHostEnv.get().trim();
}
return defHost;
}
static private String findVespaUser(String defUser) {
Optional<String> vespaUserEnv = Optional.ofNullable(System.getenv("VESPA_USER"));
if (! vespaUserEnv.isPresent()) {
log.fine("VESPA_USER not set, using "+defUser);
return defUser;
}
return vespaUserEnv.get().trim();
}
static private int findPort(String varName, int defaultPort) {
Optional<String> port = Optional.ofNullable(System.getenv(varName));
if ( ! port.isPresent() || port.get().trim().isEmpty()) {
log.fine("" + varName + " not set, using " + defaultPort);
return defaultPort;
}
try {
return Integer.parseInt(port.get());
} catch (NumberFormatException e) {
throw new IllegalArgumentException("must be an integer, was '" +
port.get() + "'");
}
}
static private int findVespaPortBase(int defaultPort) {
return findPort("VESPA_PORT_BASE", defaultPort);
}
static private int findConfigServerPort(int defaultPort) {
return findPort("port_configserver_rpc", defaultPort);
}
static private int findConfigProxyPort(int defaultPort) {
return findPort("port_configproxy_rpc", defaultPort);
}
static private int findWebServicePort(int defaultPort) {
return findPort("VESPA_WEB_SERVICE_PORT", defaultPort);
}
/**
* Get the username to own directories, files and processes
* @return the vespa user name
**/
public String vespaUser() { return vespaUser; }
/**
* Compute the host name that identifies myself.
* Detection of the hostname is now done before starting any Vespa
* programs and provided in the environment variable VESPA_HOSTNAME;
* if that variable isn't set a default of "localhost" is always returned.
* @return the vespa host name
**/
public String vespaHostname() { return vespaHost; }
/**
* Returns the path to the root under which Vespa should read and write files.
* Will not end with a "/".
* @return the vespa home directory
*/
public String vespaHome() { return vespaHome; }
/**
* Returns an absolute path produced by prepending vespaHome to the argument if it is relative.
* If the path starts by "/" (absolute) or "./" (explicitly relative - useful for tests),
* it is returned as-is.
*
* @param path the path to prepend vespaHome to unless it is absolute
* @return the given path string with the root path given from
* vespaHome() prepended, unless the given path is absolute, in which
* case it is be returned as-is
*/
public String underVespaHome(String path) {
if (path.startsWith("/")) return path;
if (path.startsWith("./")) return path;
return vespaHome() + "/" + path;
}
/**
* Returns the port number where Vespa web services should be available.
*
* @return the vespa webservice port
*/
public int vespaWebServicePort() { return vespaWebServicePort; }
/**
* Returns the base for port numbers where the Vespa services should listen.
*
* @return the vespa base number for ports
*/
public int vespaPortBase() { return vespaPortBase; }
/** @return port number used by cloud config server (for its RPC protocol) */
public int vespaConfigServerRpcPort() { return vespaPortConfigServerRpc; }
/** @return port number used by cloud config server (REST api on HTTP) */
public int vespaConfigServerHttpPort() { return vespaPortConfigServerHttp; }
/** @return port number used by config proxy server (RPC protocol) */
public int vespaConfigProxyRpcPort() { return vespaPortConfigProxyRpc; }
/** Returns the defaults of this runtime environment */
public static Defaults getDefaults() { return defaults; }
} |
right, no idea how i ended up with 91 here... | private Defaults() {
vespaHome = findVespaHome();
vespaUser = findVespaUser();
vespaHost = findVespaHostname();
vespaWebServicePort = findWebServicePort(8080);
vespaPortBase = findVespaPortBase(19000);
vespaPortConfigServerRpc = findConfigServerPort(vespaPortBase + 70);
vespaPortConfigServerHttp = vespaPortConfigServerRpc + 1;
vespaPortConfigProxyRpc = findConfigProxyPort(vespaPortBase + 91);
} | vespaPortConfigProxyRpc = findConfigProxyPort(vespaPortBase + 91); | private Defaults() {
vespaHome = findVespaHome("/opt/vespa");
vespaUser = findVespaUser("vespa");
vespaHost = findVespaHostname("localhost");
vespaWebServicePort = findWebServicePort(8080);
vespaPortBase = findVespaPortBase(19000);
vespaPortConfigServerRpc = findConfigServerPort(vespaPortBase + 70);
vespaPortConfigServerHttp = vespaPortConfigServerRpc + 1;
vespaPortConfigProxyRpc = findConfigProxyPort(vespaPortBase + 90);
} | class Defaults {
private static final Logger log = Logger.getLogger(Defaults.class.getName());
private static final Defaults defaults = new Defaults();
private final String vespaHome;
private final String vespaUser;
private final String vespaHost;
private final int vespaWebServicePort;
private final int vespaPortBase;
private final int vespaPortConfigServerRpc;
private final int vespaPortConfigServerHttp;
private final int vespaPortConfigProxyRpc;
static private String findVespaHome() {
Optional<String> vespaHomeEnv = Optional.ofNullable(System.getenv("VESPA_HOME"));
if ( ! vespaHomeEnv.isPresent() || vespaHomeEnv.get().trim().isEmpty()) {
log.info("VESPA_HOME not set, using /opt/vespa");
return "/opt/vespa";
}
String vespaHome = vespaHomeEnv.get().trim();
if (vespaHome.endsWith("/")) {
int sz = vespaHome.length() - 1;
vespaHome = vespaHome.substring(0, sz);
}
return vespaHome;
}
static private String findVespaHostname() {
Optional<String> vespaHostEnv = Optional.ofNullable(System.getenv("VESPA_HOSTNAME"));
if (vespaHostEnv.isPresent() && ! vespaHostEnv.get().trim().isEmpty()) {
return vespaHostEnv.get().trim();
}
return "localhost";
}
static private String findVespaUser() {
Optional<String> vespaUserEnv = Optional.ofNullable(System.getenv("VESPA_USER"));
if (! vespaUserEnv.isPresent()) {
log.fine("VESPA_USER not set, using vespa");
return "vespa";
}
return vespaUserEnv.get().trim();
}
static private int findPort(String varName, int defaultPort) {
Optional<String> port = Optional.ofNullable(System.getenv(varName));
if ( ! port.isPresent() || port.get().trim().isEmpty()) {
log.fine("" + varName + " not set, using " + defaultPort);
return defaultPort;
}
try {
return Integer.parseInt(port.get());
} catch (NumberFormatException e) {
throw new IllegalArgumentException("must be an integer, was '" +
port.get() + "'");
}
}
static private int findVespaPortBase(int defaultPort) {
return findPort("VESPA_PORT_BASE", defaultPort);
}
static private int findConfigServerPort(int defaultPort) {
return findPort("port_configserver_rpc", defaultPort);
}
static private int findConfigProxyPort(int defaultPort) {
return findPort("port_configproxy_rpc", defaultPort);
}
static private int findWebServicePort(int defaultPort) {
return findPort("VESPA_WEB_SERVICE_PORT", defaultPort);
}
/**
* Get the username to own directories, files and processes
* @return the vespa user name
**/
public String vespaUser() { return vespaUser; }
/**
* Compute the host name that identifies myself.
* Detection of the hostname is now done before starting any Vespa
* programs and provided in the environment variable VESPA_HOSTNAME;
* if that variable isn't set a default of "localhost" is always returned.
* @return the vespa host name
**/
public String vespaHostname() { return vespaHost; }
/**
* Returns the path to the root under which Vespa should read and write files.
* Will not end with a "/".
* @return the vespa home directory
*/
public String vespaHome() { return vespaHome; }
/**
* Returns an absolute path produced by prepending vespaHome to the argument if it is relative.
* If the path starts by "/" (absolute) or "./" (explicitly relative - useful for tests),
* it is returned as-is.
*
* @param path the path to prepend vespaHome to unless it is absolute
* @return the given path string with the root path given from
* vespaHome() prepended, unless the given path is absolute, in which
* case it is be returned as-is
*/
public String underVespaHome(String path) {
if (path.startsWith("/")) return path;
if (path.startsWith("./")) return path;
return vespaHome() + "/" + path;
}
/**
* Returns the port number where Vespa web services should be available.
*
* @return the vespa webservice port
*/
public int vespaWebServicePort() { return vespaWebServicePort; }
/**
* Returns the base for port numbers where the Vespa services should listen.
*
* @return the vespa base number for ports
*/
public int vespaPortBase() { return vespaPortBase; }
/** @return port number used by cloud config server (for its RPC protocol) */
public int vespaConfigServerRpcPort() { return vespaPortConfigServerRpc; }
/** @return port number used by cloud config server (REST api on HTTP) */
public int vespaConfigServerHttpPort() { return vespaPortConfigServerHttp; }
/** @return port number used by config proxy server (RPC protocol) */
public int vespaConfigProxyRpcPort() { return vespaPortConfigProxyRpc; }
/** Returns the defaults of this runtime environment */
public static Defaults getDefaults() { return defaults; }
} | class Defaults {
private static final Logger log = Logger.getLogger(Defaults.class.getName());
private static final Defaults defaults = new Defaults();
private final String vespaHome;
private final String vespaUser;
private final String vespaHost;
private final int vespaWebServicePort;
private final int vespaPortBase;
private final int vespaPortConfigServerRpc;
private final int vespaPortConfigServerHttp;
private final int vespaPortConfigProxyRpc;
static private String findVespaHome(String defHome) {
Optional<String> vespaHomeEnv = Optional.ofNullable(System.getenv("VESPA_HOME"));
if ( ! vespaHomeEnv.isPresent() || vespaHomeEnv.get().trim().isEmpty()) {
log.info("VESPA_HOME not set, using " + defHome);
return defHome;
}
String vespaHome = vespaHomeEnv.get().trim();
if (vespaHome.endsWith("/")) {
int sz = vespaHome.length() - 1;
vespaHome = vespaHome.substring(0, sz);
}
return vespaHome;
}
static private String findVespaHostname(String defHost) {
Optional<String> vespaHostEnv = Optional.ofNullable(System.getenv("VESPA_HOSTNAME"));
if (vespaHostEnv.isPresent() && ! vespaHostEnv.get().trim().isEmpty()) {
return vespaHostEnv.get().trim();
}
return defHost;
}
static private String findVespaUser(String defUser) {
Optional<String> vespaUserEnv = Optional.ofNullable(System.getenv("VESPA_USER"));
if (! vespaUserEnv.isPresent()) {
log.fine("VESPA_USER not set, using "+defUser);
return defUser;
}
return vespaUserEnv.get().trim();
}
static private int findPort(String varName, int defaultPort) {
Optional<String> port = Optional.ofNullable(System.getenv(varName));
if ( ! port.isPresent() || port.get().trim().isEmpty()) {
log.fine("" + varName + " not set, using " + defaultPort);
return defaultPort;
}
try {
return Integer.parseInt(port.get());
} catch (NumberFormatException e) {
throw new IllegalArgumentException("must be an integer, was '" +
port.get() + "'");
}
}
static private int findVespaPortBase(int defaultPort) {
return findPort("VESPA_PORT_BASE", defaultPort);
}
static private int findConfigServerPort(int defaultPort) {
return findPort("port_configserver_rpc", defaultPort);
}
static private int findConfigProxyPort(int defaultPort) {
return findPort("port_configproxy_rpc", defaultPort);
}
static private int findWebServicePort(int defaultPort) {
return findPort("VESPA_WEB_SERVICE_PORT", defaultPort);
}
/**
* Get the username to own directories, files and processes
* @return the vespa user name
**/
public String vespaUser() { return vespaUser; }
/**
* Compute the host name that identifies myself.
* Detection of the hostname is now done before starting any Vespa
* programs and provided in the environment variable VESPA_HOSTNAME;
* if that variable isn't set a default of "localhost" is always returned.
* @return the vespa host name
**/
public String vespaHostname() { return vespaHost; }
/**
* Returns the path to the root under which Vespa should read and write files.
* Will not end with a "/".
* @return the vespa home directory
*/
public String vespaHome() { return vespaHome; }
/**
* Returns an absolute path produced by prepending vespaHome to the argument if it is relative.
* If the path starts by "/" (absolute) or "./" (explicitly relative - useful for tests),
* it is returned as-is.
*
* @param path the path to prepend vespaHome to unless it is absolute
* @return the given path string with the root path given from
* vespaHome() prepended, unless the given path is absolute, in which
* case it is be returned as-is
*/
public String underVespaHome(String path) {
if (path.startsWith("/")) return path;
if (path.startsWith("./")) return path;
return vespaHome() + "/" + path;
}
/**
* Returns the port number where Vespa web services should be available.
*
* @return the vespa webservice port
*/
public int vespaWebServicePort() { return vespaWebServicePort; }
/**
* Returns the base for port numbers where the Vespa services should listen.
*
* @return the vespa base number for ports
*/
public int vespaPortBase() { return vespaPortBase; }
/** @return port number used by cloud config server (for its RPC protocol) */
public int vespaConfigServerRpcPort() { return vespaPortConfigServerRpc; }
/** @return port number used by cloud config server (REST api on HTTP) */
public int vespaConfigServerHttpPort() { return vespaPortConfigServerHttp; }
/** @return port number used by config proxy server (RPC protocol) */
public int vespaConfigProxyRpcPort() { return vespaPortConfigProxyRpc; }
/** Returns the defaults of this runtime environment */
public static Defaults getDefaults() { return defaults; }
} |
What is the default value of minTimeBetweenRedeployments ? | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments)); | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} |
30 minutes | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments)); | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} |
It can't figure when it is safe ? Is it a bit long ? Perhaps 2 minutes is sufficient ? Will it cause issues for tests ? | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments)); | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} |
Discussed offline. I'll look into if the maintainer can ask if bootstrap is finished instead. | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments)); | protected boolean waitInitially() {
return Instant.now().isBefore(start.plus(minTimeBetweenRedeployments));
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} | class PeriodicApplicationMaintainer extends ApplicationMaintainer {
private final Duration minTimeBetweenRedeployments;
private final Instant start;
public PeriodicApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository,
Duration interval, Duration minTimeBetweenRedeployments, JobControl jobControl) {
super(deployer, nodeRepository, interval, jobControl);
this.minTimeBetweenRedeployments = minTimeBetweenRedeployments;
this.start = Instant.now();
}
@Override
protected boolean canDeployNow(ApplicationId application) {
return getLastDeployTime(application).isBefore(nodeRepository().clock().instant().minus(minTimeBetweenRedeployments));
}
@Override
protected Set<ApplicationId> applicationsNeedingMaintenance() {
if (waitInitially()) return new HashSet<>();
Optional<ApplicationId> app = (nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.filter(this::shouldBeDeployedOnThisServer)
.min(Comparator.comparing(this::getLastDeployTime)));
app.ifPresent(applicationId -> log.log(LogLevel.INFO, applicationId + " will be deployed, last deploy time " +
getLastDeployTime(applicationId)));
return app.map(applicationId -> new HashSet<>(Collections.singletonList(applicationId))).orElseGet(HashSet::new);
}
private Instant getLastDeployTime(ApplicationId application) {
return deployer().lastDeployTime(application).orElse(Instant.EPOCH);
}
protected boolean shouldBeDeployedOnThisServer(ApplicationId application) {
return deployer().lastDeployTime(application).isPresent();
}
@Override
protected List<Node> nodesNeedingMaintenance() {
return nodeRepository().getNodes(Node.State.active);
}
} |
Output also the attempted type? | public DocumentUpdate prune(Document doc) {
if ( ! fieldPathUpdates.isEmpty()) return this;
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | "Document " + doc + " must have same type as update, which is type " + documentType); | public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Will prune away any field update that will not modify any field in the document.
* @param doc
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} |
Since this only prunes `FieldUpdates`, do we need to look at field path updates at all? E.g. if an update contains 2 field updates and 1 field path update, is there any harm in potentially pruning away both field updates and leaving the single field path update? Maybe there are some consistency issues that this avoids when this is used as part of an update-proxying docproc. If we keep this special case, would be nice to have a test for it. | public DocumentUpdate prune(Document doc) {
if ( ! fieldPathUpdates.isEmpty()) return this;
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | if ( ! fieldPathUpdates.isEmpty()) return this; | public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Will prune away any field update that will not modify any field in the document.
* @param doc
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} |
Fixed and deduplicated. | public DocumentUpdate prune(Document doc) {
if ( ! fieldPathUpdates.isEmpty()) return this;
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | "Document " + doc + " must have same type as update, which is type " + documentType); | public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Will prune away any field update that will not modify any field in the document.
* @param doc
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} |
You are right, removed restriction. | public DocumentUpdate prune(Document doc) {
if ( ! fieldPathUpdates.isEmpty()) return this;
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | if ( ! fieldPathUpdates.isEmpty()) return this; | public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<FieldUpdate> iter = fieldUpdates.iterator(); iter.hasNext();) {
FieldUpdate update = iter.next();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && (currentValue.compareTo(last.getValue()) == 0)) {
iter.remove();
}
}
}
}
return this;
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc + " must have same type as update, which is type " + documentType);
}
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Will prune away any field update that will not modify any field in the document.
* @param doc
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new ArrayList<FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, List<FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : fieldUpdates) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
return fieldUpdates.set(index, upd);
}
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdate(field.getName());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates) {
if (fieldUpdate.getField().getName().equals(fieldName)) {
return fieldUpdate;
}
}
return null;
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller. The list may not be unmodifiable.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(List<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
this.fieldUpdates = fieldUpdates;
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" +
fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.add(fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
public FieldUpdate removeFieldUpdate(int index) {
return fieldUpdates.remove(index);
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} |
Unnecessary code duplication | public SimpleLinguistics(SimpleLinguisticsConfig config) {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector(config.detector());
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
} | CharacterClasses characterClasses = new CharacterClasses(); | public SimpleLinguistics(SimpleLinguisticsConfig config) {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector(config.detector());
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
} | class SimpleLinguistics implements Linguistics {
private final Normalizer normalizer;
private final Transformer transformer;
private final Detector detector;
private final CharacterClasses characterClasses;
private final GramSplitter gramSplitter;
@Inject
public SimpleLinguistics() {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector();
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
}
@Override
public Stemmer getStemmer() { return new StemmerImpl(getTokenizer()); }
@Override
public Tokenizer getTokenizer() { return new SimpleTokenizer(normalizer, transformer); }
@Override
public Normalizer getNormalizer() { return normalizer; }
@Override
public Transformer getTransformer() { return transformer; }
@Override
public Segmenter getSegmenter() { return new SegmenterImpl(getTokenizer()); }
@Override
public Detector getDetector() { return detector; }
@Override
public GramSplitter getGramSplitter() { return gramSplitter; }
@Override
public CharacterClasses getCharacterClasses() { return characterClasses; }
@Override
public Tuple2<String, Version> getVersion(Component component) {
return new Tuple2<>("yahoo", new Version(1, 0));
}
} | class SimpleLinguistics implements Linguistics {
private final Normalizer normalizer;
private final Transformer transformer;
private final Detector detector;
private final CharacterClasses characterClasses;
private final GramSplitter gramSplitter;
@Inject
public SimpleLinguistics() {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector();
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
}
@Override
public Stemmer getStemmer() { return new StemmerImpl(getTokenizer()); }
@Override
public Tokenizer getTokenizer() { return new SimpleTokenizer(normalizer, transformer); }
@Override
public Normalizer getNormalizer() { return normalizer; }
@Override
public Transformer getTransformer() { return transformer; }
@Override
public Segmenter getSegmenter() { return new SegmenterImpl(getTokenizer()); }
@Override
public Detector getDetector() { return detector; }
@Override
public GramSplitter getGramSplitter() { return gramSplitter; }
@Override
public CharacterClasses getCharacterClasses() { return characterClasses; }
@Override
public Tuple2<String, Version> getVersion(Component component) {
return new Tuple2<>("yahoo", new Version(1, 0));
}
} |
The `GramSplitter` takes a `CharacterClasses` instance, and I only want a single instance of the latter type. That's why there is code duplication, otherwise I had to resolve to static factory methods. | public SimpleLinguistics(SimpleLinguisticsConfig config) {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector(config.detector());
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
} | CharacterClasses characterClasses = new CharacterClasses(); | public SimpleLinguistics(SimpleLinguisticsConfig config) {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector(config.detector());
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
} | class SimpleLinguistics implements Linguistics {
private final Normalizer normalizer;
private final Transformer transformer;
private final Detector detector;
private final CharacterClasses characterClasses;
private final GramSplitter gramSplitter;
@Inject
public SimpleLinguistics() {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector();
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
}
@Override
public Stemmer getStemmer() { return new StemmerImpl(getTokenizer()); }
@Override
public Tokenizer getTokenizer() { return new SimpleTokenizer(normalizer, transformer); }
@Override
public Normalizer getNormalizer() { return normalizer; }
@Override
public Transformer getTransformer() { return transformer; }
@Override
public Segmenter getSegmenter() { return new SegmenterImpl(getTokenizer()); }
@Override
public Detector getDetector() { return detector; }
@Override
public GramSplitter getGramSplitter() { return gramSplitter; }
@Override
public CharacterClasses getCharacterClasses() { return characterClasses; }
@Override
public Tuple2<String, Version> getVersion(Component component) {
return new Tuple2<>("yahoo", new Version(1, 0));
}
} | class SimpleLinguistics implements Linguistics {
private final Normalizer normalizer;
private final Transformer transformer;
private final Detector detector;
private final CharacterClasses characterClasses;
private final GramSplitter gramSplitter;
@Inject
public SimpleLinguistics() {
CharacterClasses characterClasses = new CharacterClasses();
this.normalizer = new SimpleNormalizer();
this.transformer = new SimpleTransformer();
this.detector = new SimpleDetector();
this.characterClasses = new CharacterClasses();
this.gramSplitter = new GramSplitter(characterClasses);
}
@Override
public Stemmer getStemmer() { return new StemmerImpl(getTokenizer()); }
@Override
public Tokenizer getTokenizer() { return new SimpleTokenizer(normalizer, transformer); }
@Override
public Normalizer getNormalizer() { return normalizer; }
@Override
public Transformer getTransformer() { return transformer; }
@Override
public Segmenter getSegmenter() { return new SegmenterImpl(getTokenizer()); }
@Override
public Detector getDetector() { return detector; }
@Override
public GramSplitter getGramSplitter() { return gramSplitter; }
@Override
public CharacterClasses getCharacterClasses() { return characterClasses; }
@Override
public Tuple2<String, Version> getVersion(Component component) {
return new Tuple2<>("yahoo", new Version(1, 0));
}
} |
OPTIONS and HEAD are now reported as _unknown_. Why are they not separate enum values? | private HttpMethod getMethod(Request request) {
switch (request.getMethod()) {
case "GET":
return HttpMethod.GET;
case "PATCH":
return HttpMethod.PATCH;
case "POST":
return HttpMethod.POST;
case "PUT":
return HttpMethod.PUT;
case "DELETE":
return HttpMethod.REMOVE;
default:
return HttpMethod.UNKNOWN;
}
} | return HttpMethod.UNKNOWN; | private HttpMethod getMethod(Request request) {
switch (request.getMethod()) {
case "GET":
return HttpMethod.GET;
case "PATCH":
return HttpMethod.PATCH;
case "POST":
return HttpMethod.POST;
case "PUT":
return HttpMethod.PUT;
case "DELETE":
return HttpMethod.DELETE;
case "OPTIONS":
return HttpMethod.OPTIONS;
case "HEAD":
return HttpMethod.HEAD;
default:
return HttpMethod.OTHER;
}
} | class HttpResponseStatisticsCollector extends HandlerWrapper implements Graceful {
private final AtomicReference<FutureCallback> shutdown = new AtomicReference<>();
public static enum HttpMethod {
GET, PATCH, POST, PUT, REMOVE, UNKNOWN
}
private static final String[] HTTP_RESPONSE_GROUPS = { Metrics.RESPONSES_1XX, Metrics.RESPONSES_2XX, Metrics.RESPONSES_3XX,
Metrics.RESPONSES_4XX, Metrics.RESPONSES_5XX };
private final AtomicLong inFlight = new AtomicLong();
private final LongAdder statistics[][];
public HttpResponseStatisticsCollector() {
super();
statistics = new LongAdder[HttpMethod.values().length][];
for (int method = 0; method < statistics.length; method++) {
statistics[method] = new LongAdder[HTTP_RESPONSE_GROUPS.length];
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
statistics[method][group] = new LongAdder();
}
}
}
private final AsyncListener completionWatcher = new AsyncListener() {
@Override
public void onTimeout(AsyncEvent event) throws IOException {
}
@Override
public void onStartAsync(AsyncEvent event) throws IOException {
event.getAsyncContext().addListener(this);
}
@Override
public void onError(AsyncEvent event) throws IOException {
}
@Override
public void onComplete(AsyncEvent event) throws IOException {
HttpChannelState state = ((AsyncContextEvent) event).getHttpChannelState();
Request request = state.getBaseRequest();
observeEndOfRequest(request, null);
}
};
@Override
public void handle(String path, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
inFlight.incrementAndGet();
try {
Handler handler = getHandler();
if (handler != null && shutdown.get() == null && isStarted()) {
handler.handle(path, baseRequest, request, response);
} else if (!baseRequest.isHandled()) {
baseRequest.setHandled(true);
response.sendError(HttpStatus.SERVICE_UNAVAILABLE_503);
}
} finally {
HttpChannelState state = baseRequest.getHttpChannelState();
if (state.isSuspended()) {
if (state.isInitial()) {
state.addListener(completionWatcher);
}
} else if (state.isInitial()) {
observeEndOfRequest(baseRequest, response);
}
}
}
private void observeEndOfRequest(Request request, HttpServletResponse flushableResponse) throws IOException {
int group = groupIndex(request);
if (group >= 0) {
HttpMethod method = getMethod(request);
statistics[method.ordinal()][group].increment();
}
long live = inFlight.decrementAndGet();
FutureCallback shutdownCb = shutdown.get();
if (shutdownCb != null) {
if (flushableResponse != null) {
flushableResponse.flushBuffer();
}
if (live == 0) {
shutdownCb.succeeded();
}
}
}
private int groupIndex(Request request) {
if (request.isHandled()) {
int index = (request.getResponse().getStatus() / 100) - 1;
if (index < 0 || index > statistics.length) {
return -1;
} else {
return index;
}
} else {
return 3;
}
}
public Map<String, Map<String, Long>> takeStatisticsByMethod() {
Map<String, Map<String, Long>> ret = new HashMap<>();
for (HttpMethod method : HttpMethod.values()) {
int methodIndex = method.ordinal();
Map<String, Long> methodStats = new HashMap<>();
ret.put(method.toString(), methodStats);
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
long value = statistics[methodIndex][group].sumThenReset();
methodStats.put(HTTP_RESPONSE_GROUPS[group], value);
}
}
return ret;
}
@Override
protected void doStart() throws Exception {
shutdown.set(null);
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
FutureCallback shutdownCb = shutdown.get();
if (shutdown != null && !shutdownCb.isDone()) {
shutdownCb.failed(new TimeoutException());
}
}
@Override
public Future<Void> shutdown() {
FutureCallback shutdownCb = new FutureCallback(false);
shutdown.compareAndSet(null, shutdownCb);
shutdownCb = shutdown.get();
if (inFlight.get() == 0) {
shutdownCb.succeeded();
}
return shutdownCb;
}
} | class HttpResponseStatisticsCollector extends HandlerWrapper implements Graceful {
private final AtomicReference<FutureCallback> shutdown = new AtomicReference<>();
public static enum HttpMethod {
GET, PATCH, POST, PUT, DELETE, OPTIONS, HEAD, OTHER
}
private static final String[] HTTP_RESPONSE_GROUPS = { Metrics.RESPONSES_1XX, Metrics.RESPONSES_2XX, Metrics.RESPONSES_3XX,
Metrics.RESPONSES_4XX, Metrics.RESPONSES_5XX };
private final AtomicLong inFlight = new AtomicLong();
private final LongAdder statistics[][];
public HttpResponseStatisticsCollector() {
super();
statistics = new LongAdder[HttpMethod.values().length][];
for (int method = 0; method < statistics.length; method++) {
statistics[method] = new LongAdder[HTTP_RESPONSE_GROUPS.length];
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
statistics[method][group] = new LongAdder();
}
}
}
private final AsyncListener completionWatcher = new AsyncListener() {
@Override
public void onTimeout(AsyncEvent event) throws IOException {
}
@Override
public void onStartAsync(AsyncEvent event) throws IOException {
event.getAsyncContext().addListener(this);
}
@Override
public void onError(AsyncEvent event) throws IOException {
}
@Override
public void onComplete(AsyncEvent event) throws IOException {
HttpChannelState state = ((AsyncContextEvent) event).getHttpChannelState();
Request request = state.getBaseRequest();
observeEndOfRequest(request, null);
}
};
@Override
public void handle(String path, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
inFlight.incrementAndGet();
/* The control flow logic here is mostly a copy from org.eclipse.jetty.server.handler.StatisticsHandler.handle(..) */
try {
Handler handler = getHandler();
if (handler != null && shutdown.get() == null && isStarted()) {
handler.handle(path, baseRequest, request, response);
} else if (!baseRequest.isHandled()) {
baseRequest.setHandled(true);
response.sendError(HttpStatus.SERVICE_UNAVAILABLE_503);
}
} finally {
HttpChannelState state = baseRequest.getHttpChannelState();
if (state.isSuspended()) {
if (state.isInitial()) {
state.addListener(completionWatcher);
}
} else if (state.isInitial()) {
observeEndOfRequest(baseRequest, response);
}
}
}
private void observeEndOfRequest(Request request, HttpServletResponse flushableResponse) throws IOException {
int group = groupIndex(request);
if (group >= 0) {
HttpMethod method = getMethod(request);
statistics[method.ordinal()][group].increment();
}
long live = inFlight.decrementAndGet();
FutureCallback shutdownCb = shutdown.get();
if (shutdownCb != null) {
if (flushableResponse != null) {
flushableResponse.flushBuffer();
}
if (live == 0) {
shutdownCb.succeeded();
}
}
}
private int groupIndex(Request request) {
if (request.isHandled()) {
int index = (request.getResponse().getStatus() / 100) - 1;
if (index < 0 || index > statistics.length) {
return -1;
} else {
return index;
}
} else {
return 3;
}
}
public Map<String, Map<String, Long>> takeStatisticsByMethod() {
Map<String, Map<String, Long>> ret = new HashMap<>();
for (HttpMethod method : HttpMethod.values()) {
int methodIndex = method.ordinal();
Map<String, Long> methodStats = new HashMap<>();
ret.put(method.toString(), methodStats);
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
long value = statistics[methodIndex][group].sumThenReset();
methodStats.put(HTTP_RESPONSE_GROUPS[group], value);
}
}
return ret;
}
@Override
protected void doStart() throws Exception {
shutdown.set(null);
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
FutureCallback shutdownCb = shutdown.get();
if (shutdown != null && !shutdownCb.isDone()) {
shutdownCb.failed(new TimeoutException());
}
}
@Override
public Future<Void> shutdown() {
/* This shutdown callback logic is a copy from org.eclipse.jetty.server.handler.StatisticsHandler */
FutureCallback shutdownCb = new FutureCallback(false);
shutdown.compareAndSet(null, shutdownCb);
shutdownCb = shutdown.get();
if (inFlight.get() == 0) {
shutdownCb.succeeded();
}
return shutdownCb;
}
} |
Oversight based on the request for the feature. I added those and renamed `UNKNOWN` to `OTHER` to better reflect the status. | private HttpMethod getMethod(Request request) {
switch (request.getMethod()) {
case "GET":
return HttpMethod.GET;
case "PATCH":
return HttpMethod.PATCH;
case "POST":
return HttpMethod.POST;
case "PUT":
return HttpMethod.PUT;
case "DELETE":
return HttpMethod.REMOVE;
default:
return HttpMethod.UNKNOWN;
}
} | return HttpMethod.UNKNOWN; | private HttpMethod getMethod(Request request) {
switch (request.getMethod()) {
case "GET":
return HttpMethod.GET;
case "PATCH":
return HttpMethod.PATCH;
case "POST":
return HttpMethod.POST;
case "PUT":
return HttpMethod.PUT;
case "DELETE":
return HttpMethod.DELETE;
case "OPTIONS":
return HttpMethod.OPTIONS;
case "HEAD":
return HttpMethod.HEAD;
default:
return HttpMethod.OTHER;
}
} | class HttpResponseStatisticsCollector extends HandlerWrapper implements Graceful {
private final AtomicReference<FutureCallback> shutdown = new AtomicReference<>();
public static enum HttpMethod {
GET, PATCH, POST, PUT, REMOVE, UNKNOWN
}
private static final String[] HTTP_RESPONSE_GROUPS = { Metrics.RESPONSES_1XX, Metrics.RESPONSES_2XX, Metrics.RESPONSES_3XX,
Metrics.RESPONSES_4XX, Metrics.RESPONSES_5XX };
private final AtomicLong inFlight = new AtomicLong();
private final LongAdder statistics[][];
public HttpResponseStatisticsCollector() {
super();
statistics = new LongAdder[HttpMethod.values().length][];
for (int method = 0; method < statistics.length; method++) {
statistics[method] = new LongAdder[HTTP_RESPONSE_GROUPS.length];
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
statistics[method][group] = new LongAdder();
}
}
}
private final AsyncListener completionWatcher = new AsyncListener() {
@Override
public void onTimeout(AsyncEvent event) throws IOException {
}
@Override
public void onStartAsync(AsyncEvent event) throws IOException {
event.getAsyncContext().addListener(this);
}
@Override
public void onError(AsyncEvent event) throws IOException {
}
@Override
public void onComplete(AsyncEvent event) throws IOException {
HttpChannelState state = ((AsyncContextEvent) event).getHttpChannelState();
Request request = state.getBaseRequest();
observeEndOfRequest(request, null);
}
};
@Override
public void handle(String path, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
inFlight.incrementAndGet();
try {
Handler handler = getHandler();
if (handler != null && shutdown.get() == null && isStarted()) {
handler.handle(path, baseRequest, request, response);
} else if (!baseRequest.isHandled()) {
baseRequest.setHandled(true);
response.sendError(HttpStatus.SERVICE_UNAVAILABLE_503);
}
} finally {
HttpChannelState state = baseRequest.getHttpChannelState();
if (state.isSuspended()) {
if (state.isInitial()) {
state.addListener(completionWatcher);
}
} else if (state.isInitial()) {
observeEndOfRequest(baseRequest, response);
}
}
}
private void observeEndOfRequest(Request request, HttpServletResponse flushableResponse) throws IOException {
int group = groupIndex(request);
if (group >= 0) {
HttpMethod method = getMethod(request);
statistics[method.ordinal()][group].increment();
}
long live = inFlight.decrementAndGet();
FutureCallback shutdownCb = shutdown.get();
if (shutdownCb != null) {
if (flushableResponse != null) {
flushableResponse.flushBuffer();
}
if (live == 0) {
shutdownCb.succeeded();
}
}
}
private int groupIndex(Request request) {
if (request.isHandled()) {
int index = (request.getResponse().getStatus() / 100) - 1;
if (index < 0 || index > statistics.length) {
return -1;
} else {
return index;
}
} else {
return 3;
}
}
public Map<String, Map<String, Long>> takeStatisticsByMethod() {
Map<String, Map<String, Long>> ret = new HashMap<>();
for (HttpMethod method : HttpMethod.values()) {
int methodIndex = method.ordinal();
Map<String, Long> methodStats = new HashMap<>();
ret.put(method.toString(), methodStats);
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
long value = statistics[methodIndex][group].sumThenReset();
methodStats.put(HTTP_RESPONSE_GROUPS[group], value);
}
}
return ret;
}
@Override
protected void doStart() throws Exception {
shutdown.set(null);
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
FutureCallback shutdownCb = shutdown.get();
if (shutdown != null && !shutdownCb.isDone()) {
shutdownCb.failed(new TimeoutException());
}
}
@Override
public Future<Void> shutdown() {
FutureCallback shutdownCb = new FutureCallback(false);
shutdown.compareAndSet(null, shutdownCb);
shutdownCb = shutdown.get();
if (inFlight.get() == 0) {
shutdownCb.succeeded();
}
return shutdownCb;
}
} | class HttpResponseStatisticsCollector extends HandlerWrapper implements Graceful {
private final AtomicReference<FutureCallback> shutdown = new AtomicReference<>();
public static enum HttpMethod {
GET, PATCH, POST, PUT, DELETE, OPTIONS, HEAD, OTHER
}
private static final String[] HTTP_RESPONSE_GROUPS = { Metrics.RESPONSES_1XX, Metrics.RESPONSES_2XX, Metrics.RESPONSES_3XX,
Metrics.RESPONSES_4XX, Metrics.RESPONSES_5XX };
private final AtomicLong inFlight = new AtomicLong();
private final LongAdder statistics[][];
public HttpResponseStatisticsCollector() {
super();
statistics = new LongAdder[HttpMethod.values().length][];
for (int method = 0; method < statistics.length; method++) {
statistics[method] = new LongAdder[HTTP_RESPONSE_GROUPS.length];
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
statistics[method][group] = new LongAdder();
}
}
}
private final AsyncListener completionWatcher = new AsyncListener() {
@Override
public void onTimeout(AsyncEvent event) throws IOException {
}
@Override
public void onStartAsync(AsyncEvent event) throws IOException {
event.getAsyncContext().addListener(this);
}
@Override
public void onError(AsyncEvent event) throws IOException {
}
@Override
public void onComplete(AsyncEvent event) throws IOException {
HttpChannelState state = ((AsyncContextEvent) event).getHttpChannelState();
Request request = state.getBaseRequest();
observeEndOfRequest(request, null);
}
};
@Override
public void handle(String path, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
inFlight.incrementAndGet();
/* The control flow logic here is mostly a copy from org.eclipse.jetty.server.handler.StatisticsHandler.handle(..) */
try {
Handler handler = getHandler();
if (handler != null && shutdown.get() == null && isStarted()) {
handler.handle(path, baseRequest, request, response);
} else if (!baseRequest.isHandled()) {
baseRequest.setHandled(true);
response.sendError(HttpStatus.SERVICE_UNAVAILABLE_503);
}
} finally {
HttpChannelState state = baseRequest.getHttpChannelState();
if (state.isSuspended()) {
if (state.isInitial()) {
state.addListener(completionWatcher);
}
} else if (state.isInitial()) {
observeEndOfRequest(baseRequest, response);
}
}
}
private void observeEndOfRequest(Request request, HttpServletResponse flushableResponse) throws IOException {
int group = groupIndex(request);
if (group >= 0) {
HttpMethod method = getMethod(request);
statistics[method.ordinal()][group].increment();
}
long live = inFlight.decrementAndGet();
FutureCallback shutdownCb = shutdown.get();
if (shutdownCb != null) {
if (flushableResponse != null) {
flushableResponse.flushBuffer();
}
if (live == 0) {
shutdownCb.succeeded();
}
}
}
private int groupIndex(Request request) {
if (request.isHandled()) {
int index = (request.getResponse().getStatus() / 100) - 1;
if (index < 0 || index > statistics.length) {
return -1;
} else {
return index;
}
} else {
return 3;
}
}
public Map<String, Map<String, Long>> takeStatisticsByMethod() {
Map<String, Map<String, Long>> ret = new HashMap<>();
for (HttpMethod method : HttpMethod.values()) {
int methodIndex = method.ordinal();
Map<String, Long> methodStats = new HashMap<>();
ret.put(method.toString(), methodStats);
for (int group = 0; group < HTTP_RESPONSE_GROUPS.length; group++) {
long value = statistics[methodIndex][group].sumThenReset();
methodStats.put(HTTP_RESPONSE_GROUPS[group], value);
}
}
return ret;
}
@Override
protected void doStart() throws Exception {
shutdown.set(null);
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
FutureCallback shutdownCb = shutdown.get();
if (shutdown != null && !shutdownCb.isDone()) {
shutdownCb.failed(new TimeoutException());
}
}
@Override
public Future<Void> shutdown() {
/* This shutdown callback logic is a copy from org.eclipse.jetty.server.handler.StatisticsHandler */
FutureCallback shutdownCb = new FutureCallback(false);
shutdown.compareAndSet(null, shutdownCb);
shutdownCb = shutdown.get();
if (inFlight.get() == 0) {
shutdownCb.succeeded();
}
return shutdownCb;
}
} |
Not needed: Since PROP_REQUEST was private, there is no chance that anybody is already using this property name - it is not part of the API of properties. | public void set(CompoundName key, Object value, Map<String,String> context) {
try {
if (key.size()==2 && key.first().equals(Model.MODEL)) {
Model model = query.getModel();
if (key.last().equals(Model.QUERY_STRING))
model.setQueryString(asString(value, ""));
else if (key.last().equals(Model.TYPE))
model.setType(asString(value, "ANY"));
else if (key.last().equals(Model.FILTER))
model.setFilter(asString(value, ""));
else if (key.last().equals(Model.DEFAULT_INDEX))
model.setDefaultIndex(asString(value, ""));
else if (key.last().equals(Model.LANGUAGE))
model.setLanguage(asString(value, ""));
else if (key.last().equals(Model.ENCODING))
model.setEncoding(asString(value,""));
else if (key.last().equals(Model.SEARCH_PATH))
model.setSearchPath(asString(value,""));
else if (key.last().equals(Model.SOURCES))
model.setSources(asString(value,""));
else if (key.last().equals(Model.RESTRICT))
model.setRestrict(asString(value,""));
else
throwIllegalParameter(key.last(),Model.MODEL);
}
else if (key.first().equals(Ranking.RANKING)) {
Ranking ranking = query.getRanking();
if (key.size()==2) {
if (key.last().equals(Ranking.LOCATION))
ranking.setLocation(asString(value,""));
else if (key.last().equals(Ranking.PROFILE))
ranking.setProfile(asString(value,""));
else if (key.last().equals(Ranking.SORTING))
ranking.setSorting(asString(value,""));
else if (key.last().equals(Ranking.FRESHNESS))
ranking.setFreshness(asString(value, ""));
else if (key.last().equals(Ranking.QUERYCACHE))
ranking.setQueryCache(asBoolean(value, false));
else if (key.last().equals(Ranking.LIST_FEATURES))
ranking.setListFeatures(asBoolean(value,false));
}
else if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {
if (key.size() == 3) {
MatchPhase matchPhase = ranking.getMatchPhase();
if (key.last().equals(MatchPhase.ATTRIBUTE)) {
matchPhase.setAttribute(asString(value, null));
} else if (key.last().equals(MatchPhase.ASCENDING)) {
matchPhase.setAscending(asBoolean(value, false));
} else if (key.last().equals(MatchPhase.MAX_HITS)) {
matchPhase.setMaxHits(asLong(value, null));
} else if (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) {
matchPhase.setMaxFilterCoverage(asDouble(value, 0.2));
}
} else if (key.size() > 3 && key.get(2).equals(Ranking.DIVERSITY)) {
Diversity diversity = ranking.getMatchPhase().getDiversity();
if (key.last().equals(Diversity.ATTRIBUTE)) {
diversity.setAttribute(asString(value, null));
} else if (key.last().equals(Diversity.MINGROUPS)) {
diversity.setMinGroups(asLong(value, null));
} else if ((key.size() > 4) && key.get(3).equals(Diversity.CUTOFF)) {
if (key.last().equals(Diversity.FACTOR)) {
diversity.setCutoffFactor(asDouble(value, 10.0));
} else if (key.last().equals(Diversity.STRATEGY)) {
diversity.setCutoffStrategy(asString(value, "loose"));
}
}
}
}
else if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {
SoftTimeout soft = ranking.getSoftTimeout();
if (key.last().equals(SoftTimeout.ENABLE)) soft.setEnable(asBoolean(value, false));
if (key.last().equals(SoftTimeout.FACTOR)) soft.setFactor(asDouble(value, 0.50));
if (key.last().equals(SoftTimeout.TAILCOST)) soft.setTailcost(asDouble(value, 0.10));
}
else if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {
Matching matching = ranking.getMatching();
if (key.last().equals(Matching.TERMWISELIMIT)) matching.setTermwiselimit(asDouble(value, 1.0));
if (key.last().equals(Matching.NUMTHREADSPERSEARCH)) matching.setNumThreadsPerSearch(asInteger(value, 1));
if (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) matching.setNumSearchPartitions(asInteger(value, 1));
if (key.last().equals(Matching.MINHITSPERTHREAD)) matching.setMinHitsPerThread(asInteger(value, 0));
}
else if (key.size()>2) {
String restKey = key.rest().rest().toString();
if (key.get(1).equals(Ranking.FEATURES))
setRankingFeature(query, restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent("features")));
else if (key.get(1).equals(Ranking.PROPERTIES))
ranking.getProperties().put(restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent("properties")));
else
throwIllegalParameter(key.rest().toString(),Ranking.RANKING);
}
}
else if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {
if (key.last().equals(Presentation.BOLDING))
query.getPresentation().setBolding(asBoolean(value, true));
else if (key.last().equals(Presentation.SUMMARY))
query.getPresentation().setSummary(asString(value, ""));
else if (key.last().equals(Presentation.FORMAT))
query.getPresentation().setFormat(asString(value,""));
else if (key.last().equals(Presentation.TIMING))
query.getPresentation().setTiming(asBoolean(value, true));
else if (key.last().equals(Presentation.SUMMARY_FIELDS))
query.getPresentation().setSummaryFields(asString(value,""));
else if ( ! key.last().equals(Presentation.REPORT_COVERAGE))
throwIllegalParameter(key.last(), Presentation.PRESENTATION);
}
else if (key.size()==2 && key.first().equals(Select.SELECT)) {
if (key.last().equals(Select.WHERE)){
query.getSelect().setWhereString(asString(value, ""));
} else if (key.last().equals(Select.GROUPING)) {
query.getSelect().setGroupingString(asString(value, ""));
}
}
else if (key.first().equals("rankfeature") || key.first().equals("featureoverride") ) {
setRankingFeature(query, key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent("features")));
} else if (key.first().equals("rankproperty")) {
query.getRanking().getProperties().put(key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent("properties")));
} else if (key.size()==1) {
if (key.equals(Query.HITS))
query.setHits(asInteger(value,10));
else if (key.equals(Query.OFFSET))
query.setOffset(asInteger(value,0));
else if (key.equals(Query.TRACE_LEVEL))
query.setTraceLevel(asInteger(value,0));
else if (key.equals(Query.TIMEOUT))
query.setTimeout(value.toString());
else if (key.equals(Query.NO_CACHE))
query.setNoCache(asBoolean(value,false));
else if (key.equals(Query.GROUPING_SESSION_CACHE))
query.setGroupingSessionCache(asBoolean(value, false));
else
super.set(key,value,context);
} else if (key.toString().equals(GroupingRequest.PROP_REQUEST)) {
query.getSelect().setGrouping((List<GroupingRequest>) value);
}
else
super.set(key,value,context);
}
catch (Exception e) {
if (e.getMessage().startsWith("Could not set"))
throw e;
else
throw new IllegalArgumentException("Could not set '" + key + "' to '" + value + "'", e);
}
} | } else if (key.toString().equals(GroupingRequest.PROP_REQUEST)) { | public void set(CompoundName key, Object value, Map<String,String> context) {
try {
if (key.size()==2 && key.first().equals(Model.MODEL)) {
Model model = query.getModel();
if (key.last().equals(Model.QUERY_STRING))
model.setQueryString(asString(value, ""));
else if (key.last().equals(Model.TYPE))
model.setType(asString(value, "ANY"));
else if (key.last().equals(Model.FILTER))
model.setFilter(asString(value, ""));
else if (key.last().equals(Model.DEFAULT_INDEX))
model.setDefaultIndex(asString(value, ""));
else if (key.last().equals(Model.LANGUAGE))
model.setLanguage(asString(value, ""));
else if (key.last().equals(Model.ENCODING))
model.setEncoding(asString(value,""));
else if (key.last().equals(Model.SEARCH_PATH))
model.setSearchPath(asString(value,""));
else if (key.last().equals(Model.SOURCES))
model.setSources(asString(value,""));
else if (key.last().equals(Model.RESTRICT))
model.setRestrict(asString(value,""));
else
throwIllegalParameter(key.last(),Model.MODEL);
}
else if (key.first().equals(Ranking.RANKING)) {
Ranking ranking = query.getRanking();
if (key.size()==2) {
if (key.last().equals(Ranking.LOCATION))
ranking.setLocation(asString(value,""));
else if (key.last().equals(Ranking.PROFILE))
ranking.setProfile(asString(value,""));
else if (key.last().equals(Ranking.SORTING))
ranking.setSorting(asString(value,""));
else if (key.last().equals(Ranking.FRESHNESS))
ranking.setFreshness(asString(value, ""));
else if (key.last().equals(Ranking.QUERYCACHE))
ranking.setQueryCache(asBoolean(value, false));
else if (key.last().equals(Ranking.LIST_FEATURES))
ranking.setListFeatures(asBoolean(value,false));
}
else if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {
if (key.size() == 3) {
MatchPhase matchPhase = ranking.getMatchPhase();
if (key.last().equals(MatchPhase.ATTRIBUTE)) {
matchPhase.setAttribute(asString(value, null));
} else if (key.last().equals(MatchPhase.ASCENDING)) {
matchPhase.setAscending(asBoolean(value, false));
} else if (key.last().equals(MatchPhase.MAX_HITS)) {
matchPhase.setMaxHits(asLong(value, null));
} else if (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) {
matchPhase.setMaxFilterCoverage(asDouble(value, 0.2));
}
} else if (key.size() > 3 && key.get(2).equals(Ranking.DIVERSITY)) {
Diversity diversity = ranking.getMatchPhase().getDiversity();
if (key.last().equals(Diversity.ATTRIBUTE)) {
diversity.setAttribute(asString(value, null));
} else if (key.last().equals(Diversity.MINGROUPS)) {
diversity.setMinGroups(asLong(value, null));
} else if ((key.size() > 4) && key.get(3).equals(Diversity.CUTOFF)) {
if (key.last().equals(Diversity.FACTOR)) {
diversity.setCutoffFactor(asDouble(value, 10.0));
} else if (key.last().equals(Diversity.STRATEGY)) {
diversity.setCutoffStrategy(asString(value, "loose"));
}
}
}
}
else if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {
SoftTimeout soft = ranking.getSoftTimeout();
if (key.last().equals(SoftTimeout.ENABLE)) soft.setEnable(asBoolean(value, false));
if (key.last().equals(SoftTimeout.FACTOR)) soft.setFactor(asDouble(value, 0.50));
if (key.last().equals(SoftTimeout.TAILCOST)) soft.setTailcost(asDouble(value, 0.10));
}
else if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {
Matching matching = ranking.getMatching();
if (key.last().equals(Matching.TERMWISELIMIT)) matching.setTermwiselimit(asDouble(value, 1.0));
if (key.last().equals(Matching.NUMTHREADSPERSEARCH)) matching.setNumThreadsPerSearch(asInteger(value, 1));
if (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) matching.setNumSearchPartitions(asInteger(value, 1));
if (key.last().equals(Matching.MINHITSPERTHREAD)) matching.setMinHitsPerThread(asInteger(value, 0));
}
else if (key.size()>2) {
String restKey = key.rest().rest().toString();
if (key.get(1).equals(Ranking.FEATURES))
setRankingFeature(query, restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent("features")));
else if (key.get(1).equals(Ranking.PROPERTIES))
ranking.getProperties().put(restKey, toSpecifiedType(restKey, value, profileRegistry.getTypeRegistry().getComponent("properties")));
else
throwIllegalParameter(key.rest().toString(),Ranking.RANKING);
}
}
else if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {
if (key.last().equals(Presentation.BOLDING))
query.getPresentation().setBolding(asBoolean(value, true));
else if (key.last().equals(Presentation.SUMMARY))
query.getPresentation().setSummary(asString(value, ""));
else if (key.last().equals(Presentation.FORMAT))
query.getPresentation().setFormat(asString(value,""));
else if (key.last().equals(Presentation.TIMING))
query.getPresentation().setTiming(asBoolean(value, true));
else if (key.last().equals(Presentation.SUMMARY_FIELDS))
query.getPresentation().setSummaryFields(asString(value,""));
else if ( ! key.last().equals(Presentation.REPORT_COVERAGE))
throwIllegalParameter(key.last(), Presentation.PRESENTATION);
}
else if (key.size()==2 && key.first().equals(Select.SELECT)) {
if (key.last().equals(Select.WHERE)){
query.getSelect().setWhereString(asString(value, ""));
} else if (key.last().equals(Select.GROUPING)) {
query.getSelect().setGroupingString(asString(value, ""));
}
}
else if (key.first().equals("rankfeature") || key.first().equals("featureoverride") ) {
setRankingFeature(query, key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent("features")));
} else if (key.first().equals("rankproperty")) {
query.getRanking().getProperties().put(key.rest().toString(), toSpecifiedType(key.rest().toString(), value, profileRegistry.getTypeRegistry().getComponent("properties")));
} else if (key.size()==1) {
if (key.equals(Query.HITS))
query.setHits(asInteger(value,10));
else if (key.equals(Query.OFFSET))
query.setOffset(asInteger(value,0));
else if (key.equals(Query.TRACE_LEVEL))
query.setTraceLevel(asInteger(value,0));
else if (key.equals(Query.TIMEOUT))
query.setTimeout(value.toString());
else if (key.equals(Query.NO_CACHE))
query.setNoCache(asBoolean(value,false));
else if (key.equals(Query.GROUPING_SESSION_CACHE))
query.setGroupingSessionCache(asBoolean(value, false));
else
super.set(key,value,context);
} else
super.set(key,value,context);
}
catch (Exception e) {
if (e.getMessage().startsWith("Could not set"))
throw e;
else
throw new IllegalArgumentException("Could not set '" + key + "' to '" + value + "'", e);
}
} | class QueryProperties extends Properties {
/**
* TODO: Remove on Vespa 7
* @deprecated use Query.nativeProperties
*/
@Deprecated
public static final CompoundName[] PER_SOURCE_QUERY_PROPERTIES =
Query.nativeProperties.toArray(new CompoundName[] {});
private Query query;
private final CompiledQueryProfileRegistry profileRegistry;
public QueryProperties(Query query, CompiledQueryProfileRegistry profileRegistry) {
this.query = query;
this.profileRegistry = profileRegistry;
}
public void setParentQuery(Query query) {
this.query=query;
super.setParentQuery(query);
}
@SuppressWarnings("deprecation")
@Override
public Object get(CompoundName key, Map<String,String> context,
com.yahoo.processing.request.Properties substitution) {
if (key.size() == 2 && key.first().equals(Model.MODEL)) {
Model model = query.getModel();
if (key.last().equals(Model.QUERY_STRING)) return model.getQueryString();
if (key.last().equals(Model.TYPE)) return model.getType();
if (key.last().equals(Model.FILTER)) return model.getFilter();
if (key.last().equals(Model.DEFAULT_INDEX)) return model.getDefaultIndex();
if (key.last().equals(Model.LANGUAGE)) return model.getLanguage();
if (key.last().equals(Model.ENCODING)) return model.getEncoding();
if (key.last().equals(Model.SOURCES)) return model.getSources();
if (key.last().equals(Model.SEARCH_PATH)) return model.getSearchPath();
if (key.last().equals(Model.RESTRICT)) return model.getRestrict();
}
else if (key.first().equals(Ranking.RANKING)) {
Ranking ranking = query.getRanking();
if (key.size() == 2) {
if (key.last().equals(Ranking.LOCATION)) return ranking.getLocation();
if (key.last().equals(Ranking.PROFILE)) return ranking.getProfile();
if (key.last().equals(Ranking.SORTING)) return ranking.getSorting();
if (key.last().equals(Ranking.FRESHNESS)) return ranking.getFreshness();
if (key.last().equals(Ranking.QUERYCACHE)) return ranking.getQueryCache();
if (key.last().equals(Ranking.LIST_FEATURES)) return ranking.getListFeatures();
}
else if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {
if (key.size() == 3) {
MatchPhase matchPhase = ranking.getMatchPhase();
if (key.last().equals(MatchPhase.ATTRIBUTE)) return matchPhase.getAttribute();
if (key.last().equals(MatchPhase.ASCENDING)) return matchPhase.getAscending();
if (key.last().equals(MatchPhase.MAX_HITS)) return matchPhase.getMaxHits();
if (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) return matchPhase.getMaxFilterCoverage();
} else if (key.size() >= 4 && key.get(2).equals(Ranking.DIVERSITY)) {
Diversity diversity = ranking.getMatchPhase().getDiversity();
if (key.size() == 4) {
if (key.last().equals(Diversity.ATTRIBUTE)) return diversity.getAttribute();
if (key.last().equals(Diversity.MINGROUPS)) return diversity.getMinGroups();
} else if ((key.size() == 5) && key.get(3).equals(Diversity.CUTOFF)) {
if (key.last().equals(Diversity.FACTOR)) return diversity.getCutoffFactor();
if (key.last().equals(Diversity.STRATEGY)) return diversity.getCutoffStrategy();
}
}
}
else if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {
SoftTimeout soft = ranking.getSoftTimeout();
if (key.last().equals(SoftTimeout.ENABLE)) return soft.getEnable();
if (key.last().equals(SoftTimeout.FACTOR)) return soft.getFactor();
if (key.last().equals(SoftTimeout.TAILCOST)) return soft.getTailcost();
}
else if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {
Matching matching = ranking.getMatching();
if (key.last().equals(Matching.TERMWISELIMIT)) return matching.getTermwiseLimit();
if (key.last().equals(Matching.NUMTHREADSPERSEARCH)) return matching.getNumThreadsPerSearch();
if (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) return matching.getNumSearchPartitions();
if (key.last().equals(Matching.MINHITSPERTHREAD)) return matching.getMinHitsPerThread();
}
else if (key.size()>2) {
if (key.get(1).equals(Ranking.FEATURES)) return ranking.getFeatures().getObject(key.rest().rest().toString());
if (key.get(1).equals(Ranking.PROPERTIES)) return ranking.getProperties().get(key.rest().rest().toString());
}
}
else if (key.size()==2 && key.first().equals(Select.SELECT)) {
if (key.last().equals(Select.WHERE)) return query.getSelect().getWhereString();
if (key.last().equals(Select.GROUPING)) return query.getSelect().getGroupingString();
}
else if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {
if (key.last().equals(Presentation.BOLDING)) return query.getPresentation().getBolding();
if (key.last().equals(Presentation.SUMMARY)) return query.getPresentation().getSummary();
if (key.last().equals(Presentation.REPORT_COVERAGE)) return true;
if (key.last().equals(Presentation.FORMAT)) return query.getPresentation().getFormat();
if (key.last().equals(Presentation.TIMING)) return query.getPresentation().getTiming();
if (key.last().equals(Presentation.SUMMARY_FIELDS)) return query.getPresentation().getSummaryFields();
}
else if (key.first().equals("rankfeature") || key.first().equals("featureoverride")) {
return query.getRanking().getFeatures().getObject(key.rest().toString());
} else if (key.first().equals("rankproperty")) {
return query.getRanking().getProperties().get(key.rest().toString());
} else if (key.size()==1) {
if (key.equals(Query.HITS)) return query.getHits();
if (key.equals(Query.OFFSET)) return query.getOffset();
if (key.equals(Query.TRACE_LEVEL)) return query.getTraceLevel();
if (key.equals(Query.TIMEOUT)) return query.getTimeout();
if (key.equals(Query.NO_CACHE)) return query.getNoCache();
if (key.equals(Query.GROUPING_SESSION_CACHE)) return query.getGroupingSessionCache();
if (key.toString().equals(Model.MODEL)) return query.getModel();
if (key.toString().equals(Ranking.RANKING)) return query.getRanking();
if (key.toString().equals(Presentation.PRESENTATION)) return query.getPresentation();
} else if (key.toString().equals(GroupingRequest.PROP_REQUEST)) {
return query.getSelect().getGrouping();
}
return super.get(key, context, substitution);
}
@SuppressWarnings("deprecation")
@Override
@Override
public Map<String, Object> listProperties(CompoundName prefix,
Map<String,String> context,
com.yahoo.processing.request.Properties substitution) {
Map<String, Object> properties = super.listProperties(prefix, context, substitution);
for (CompoundName queryProperty : Query.nativeProperties) {
if (queryProperty.hasPrefix(prefix)) {
Object value = this.get(queryProperty, context, substitution);
if (value != null)
properties.put(queryProperty.toString(), value);
}
}
return properties;
}
private void setRankingFeature(Query query, String key, Object value) {
if (value instanceof Tensor)
query.getRanking().getFeatures().put(key, (Tensor)value);
else
query.getRanking().getFeatures().put(key, asString(value, ""));
}
private Object toSpecifiedType(String key, Object value, QueryProfileType type) {
if ( ! ( value instanceof String)) return value;
if (type == null) return value;
FieldDescription field = type.getField(key);
if (field == null) return value;
return field.getType().convertFrom(value, profileRegistry);
}
private void throwIllegalParameter(String key,String namespace) {
throw new IllegalArgumentException("'" + key + "' is not a valid property in '" + namespace +
"'. See the search api for valid keys starting by '" + namespace + "'.");
}
@Override
public final Query getParentQuery() {
return query;
}
} | class QueryProperties extends Properties {
/**
* TODO: Remove on Vespa 7
* @deprecated use Query.nativeProperties
*/
@Deprecated
public static final CompoundName[] PER_SOURCE_QUERY_PROPERTIES =
Query.nativeProperties.toArray(new CompoundName[] {});
private Query query;
private final CompiledQueryProfileRegistry profileRegistry;
public QueryProperties(Query query, CompiledQueryProfileRegistry profileRegistry) {
this.query = query;
this.profileRegistry = profileRegistry;
}
public void setParentQuery(Query query) {
this.query=query;
super.setParentQuery(query);
}
@SuppressWarnings("deprecation")
@Override
public Object get(CompoundName key, Map<String,String> context,
com.yahoo.processing.request.Properties substitution) {
if (key.size() == 2 && key.first().equals(Model.MODEL)) {
Model model = query.getModel();
if (key.last().equals(Model.QUERY_STRING)) return model.getQueryString();
if (key.last().equals(Model.TYPE)) return model.getType();
if (key.last().equals(Model.FILTER)) return model.getFilter();
if (key.last().equals(Model.DEFAULT_INDEX)) return model.getDefaultIndex();
if (key.last().equals(Model.LANGUAGE)) return model.getLanguage();
if (key.last().equals(Model.ENCODING)) return model.getEncoding();
if (key.last().equals(Model.SOURCES)) return model.getSources();
if (key.last().equals(Model.SEARCH_PATH)) return model.getSearchPath();
if (key.last().equals(Model.RESTRICT)) return model.getRestrict();
}
else if (key.first().equals(Ranking.RANKING)) {
Ranking ranking = query.getRanking();
if (key.size() == 2) {
if (key.last().equals(Ranking.LOCATION)) return ranking.getLocation();
if (key.last().equals(Ranking.PROFILE)) return ranking.getProfile();
if (key.last().equals(Ranking.SORTING)) return ranking.getSorting();
if (key.last().equals(Ranking.FRESHNESS)) return ranking.getFreshness();
if (key.last().equals(Ranking.QUERYCACHE)) return ranking.getQueryCache();
if (key.last().equals(Ranking.LIST_FEATURES)) return ranking.getListFeatures();
}
else if (key.size()>=3 && key.get(1).equals(Ranking.MATCH_PHASE)) {
if (key.size() == 3) {
MatchPhase matchPhase = ranking.getMatchPhase();
if (key.last().equals(MatchPhase.ATTRIBUTE)) return matchPhase.getAttribute();
if (key.last().equals(MatchPhase.ASCENDING)) return matchPhase.getAscending();
if (key.last().equals(MatchPhase.MAX_HITS)) return matchPhase.getMaxHits();
if (key.last().equals(MatchPhase.MAX_FILTER_COVERAGE)) return matchPhase.getMaxFilterCoverage();
} else if (key.size() >= 4 && key.get(2).equals(Ranking.DIVERSITY)) {
Diversity diversity = ranking.getMatchPhase().getDiversity();
if (key.size() == 4) {
if (key.last().equals(Diversity.ATTRIBUTE)) return diversity.getAttribute();
if (key.last().equals(Diversity.MINGROUPS)) return diversity.getMinGroups();
} else if ((key.size() == 5) && key.get(3).equals(Diversity.CUTOFF)) {
if (key.last().equals(Diversity.FACTOR)) return diversity.getCutoffFactor();
if (key.last().equals(Diversity.STRATEGY)) return diversity.getCutoffStrategy();
}
}
}
else if (key.size() == 3 && key.get(1).equals(Ranking.SOFTTIMEOUT)) {
SoftTimeout soft = ranking.getSoftTimeout();
if (key.last().equals(SoftTimeout.ENABLE)) return soft.getEnable();
if (key.last().equals(SoftTimeout.FACTOR)) return soft.getFactor();
if (key.last().equals(SoftTimeout.TAILCOST)) return soft.getTailcost();
}
else if (key.size() == 3 && key.get(1).equals(Ranking.MATCHING)) {
Matching matching = ranking.getMatching();
if (key.last().equals(Matching.TERMWISELIMIT)) return matching.getTermwiseLimit();
if (key.last().equals(Matching.NUMTHREADSPERSEARCH)) return matching.getNumThreadsPerSearch();
if (key.last().equals(Matching.NUMSEARCHPARTITIIONS)) return matching.getNumSearchPartitions();
if (key.last().equals(Matching.MINHITSPERTHREAD)) return matching.getMinHitsPerThread();
}
else if (key.size()>2) {
if (key.get(1).equals(Ranking.FEATURES)) return ranking.getFeatures().getObject(key.rest().rest().toString());
if (key.get(1).equals(Ranking.PROPERTIES)) return ranking.getProperties().get(key.rest().rest().toString());
}
}
else if (key.size()==2 && key.first().equals(Select.SELECT)) {
if (key.last().equals(Select.WHERE)) return query.getSelect().getWhereString();
if (key.last().equals(Select.GROUPING)) return query.getSelect().getGroupingString();
}
else if (key.size()==2 && key.first().equals(Presentation.PRESENTATION)) {
if (key.last().equals(Presentation.BOLDING)) return query.getPresentation().getBolding();
if (key.last().equals(Presentation.SUMMARY)) return query.getPresentation().getSummary();
if (key.last().equals(Presentation.REPORT_COVERAGE)) return true;
if (key.last().equals(Presentation.FORMAT)) return query.getPresentation().getFormat();
if (key.last().equals(Presentation.TIMING)) return query.getPresentation().getTiming();
if (key.last().equals(Presentation.SUMMARY_FIELDS)) return query.getPresentation().getSummaryFields();
}
else if (key.first().equals("rankfeature") || key.first().equals("featureoverride")) {
return query.getRanking().getFeatures().getObject(key.rest().toString());
} else if (key.first().equals("rankproperty")) {
return query.getRanking().getProperties().get(key.rest().toString());
} else if (key.size()==1) {
if (key.equals(Query.HITS)) return query.getHits();
if (key.equals(Query.OFFSET)) return query.getOffset();
if (key.equals(Query.TRACE_LEVEL)) return query.getTraceLevel();
if (key.equals(Query.TIMEOUT)) return query.getTimeout();
if (key.equals(Query.NO_CACHE)) return query.getNoCache();
if (key.equals(Query.GROUPING_SESSION_CACHE)) return query.getGroupingSessionCache();
if (key.toString().equals(Model.MODEL)) return query.getModel();
if (key.toString().equals(Ranking.RANKING)) return query.getRanking();
if (key.toString().equals(Presentation.PRESENTATION)) return query.getPresentation();
}
return super.get(key, context, substitution);
}
@SuppressWarnings("deprecation")
@Override
@Override
public Map<String, Object> listProperties(CompoundName prefix,
Map<String,String> context,
com.yahoo.processing.request.Properties substitution) {
Map<String, Object> properties = super.listProperties(prefix, context, substitution);
for (CompoundName queryProperty : Query.nativeProperties) {
if (queryProperty.hasPrefix(prefix)) {
Object value = this.get(queryProperty, context, substitution);
if (value != null)
properties.put(queryProperty.toString(), value);
}
}
return properties;
}
private void setRankingFeature(Query query, String key, Object value) {
if (value instanceof Tensor)
query.getRanking().getFeatures().put(key, (Tensor)value);
else
query.getRanking().getFeatures().put(key, asString(value, ""));
}
private Object toSpecifiedType(String key, Object value, QueryProfileType type) {
if ( ! ( value instanceof String)) return value;
if (type == null) return value;
FieldDescription field = type.getField(key);
if (field == null) return value;
return field.getType().convertFrom(value, profileRegistry);
}
private void throwIllegalParameter(String key,String namespace) {
throw new IllegalArgumentException("'" + key + "' is not a valid property in '" + namespace +
"'. See the search api for valid keys starting by '" + namespace + "'.");
}
@Override
public final Query getParentQuery() {
return query;
}
} |
You also need to change newRequest to access query.getSelect().getGrouping() in the same way. | public static List<GroupingRequest> getRequests(Query query) {
return query.getSelect().getGrouping();
} | return query.getSelect().getGrouping(); | public static List<GroupingRequest> getRequests(Query query) {
return query.getSelect().getGrouping();
} | class that have been attached to the given {@link Query} | class that have been attached to the given {@link Query} |
Did you mean newInstance? | public static List<GroupingRequest> getRequests(Query query) {
return query.getSelect().getGrouping();
} | return query.getSelect().getGrouping(); | public static List<GroupingRequest> getRequests(Query query) {
return query.getSelect().getGrouping();
} | class that have been attached to the given {@link Query} | class that have been attached to the given {@link Query} |
Good. This reminds me though - I don't think we should have a setGrouping such that the list can be changed - only a getGrouping() which always returns a modifiable list. Changing the list instance makes no sense. | public static GroupingRequest newInstance(Query query) {
List<GroupingRequest> lst = getRequests(query);
if (lst.isEmpty()) {
lst = new LinkedList<>();
query.getSelect().setGrouping(lst);
}
GroupingRequest ret = new GroupingRequest(lst.size());
lst.add(ret);
return ret;
} | query.getSelect().setGrouping(lst); | public static GroupingRequest newInstance(Query query) {
List<GroupingRequest> lst = getRequests(query);
GroupingRequest ret = new GroupingRequest(lst.size());
lst.add(ret);
return ret;
} | class to the given {@link Query} | class to the given {@link Query} |
I found this statement a bit hard to read, would this be equivalent? `` Optional<Version> nodeVersion = node.status().osVersion(); if (this.version.isEmpty() || !nodeVersion.isPresent()) return nextMatches(node); return this.version.equals(nodeVersion.get()); `` | public boolean matches(Node node) {
if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) {
return false;
}
return nextMatches(node);
} | if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) { | public boolean matches(Node node) {
if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) {
return false;
}
return nextMatches(node);
} | class NodeOsVersionFilter extends NodeFilter {
private final Version version;
private NodeOsVersionFilter(Version version, NodeFilter next) {
super(next);
this.version = Objects.requireNonNull(version, "version cannot be null");
}
@Override
public static NodeOsVersionFilter from(String version, NodeFilter filter) {
return new NodeOsVersionFilter(Version.fromString(version), filter);
}
} | class NodeOsVersionFilter extends NodeFilter {
private final Version version;
private NodeOsVersionFilter(Version version, NodeFilter next) {
super(next);
this.version = Objects.requireNonNull(version, "version cannot be null");
}
@Override
public static NodeOsVersionFilter from(String version, NodeFilter filter) {
return new NodeOsVersionFilter(Version.fromString(version), filter);
}
} |
Thanks, that's better. Will change it. | public boolean matches(Node node) {
if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) {
return false;
}
return nextMatches(node);
} | if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) { | public boolean matches(Node node) {
if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) {
return false;
}
return nextMatches(node);
} | class NodeOsVersionFilter extends NodeFilter {
private final Version version;
private NodeOsVersionFilter(Version version, NodeFilter next) {
super(next);
this.version = Objects.requireNonNull(version, "version cannot be null");
}
@Override
public static NodeOsVersionFilter from(String version, NodeFilter filter) {
return new NodeOsVersionFilter(Version.fromString(version), filter);
}
} | class NodeOsVersionFilter extends NodeFilter {
private final Version version;
private NodeOsVersionFilter(Version version, NodeFilter next) {
super(next);
this.version = Objects.requireNonNull(version, "version cannot be null");
}
@Override
public static NodeOsVersionFilter from(String version, NodeFilter filter) {
return new NodeOsVersionFilter(Version.fromString(version), filter);
}
} |
Actually, that's not the same thing due to the behavior of `NodeFilter#nextMatches`. Your suggestion would be fine if `NodeFilter#nextMatches` method didn't return `true` for the last filter in the chain. I think the intention is that filters should match a given node if no filters in the chain actively reject it (a bit strange, but other code depends on this behavior). | public boolean matches(Node node) {
if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) {
return false;
}
return nextMatches(node);
} | if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) { | public boolean matches(Node node) {
if (!version.isEmpty() && !node.status().osVersion().filter(v -> v.equals(version)).isPresent()) {
return false;
}
return nextMatches(node);
} | class NodeOsVersionFilter extends NodeFilter {
private final Version version;
private NodeOsVersionFilter(Version version, NodeFilter next) {
super(next);
this.version = Objects.requireNonNull(version, "version cannot be null");
}
@Override
public static NodeOsVersionFilter from(String version, NodeFilter filter) {
return new NodeOsVersionFilter(Version.fromString(version), filter);
}
} | class NodeOsVersionFilter extends NodeFilter {
private final Version version;
private NodeOsVersionFilter(Version version, NodeFilter next) {
super(next);
this.version = Objects.requireNonNull(version, "version cannot be null");
}
@Override
public static NodeOsVersionFilter from(String version, NodeFilter filter) {
return new NodeOsVersionFilter(Version.fromString(version), filter);
}
} |
Nice | protected void deploy(ApplicationId application) {
if (pendingDeployments.addIfAbsent(application)) {
log.log(LogLevel.INFO, application + " will be deployed, last deploy time " +
getLastDeployTime(application));
deploymentExecutor.execute(() -> deployWithLock(application));
}
} | if (pendingDeployments.addIfAbsent(application)) { | protected void deploy(ApplicationId application) {
if (pendingDeployments.addIfAbsent(application)) {
log.log(LogLevel.INFO, application + " will be deployed, last deploy time " +
getLastDeployTime(application));
deploymentExecutor.execute(() -> deployWithLock(application));
}
} | class ApplicationMaintainer extends Maintainer {
private final Deployer deployer;
private final CopyOnWriteArrayList<ApplicationId> pendingDeployments = new CopyOnWriteArrayList<>();
private final ThreadPoolExecutor deploymentExecutor = new ThreadPoolExecutor(1, 1,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(),
new DaemonThreadFactory("node repo application maintainer"));
protected ApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository, Duration interval, JobControl jobControl) {
super(nodeRepository, interval, jobControl);
this.deployer = deployer;
}
@Override
protected final void maintain() {
applicationsNeedingMaintenance().forEach(this::deploy);
}
/** Returns the number of deployments that are pending execution */
public int pendingDeployments() {
return pendingDeployments.size();
}
/** Returns whether given application should be deployed at this moment in time */
protected boolean canDeployNow(ApplicationId application) {
return true;
}
/**
* Redeploy this application.
*
* The default implementation deploys asynchronously to make sure we do all applications timely
* even when deployments are slow.
*/
protected Deployer deployer() { return deployer; }
protected Set<ApplicationId> applicationsNeedingMaintenance() {
return nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.collect(Collectors.toCollection(LinkedHashSet::new));
}
/**
* Returns the nodes whose applications should be maintained by this now.
* This should be some subset of the allocated nodes.
*/
protected abstract List<Node> nodesNeedingMaintenance();
/** Redeploy this application. A lock will be taken for the duration of the deployment activation */
protected final void deployWithLock(ApplicationId application) {
try (Mutex lock = nodeRepository().lock(application, Duration.ofSeconds(1))) {
if ( ! isActive(application)) return;
if ( ! canDeployNow(application)) return;
Optional<Deployment> deployment = deployer.deployFromLocalActive(application);
if ( ! deployment.isPresent()) return;
log.log(LogLevel.DEBUG, this.getClass().getSimpleName() + " deploying " + application);
deployment.get().activate();
} catch (RuntimeException e) {
log.log(LogLevel.WARNING, "Exception on maintenance redeploy", e);
} finally {
pendingDeployments.remove(application);
}
}
/** Returns the last time application was deployed. Epoch is returned if the application has never been deployed. */
protected final Instant getLastDeployTime(ApplicationId application) {
return deployer.lastDeployTime(application).orElse(Instant.EPOCH);
}
/** Returns true when application has at least one active node */
private boolean isActive(ApplicationId application) {
return ! nodeRepository().getNodes(application, Node.State.active).isEmpty();
}
@Override
public void deconstruct() {
super.deconstruct();
this.deploymentExecutor.shutdownNow();
try {
this.deploymentExecutor.awaitTermination(1, TimeUnit.MINUTES);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
} | class ApplicationMaintainer extends Maintainer {
private final Deployer deployer;
private final CopyOnWriteArrayList<ApplicationId> pendingDeployments = new CopyOnWriteArrayList<>();
private final ThreadPoolExecutor deploymentExecutor = new ThreadPoolExecutor(1, 1,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(),
new DaemonThreadFactory("node repo application maintainer"));
protected ApplicationMaintainer(Deployer deployer, NodeRepository nodeRepository, Duration interval, JobControl jobControl) {
super(nodeRepository, interval, jobControl);
this.deployer = deployer;
}
@Override
protected final void maintain() {
applicationsNeedingMaintenance().forEach(this::deploy);
}
/** Returns the number of deployments that are pending execution */
public int pendingDeployments() {
return pendingDeployments.size();
}
/** Returns whether given application should be deployed at this moment in time */
protected boolean canDeployNow(ApplicationId application) {
return true;
}
/**
* Redeploy this application.
*
* The default implementation deploys asynchronously to make sure we do all applications timely
* even when deployments are slow.
*/
protected Deployer deployer() { return deployer; }
protected Set<ApplicationId> applicationsNeedingMaintenance() {
return nodesNeedingMaintenance().stream()
.map(node -> node.allocation().get().owner())
.collect(Collectors.toCollection(LinkedHashSet::new));
}
/**
* Returns the nodes whose applications should be maintained by this now.
* This should be some subset of the allocated nodes.
*/
protected abstract List<Node> nodesNeedingMaintenance();
/** Redeploy this application. A lock will be taken for the duration of the deployment activation */
protected final void deployWithLock(ApplicationId application) {
try (Mutex lock = nodeRepository().lock(application, Duration.ofSeconds(1))) {
if ( ! isActive(application)) return;
if ( ! canDeployNow(application)) return;
Optional<Deployment> deployment = deployer.deployFromLocalActive(application);
if ( ! deployment.isPresent()) return;
log.log(LogLevel.DEBUG, this.getClass().getSimpleName() + " deploying " + application);
deployment.get().activate();
} catch (RuntimeException e) {
log.log(LogLevel.WARNING, "Exception on maintenance redeploy", e);
} finally {
pendingDeployments.remove(application);
}
}
/** Returns the last time application was deployed. Epoch is returned if the application has never been deployed. */
protected final Instant getLastDeployTime(ApplicationId application) {
return deployer.lastDeployTime(application).orElse(Instant.EPOCH);
}
/** Returns true when application has at least one active node */
private boolean isActive(ApplicationId application) {
return ! nodeRepository().getNodes(application, Node.State.active).isEmpty();
}
@Override
public void deconstruct() {
super.deconstruct();
this.deploymentExecutor.shutdownNow();
try {
this.deploymentExecutor.awaitTermination(1, TimeUnit.MINUTES);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
} |
Just curious, why this change? | private void assertFile(HttpResponse response, String resourceName) {
try {
Path path = Paths.get("src/test/resources/").resolve(resourceName);
String expected = new String(Files.readAllBytes(path));
compare(response, expected);
} catch (Exception e) {
throw new RuntimeException(e);
}
} | Path path = Paths.get("src/test/resources/").resolve(resourceName); | private void assertFile(HttpResponse response, String resourceName) {
try {
Path path = Paths.get("src/test/resources/").resolve(resourceName);
String expected = new String(Files.readAllBytes(path));
compare(response, expected);
} catch (Exception e) {
throw new RuntimeException(e);
}
} | class JobControllerApiHandlerHelperTest {
private final ApplicationId appId = ApplicationId.from("vespa", "music", "default");
private final Instant start = Instant.parse("2018-06-27T10:12:35Z");
private static final Versions versions = new Versions(Version.fromString("1.2.3"),
ApplicationVersion.from(new SourceRevision("repo",
"branch",
"bada55"),
321),
Optional.empty(),
Optional.empty());
private static Step lastStep = Step.values()[Step.values().length - 1];
@Test
public void jobTypeResponse() {
Map<JobType, Run> jobMap = new HashMap<>();
List<JobType> jobList = new ArrayList<>();
jobMap.put(JobType.systemTest, createRun(JobType.systemTest, 1, 30, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.systemTest);
jobMap.put(JobType.productionApNortheast1, createRun(JobType.productionApNortheast1, 1, 60, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.productionApNortheast1);
jobMap.put(JobType.productionUsWest1, createRun(JobType.productionUsWest1, 1, 60, Step.startTests, Optional.of(RunStatus.error)));
jobList.add(JobType.productionUsWest1);
URI jobUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.jobTypeResponse(jobList, jobMap, jobUrl);
assertFile(response, "job/job-type-response.json");
}
@Test
public void runResponse() {
Map<RunId, Run> runs = new HashMap<>();
Run run;
run = createRun(JobType.systemTest, 3, 30, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 2, 56, Step.installReal, Optional.of(RunStatus.error));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 1, 44, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
URI jobTypeUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.runResponse(runs, jobTypeUrl);
assertFile(response, "job/run-status-response.json");
}
@Test
public void runDetailsResponse() {
ControllerTester tester = new ControllerTester();
MockLogStore logStore = new MockLogStore();
JobController jobController = new JobController(tester.controller(), logStore);
RunId runId = new RunId(appId, JobType.systemTest, 42);
tester.curator().writeHistoricRuns(
runId.application(),
runId.type(),
Collections.singleton(createRun(JobType.systemTest, 42, 44, lastStep, Optional.of(RunStatus.running))));
logStore.append(runId, Step.deployTester.name(), "INFO\t1234567890\tSUCCESS".getBytes());
logStore.append(runId, Step.installTester.name(), "INFO\t1234598760\tSUCCESS".getBytes());
logStore.append(runId, Step.deactivateTester.name(), "INFO\t1234678901\tERROR: Something went wrong".getBytes());
HttpResponse response = JobControllerApiHandlerHelper.runDetailsResponse(jobController, runId);
assertFile(response, "job/run-details-response.json");
}
@Test
public void submitResponse() {
ControllerTester tester = new ControllerTester();
tester.createTenant("tenant", "domain", 1L);
tester.createApplication(TenantName.from("tenant"), "application", "default", 1L);
JobController jobController = new JobController(tester.controller(), new MockLogStore());
HttpResponse response = JobControllerApiHandlerHelper.submitResponse(
jobController, "tenant", "application", new SourceRevision("repository", "branch", "commit"), new byte[0], new byte[0]);
compare(response, "{\"version\":\"1.0.1-commit\"}");
}
private Run createRun(JobType type, long runid, long duration, Step lastStep, Optional<RunStatus> lastStepStatus) {
RunId runId = new RunId(appId, type, runid);
Map<Step, Step.Status> stepStatusMap = new HashMap<>();
for (Step step : Step.values()) {
if (step.ordinal() < lastStep.ordinal()) {
stepStatusMap.put(step, Step.Status.succeeded);
} else if (step.equals(lastStep) && lastStepStatus.isPresent()) {
stepStatusMap.put(step, Step.Status.of(lastStepStatus.get()));
} else {
stepStatusMap.put(step, Step.Status.unfinished);
}
}
Optional<Instant> end = Optional.empty();
if (lastStepStatus.isPresent() && lastStep == JobControllerApiHandlerHelperTest.lastStep) {
end = Optional.of(start.plusSeconds(duration));
}
RunStatus status = end.isPresent() && lastStepStatus.equals(Optional.of(RunStatus.running))
? RunStatus.success
: lastStepStatus.orElse(RunStatus.running);
return new Run(runId, stepStatusMap, versions, start, end, status, -1);
}
private void compare(HttpResponse response, String expected) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
response.render(baos);
String actual = new String(baos.toByteArray());
JSONObject actualJSON = new JSONObject(actual);
JSONObject expectedJSON = new JSONObject(expected);
Assert.assertEquals(expectedJSON.toString(), actualJSON.toString());
} catch (IOException | JSONException e) {
fail();
}
}
} | class JobControllerApiHandlerHelperTest {
private final ApplicationId appId = ApplicationId.from("vespa", "music", "default");
private final Instant start = Instant.parse("2018-06-27T10:12:35Z");
private static final Versions versions = new Versions(Version.fromString("1.2.3"),
ApplicationVersion.from(new SourceRevision("repo",
"branch",
"bada55"),
321),
Optional.empty(),
Optional.empty());
private static Step lastStep = Step.values()[Step.values().length - 1];
@Test
public void jobTypeResponse() {
Map<JobType, Run> jobMap = new HashMap<>();
List<JobType> jobList = new ArrayList<>();
jobMap.put(JobType.systemTest, createRun(JobType.systemTest, 1, 30, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.systemTest);
jobMap.put(JobType.productionApNortheast1, createRun(JobType.productionApNortheast1, 1, 60, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.productionApNortheast1);
jobMap.put(JobType.productionUsWest1, createRun(JobType.productionUsWest1, 1, 60, Step.startTests, Optional.of(RunStatus.error)));
jobList.add(JobType.productionUsWest1);
URI jobUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.jobTypeResponse(jobList, jobMap, jobUrl);
assertFile(response, "job/job-type-response.json");
}
@Test
public void runResponse() {
Map<RunId, Run> runs = new HashMap<>();
Run run;
run = createRun(JobType.systemTest, 3, 30, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 2, 56, Step.installReal, Optional.of(RunStatus.error));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 1, 44, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
URI jobTypeUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.runResponse(runs, jobTypeUrl);
assertFile(response, "job/run-status-response.json");
}
@Test
public void runDetailsResponse() {
ControllerTester tester = new ControllerTester();
MockLogStore logStore = new MockLogStore();
JobController jobController = new JobController(tester.controller(), logStore);
RunId runId = new RunId(appId, JobType.systemTest, 42);
tester.curator().writeHistoricRuns(
runId.application(),
runId.type(),
Collections.singleton(createRun(JobType.systemTest, 42, 44, lastStep, Optional.of(RunStatus.running))));
logStore.append(runId, Step.deployTester.name(), "INFO\t1234567890\tSUCCESS".getBytes());
logStore.append(runId, Step.installTester.name(), "INFO\t1234598760\tSUCCESS".getBytes());
logStore.append(runId, Step.deactivateTester.name(), "INFO\t1234678901\tERROR: Something went wrong".getBytes());
HttpResponse response = JobControllerApiHandlerHelper.runDetailsResponse(jobController, runId);
assertFile(response, "job/run-details-response.json");
}
@Test
public void submitResponse() {
ControllerTester tester = new ControllerTester();
tester.createTenant("tenant", "domain", 1L);
tester.createApplication(TenantName.from("tenant"), "application", "default", 1L);
JobController jobController = new JobController(tester.controller(), new MockLogStore());
HttpResponse response = JobControllerApiHandlerHelper.submitResponse(
jobController, "tenant", "application", new SourceRevision("repository", "branch", "commit"), new byte[0], new byte[0]);
compare(response, "{\"version\":\"1.0.1-commit\"}");
}
private Run createRun(JobType type, long runid, long duration, Step lastStep, Optional<RunStatus> lastStepStatus) {
RunId runId = new RunId(appId, type, runid);
Map<Step, Step.Status> stepStatusMap = new HashMap<>();
for (Step step : Step.values()) {
if (step.ordinal() < lastStep.ordinal()) {
stepStatusMap.put(step, Step.Status.succeeded);
} else if (step.equals(lastStep) && lastStepStatus.isPresent()) {
stepStatusMap.put(step, Step.Status.of(lastStepStatus.get()));
} else {
stepStatusMap.put(step, Step.Status.unfinished);
}
}
Optional<Instant> end = Optional.empty();
if (lastStepStatus.isPresent() && lastStep == JobControllerApiHandlerHelperTest.lastStep) {
end = Optional.of(start.plusSeconds(duration));
}
RunStatus status = end.isPresent() && lastStepStatus.equals(Optional.of(RunStatus.running))
? RunStatus.success
: lastStepStatus.orElse(RunStatus.running);
return new Run(runId, stepStatusMap, versions, start, end, status, -1);
}
private void compare(HttpResponse response, String expected) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
response.render(baos);
String actual = new String(baos.toByteArray());
JSONObject actualJSON = new JSONObject(actual);
JSONObject expectedJSON = new JSONObject(expected);
Assert.assertEquals(expectedJSON.toString(), actualJSON.toString());
} catch (IOException | JSONException e) {
fail();
}
}
} |
IntelliJ sometimes gets confused which class path to use, which causes tests that use this method to fail with NPE. I think it happens after I install the module locally with `mvn install`, which I sometimes do to check for breaking API between this and internal repo. | private void assertFile(HttpResponse response, String resourceName) {
try {
Path path = Paths.get("src/test/resources/").resolve(resourceName);
String expected = new String(Files.readAllBytes(path));
compare(response, expected);
} catch (Exception e) {
throw new RuntimeException(e);
}
} | Path path = Paths.get("src/test/resources/").resolve(resourceName); | private void assertFile(HttpResponse response, String resourceName) {
try {
Path path = Paths.get("src/test/resources/").resolve(resourceName);
String expected = new String(Files.readAllBytes(path));
compare(response, expected);
} catch (Exception e) {
throw new RuntimeException(e);
}
} | class JobControllerApiHandlerHelperTest {
private final ApplicationId appId = ApplicationId.from("vespa", "music", "default");
private final Instant start = Instant.parse("2018-06-27T10:12:35Z");
private static final Versions versions = new Versions(Version.fromString("1.2.3"),
ApplicationVersion.from(new SourceRevision("repo",
"branch",
"bada55"),
321),
Optional.empty(),
Optional.empty());
private static Step lastStep = Step.values()[Step.values().length - 1];
@Test
public void jobTypeResponse() {
Map<JobType, Run> jobMap = new HashMap<>();
List<JobType> jobList = new ArrayList<>();
jobMap.put(JobType.systemTest, createRun(JobType.systemTest, 1, 30, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.systemTest);
jobMap.put(JobType.productionApNortheast1, createRun(JobType.productionApNortheast1, 1, 60, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.productionApNortheast1);
jobMap.put(JobType.productionUsWest1, createRun(JobType.productionUsWest1, 1, 60, Step.startTests, Optional.of(RunStatus.error)));
jobList.add(JobType.productionUsWest1);
URI jobUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.jobTypeResponse(jobList, jobMap, jobUrl);
assertFile(response, "job/job-type-response.json");
}
@Test
public void runResponse() {
Map<RunId, Run> runs = new HashMap<>();
Run run;
run = createRun(JobType.systemTest, 3, 30, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 2, 56, Step.installReal, Optional.of(RunStatus.error));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 1, 44, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
URI jobTypeUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.runResponse(runs, jobTypeUrl);
assertFile(response, "job/run-status-response.json");
}
@Test
public void runDetailsResponse() {
ControllerTester tester = new ControllerTester();
MockLogStore logStore = new MockLogStore();
JobController jobController = new JobController(tester.controller(), logStore);
RunId runId = new RunId(appId, JobType.systemTest, 42);
tester.curator().writeHistoricRuns(
runId.application(),
runId.type(),
Collections.singleton(createRun(JobType.systemTest, 42, 44, lastStep, Optional.of(RunStatus.running))));
logStore.append(runId, Step.deployTester.name(), "INFO\t1234567890\tSUCCESS".getBytes());
logStore.append(runId, Step.installTester.name(), "INFO\t1234598760\tSUCCESS".getBytes());
logStore.append(runId, Step.deactivateTester.name(), "INFO\t1234678901\tERROR: Something went wrong".getBytes());
HttpResponse response = JobControllerApiHandlerHelper.runDetailsResponse(jobController, runId);
assertFile(response, "job/run-details-response.json");
}
@Test
public void submitResponse() {
ControllerTester tester = new ControllerTester();
tester.createTenant("tenant", "domain", 1L);
tester.createApplication(TenantName.from("tenant"), "application", "default", 1L);
JobController jobController = new JobController(tester.controller(), new MockLogStore());
HttpResponse response = JobControllerApiHandlerHelper.submitResponse(
jobController, "tenant", "application", new SourceRevision("repository", "branch", "commit"), new byte[0], new byte[0]);
compare(response, "{\"version\":\"1.0.1-commit\"}");
}
private Run createRun(JobType type, long runid, long duration, Step lastStep, Optional<RunStatus> lastStepStatus) {
RunId runId = new RunId(appId, type, runid);
Map<Step, Step.Status> stepStatusMap = new HashMap<>();
for (Step step : Step.values()) {
if (step.ordinal() < lastStep.ordinal()) {
stepStatusMap.put(step, Step.Status.succeeded);
} else if (step.equals(lastStep) && lastStepStatus.isPresent()) {
stepStatusMap.put(step, Step.Status.of(lastStepStatus.get()));
} else {
stepStatusMap.put(step, Step.Status.unfinished);
}
}
Optional<Instant> end = Optional.empty();
if (lastStepStatus.isPresent() && lastStep == JobControllerApiHandlerHelperTest.lastStep) {
end = Optional.of(start.plusSeconds(duration));
}
RunStatus status = end.isPresent() && lastStepStatus.equals(Optional.of(RunStatus.running))
? RunStatus.success
: lastStepStatus.orElse(RunStatus.running);
return new Run(runId, stepStatusMap, versions, start, end, status, -1);
}
private void compare(HttpResponse response, String expected) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
response.render(baos);
String actual = new String(baos.toByteArray());
JSONObject actualJSON = new JSONObject(actual);
JSONObject expectedJSON = new JSONObject(expected);
Assert.assertEquals(expectedJSON.toString(), actualJSON.toString());
} catch (IOException | JSONException e) {
fail();
}
}
} | class JobControllerApiHandlerHelperTest {
private final ApplicationId appId = ApplicationId.from("vespa", "music", "default");
private final Instant start = Instant.parse("2018-06-27T10:12:35Z");
private static final Versions versions = new Versions(Version.fromString("1.2.3"),
ApplicationVersion.from(new SourceRevision("repo",
"branch",
"bada55"),
321),
Optional.empty(),
Optional.empty());
private static Step lastStep = Step.values()[Step.values().length - 1];
@Test
public void jobTypeResponse() {
Map<JobType, Run> jobMap = new HashMap<>();
List<JobType> jobList = new ArrayList<>();
jobMap.put(JobType.systemTest, createRun(JobType.systemTest, 1, 30, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.systemTest);
jobMap.put(JobType.productionApNortheast1, createRun(JobType.productionApNortheast1, 1, 60, lastStep, Optional.of(RunStatus.running)));
jobList.add(JobType.productionApNortheast1);
jobMap.put(JobType.productionUsWest1, createRun(JobType.productionUsWest1, 1, 60, Step.startTests, Optional.of(RunStatus.error)));
jobList.add(JobType.productionUsWest1);
URI jobUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.jobTypeResponse(jobList, jobMap, jobUrl);
assertFile(response, "job/job-type-response.json");
}
@Test
public void runResponse() {
Map<RunId, Run> runs = new HashMap<>();
Run run;
run = createRun(JobType.systemTest, 3, 30, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 2, 56, Step.installReal, Optional.of(RunStatus.error));
runs.put(run.id(), run);
run = createRun(JobType.systemTest, 1, 44, lastStep, Optional.of(RunStatus.running));
runs.put(run.id(), run);
URI jobTypeUrl = URI.create("https:
HttpResponse response = JobControllerApiHandlerHelper.runResponse(runs, jobTypeUrl);
assertFile(response, "job/run-status-response.json");
}
@Test
public void runDetailsResponse() {
ControllerTester tester = new ControllerTester();
MockLogStore logStore = new MockLogStore();
JobController jobController = new JobController(tester.controller(), logStore);
RunId runId = new RunId(appId, JobType.systemTest, 42);
tester.curator().writeHistoricRuns(
runId.application(),
runId.type(),
Collections.singleton(createRun(JobType.systemTest, 42, 44, lastStep, Optional.of(RunStatus.running))));
logStore.append(runId, Step.deployTester.name(), "INFO\t1234567890\tSUCCESS".getBytes());
logStore.append(runId, Step.installTester.name(), "INFO\t1234598760\tSUCCESS".getBytes());
logStore.append(runId, Step.deactivateTester.name(), "INFO\t1234678901\tERROR: Something went wrong".getBytes());
HttpResponse response = JobControllerApiHandlerHelper.runDetailsResponse(jobController, runId);
assertFile(response, "job/run-details-response.json");
}
@Test
public void submitResponse() {
ControllerTester tester = new ControllerTester();
tester.createTenant("tenant", "domain", 1L);
tester.createApplication(TenantName.from("tenant"), "application", "default", 1L);
JobController jobController = new JobController(tester.controller(), new MockLogStore());
HttpResponse response = JobControllerApiHandlerHelper.submitResponse(
jobController, "tenant", "application", new SourceRevision("repository", "branch", "commit"), new byte[0], new byte[0]);
compare(response, "{\"version\":\"1.0.1-commit\"}");
}
private Run createRun(JobType type, long runid, long duration, Step lastStep, Optional<RunStatus> lastStepStatus) {
RunId runId = new RunId(appId, type, runid);
Map<Step, Step.Status> stepStatusMap = new HashMap<>();
for (Step step : Step.values()) {
if (step.ordinal() < lastStep.ordinal()) {
stepStatusMap.put(step, Step.Status.succeeded);
} else if (step.equals(lastStep) && lastStepStatus.isPresent()) {
stepStatusMap.put(step, Step.Status.of(lastStepStatus.get()));
} else {
stepStatusMap.put(step, Step.Status.unfinished);
}
}
Optional<Instant> end = Optional.empty();
if (lastStepStatus.isPresent() && lastStep == JobControllerApiHandlerHelperTest.lastStep) {
end = Optional.of(start.plusSeconds(duration));
}
RunStatus status = end.isPresent() && lastStepStatus.equals(Optional.of(RunStatus.running))
? RunStatus.success
: lastStepStatus.orElse(RunStatus.running);
return new Run(runId, stepStatusMap, versions, start, end, status, -1);
}
private void compare(HttpResponse response, String expected) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
response.render(baos);
String actual = new String(baos.toByteArray());
JSONObject actualJSON = new JSONObject(actual);
JSONObject expectedJSON = new JSONObject(expected);
Assert.assertEquals(expectedJSON.toString(), actualJSON.toString());
} catch (IOException | JSONException e) {
fail();
}
}
} |
PS: Objects.hash(...) | public int hashCode() {
int result = targetPlatform.hashCode();
result = 31 * result + targetApplication.hashCode();
result = 31 * result + sourcePlatform.hashCode();
result = 31 * result + sourceApplication.hashCode();
return result;
} | int result = targetPlatform.hashCode(); | public int hashCode() {
int result = targetPlatform.hashCode();
result = 31 * result + targetApplication.hashCode();
result = 31 * result + sourcePlatform.hashCode();
result = 31 * result + sourceApplication.hashCode();
return result;
} | class Versions {
private final Version targetPlatform;
private final ApplicationVersion targetApplication;
private final Optional<Version> sourcePlatform;
private final Optional<ApplicationVersion> sourceApplication;
public Versions(Version targetPlatform, ApplicationVersion targetApplication, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication) {
if (sourcePlatform.isPresent() ^ sourceApplication.isPresent())
throw new IllegalArgumentException("Sources must both be present or absent.");
this.targetPlatform = requireNonNull(targetPlatform);
this.targetApplication = requireNonNull(targetApplication);
this.sourcePlatform = requireNonNull(sourcePlatform);
this.sourceApplication = requireNonNull(sourceApplication);
}
/** Target platform version for this */
public Version targetPlatform() {
return targetPlatform;
}
/** Target application version for this */
public ApplicationVersion targetApplication() {
return targetApplication;
}
/** Source platform version for this */
public Optional<Version> sourcePlatform() {
return sourcePlatform;
}
/** Source application version for this */
public Optional<ApplicationVersion> sourceApplication() {
return sourceApplication;
}
/** Returns whether source versions are present and match those of the given job run */
public boolean sourcesMatchIfPresent(JobStatus.JobRun jobRun) {
return (!sourcePlatform.filter(version -> !version.equals(targetPlatform)).isPresent() ||
sourcePlatform.equals(jobRun.sourcePlatform())) &&
(!sourceApplication.filter(version -> !version.equals(targetApplication)).isPresent() ||
sourceApplication.equals(jobRun.sourceApplication()));
}
public boolean targetsMatch(JobStatus.JobRun jobRun) {
return targetPlatform.equals(jobRun.platform()) &&
targetApplication.equals(jobRun.application());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if ( ! (o instanceof Versions)) return false;
Versions versions = (Versions) o;
if ( ! targetPlatform.equals(versions.targetPlatform)) return false;
if ( ! targetApplication.equals(versions.targetApplication)) return false;
if ( ! sourcePlatform.equals(versions.sourcePlatform)) return false;
return sourceApplication.equals(versions.sourceApplication);
}
@Override
@Override
public String toString() {
return String.format("platform %s%s, application %s%s",
sourcePlatform.filter(source -> !source.equals(targetPlatform))
.map(source -> source + " -> ").orElse(""),
targetPlatform,
sourceApplication.filter(source -> !source.equals(targetApplication))
.map(source -> source.id() + " -> ").orElse(""),
targetApplication.id());
}
/** Create versions using change contained in application */
public static Versions from(Application application, Version defaultPlatformVersion) {
return from(application.change(), application, Optional.empty(), defaultPlatformVersion);
}
/** Create versions using given change and application */
public static Versions from(Change change, Application application, Optional<Deployment> deployment,
Version defaultPlatformVersion) {
return new Versions(targetPlatform(application, change, deployment, defaultPlatformVersion),
targetApplication(application, change, deployment),
deployment.map(Deployment::version),
deployment.map(Deployment::applicationVersion));
}
private static Version targetPlatform(Application application, Change change, Optional<Deployment> deployment,
Version defaultVersion) {
return max(deployment.map(Deployment::version), change.platform())
.orElse(application.oldestDeployedPlatform()
.orElse(defaultVersion));
}
private static ApplicationVersion targetApplication(Application application, Change change,
Optional<Deployment> deployment) {
return max(deployment.map(Deployment::applicationVersion), change.application())
.orElse(application.oldestDeployedApplication()
.orElse(application.deploymentJobs().jobStatus().get(JobType.component)
.lastSuccess()
.get()
.application()));
}
private static <T extends Comparable<T>> Optional<T> max(Optional<T> o1, Optional<T> o2) {
return ! o1.isPresent() ? o2 : ! o2.isPresent() ? o1 : o1.get().compareTo(o2.get()) >= 0 ? o1 : o2;
}
} | class Versions {
private final Version targetPlatform;
private final ApplicationVersion targetApplication;
private final Optional<Version> sourcePlatform;
private final Optional<ApplicationVersion> sourceApplication;
public Versions(Version targetPlatform, ApplicationVersion targetApplication, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication) {
if (sourcePlatform.isPresent() ^ sourceApplication.isPresent())
throw new IllegalArgumentException("Sources must both be present or absent.");
this.targetPlatform = requireNonNull(targetPlatform);
this.targetApplication = requireNonNull(targetApplication);
this.sourcePlatform = requireNonNull(sourcePlatform);
this.sourceApplication = requireNonNull(sourceApplication);
}
/** Target platform version for this */
public Version targetPlatform() {
return targetPlatform;
}
/** Target application version for this */
public ApplicationVersion targetApplication() {
return targetApplication;
}
/** Source platform version for this */
public Optional<Version> sourcePlatform() {
return sourcePlatform;
}
/** Source application version for this */
public Optional<ApplicationVersion> sourceApplication() {
return sourceApplication;
}
/** Returns whether source versions are present and match those of the given job run */
public boolean sourcesMatchIfPresent(JobStatus.JobRun jobRun) {
return (!sourcePlatform.filter(version -> !version.equals(targetPlatform)).isPresent() ||
sourcePlatform.equals(jobRun.sourcePlatform())) &&
(!sourceApplication.filter(version -> !version.equals(targetApplication)).isPresent() ||
sourceApplication.equals(jobRun.sourceApplication()));
}
public boolean targetsMatch(JobStatus.JobRun jobRun) {
return targetPlatform.equals(jobRun.platform()) &&
targetApplication.equals(jobRun.application());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if ( ! (o instanceof Versions)) return false;
Versions versions = (Versions) o;
if ( ! targetPlatform.equals(versions.targetPlatform)) return false;
if ( ! targetApplication.equals(versions.targetApplication)) return false;
if ( ! sourcePlatform.equals(versions.sourcePlatform)) return false;
return sourceApplication.equals(versions.sourceApplication);
}
@Override
@Override
public String toString() {
return String.format("platform %s%s, application %s%s",
sourcePlatform.filter(source -> !source.equals(targetPlatform))
.map(source -> source + " -> ").orElse(""),
targetPlatform,
sourceApplication.filter(source -> !source.equals(targetApplication))
.map(source -> source.id() + " -> ").orElse(""),
targetApplication.id());
}
/** Create versions using change contained in application */
public static Versions from(Application application, Version defaultPlatformVersion) {
return from(application.change(), application, Optional.empty(), defaultPlatformVersion);
}
/** Create versions using given change and application */
public static Versions from(Change change, Application application, Optional<Deployment> deployment,
Version defaultPlatformVersion) {
return new Versions(targetPlatform(application, change, deployment, defaultPlatformVersion),
targetApplication(application, change, deployment),
deployment.map(Deployment::version),
deployment.map(Deployment::applicationVersion));
}
private static Version targetPlatform(Application application, Change change, Optional<Deployment> deployment,
Version defaultVersion) {
return max(deployment.map(Deployment::version), change.platform())
.orElse(application.oldestDeployedPlatform()
.orElse(defaultVersion));
}
private static ApplicationVersion targetApplication(Application application, Change change,
Optional<Deployment> deployment) {
return max(deployment.map(Deployment::applicationVersion), change.application())
.orElse(application.oldestDeployedApplication()
.orElse(application.deploymentJobs().jobStatus().get(JobType.component)
.lastSuccess()
.get()
.application()));
}
private static <T extends Comparable<T>> Optional<T> max(Optional<T> o1, Optional<T> o2) {
return ! o1.isPresent() ? o2 : ! o2.isPresent() ? o1 : o1.get().compareTo(o2.get()) >= 0 ? o1 : o2;
}
} |
Yes, that's cleaner. I'll change the IntelliJ settings ... | public int hashCode() {
int result = targetPlatform.hashCode();
result = 31 * result + targetApplication.hashCode();
result = 31 * result + sourcePlatform.hashCode();
result = 31 * result + sourceApplication.hashCode();
return result;
} | int result = targetPlatform.hashCode(); | public int hashCode() {
int result = targetPlatform.hashCode();
result = 31 * result + targetApplication.hashCode();
result = 31 * result + sourcePlatform.hashCode();
result = 31 * result + sourceApplication.hashCode();
return result;
} | class Versions {
private final Version targetPlatform;
private final ApplicationVersion targetApplication;
private final Optional<Version> sourcePlatform;
private final Optional<ApplicationVersion> sourceApplication;
public Versions(Version targetPlatform, ApplicationVersion targetApplication, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication) {
if (sourcePlatform.isPresent() ^ sourceApplication.isPresent())
throw new IllegalArgumentException("Sources must both be present or absent.");
this.targetPlatform = requireNonNull(targetPlatform);
this.targetApplication = requireNonNull(targetApplication);
this.sourcePlatform = requireNonNull(sourcePlatform);
this.sourceApplication = requireNonNull(sourceApplication);
}
/** Target platform version for this */
public Version targetPlatform() {
return targetPlatform;
}
/** Target application version for this */
public ApplicationVersion targetApplication() {
return targetApplication;
}
/** Source platform version for this */
public Optional<Version> sourcePlatform() {
return sourcePlatform;
}
/** Source application version for this */
public Optional<ApplicationVersion> sourceApplication() {
return sourceApplication;
}
/** Returns whether source versions are present and match those of the given job run */
public boolean sourcesMatchIfPresent(JobStatus.JobRun jobRun) {
return (!sourcePlatform.filter(version -> !version.equals(targetPlatform)).isPresent() ||
sourcePlatform.equals(jobRun.sourcePlatform())) &&
(!sourceApplication.filter(version -> !version.equals(targetApplication)).isPresent() ||
sourceApplication.equals(jobRun.sourceApplication()));
}
public boolean targetsMatch(JobStatus.JobRun jobRun) {
return targetPlatform.equals(jobRun.platform()) &&
targetApplication.equals(jobRun.application());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if ( ! (o instanceof Versions)) return false;
Versions versions = (Versions) o;
if ( ! targetPlatform.equals(versions.targetPlatform)) return false;
if ( ! targetApplication.equals(versions.targetApplication)) return false;
if ( ! sourcePlatform.equals(versions.sourcePlatform)) return false;
return sourceApplication.equals(versions.sourceApplication);
}
@Override
@Override
public String toString() {
return String.format("platform %s%s, application %s%s",
sourcePlatform.filter(source -> !source.equals(targetPlatform))
.map(source -> source + " -> ").orElse(""),
targetPlatform,
sourceApplication.filter(source -> !source.equals(targetApplication))
.map(source -> source.id() + " -> ").orElse(""),
targetApplication.id());
}
/** Create versions using change contained in application */
public static Versions from(Application application, Version defaultPlatformVersion) {
return from(application.change(), application, Optional.empty(), defaultPlatformVersion);
}
/** Create versions using given change and application */
public static Versions from(Change change, Application application, Optional<Deployment> deployment,
Version defaultPlatformVersion) {
return new Versions(targetPlatform(application, change, deployment, defaultPlatformVersion),
targetApplication(application, change, deployment),
deployment.map(Deployment::version),
deployment.map(Deployment::applicationVersion));
}
private static Version targetPlatform(Application application, Change change, Optional<Deployment> deployment,
Version defaultVersion) {
return max(deployment.map(Deployment::version), change.platform())
.orElse(application.oldestDeployedPlatform()
.orElse(defaultVersion));
}
private static ApplicationVersion targetApplication(Application application, Change change,
Optional<Deployment> deployment) {
return max(deployment.map(Deployment::applicationVersion), change.application())
.orElse(application.oldestDeployedApplication()
.orElse(application.deploymentJobs().jobStatus().get(JobType.component)
.lastSuccess()
.get()
.application()));
}
private static <T extends Comparable<T>> Optional<T> max(Optional<T> o1, Optional<T> o2) {
return ! o1.isPresent() ? o2 : ! o2.isPresent() ? o1 : o1.get().compareTo(o2.get()) >= 0 ? o1 : o2;
}
} | class Versions {
private final Version targetPlatform;
private final ApplicationVersion targetApplication;
private final Optional<Version> sourcePlatform;
private final Optional<ApplicationVersion> sourceApplication;
public Versions(Version targetPlatform, ApplicationVersion targetApplication, Optional<Version> sourcePlatform,
Optional<ApplicationVersion> sourceApplication) {
if (sourcePlatform.isPresent() ^ sourceApplication.isPresent())
throw new IllegalArgumentException("Sources must both be present or absent.");
this.targetPlatform = requireNonNull(targetPlatform);
this.targetApplication = requireNonNull(targetApplication);
this.sourcePlatform = requireNonNull(sourcePlatform);
this.sourceApplication = requireNonNull(sourceApplication);
}
/** Target platform version for this */
public Version targetPlatform() {
return targetPlatform;
}
/** Target application version for this */
public ApplicationVersion targetApplication() {
return targetApplication;
}
/** Source platform version for this */
public Optional<Version> sourcePlatform() {
return sourcePlatform;
}
/** Source application version for this */
public Optional<ApplicationVersion> sourceApplication() {
return sourceApplication;
}
/** Returns whether source versions are present and match those of the given job run */
public boolean sourcesMatchIfPresent(JobStatus.JobRun jobRun) {
return (!sourcePlatform.filter(version -> !version.equals(targetPlatform)).isPresent() ||
sourcePlatform.equals(jobRun.sourcePlatform())) &&
(!sourceApplication.filter(version -> !version.equals(targetApplication)).isPresent() ||
sourceApplication.equals(jobRun.sourceApplication()));
}
public boolean targetsMatch(JobStatus.JobRun jobRun) {
return targetPlatform.equals(jobRun.platform()) &&
targetApplication.equals(jobRun.application());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if ( ! (o instanceof Versions)) return false;
Versions versions = (Versions) o;
if ( ! targetPlatform.equals(versions.targetPlatform)) return false;
if ( ! targetApplication.equals(versions.targetApplication)) return false;
if ( ! sourcePlatform.equals(versions.sourcePlatform)) return false;
return sourceApplication.equals(versions.sourceApplication);
}
@Override
@Override
public String toString() {
return String.format("platform %s%s, application %s%s",
sourcePlatform.filter(source -> !source.equals(targetPlatform))
.map(source -> source + " -> ").orElse(""),
targetPlatform,
sourceApplication.filter(source -> !source.equals(targetApplication))
.map(source -> source.id() + " -> ").orElse(""),
targetApplication.id());
}
/** Create versions using change contained in application */
public static Versions from(Application application, Version defaultPlatformVersion) {
return from(application.change(), application, Optional.empty(), defaultPlatformVersion);
}
/** Create versions using given change and application */
public static Versions from(Change change, Application application, Optional<Deployment> deployment,
Version defaultPlatformVersion) {
return new Versions(targetPlatform(application, change, deployment, defaultPlatformVersion),
targetApplication(application, change, deployment),
deployment.map(Deployment::version),
deployment.map(Deployment::applicationVersion));
}
private static Version targetPlatform(Application application, Change change, Optional<Deployment> deployment,
Version defaultVersion) {
return max(deployment.map(Deployment::version), change.platform())
.orElse(application.oldestDeployedPlatform()
.orElse(defaultVersion));
}
private static ApplicationVersion targetApplication(Application application, Change change,
Optional<Deployment> deployment) {
return max(deployment.map(Deployment::applicationVersion), change.application())
.orElse(application.oldestDeployedApplication()
.orElse(application.deploymentJobs().jobStatus().get(JobType.component)
.lastSuccess()
.get()
.application()));
}
private static <T extends Comparable<T>> Optional<T> max(Optional<T> o1, Optional<T> o2) {
return ! o1.isPresent() ? o2 : ! o2.isPresent() ? o1 : o1.get().compareTo(o2.get()) >= 0 ? o1 : o2;
}
} |
Nitpick: IllegalStateException | private Optional<RunStatus> endTests(RunId id, DualLogger logger) {
URI testerEndpoint = JobController.testerEndpoint(controller, id)
.orElseThrow(() -> new NoSuchElementException("Endpoint for tester vanished again before tests were complete!"));
controller.jobController().updateTestLog(id);
RunStatus status;
switch (controller.jobController().cloud().getStatus(testerEndpoint)) {
case NOT_STARTED:
throw new IllegalStateException("Tester reports tests not started, even though they should have!");
case RUNNING:
return Optional.empty();
case FAILURE:
logger.log("Tests failed.");
status = testFailure; break;
case ERROR:
logger.log(INFO, "Tester failed running its tests!");
status = error; break;
case SUCCESS:
logger.log("Tests completed successfully.");
status = running; break;
default:
throw new IllegalArgumentException("Unknown status!");
}
return Optional.of(status);
} | throw new IllegalArgumentException("Unknown status!"); | private Optional<RunStatus> endTests(RunId id, DualLogger logger) {
URI testerEndpoint = JobController.testerEndpoint(controller, id)
.orElseThrow(() -> new NoSuchElementException("Endpoint for tester vanished again before tests were complete!"));
controller.jobController().updateTestLog(id);
RunStatus status;
TesterCloud.Status testStatus = controller.jobController().cloud().getStatus(testerEndpoint);
switch (testStatus) {
case NOT_STARTED:
throw new IllegalStateException("Tester reports tests not started, even though they should have!");
case RUNNING:
return Optional.empty();
case FAILURE:
logger.log("Tests failed.");
status = testFailure; break;
case ERROR:
logger.log(INFO, "Tester failed running its tests!");
status = error; break;
case SUCCESS:
logger.log("Tests completed successfully.");
status = running; break;
default:
throw new IllegalStateException("Unknown status '" + testStatus + "'!");
}
return Optional.of(status);
} | class InternalStepRunner implements StepRunner {
private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName());
static final Duration endpointTimeout = Duration.ofMinutes(15);
static final Duration installationTimeout = Duration.ofMinutes(150);
private final Controller controller;
public InternalStepRunner(Controller controller) {
this.controller = controller;
}
@Override
public Optional<RunStatus> run(LockedStep step, RunId id) {
DualLogger logger = new DualLogger(id, step.get());
try {
switch (step.get()) {
case deployInitialReal: return deployInitialReal(id, logger);
case installInitialReal: return installInitialReal(id, logger);
case deployReal: return deployReal(id, logger);
case deployTester: return deployTester(id, logger);
case installReal: return installReal(id, logger);
case installTester: return installTester(id, logger);
case startTests: return startTests(id, logger);
case endTests: return endTests(id, logger);
case deactivateReal: return deactivateReal(id, logger);
case deactivateTester: return deactivateTester(id, logger);
case report: return report(id);
default: throw new AssertionError("Unknown step '" + step + "'!");
}
}
catch (UncheckedIOException e) {
logger.log(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e));
return Optional.empty();
}
catch (RuntimeException e) {
logger.log(WARNING, "Unexpected exception running " + id, e);
if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) {
logger.log("Will keep trying, as this is a cleanup step.");
return Optional.empty();
}
return Optional.of(error);
}
}
private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " +
versions.sourcePlatform().orElse(versions.targetPlatform()) +
" and application version " +
versions.sourceApplication().orElse(versions.targetApplication()).id() + " ...");
return deployReal(id, true, logger);
}
private Optional<RunStatus> deployReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " + versions.targetPlatform() +
" and application version " + versions.targetApplication().id() + " ...");
return deployReal(id, false, logger);
}
private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, DualLogger logger) {
return deploy(id.application(),
id.type(),
() -> controller.applications().deploy(id.application(),
id.type().zone(controller.system()),
Optional.empty(),
new DeployOptions(false,
Optional.empty(),
false,
setTheStage)),
logger);
}
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) {
logger.log("Deploying the tester container ...");
return deploy(JobController.testerOf(id.application()),
id.type(),
() -> controller.applications().deployTester(JobController.testerOf(id.application()),
testerPackage(id),
id.type().zone(controller.system()),
new DeployOptions(true,
Optional.of(controller.systemVersion()),
false,
false)),
logger);
}
private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, DualLogger logger) {
try {
PrepareResponse prepareResponse = deployment.get().prepareResponse();
if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) {
logger.log("Deploy failed due to non-compatible changes that require re-feed. " +
"Your options are: \n" +
"1. Revert the incompatible changes.\n" +
"2. If you think it is safe in your case, you can override this validation, see\n" +
" http:
"3. Deploy as a new application under a different name.\n" +
"Illegal actions:\n" +
prepareResponse.configChangeActions.refeedActions.stream()
.filter(action -> ! action.allowed)
.flatMap(action -> action.messages.stream())
.collect(Collectors.joining("\n")) + "\n" +
"Details:\n" +
prepareResponse.log.stream()
.map(entry -> entry.message)
.collect(Collectors.joining("\n")));
return Optional.of(deploymentFailed);
}
if (prepareResponse.configChangeActions.restartActions.isEmpty())
logger.log("No services requiring restart.");
else
prepareResponse.configChangeActions.restartActions.stream()
.flatMap(action -> action.services.stream())
.map(service -> service.hostName)
.sorted().distinct()
.map(Hostname::new)
.forEach(hostname -> {
controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname));
logger.log("Restarting services on host " + hostname.id() + ".");
});
logger.log("Deployment successful.");
return Optional.of(running);
}
catch (ConfigServerException e) {
if ( e.getErrorCode() == OUT_OF_CAPACITY && type.isTest()
|| e.getErrorCode() == ACTIVATION_CONFLICT
|| e.getErrorCode() == APPLICATION_LOCK_FAILURE) {
logger.log("Will retry, because of '" + e.getErrorCode() + "' deploying:\n" + e.getMessage());
return Optional.empty();
}
throw e;
}
}
private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) {
return installReal(id, true, logger);
}
private Optional<RunStatus> installReal(RunId id, DualLogger logger) {
return installReal(id, false, logger);
}
private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) {
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before installation was successful.");
return Optional.of(installationFailed);
}
Versions versions = controller.jobController().run(id).get().versions();
Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform();
ApplicationVersion application = setTheStage ? versions.sourceApplication().orElse(versions.targetApplication()) : versions.targetApplication();
logger.log("Checking installation of " + platform + " and " + application.id() + " ...");
if (nodesConverged(id.application(), id.type(), platform, logger) && servicesConverged(id.application(), id.type())) {
logger.log("Installation succeeded!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(INFO, "Installation failed to complete within " + installationTimeout.toMinutes() + " minutes!");
return Optional.of(installationFailed);
}
logger.log("Installation not yet complete.");
return Optional.empty();
}
private Optional<RunStatus> installTester(RunId id, DualLogger logger) {
logger.log("Checking installation of tester container ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tester was installed.");
return Optional.of(installationFailed);
}
if (servicesConverged(JobController.testerOf(id.application()), id.type())) {
logger.log("Tester container successfully installed!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(WARNING, "Installation of tester failed to complete within " + installationTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Installation of tester not yet complete.");
return Optional.empty();
}
private boolean nodesConverged(ApplicationId id, JobType type, Version target, DualLogger logger) {
List<Node> nodes = controller.configServer().nodeRepository().list(type.zone(controller.system()), id, ImmutableSet.of(active, reserved));
for (Node node : nodes)
logger.log(String.format("%70s: %-16s%-25s%-32s%s",
node.hostname(),
node.serviceState(),
node.wantedVersion() + (node.currentVersion().equals(node.wantedVersion()) ? "" : " <-- " + node.currentVersion()),
node.restartGeneration() == node.wantedRestartGeneration() ? ""
: "restart pending (" + node.wantedRestartGeneration() + " <-- " + node.restartGeneration() + ")",
node.rebootGeneration() == node.wantedRebootGeneration() ? ""
: "reboot pending (" + node.wantedRebootGeneration() + " <-- " + node.rebootGeneration() + ")"));
return nodes.stream().allMatch(node -> node.currentVersion().equals(target)
&& node.restartGeneration() == node.wantedRestartGeneration()
&& node.rebootGeneration() == node.wantedRebootGeneration());
}
private boolean servicesConverged(ApplicationId id, JobType type) {
return controller.configServer().serviceConvergence(new DeploymentId(id, type.zone(controller.system())))
.map(ServiceConvergence::converged)
.orElse(false);
}
private Optional<RunStatus> startTests(RunId id, DualLogger logger) {
logger.log("Attempting to find endpoints ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tests could start.");
return Optional.of(installationFailed);
}
Map<ZoneId, List<URI>> endpoints = deploymentEndpoints(id.application());
logger.log("Found endpoints:\n" +
endpoints.entrySet().stream()
.map(zoneEndpoints -> "- " + zoneEndpoints.getKey() + ":\n" +
zoneEndpoints.getValue().stream()
.map(uri -> " |-- " + uri)
.collect(Collectors.joining("\n")))
.collect(Collectors.joining("\n")));
if ( ! endpoints.containsKey(id.type().zone(controller.system()))) {
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
logger.log("Endpoints for the deployment to test are not yet ready.");
return Optional.empty();
}
Optional<URI> testerEndpoint = JobController.testerEndpoint(controller, id);
if (testerEndpoint.isPresent()) {
logger.log("Starting tests ...");
controller.jobController().cloud().startTests(testerEndpoint.get(),
TesterCloud.Suite.of(id.type()),
testConfig(id.application(), id.type().zone(controller.system()), controller.system(), endpoints));
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoint for tester failed to show up within " + endpointTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Endpoints of tester container not yet available.");
return Optional.empty();
}
private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) {
logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.applications().deactivate(id.application(), id.type().zone(controller.system()));
return Optional.of(running);
}
private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) {
logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.jobController().deactivateTester(id.application(), id.type());
return Optional.of(running);
}
private Optional<RunStatus> report(RunId id) {
controller.jobController().active(id).ifPresent(run -> controller.applications().deploymentTrigger().notifyOfCompletion(report(run)));
return Optional.of(running);
}
/** Returns the real application with the given id. */
private Application application(ApplicationId id) {
return controller.applications().require(id);
}
/** Returns whether the time elapsed since the last real deployment in the given zone is more than the given timeout. */
private boolean timedOut(ApplicationId id, JobType type, Duration timeout) {
return application(id).deployments().get(type.zone(controller.system())).at().isBefore(controller.clock().instant().minus(timeout));
}
/** Returns whether the real deployment for the given job type has expired, i.e., no longer exists. */
private boolean expired(ApplicationId id, JobType type) {
return ! application(id).deployments().containsKey(type.zone(controller.system()));
}
/** Returns a generated job report for the given run. */
private DeploymentJobs.JobReport report(Run run) {
return new DeploymentJobs.JobReport(run.id().application(),
run.id().type(),
1,
run.id().number(),
Optional.empty(),
run.hasFailed() ? Optional.of(DeploymentJobs.JobError.unknown) : Optional.empty());
}
/** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */
private ApplicationPackage testerPackage(RunId id) {
ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication();
byte[] testPackage = controller.applications().applicationStore().getTesterPackage(JobController.testerOf(id.application()), version.id());
byte[] servicesXml = servicesXml(controller.system());
try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) {
zipBuilder.add(testPackage);
zipBuilder.add("services.xml", servicesXml);
zipBuilder.close();
return new ApplicationPackage(zipBuilder.toByteArray());
}
}
/** Returns all endpoints for all current deployments of the given real application. */
private Map<ZoneId, List<URI>> deploymentEndpoints(ApplicationId id) {
ImmutableMap.Builder<ZoneId, List<URI>> deployments = ImmutableMap.builder();
application(id).deployments().keySet()
.forEach(zone -> controller.applications().getDeploymentEndpoints(new DeploymentId(id, zone))
.filter(endpoints -> ! endpoints.isEmpty())
.ifPresent(endpoints -> deployments.put(zone, endpoints)));
return deployments.build();
}
/** Returns the generated services.xml content for the tester application. */
static byte[] servicesXml(SystemName systemName) {
String domain = systemName == SystemName.main ? "vespa.vespa" : "vespa.vespa.cd";
String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" +
"<services xmlns:deploy='vespa' version='1.0'>\n" +
" <container version='1.0' id='default'>\n" +
"\n" +
" <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" +
" <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" +
" <artifactsPath>artifacts</artifactsPath>\n" +
" </config>\n" +
" </component>\n" +
"\n" +
" <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" +
" <binding>http:
" </handler>\n" +
"\n" +
" <http>\n" +
" <server id='default' port='4080'/>\n" +
" <filtering>\n" +
" <access-control domain='" + domain + "'>\n" +
" <exclude>\n" +
" <binding>http:
" </exclude>\n" +
" </access-control>\n" +
" <request-chain id=\"testrunner-api\">\n" +
" <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" +
" <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" +
" <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" +
" </config>\n" +
" <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" +
" <resourceName>" + domain + ":tester-application</resourceName>\n" +
" <action>deploy</action>\n" +
" </config>\n" +
" </component>\n" +
" </filter>\n" +
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n" +
"\n" +
" <nodes count=\"1\" flavor=\"d-2-8-50\" />\n" +
" </container>\n" +
"</services>\n";
return servicesXml.getBytes();
}
/** Returns the config for the tests to run for the given job. */
private static byte[] testConfig(ApplicationId id, ZoneId testerZone, SystemName system, Map<ZoneId, List<URI>> deployments) {
Slime slime = new Slime();
Cursor root = slime.setObject();
root.setString("application", id.serializedForm());
root.setString("zone", testerZone.value());
root.setString("system", system.name());
Cursor endpointsObject = root.setObject("endpoints");
deployments.forEach((zone, endpoints) -> {
Cursor endpointArray = endpointsObject.setArray(zone.value());
for (URI endpoint : endpoints)
endpointArray.addString(endpoint.toString());
});
try {
return SlimeUtils.toJsonBytes(slime);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */
private class DualLogger {
private final RunId id;
private final Step step;
private final String prefix;
private DualLogger(RunId id, Step step) {
this.id = id;
this.step = step;
this.prefix = id + " at " + step + ": ";
}
private void log(String message) {
log(DEBUG, message);
}
private void log(Level level, String message) {
log(level, message, null);
}
private void log(Level level, String message, Throwable thrown) {
LogRecord record = new LogRecord(level, prefix + message);
record.setThrown(thrown);
logger.log(record);
if (thrown != null) {
ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream();
thrown.printStackTrace(new PrintStream(traceBuffer));
message += "\n" + traceBuffer;
}
controller.jobController().log(id, step, level, message);
}
}
} | class InternalStepRunner implements StepRunner {
private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName());
static final Duration endpointTimeout = Duration.ofMinutes(15);
static final Duration installationTimeout = Duration.ofMinutes(150);
private final Controller controller;
public InternalStepRunner(Controller controller) {
this.controller = controller;
}
@Override
public Optional<RunStatus> run(LockedStep step, RunId id) {
DualLogger logger = new DualLogger(id, step.get());
try {
switch (step.get()) {
case deployInitialReal: return deployInitialReal(id, logger);
case installInitialReal: return installInitialReal(id, logger);
case deployReal: return deployReal(id, logger);
case deployTester: return deployTester(id, logger);
case installReal: return installReal(id, logger);
case installTester: return installTester(id, logger);
case startTests: return startTests(id, logger);
case endTests: return endTests(id, logger);
case deactivateReal: return deactivateReal(id, logger);
case deactivateTester: return deactivateTester(id, logger);
case report: return report(id);
default: throw new AssertionError("Unknown step '" + step + "'!");
}
}
catch (UncheckedIOException e) {
logger.log(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e));
return Optional.empty();
}
catch (RuntimeException e) {
logger.log(WARNING, "Unexpected exception running " + id, e);
if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) {
logger.log("Will keep trying, as this is a cleanup step.");
return Optional.empty();
}
return Optional.of(error);
}
}
private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " +
versions.sourcePlatform().orElse(versions.targetPlatform()) +
" and application version " +
versions.sourceApplication().orElse(versions.targetApplication()).id() + " ...");
return deployReal(id, true, logger);
}
private Optional<RunStatus> deployReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " + versions.targetPlatform() +
" and application version " + versions.targetApplication().id() + " ...");
return deployReal(id, false, logger);
}
private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, DualLogger logger) {
return deploy(id.application(),
id.type(),
() -> controller.applications().deploy(id.application(),
id.type().zone(controller.system()),
Optional.empty(),
new DeployOptions(false,
Optional.empty(),
false,
setTheStage)),
logger);
}
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) {
logger.log("Deploying the tester container ...");
return deploy(JobController.testerOf(id.application()),
id.type(),
() -> controller.applications().deployTester(JobController.testerOf(id.application()),
testerPackage(id),
id.type().zone(controller.system()),
new DeployOptions(true,
Optional.of(controller.systemVersion()),
false,
false)),
logger);
}
private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, DualLogger logger) {
try {
PrepareResponse prepareResponse = deployment.get().prepareResponse();
if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) {
logger.log("Deploy failed due to non-compatible changes that require re-feed. " +
"Your options are: \n" +
"1. Revert the incompatible changes.\n" +
"2. If you think it is safe in your case, you can override this validation, see\n" +
" http:
"3. Deploy as a new application under a different name.\n" +
"Illegal actions:\n" +
prepareResponse.configChangeActions.refeedActions.stream()
.filter(action -> ! action.allowed)
.flatMap(action -> action.messages.stream())
.collect(Collectors.joining("\n")) + "\n" +
"Details:\n" +
prepareResponse.log.stream()
.map(entry -> entry.message)
.collect(Collectors.joining("\n")));
return Optional.of(deploymentFailed);
}
if (prepareResponse.configChangeActions.restartActions.isEmpty())
logger.log("No services requiring restart.");
else
prepareResponse.configChangeActions.restartActions.stream()
.flatMap(action -> action.services.stream())
.map(service -> service.hostName)
.sorted().distinct()
.map(Hostname::new)
.forEach(hostname -> {
controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname));
logger.log("Restarting services on host " + hostname.id() + ".");
});
logger.log("Deployment successful.");
return Optional.of(running);
}
catch (ConfigServerException e) {
if ( e.getErrorCode() == OUT_OF_CAPACITY && type.isTest()
|| e.getErrorCode() == ACTIVATION_CONFLICT
|| e.getErrorCode() == APPLICATION_LOCK_FAILURE) {
logger.log("Will retry, because of '" + e.getErrorCode() + "' deploying:\n" + e.getMessage());
return Optional.empty();
}
throw e;
}
}
private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) {
return installReal(id, true, logger);
}
private Optional<RunStatus> installReal(RunId id, DualLogger logger) {
return installReal(id, false, logger);
}
private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) {
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before installation was successful.");
return Optional.of(installationFailed);
}
Versions versions = controller.jobController().run(id).get().versions();
Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform();
ApplicationVersion application = setTheStage ? versions.sourceApplication().orElse(versions.targetApplication()) : versions.targetApplication();
logger.log("Checking installation of " + platform + " and " + application.id() + " ...");
if (nodesConverged(id.application(), id.type(), platform, logger) && servicesConverged(id.application(), id.type())) {
logger.log("Installation succeeded!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(INFO, "Installation failed to complete within " + installationTimeout.toMinutes() + " minutes!");
return Optional.of(installationFailed);
}
logger.log("Installation not yet complete.");
return Optional.empty();
}
private Optional<RunStatus> installTester(RunId id, DualLogger logger) {
logger.log("Checking installation of tester container ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tester was installed.");
return Optional.of(installationFailed);
}
if (servicesConverged(JobController.testerOf(id.application()), id.type())) {
logger.log("Tester container successfully installed!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(WARNING, "Installation of tester failed to complete within " + installationTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Installation of tester not yet complete.");
return Optional.empty();
}
private boolean nodesConverged(ApplicationId id, JobType type, Version target, DualLogger logger) {
List<Node> nodes = controller.configServer().nodeRepository().list(type.zone(controller.system()), id, ImmutableSet.of(active, reserved));
for (Node node : nodes)
logger.log(String.format("%70s: %-16s%-25s%-32s%s",
node.hostname(),
node.serviceState(),
node.wantedVersion() + (node.currentVersion().equals(node.wantedVersion()) ? "" : " <-- " + node.currentVersion()),
node.restartGeneration() == node.wantedRestartGeneration() ? ""
: "restart pending (" + node.wantedRestartGeneration() + " <-- " + node.restartGeneration() + ")",
node.rebootGeneration() == node.wantedRebootGeneration() ? ""
: "reboot pending (" + node.wantedRebootGeneration() + " <-- " + node.rebootGeneration() + ")"));
return nodes.stream().allMatch(node -> node.currentVersion().equals(target)
&& node.restartGeneration() == node.wantedRestartGeneration()
&& node.rebootGeneration() == node.wantedRebootGeneration());
}
private boolean servicesConverged(ApplicationId id, JobType type) {
return controller.configServer().serviceConvergence(new DeploymentId(id, type.zone(controller.system())))
.map(ServiceConvergence::converged)
.orElse(false);
}
private Optional<RunStatus> startTests(RunId id, DualLogger logger) {
logger.log("Attempting to find endpoints ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tests could start.");
return Optional.of(installationFailed);
}
Map<ZoneId, List<URI>> endpoints = deploymentEndpoints(id.application());
logger.log("Found endpoints:\n" +
endpoints.entrySet().stream()
.map(zoneEndpoints -> "- " + zoneEndpoints.getKey() + ":\n" +
zoneEndpoints.getValue().stream()
.map(uri -> " |-- " + uri)
.collect(Collectors.joining("\n")))
.collect(Collectors.joining("\n")));
if ( ! endpoints.containsKey(id.type().zone(controller.system()))) {
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
logger.log("Endpoints for the deployment to test are not yet ready.");
return Optional.empty();
}
Optional<URI> testerEndpoint = JobController.testerEndpoint(controller, id);
if (testerEndpoint.isPresent()) {
logger.log("Starting tests ...");
controller.jobController().cloud().startTests(testerEndpoint.get(),
TesterCloud.Suite.of(id.type()),
testConfig(id.application(), id.type().zone(controller.system()), controller.system(), endpoints));
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoint for tester failed to show up within " + endpointTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Endpoints of tester container not yet available.");
return Optional.empty();
}
private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) {
logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.applications().deactivate(id.application(), id.type().zone(controller.system()));
return Optional.of(running);
}
private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) {
logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.jobController().deactivateTester(id.application(), id.type());
return Optional.of(running);
}
private Optional<RunStatus> report(RunId id) {
controller.jobController().active(id).ifPresent(run -> controller.applications().deploymentTrigger().notifyOfCompletion(report(run)));
return Optional.of(running);
}
/** Returns the real application with the given id. */
private Application application(ApplicationId id) {
return controller.applications().require(id);
}
/** Returns whether the time elapsed since the last real deployment in the given zone is more than the given timeout. */
private boolean timedOut(ApplicationId id, JobType type, Duration timeout) {
return application(id).deployments().get(type.zone(controller.system())).at().isBefore(controller.clock().instant().minus(timeout));
}
/** Returns whether the real deployment for the given job type has expired, i.e., no longer exists. */
private boolean expired(ApplicationId id, JobType type) {
return ! application(id).deployments().containsKey(type.zone(controller.system()));
}
/** Returns a generated job report for the given run. */
private DeploymentJobs.JobReport report(Run run) {
return new DeploymentJobs.JobReport(run.id().application(),
run.id().type(),
1,
run.id().number(),
Optional.empty(),
run.hasFailed() ? Optional.of(DeploymentJobs.JobError.unknown) : Optional.empty());
}
/** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */
private ApplicationPackage testerPackage(RunId id) {
ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication();
byte[] testPackage = controller.applications().applicationStore().getTesterPackage(JobController.testerOf(id.application()), version.id());
byte[] servicesXml = servicesXml(controller.system());
try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) {
zipBuilder.add(testPackage);
zipBuilder.add("services.xml", servicesXml);
zipBuilder.close();
return new ApplicationPackage(zipBuilder.toByteArray());
}
}
/** Returns all endpoints for all current deployments of the given real application. */
private Map<ZoneId, List<URI>> deploymentEndpoints(ApplicationId id) {
ImmutableMap.Builder<ZoneId, List<URI>> deployments = ImmutableMap.builder();
application(id).deployments().keySet()
.forEach(zone -> controller.applications().getDeploymentEndpoints(new DeploymentId(id, zone))
.filter(endpoints -> ! endpoints.isEmpty())
.ifPresent(endpoints -> deployments.put(zone, endpoints)));
return deployments.build();
}
/** Returns the generated services.xml content for the tester application. */
static byte[] servicesXml(SystemName systemName) {
String domain = systemName == SystemName.main ? "vespa.vespa" : "vespa.vespa.cd";
String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" +
"<services xmlns:deploy='vespa' version='1.0'>\n" +
" <container version='1.0' id='default'>\n" +
"\n" +
" <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" +
" <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" +
" <artifactsPath>artifacts</artifactsPath>\n" +
" </config>\n" +
" </component>\n" +
"\n" +
" <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" +
" <binding>http:
" </handler>\n" +
"\n" +
" <http>\n" +
" <server id='default' port='4080'/>\n" +
" <filtering>\n" +
" <access-control domain='" + domain + "'>\n" +
" <exclude>\n" +
" <binding>http:
" </exclude>\n" +
" </access-control>\n" +
" <request-chain id=\"testrunner-api\">\n" +
" <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" +
" <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" +
" <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" +
" </config>\n" +
" <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" +
" <resourceName>" + domain + ":tester-application</resourceName>\n" +
" <action>deploy</action>\n" +
" </config>\n" +
" </component>\n" +
" </filter>\n" +
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n" +
"\n" +
" <nodes count=\"1\" flavor=\"d-2-8-50\" />\n" +
" </container>\n" +
"</services>\n";
return servicesXml.getBytes();
}
/** Returns the config for the tests to run for the given job. */
private static byte[] testConfig(ApplicationId id, ZoneId testerZone, SystemName system, Map<ZoneId, List<URI>> deployments) {
Slime slime = new Slime();
Cursor root = slime.setObject();
root.setString("application", id.serializedForm());
root.setString("zone", testerZone.value());
root.setString("system", system.name());
Cursor endpointsObject = root.setObject("endpoints");
deployments.forEach((zone, endpoints) -> {
Cursor endpointArray = endpointsObject.setArray(zone.value());
for (URI endpoint : endpoints)
endpointArray.addString(endpoint.toString());
});
try {
return SlimeUtils.toJsonBytes(slime);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */
private class DualLogger {
private final RunId id;
private final Step step;
private final String prefix;
private DualLogger(RunId id, Step step) {
this.id = id;
this.step = step;
this.prefix = id + " at " + step + ": ";
}
private void log(String message) {
log(DEBUG, message);
}
private void log(Level level, String message) {
log(level, message, null);
}
private void log(Level level, String message, Throwable thrown) {
LogRecord record = new LogRecord(level, prefix + message);
record.setThrown(thrown);
logger.log(record);
if (thrown != null) {
ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream();
thrown.printStackTrace(new PrintStream(traceBuffer));
message += "\n" + traceBuffer;
}
controller.jobController().log(id, step, level, message);
}
}
} |
This is one of the rare cases where the exception means something else than (give up what you are trying to do), so it may be worth creating a particular exception for this. IllegalArgumentException, in particular is the one we use to signal that the client (ultimately the user/requestor) is asking for something wrong. | static HttpResponse runDetailsResponse(JobController jobController, RunId runId, String after) {
Slime slime = new Slime();
Cursor logsObject = slime.setObject();
try {
jobController.updateTestLog(runId);
logsObject.setBool("active", true);
}
catch (IllegalArgumentException e) {
logsObject.setBool("active", jobController.active(runId).isPresent());
}
RunLog runLog = (after == null ? jobController.details(runId) : jobController.details(runId, Long.parseLong(after)))
.orElseThrow(() -> new NotExistsException(String.format(
"No run details exist for application: %s, job type: %s, number: %d",
runId.application().toShortString(), runId.type().jobName(), runId.number())));
for (Step step : Step.values()) {
runLog.get(step).ifPresent(entries -> toSlime(logsObject.setArray(step.name()), entries));
}
runLog.lastId().ifPresent(id -> logsObject.setLong("lastId", id));
return new SlimeJsonResponse(slime);
} | } | static HttpResponse runDetailsResponse(JobController jobController, RunId runId, String after) {
Slime slime = new Slime();
Cursor logsObject = slime.setObject();
logsObject.setBool("active", jobController.active(runId).isPresent());
jobController.updateTestLog(runId);
RunLog runLog = (after == null ? jobController.details(runId) : jobController.details(runId, Long.parseLong(after)))
.orElseThrow(() -> new NotExistsException(String.format(
"No run details exist for application: %s, job type: %s, number: %d",
runId.application().toShortString(), runId.type().jobName(), runId.number())));
for (Step step : Step.values()) {
runLog.get(step).ifPresent(entries -> toSlime(logsObject.setArray(step.name()), entries));
}
runLog.lastId().ifPresent(id -> logsObject.setLong("lastId", id));
return new SlimeJsonResponse(slime);
} | class JobControllerApiHandlerHelper {
/**
* @return Response with all job types that have recorded runs for the application _and_ the status for the last run of that type
*/
static HttpResponse jobTypeResponse(List<JobType> sortedJobs, Map<JobType, Run> lastRun, URI baseUriForJobs) {
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
Cursor jobArray = responseObject.setArray("jobs");
sortedJobs.forEach(jobType ->
jobTypeToSlime(jobArray.addObject(), jobType, Optional.ofNullable(lastRun.get(jobType)), baseUriForJobs));
return new SlimeJsonResponse(slime);
}
private static void jobTypeToSlime(Cursor cursor, JobType jobType, Optional<Run> lastRun, URI baseUriForJobs) {
Cursor jobObject = cursor.setObject(jobType.jobName());
String jobTypePath = baseUriForJobs.getPath() + "/" + jobType.jobName();
URI baseUriForJobType = baseUriForJobs.resolve(jobTypePath);
jobObject.setString("url", baseUriForJobType.toString());
lastRun.ifPresent(run -> {
Cursor lastObject = jobObject.setObject("last");
runToSlime(lastObject, run, baseUriForJobType);
});
}
/**
* @return Response with the runs for a specific jobtype
*/
static HttpResponse runResponse(Map<RunId, Run> runs, URI baseUriForJobType) {
Slime slime = new Slime();
Cursor cursor = slime.setObject();
runs.forEach((runid, run) -> runToSlime(cursor.setObject(Long.toString(runid.number())), run, baseUriForJobType));
return new SlimeJsonResponse(slime);
}
private static void runToSlime(Cursor cursor, Run run, URI baseUriForJobType) {
cursor.setString("status", run.status().name());
run.end().ifPresent(instant -> cursor.setString("end", instant.toString()));
Cursor stepsArray = cursor.setArray("steps");
run.steps().forEach((step, status) -> {
Cursor stepObject = stepsArray.addObject();
stepObject.setString(step.name(), status.name());
});
cursor.setString("start", run.start().toString());
cursor.setLong("id", run.id().number());
String logsPath = baseUriForJobType.getPath() + "/run/" + run.id().number();
cursor.setString("logs", baseUriForJobType.resolve(logsPath).toString());
cursor.setString("wantedPlatform", run.versions().targetPlatform().toString());
applicationVersionToSlime(cursor.setObject("wantedApplication"), run.versions().targetApplication());
run.versions().sourcePlatform().ifPresent(version -> cursor.setString("currentPlatform", version.toString()));
run.versions().sourceApplication().ifPresent(version -> applicationVersionToSlime(cursor.setObject("currentApplication"), version));
}
private static void applicationVersionToSlime(Cursor cursor, ApplicationVersion version) {
cursor.setString("id", version.id());
cursor.setLong("build", version.buildNumber().get());
cursor.setString("repository", version.source().get().repository());
cursor.setString("branch", version.source().get().branch());
cursor.setString("commit", version.source().get().commit());
}
/**
* @return Response with logs from a single run
*/
private static void toSlime(Cursor entryArray, List<LogEntry> entries) {
entries.forEach(entry -> toSlime(entryArray.addObject(), entry));
}
private static void toSlime(Cursor entryObject, LogEntry entry) {
entryObject.setLong("at", entry.at());
entryObject.setString("type", entry.type().name());
entryObject.setString("message", entry.message());
}
/**
* Unpack payload and submit to job controller. Defaults instance to 'default' and renders the
* application version on success.
*
* @return Response with the new application version
*/
static HttpResponse submitResponse(JobController jobController, String tenant, String application,
SourceRevision sourceRevision, byte[] appPackage, byte[] testPackage) {
ApplicationVersion version = jobController.submit(ApplicationId.from(tenant, application, "default"),
sourceRevision, appPackage, testPackage);
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
responseObject.setString("version", version.id());
return new SlimeJsonResponse(slime);
}
} | class JobControllerApiHandlerHelper {
/**
* @return Response with all job types that have recorded runs for the application _and_ the status for the last run of that type
*/
static HttpResponse jobTypeResponse(List<JobType> sortedJobs, Map<JobType, Run> lastRun, URI baseUriForJobs) {
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
Cursor jobArray = responseObject.setArray("jobs");
sortedJobs.forEach(jobType ->
jobTypeToSlime(jobArray.addObject(), jobType, Optional.ofNullable(lastRun.get(jobType)), baseUriForJobs));
return new SlimeJsonResponse(slime);
}
private static void jobTypeToSlime(Cursor cursor, JobType jobType, Optional<Run> lastRun, URI baseUriForJobs) {
Cursor jobObject = cursor.setObject(jobType.jobName());
String jobTypePath = baseUriForJobs.getPath() + "/" + jobType.jobName();
URI baseUriForJobType = baseUriForJobs.resolve(jobTypePath);
jobObject.setString("url", baseUriForJobType.toString());
lastRun.ifPresent(run -> {
Cursor lastObject = jobObject.setObject("last");
runToSlime(lastObject, run, baseUriForJobType);
});
}
/**
* @return Response with the runs for a specific jobtype
*/
static HttpResponse runResponse(Map<RunId, Run> runs, URI baseUriForJobType) {
Slime slime = new Slime();
Cursor cursor = slime.setObject();
runs.forEach((runid, run) -> runToSlime(cursor.setObject(Long.toString(runid.number())), run, baseUriForJobType));
return new SlimeJsonResponse(slime);
}
private static void runToSlime(Cursor cursor, Run run, URI baseUriForJobType) {
cursor.setString("status", run.status().name());
run.end().ifPresent(instant -> cursor.setString("end", instant.toString()));
Cursor stepsArray = cursor.setArray("steps");
run.steps().forEach((step, status) -> {
Cursor stepObject = stepsArray.addObject();
stepObject.setString(step.name(), status.name());
});
cursor.setString("start", run.start().toString());
cursor.setLong("id", run.id().number());
String logsPath = baseUriForJobType.getPath() + "/run/" + run.id().number();
cursor.setString("logs", baseUriForJobType.resolve(logsPath).toString());
cursor.setString("wantedPlatform", run.versions().targetPlatform().toString());
applicationVersionToSlime(cursor.setObject("wantedApplication"), run.versions().targetApplication());
run.versions().sourcePlatform().ifPresent(version -> cursor.setString("currentPlatform", version.toString()));
run.versions().sourceApplication().ifPresent(version -> applicationVersionToSlime(cursor.setObject("currentApplication"), version));
}
private static void applicationVersionToSlime(Cursor cursor, ApplicationVersion version) {
cursor.setString("id", version.id());
cursor.setLong("build", version.buildNumber().get());
cursor.setString("repository", version.source().get().repository());
cursor.setString("branch", version.source().get().branch());
cursor.setString("commit", version.source().get().commit());
}
/**
* @return Response with logs from a single run
*/
private static void toSlime(Cursor entryArray, List<LogEntry> entries) {
entries.forEach(entry -> toSlime(entryArray.addObject(), entry));
}
private static void toSlime(Cursor entryObject, LogEntry entry) {
entryObject.setLong("at", entry.at());
entryObject.setString("type", entry.type().name());
entryObject.setString("message", entry.message());
}
/**
* Unpack payload and submit to job controller. Defaults instance to 'default' and renders the
* application version on success.
*
* @return Response with the new application version
*/
static HttpResponse submitResponse(JobController jobController, String tenant, String application,
SourceRevision sourceRevision, byte[] appPackage, byte[] testPackage) {
ApplicationVersion version = jobController.submit(ApplicationId.from(tenant, application, "default"),
sourceRevision, appPackage, testPackage);
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
responseObject.setString("version", version.id());
return new SlimeJsonResponse(slime);
}
} |
Yes, that's better. | private Optional<RunStatus> endTests(RunId id, DualLogger logger) {
URI testerEndpoint = JobController.testerEndpoint(controller, id)
.orElseThrow(() -> new NoSuchElementException("Endpoint for tester vanished again before tests were complete!"));
controller.jobController().updateTestLog(id);
RunStatus status;
switch (controller.jobController().cloud().getStatus(testerEndpoint)) {
case NOT_STARTED:
throw new IllegalStateException("Tester reports tests not started, even though they should have!");
case RUNNING:
return Optional.empty();
case FAILURE:
logger.log("Tests failed.");
status = testFailure; break;
case ERROR:
logger.log(INFO, "Tester failed running its tests!");
status = error; break;
case SUCCESS:
logger.log("Tests completed successfully.");
status = running; break;
default:
throw new IllegalArgumentException("Unknown status!");
}
return Optional.of(status);
} | throw new IllegalArgumentException("Unknown status!"); | private Optional<RunStatus> endTests(RunId id, DualLogger logger) {
URI testerEndpoint = JobController.testerEndpoint(controller, id)
.orElseThrow(() -> new NoSuchElementException("Endpoint for tester vanished again before tests were complete!"));
controller.jobController().updateTestLog(id);
RunStatus status;
TesterCloud.Status testStatus = controller.jobController().cloud().getStatus(testerEndpoint);
switch (testStatus) {
case NOT_STARTED:
throw new IllegalStateException("Tester reports tests not started, even though they should have!");
case RUNNING:
return Optional.empty();
case FAILURE:
logger.log("Tests failed.");
status = testFailure; break;
case ERROR:
logger.log(INFO, "Tester failed running its tests!");
status = error; break;
case SUCCESS:
logger.log("Tests completed successfully.");
status = running; break;
default:
throw new IllegalStateException("Unknown status '" + testStatus + "'!");
}
return Optional.of(status);
} | class InternalStepRunner implements StepRunner {
private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName());
static final Duration endpointTimeout = Duration.ofMinutes(15);
static final Duration installationTimeout = Duration.ofMinutes(150);
private final Controller controller;
public InternalStepRunner(Controller controller) {
this.controller = controller;
}
@Override
public Optional<RunStatus> run(LockedStep step, RunId id) {
DualLogger logger = new DualLogger(id, step.get());
try {
switch (step.get()) {
case deployInitialReal: return deployInitialReal(id, logger);
case installInitialReal: return installInitialReal(id, logger);
case deployReal: return deployReal(id, logger);
case deployTester: return deployTester(id, logger);
case installReal: return installReal(id, logger);
case installTester: return installTester(id, logger);
case startTests: return startTests(id, logger);
case endTests: return endTests(id, logger);
case deactivateReal: return deactivateReal(id, logger);
case deactivateTester: return deactivateTester(id, logger);
case report: return report(id);
default: throw new AssertionError("Unknown step '" + step + "'!");
}
}
catch (UncheckedIOException e) {
logger.log(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e));
return Optional.empty();
}
catch (RuntimeException e) {
logger.log(WARNING, "Unexpected exception running " + id, e);
if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) {
logger.log("Will keep trying, as this is a cleanup step.");
return Optional.empty();
}
return Optional.of(error);
}
}
private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " +
versions.sourcePlatform().orElse(versions.targetPlatform()) +
" and application version " +
versions.sourceApplication().orElse(versions.targetApplication()).id() + " ...");
return deployReal(id, true, logger);
}
private Optional<RunStatus> deployReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " + versions.targetPlatform() +
" and application version " + versions.targetApplication().id() + " ...");
return deployReal(id, false, logger);
}
private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, DualLogger logger) {
return deploy(id.application(),
id.type(),
() -> controller.applications().deploy(id.application(),
id.type().zone(controller.system()),
Optional.empty(),
new DeployOptions(false,
Optional.empty(),
false,
setTheStage)),
logger);
}
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) {
logger.log("Deploying the tester container ...");
return deploy(JobController.testerOf(id.application()),
id.type(),
() -> controller.applications().deployTester(JobController.testerOf(id.application()),
testerPackage(id),
id.type().zone(controller.system()),
new DeployOptions(true,
Optional.of(controller.systemVersion()),
false,
false)),
logger);
}
private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, DualLogger logger) {
try {
PrepareResponse prepareResponse = deployment.get().prepareResponse();
if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) {
logger.log("Deploy failed due to non-compatible changes that require re-feed. " +
"Your options are: \n" +
"1. Revert the incompatible changes.\n" +
"2. If you think it is safe in your case, you can override this validation, see\n" +
" http:
"3. Deploy as a new application under a different name.\n" +
"Illegal actions:\n" +
prepareResponse.configChangeActions.refeedActions.stream()
.filter(action -> ! action.allowed)
.flatMap(action -> action.messages.stream())
.collect(Collectors.joining("\n")) + "\n" +
"Details:\n" +
prepareResponse.log.stream()
.map(entry -> entry.message)
.collect(Collectors.joining("\n")));
return Optional.of(deploymentFailed);
}
if (prepareResponse.configChangeActions.restartActions.isEmpty())
logger.log("No services requiring restart.");
else
prepareResponse.configChangeActions.restartActions.stream()
.flatMap(action -> action.services.stream())
.map(service -> service.hostName)
.sorted().distinct()
.map(Hostname::new)
.forEach(hostname -> {
controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname));
logger.log("Restarting services on host " + hostname.id() + ".");
});
logger.log("Deployment successful.");
return Optional.of(running);
}
catch (ConfigServerException e) {
if ( e.getErrorCode() == OUT_OF_CAPACITY && type.isTest()
|| e.getErrorCode() == ACTIVATION_CONFLICT
|| e.getErrorCode() == APPLICATION_LOCK_FAILURE) {
logger.log("Will retry, because of '" + e.getErrorCode() + "' deploying:\n" + e.getMessage());
return Optional.empty();
}
throw e;
}
}
private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) {
return installReal(id, true, logger);
}
private Optional<RunStatus> installReal(RunId id, DualLogger logger) {
return installReal(id, false, logger);
}
private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) {
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before installation was successful.");
return Optional.of(installationFailed);
}
Versions versions = controller.jobController().run(id).get().versions();
Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform();
ApplicationVersion application = setTheStage ? versions.sourceApplication().orElse(versions.targetApplication()) : versions.targetApplication();
logger.log("Checking installation of " + platform + " and " + application.id() + " ...");
if (nodesConverged(id.application(), id.type(), platform, logger) && servicesConverged(id.application(), id.type())) {
logger.log("Installation succeeded!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(INFO, "Installation failed to complete within " + installationTimeout.toMinutes() + " minutes!");
return Optional.of(installationFailed);
}
logger.log("Installation not yet complete.");
return Optional.empty();
}
private Optional<RunStatus> installTester(RunId id, DualLogger logger) {
logger.log("Checking installation of tester container ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tester was installed.");
return Optional.of(installationFailed);
}
if (servicesConverged(JobController.testerOf(id.application()), id.type())) {
logger.log("Tester container successfully installed!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(WARNING, "Installation of tester failed to complete within " + installationTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Installation of tester not yet complete.");
return Optional.empty();
}
private boolean nodesConverged(ApplicationId id, JobType type, Version target, DualLogger logger) {
List<Node> nodes = controller.configServer().nodeRepository().list(type.zone(controller.system()), id, ImmutableSet.of(active, reserved));
for (Node node : nodes)
logger.log(String.format("%70s: %-16s%-25s%-32s%s",
node.hostname(),
node.serviceState(),
node.wantedVersion() + (node.currentVersion().equals(node.wantedVersion()) ? "" : " <-- " + node.currentVersion()),
node.restartGeneration() == node.wantedRestartGeneration() ? ""
: "restart pending (" + node.wantedRestartGeneration() + " <-- " + node.restartGeneration() + ")",
node.rebootGeneration() == node.wantedRebootGeneration() ? ""
: "reboot pending (" + node.wantedRebootGeneration() + " <-- " + node.rebootGeneration() + ")"));
return nodes.stream().allMatch(node -> node.currentVersion().equals(target)
&& node.restartGeneration() == node.wantedRestartGeneration()
&& node.rebootGeneration() == node.wantedRebootGeneration());
}
private boolean servicesConverged(ApplicationId id, JobType type) {
return controller.configServer().serviceConvergence(new DeploymentId(id, type.zone(controller.system())))
.map(ServiceConvergence::converged)
.orElse(false);
}
private Optional<RunStatus> startTests(RunId id, DualLogger logger) {
logger.log("Attempting to find endpoints ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tests could start.");
return Optional.of(installationFailed);
}
Map<ZoneId, List<URI>> endpoints = deploymentEndpoints(id.application());
logger.log("Found endpoints:\n" +
endpoints.entrySet().stream()
.map(zoneEndpoints -> "- " + zoneEndpoints.getKey() + ":\n" +
zoneEndpoints.getValue().stream()
.map(uri -> " |-- " + uri)
.collect(Collectors.joining("\n")))
.collect(Collectors.joining("\n")));
if ( ! endpoints.containsKey(id.type().zone(controller.system()))) {
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
logger.log("Endpoints for the deployment to test are not yet ready.");
return Optional.empty();
}
Optional<URI> testerEndpoint = JobController.testerEndpoint(controller, id);
if (testerEndpoint.isPresent()) {
logger.log("Starting tests ...");
controller.jobController().cloud().startTests(testerEndpoint.get(),
TesterCloud.Suite.of(id.type()),
testConfig(id.application(), id.type().zone(controller.system()), controller.system(), endpoints));
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoint for tester failed to show up within " + endpointTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Endpoints of tester container not yet available.");
return Optional.empty();
}
private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) {
logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.applications().deactivate(id.application(), id.type().zone(controller.system()));
return Optional.of(running);
}
private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) {
logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.jobController().deactivateTester(id.application(), id.type());
return Optional.of(running);
}
private Optional<RunStatus> report(RunId id) {
controller.jobController().active(id).ifPresent(run -> controller.applications().deploymentTrigger().notifyOfCompletion(report(run)));
return Optional.of(running);
}
/** Returns the real application with the given id. */
private Application application(ApplicationId id) {
return controller.applications().require(id);
}
/** Returns whether the time elapsed since the last real deployment in the given zone is more than the given timeout. */
private boolean timedOut(ApplicationId id, JobType type, Duration timeout) {
return application(id).deployments().get(type.zone(controller.system())).at().isBefore(controller.clock().instant().minus(timeout));
}
/** Returns whether the real deployment for the given job type has expired, i.e., no longer exists. */
private boolean expired(ApplicationId id, JobType type) {
return ! application(id).deployments().containsKey(type.zone(controller.system()));
}
/** Returns a generated job report for the given run. */
private DeploymentJobs.JobReport report(Run run) {
return new DeploymentJobs.JobReport(run.id().application(),
run.id().type(),
1,
run.id().number(),
Optional.empty(),
run.hasFailed() ? Optional.of(DeploymentJobs.JobError.unknown) : Optional.empty());
}
/** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */
private ApplicationPackage testerPackage(RunId id) {
ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication();
byte[] testPackage = controller.applications().applicationStore().getTesterPackage(JobController.testerOf(id.application()), version.id());
byte[] servicesXml = servicesXml(controller.system());
try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) {
zipBuilder.add(testPackage);
zipBuilder.add("services.xml", servicesXml);
zipBuilder.close();
return new ApplicationPackage(zipBuilder.toByteArray());
}
}
/** Returns all endpoints for all current deployments of the given real application. */
private Map<ZoneId, List<URI>> deploymentEndpoints(ApplicationId id) {
ImmutableMap.Builder<ZoneId, List<URI>> deployments = ImmutableMap.builder();
application(id).deployments().keySet()
.forEach(zone -> controller.applications().getDeploymentEndpoints(new DeploymentId(id, zone))
.filter(endpoints -> ! endpoints.isEmpty())
.ifPresent(endpoints -> deployments.put(zone, endpoints)));
return deployments.build();
}
/** Returns the generated services.xml content for the tester application. */
static byte[] servicesXml(SystemName systemName) {
String domain = systemName == SystemName.main ? "vespa.vespa" : "vespa.vespa.cd";
String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" +
"<services xmlns:deploy='vespa' version='1.0'>\n" +
" <container version='1.0' id='default'>\n" +
"\n" +
" <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" +
" <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" +
" <artifactsPath>artifacts</artifactsPath>\n" +
" </config>\n" +
" </component>\n" +
"\n" +
" <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" +
" <binding>http:
" </handler>\n" +
"\n" +
" <http>\n" +
" <server id='default' port='4080'/>\n" +
" <filtering>\n" +
" <access-control domain='" + domain + "'>\n" +
" <exclude>\n" +
" <binding>http:
" </exclude>\n" +
" </access-control>\n" +
" <request-chain id=\"testrunner-api\">\n" +
" <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" +
" <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" +
" <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" +
" </config>\n" +
" <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" +
" <resourceName>" + domain + ":tester-application</resourceName>\n" +
" <action>deploy</action>\n" +
" </config>\n" +
" </component>\n" +
" </filter>\n" +
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n" +
"\n" +
" <nodes count=\"1\" flavor=\"d-2-8-50\" />\n" +
" </container>\n" +
"</services>\n";
return servicesXml.getBytes();
}
/** Returns the config for the tests to run for the given job. */
private static byte[] testConfig(ApplicationId id, ZoneId testerZone, SystemName system, Map<ZoneId, List<URI>> deployments) {
Slime slime = new Slime();
Cursor root = slime.setObject();
root.setString("application", id.serializedForm());
root.setString("zone", testerZone.value());
root.setString("system", system.name());
Cursor endpointsObject = root.setObject("endpoints");
deployments.forEach((zone, endpoints) -> {
Cursor endpointArray = endpointsObject.setArray(zone.value());
for (URI endpoint : endpoints)
endpointArray.addString(endpoint.toString());
});
try {
return SlimeUtils.toJsonBytes(slime);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */
private class DualLogger {
private final RunId id;
private final Step step;
private final String prefix;
private DualLogger(RunId id, Step step) {
this.id = id;
this.step = step;
this.prefix = id + " at " + step + ": ";
}
private void log(String message) {
log(DEBUG, message);
}
private void log(Level level, String message) {
log(level, message, null);
}
private void log(Level level, String message, Throwable thrown) {
LogRecord record = new LogRecord(level, prefix + message);
record.setThrown(thrown);
logger.log(record);
if (thrown != null) {
ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream();
thrown.printStackTrace(new PrintStream(traceBuffer));
message += "\n" + traceBuffer;
}
controller.jobController().log(id, step, level, message);
}
}
} | class InternalStepRunner implements StepRunner {
private static final Logger logger = Logger.getLogger(InternalStepRunner.class.getName());
static final Duration endpointTimeout = Duration.ofMinutes(15);
static final Duration installationTimeout = Duration.ofMinutes(150);
private final Controller controller;
public InternalStepRunner(Controller controller) {
this.controller = controller;
}
@Override
public Optional<RunStatus> run(LockedStep step, RunId id) {
DualLogger logger = new DualLogger(id, step.get());
try {
switch (step.get()) {
case deployInitialReal: return deployInitialReal(id, logger);
case installInitialReal: return installInitialReal(id, logger);
case deployReal: return deployReal(id, logger);
case deployTester: return deployTester(id, logger);
case installReal: return installReal(id, logger);
case installTester: return installTester(id, logger);
case startTests: return startTests(id, logger);
case endTests: return endTests(id, logger);
case deactivateReal: return deactivateReal(id, logger);
case deactivateTester: return deactivateTester(id, logger);
case report: return report(id);
default: throw new AssertionError("Unknown step '" + step + "'!");
}
}
catch (UncheckedIOException e) {
logger.log(INFO, "IO exception running " + id + ": " + Exceptions.toMessageString(e));
return Optional.empty();
}
catch (RuntimeException e) {
logger.log(WARNING, "Unexpected exception running " + id, e);
if (JobProfile.of(id.type()).alwaysRun().contains(step.get())) {
logger.log("Will keep trying, as this is a cleanup step.");
return Optional.empty();
}
return Optional.of(error);
}
}
private Optional<RunStatus> deployInitialReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " +
versions.sourcePlatform().orElse(versions.targetPlatform()) +
" and application version " +
versions.sourceApplication().orElse(versions.targetApplication()).id() + " ...");
return deployReal(id, true, logger);
}
private Optional<RunStatus> deployReal(RunId id, DualLogger logger) {
Versions versions = controller.jobController().run(id).get().versions();
logger.log("Deploying platform version " + versions.targetPlatform() +
" and application version " + versions.targetApplication().id() + " ...");
return deployReal(id, false, logger);
}
private Optional<RunStatus> deployReal(RunId id, boolean setTheStage, DualLogger logger) {
return deploy(id.application(),
id.type(),
() -> controller.applications().deploy(id.application(),
id.type().zone(controller.system()),
Optional.empty(),
new DeployOptions(false,
Optional.empty(),
false,
setTheStage)),
logger);
}
private Optional<RunStatus> deployTester(RunId id, DualLogger logger) {
logger.log("Deploying the tester container ...");
return deploy(JobController.testerOf(id.application()),
id.type(),
() -> controller.applications().deployTester(JobController.testerOf(id.application()),
testerPackage(id),
id.type().zone(controller.system()),
new DeployOptions(true,
Optional.of(controller.systemVersion()),
false,
false)),
logger);
}
private Optional<RunStatus> deploy(ApplicationId id, JobType type, Supplier<ActivateResult> deployment, DualLogger logger) {
try {
PrepareResponse prepareResponse = deployment.get().prepareResponse();
if ( ! prepareResponse.configChangeActions.refeedActions.stream().allMatch(action -> action.allowed)) {
logger.log("Deploy failed due to non-compatible changes that require re-feed. " +
"Your options are: \n" +
"1. Revert the incompatible changes.\n" +
"2. If you think it is safe in your case, you can override this validation, see\n" +
" http:
"3. Deploy as a new application under a different name.\n" +
"Illegal actions:\n" +
prepareResponse.configChangeActions.refeedActions.stream()
.filter(action -> ! action.allowed)
.flatMap(action -> action.messages.stream())
.collect(Collectors.joining("\n")) + "\n" +
"Details:\n" +
prepareResponse.log.stream()
.map(entry -> entry.message)
.collect(Collectors.joining("\n")));
return Optional.of(deploymentFailed);
}
if (prepareResponse.configChangeActions.restartActions.isEmpty())
logger.log("No services requiring restart.");
else
prepareResponse.configChangeActions.restartActions.stream()
.flatMap(action -> action.services.stream())
.map(service -> service.hostName)
.sorted().distinct()
.map(Hostname::new)
.forEach(hostname -> {
controller.applications().restart(new DeploymentId(id, type.zone(controller.system())), Optional.of(hostname));
logger.log("Restarting services on host " + hostname.id() + ".");
});
logger.log("Deployment successful.");
return Optional.of(running);
}
catch (ConfigServerException e) {
if ( e.getErrorCode() == OUT_OF_CAPACITY && type.isTest()
|| e.getErrorCode() == ACTIVATION_CONFLICT
|| e.getErrorCode() == APPLICATION_LOCK_FAILURE) {
logger.log("Will retry, because of '" + e.getErrorCode() + "' deploying:\n" + e.getMessage());
return Optional.empty();
}
throw e;
}
}
private Optional<RunStatus> installInitialReal(RunId id, DualLogger logger) {
return installReal(id, true, logger);
}
private Optional<RunStatus> installReal(RunId id, DualLogger logger) {
return installReal(id, false, logger);
}
private Optional<RunStatus> installReal(RunId id, boolean setTheStage, DualLogger logger) {
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before installation was successful.");
return Optional.of(installationFailed);
}
Versions versions = controller.jobController().run(id).get().versions();
Version platform = setTheStage ? versions.sourcePlatform().orElse(versions.targetPlatform()) : versions.targetPlatform();
ApplicationVersion application = setTheStage ? versions.sourceApplication().orElse(versions.targetApplication()) : versions.targetApplication();
logger.log("Checking installation of " + platform + " and " + application.id() + " ...");
if (nodesConverged(id.application(), id.type(), platform, logger) && servicesConverged(id.application(), id.type())) {
logger.log("Installation succeeded!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(INFO, "Installation failed to complete within " + installationTimeout.toMinutes() + " minutes!");
return Optional.of(installationFailed);
}
logger.log("Installation not yet complete.");
return Optional.empty();
}
private Optional<RunStatus> installTester(RunId id, DualLogger logger) {
logger.log("Checking installation of tester container ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tester was installed.");
return Optional.of(installationFailed);
}
if (servicesConverged(JobController.testerOf(id.application()), id.type())) {
logger.log("Tester container successfully installed!");
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), installationTimeout)) {
logger.log(WARNING, "Installation of tester failed to complete within " + installationTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Installation of tester not yet complete.");
return Optional.empty();
}
private boolean nodesConverged(ApplicationId id, JobType type, Version target, DualLogger logger) {
List<Node> nodes = controller.configServer().nodeRepository().list(type.zone(controller.system()), id, ImmutableSet.of(active, reserved));
for (Node node : nodes)
logger.log(String.format("%70s: %-16s%-25s%-32s%s",
node.hostname(),
node.serviceState(),
node.wantedVersion() + (node.currentVersion().equals(node.wantedVersion()) ? "" : " <-- " + node.currentVersion()),
node.restartGeneration() == node.wantedRestartGeneration() ? ""
: "restart pending (" + node.wantedRestartGeneration() + " <-- " + node.restartGeneration() + ")",
node.rebootGeneration() == node.wantedRebootGeneration() ? ""
: "reboot pending (" + node.wantedRebootGeneration() + " <-- " + node.rebootGeneration() + ")"));
return nodes.stream().allMatch(node -> node.currentVersion().equals(target)
&& node.restartGeneration() == node.wantedRestartGeneration()
&& node.rebootGeneration() == node.wantedRebootGeneration());
}
private boolean servicesConverged(ApplicationId id, JobType type) {
return controller.configServer().serviceConvergence(new DeploymentId(id, type.zone(controller.system())))
.map(ServiceConvergence::converged)
.orElse(false);
}
private Optional<RunStatus> startTests(RunId id, DualLogger logger) {
logger.log("Attempting to find endpoints ...");
if (expired(id.application(), id.type())) {
logger.log(INFO, "Deployment expired before tests could start.");
return Optional.of(installationFailed);
}
Map<ZoneId, List<URI>> endpoints = deploymentEndpoints(id.application());
logger.log("Found endpoints:\n" +
endpoints.entrySet().stream()
.map(zoneEndpoints -> "- " + zoneEndpoints.getKey() + ":\n" +
zoneEndpoints.getValue().stream()
.map(uri -> " |-- " + uri)
.collect(Collectors.joining("\n")))
.collect(Collectors.joining("\n")));
if ( ! endpoints.containsKey(id.type().zone(controller.system()))) {
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoints failed to show up within " + endpointTimeout.toMinutes() + " minutes!");
return Optional.of(error);
}
logger.log("Endpoints for the deployment to test are not yet ready.");
return Optional.empty();
}
Optional<URI> testerEndpoint = JobController.testerEndpoint(controller, id);
if (testerEndpoint.isPresent()) {
logger.log("Starting tests ...");
controller.jobController().cloud().startTests(testerEndpoint.get(),
TesterCloud.Suite.of(id.type()),
testConfig(id.application(), id.type().zone(controller.system()), controller.system(), endpoints));
return Optional.of(running);
}
if (timedOut(id.application(), id.type(), endpointTimeout)) {
logger.log(WARNING, "Endpoint for tester failed to show up within " + endpointTimeout.toMinutes() + " minutes of real deployment!");
return Optional.of(error);
}
logger.log("Endpoints of tester container not yet available.");
return Optional.empty();
}
private Optional<RunStatus> deactivateReal(RunId id, DualLogger logger) {
logger.log("Deactivating deployment of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.applications().deactivate(id.application(), id.type().zone(controller.system()));
return Optional.of(running);
}
private Optional<RunStatus> deactivateTester(RunId id, DualLogger logger) {
logger.log("Deactivating tester of " + id.application() + " in " + id.type().zone(controller.system()) + " ...");
controller.jobController().deactivateTester(id.application(), id.type());
return Optional.of(running);
}
private Optional<RunStatus> report(RunId id) {
controller.jobController().active(id).ifPresent(run -> controller.applications().deploymentTrigger().notifyOfCompletion(report(run)));
return Optional.of(running);
}
/** Returns the real application with the given id. */
private Application application(ApplicationId id) {
return controller.applications().require(id);
}
/** Returns whether the time elapsed since the last real deployment in the given zone is more than the given timeout. */
private boolean timedOut(ApplicationId id, JobType type, Duration timeout) {
return application(id).deployments().get(type.zone(controller.system())).at().isBefore(controller.clock().instant().minus(timeout));
}
/** Returns whether the real deployment for the given job type has expired, i.e., no longer exists. */
private boolean expired(ApplicationId id, JobType type) {
return ! application(id).deployments().containsKey(type.zone(controller.system()));
}
/** Returns a generated job report for the given run. */
private DeploymentJobs.JobReport report(Run run) {
return new DeploymentJobs.JobReport(run.id().application(),
run.id().type(),
1,
run.id().number(),
Optional.empty(),
run.hasFailed() ? Optional.of(DeploymentJobs.JobError.unknown) : Optional.empty());
}
/** Returns the application package for the tester application, assembled from a generated config, fat-jar and services.xml. */
private ApplicationPackage testerPackage(RunId id) {
ApplicationVersion version = controller.jobController().run(id).get().versions().targetApplication();
byte[] testPackage = controller.applications().applicationStore().getTesterPackage(JobController.testerOf(id.application()), version.id());
byte[] servicesXml = servicesXml(controller.system());
try (ZipBuilder zipBuilder = new ZipBuilder(testPackage.length + servicesXml.length + 1000)) {
zipBuilder.add(testPackage);
zipBuilder.add("services.xml", servicesXml);
zipBuilder.close();
return new ApplicationPackage(zipBuilder.toByteArray());
}
}
/** Returns all endpoints for all current deployments of the given real application. */
private Map<ZoneId, List<URI>> deploymentEndpoints(ApplicationId id) {
ImmutableMap.Builder<ZoneId, List<URI>> deployments = ImmutableMap.builder();
application(id).deployments().keySet()
.forEach(zone -> controller.applications().getDeploymentEndpoints(new DeploymentId(id, zone))
.filter(endpoints -> ! endpoints.isEmpty())
.ifPresent(endpoints -> deployments.put(zone, endpoints)));
return deployments.build();
}
/** Returns the generated services.xml content for the tester application. */
static byte[] servicesXml(SystemName systemName) {
String domain = systemName == SystemName.main ? "vespa.vespa" : "vespa.vespa.cd";
String servicesXml = "<?xml version='1.0' encoding='UTF-8'?>\n" +
"<services xmlns:deploy='vespa' version='1.0'>\n" +
" <container version='1.0' id='default'>\n" +
"\n" +
" <component id=\"com.yahoo.vespa.hosted.testrunner.TestRunner\" bundle=\"vespa-testrunner-components\">\n" +
" <config name=\"com.yahoo.vespa.hosted.testrunner.test-runner\">\n" +
" <artifactsPath>artifacts</artifactsPath>\n" +
" </config>\n" +
" </component>\n" +
"\n" +
" <handler id=\"com.yahoo.vespa.hosted.testrunner.TestRunnerHandler\" bundle=\"vespa-testrunner-components\">\n" +
" <binding>http:
" </handler>\n" +
"\n" +
" <http>\n" +
" <server id='default' port='4080'/>\n" +
" <filtering>\n" +
" <access-control domain='" + domain + "'>\n" +
" <exclude>\n" +
" <binding>http:
" </exclude>\n" +
" </access-control>\n" +
" <request-chain id=\"testrunner-api\">\n" +
" <filter id='authz-filter' class='com.yahoo.jdisc.http.filter.security.athenz.AthenzAuthorizationFilter' bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.athenz-authorization-filter\">\n" +
" <credentialsToVerify>TOKEN_ONLY</credentialsToVerify>\n" +
" <roleTokenHeaderName>Yahoo-Role-Auth</roleTokenHeaderName>\n" +
" </config>\n" +
" <component id=\"com.yahoo.jdisc.http.filter.security.athenz.StaticRequestResourceMapper\" bundle=\"jdisc-security-filters\">\n" +
" <config name=\"jdisc.http.filter.security.athenz.static-request-resource-mapper\">\n" +
" <resourceName>" + domain + ":tester-application</resourceName>\n" +
" <action>deploy</action>\n" +
" </config>\n" +
" </component>\n" +
" </filter>\n" +
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n" +
"\n" +
" <nodes count=\"1\" flavor=\"d-2-8-50\" />\n" +
" </container>\n" +
"</services>\n";
return servicesXml.getBytes();
}
/** Returns the config for the tests to run for the given job. */
private static byte[] testConfig(ApplicationId id, ZoneId testerZone, SystemName system, Map<ZoneId, List<URI>> deployments) {
Slime slime = new Slime();
Cursor root = slime.setObject();
root.setString("application", id.serializedForm());
root.setString("zone", testerZone.value());
root.setString("system", system.name());
Cursor endpointsObject = root.setObject("endpoints");
deployments.forEach((zone, endpoints) -> {
Cursor endpointArray = endpointsObject.setArray(zone.value());
for (URI endpoint : endpoints)
endpointArray.addString(endpoint.toString());
});
try {
return SlimeUtils.toJsonBytes(slime);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/** Logger which logs to a {@link JobController}, as well as to the parent class' {@link Logger}. */
private class DualLogger {
private final RunId id;
private final Step step;
private final String prefix;
private DualLogger(RunId id, Step step) {
this.id = id;
this.step = step;
this.prefix = id + " at " + step + ": ";
}
private void log(String message) {
log(DEBUG, message);
}
private void log(Level level, String message) {
log(level, message, null);
}
private void log(Level level, String message, Throwable thrown) {
LogRecord record = new LogRecord(level, prefix + message);
record.setThrown(thrown);
logger.log(record);
if (thrown != null) {
ByteArrayOutputStream traceBuffer = new ByteArrayOutputStream();
thrown.printStackTrace(new PrintStream(traceBuffer));
message += "\n" + traceBuffer;
}
controller.jobController().log(id, step, level, message);
}
}
} |
You're absolutely right. | static HttpResponse runDetailsResponse(JobController jobController, RunId runId, String after) {
Slime slime = new Slime();
Cursor logsObject = slime.setObject();
try {
jobController.updateTestLog(runId);
logsObject.setBool("active", true);
}
catch (IllegalArgumentException e) {
logsObject.setBool("active", jobController.active(runId).isPresent());
}
RunLog runLog = (after == null ? jobController.details(runId) : jobController.details(runId, Long.parseLong(after)))
.orElseThrow(() -> new NotExistsException(String.format(
"No run details exist for application: %s, job type: %s, number: %d",
runId.application().toShortString(), runId.type().jobName(), runId.number())));
for (Step step : Step.values()) {
runLog.get(step).ifPresent(entries -> toSlime(logsObject.setArray(step.name()), entries));
}
runLog.lastId().ifPresent(id -> logsObject.setLong("lastId", id));
return new SlimeJsonResponse(slime);
} | } | static HttpResponse runDetailsResponse(JobController jobController, RunId runId, String after) {
Slime slime = new Slime();
Cursor logsObject = slime.setObject();
logsObject.setBool("active", jobController.active(runId).isPresent());
jobController.updateTestLog(runId);
RunLog runLog = (after == null ? jobController.details(runId) : jobController.details(runId, Long.parseLong(after)))
.orElseThrow(() -> new NotExistsException(String.format(
"No run details exist for application: %s, job type: %s, number: %d",
runId.application().toShortString(), runId.type().jobName(), runId.number())));
for (Step step : Step.values()) {
runLog.get(step).ifPresent(entries -> toSlime(logsObject.setArray(step.name()), entries));
}
runLog.lastId().ifPresent(id -> logsObject.setLong("lastId", id));
return new SlimeJsonResponse(slime);
} | class JobControllerApiHandlerHelper {
/**
* @return Response with all job types that have recorded runs for the application _and_ the status for the last run of that type
*/
static HttpResponse jobTypeResponse(List<JobType> sortedJobs, Map<JobType, Run> lastRun, URI baseUriForJobs) {
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
Cursor jobArray = responseObject.setArray("jobs");
sortedJobs.forEach(jobType ->
jobTypeToSlime(jobArray.addObject(), jobType, Optional.ofNullable(lastRun.get(jobType)), baseUriForJobs));
return new SlimeJsonResponse(slime);
}
private static void jobTypeToSlime(Cursor cursor, JobType jobType, Optional<Run> lastRun, URI baseUriForJobs) {
Cursor jobObject = cursor.setObject(jobType.jobName());
String jobTypePath = baseUriForJobs.getPath() + "/" + jobType.jobName();
URI baseUriForJobType = baseUriForJobs.resolve(jobTypePath);
jobObject.setString("url", baseUriForJobType.toString());
lastRun.ifPresent(run -> {
Cursor lastObject = jobObject.setObject("last");
runToSlime(lastObject, run, baseUriForJobType);
});
}
/**
* @return Response with the runs for a specific jobtype
*/
static HttpResponse runResponse(Map<RunId, Run> runs, URI baseUriForJobType) {
Slime slime = new Slime();
Cursor cursor = slime.setObject();
runs.forEach((runid, run) -> runToSlime(cursor.setObject(Long.toString(runid.number())), run, baseUriForJobType));
return new SlimeJsonResponse(slime);
}
private static void runToSlime(Cursor cursor, Run run, URI baseUriForJobType) {
cursor.setString("status", run.status().name());
run.end().ifPresent(instant -> cursor.setString("end", instant.toString()));
Cursor stepsArray = cursor.setArray("steps");
run.steps().forEach((step, status) -> {
Cursor stepObject = stepsArray.addObject();
stepObject.setString(step.name(), status.name());
});
cursor.setString("start", run.start().toString());
cursor.setLong("id", run.id().number());
String logsPath = baseUriForJobType.getPath() + "/run/" + run.id().number();
cursor.setString("logs", baseUriForJobType.resolve(logsPath).toString());
cursor.setString("wantedPlatform", run.versions().targetPlatform().toString());
applicationVersionToSlime(cursor.setObject("wantedApplication"), run.versions().targetApplication());
run.versions().sourcePlatform().ifPresent(version -> cursor.setString("currentPlatform", version.toString()));
run.versions().sourceApplication().ifPresent(version -> applicationVersionToSlime(cursor.setObject("currentApplication"), version));
}
private static void applicationVersionToSlime(Cursor cursor, ApplicationVersion version) {
cursor.setString("id", version.id());
cursor.setLong("build", version.buildNumber().get());
cursor.setString("repository", version.source().get().repository());
cursor.setString("branch", version.source().get().branch());
cursor.setString("commit", version.source().get().commit());
}
/**
* @return Response with logs from a single run
*/
private static void toSlime(Cursor entryArray, List<LogEntry> entries) {
entries.forEach(entry -> toSlime(entryArray.addObject(), entry));
}
private static void toSlime(Cursor entryObject, LogEntry entry) {
entryObject.setLong("at", entry.at());
entryObject.setString("type", entry.type().name());
entryObject.setString("message", entry.message());
}
/**
* Unpack payload and submit to job controller. Defaults instance to 'default' and renders the
* application version on success.
*
* @return Response with the new application version
*/
static HttpResponse submitResponse(JobController jobController, String tenant, String application,
SourceRevision sourceRevision, byte[] appPackage, byte[] testPackage) {
ApplicationVersion version = jobController.submit(ApplicationId.from(tenant, application, "default"),
sourceRevision, appPackage, testPackage);
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
responseObject.setString("version", version.id());
return new SlimeJsonResponse(slime);
}
} | class JobControllerApiHandlerHelper {
/**
* @return Response with all job types that have recorded runs for the application _and_ the status for the last run of that type
*/
static HttpResponse jobTypeResponse(List<JobType> sortedJobs, Map<JobType, Run> lastRun, URI baseUriForJobs) {
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
Cursor jobArray = responseObject.setArray("jobs");
sortedJobs.forEach(jobType ->
jobTypeToSlime(jobArray.addObject(), jobType, Optional.ofNullable(lastRun.get(jobType)), baseUriForJobs));
return new SlimeJsonResponse(slime);
}
private static void jobTypeToSlime(Cursor cursor, JobType jobType, Optional<Run> lastRun, URI baseUriForJobs) {
Cursor jobObject = cursor.setObject(jobType.jobName());
String jobTypePath = baseUriForJobs.getPath() + "/" + jobType.jobName();
URI baseUriForJobType = baseUriForJobs.resolve(jobTypePath);
jobObject.setString("url", baseUriForJobType.toString());
lastRun.ifPresent(run -> {
Cursor lastObject = jobObject.setObject("last");
runToSlime(lastObject, run, baseUriForJobType);
});
}
/**
* @return Response with the runs for a specific jobtype
*/
static HttpResponse runResponse(Map<RunId, Run> runs, URI baseUriForJobType) {
Slime slime = new Slime();
Cursor cursor = slime.setObject();
runs.forEach((runid, run) -> runToSlime(cursor.setObject(Long.toString(runid.number())), run, baseUriForJobType));
return new SlimeJsonResponse(slime);
}
private static void runToSlime(Cursor cursor, Run run, URI baseUriForJobType) {
cursor.setString("status", run.status().name());
run.end().ifPresent(instant -> cursor.setString("end", instant.toString()));
Cursor stepsArray = cursor.setArray("steps");
run.steps().forEach((step, status) -> {
Cursor stepObject = stepsArray.addObject();
stepObject.setString(step.name(), status.name());
});
cursor.setString("start", run.start().toString());
cursor.setLong("id", run.id().number());
String logsPath = baseUriForJobType.getPath() + "/run/" + run.id().number();
cursor.setString("logs", baseUriForJobType.resolve(logsPath).toString());
cursor.setString("wantedPlatform", run.versions().targetPlatform().toString());
applicationVersionToSlime(cursor.setObject("wantedApplication"), run.versions().targetApplication());
run.versions().sourcePlatform().ifPresent(version -> cursor.setString("currentPlatform", version.toString()));
run.versions().sourceApplication().ifPresent(version -> applicationVersionToSlime(cursor.setObject("currentApplication"), version));
}
private static void applicationVersionToSlime(Cursor cursor, ApplicationVersion version) {
cursor.setString("id", version.id());
cursor.setLong("build", version.buildNumber().get());
cursor.setString("repository", version.source().get().repository());
cursor.setString("branch", version.source().get().branch());
cursor.setString("commit", version.source().get().commit());
}
/**
* @return Response with logs from a single run
*/
private static void toSlime(Cursor entryArray, List<LogEntry> entries) {
entries.forEach(entry -> toSlime(entryArray.addObject(), entry));
}
private static void toSlime(Cursor entryObject, LogEntry entry) {
entryObject.setLong("at", entry.at());
entryObject.setString("type", entry.type().name());
entryObject.setString("message", entry.message());
}
/**
* Unpack payload and submit to job controller. Defaults instance to 'default' and renders the
* application version on success.
*
* @return Response with the new application version
*/
static HttpResponse submitResponse(JobController jobController, String tenant, String application,
SourceRevision sourceRevision, byte[] appPackage, byte[] testPackage) {
ApplicationVersion version = jobController.submit(ApplicationId.from(tenant, application, "default"),
sourceRevision, appPackage, testPackage);
Slime slime = new Slime();
Cursor responseObject = slime.setObject();
responseObject.setString("version", version.id());
return new SlimeJsonResponse(slime);
}
} |
I think we should use field.getName() to avoid "Field field foo.." in the message. | public void process(boolean validate) {
for (SDField field : search.allConcreteFields()) {
if (!field.isExtraField() && field.getAttributes().containsKey(field.getName())) {
if (field.getAttributes().get(field.getName()).isMutable()) {
throw new IllegalArgumentException("Field " + field + " in '" + search.getDocument().getName() +
"' can not be marked mutable as it inside the document.");
}
}
}
} | throw new IllegalArgumentException("Field " + field + " in '" + search.getDocument().getName() + | public void process(boolean validate) {
for (SDField field : search.allConcreteFields()) {
if (!field.isExtraField() && field.getAttributes().containsKey(field.getName())) {
if (field.getAttributes().get(field.getName()).isMutable()) {
throw new IllegalArgumentException("Field " + field + " in '" + search.getDocument().getName() +
"' can not be marked mutable as it inside the document.");
}
}
}
} | class MutableAttributes extends Processor {
public MutableAttributes(Search search, DeployLogger deployLogger,
RankProfileRegistry rankProfileRegistry, QueryProfiles queryProfiles)
{
super(search, deployLogger, rankProfileRegistry, queryProfiles);
}
@Override
} | class MutableAttributes extends Processor {
public MutableAttributes(Search search, DeployLogger deployLogger,
RankProfileRegistry rankProfileRegistry, QueryProfiles queryProfiles)
{
super(search, deployLogger, rankProfileRegistry, queryProfiles);
}
@Override
} |
Missing 'is'? 'as it inside' -> 'as it is inside'? | public void process(boolean validate) {
for (SDField field : search.allConcreteFields()) {
if (!field.isExtraField() && field.getAttributes().containsKey(field.getName())) {
if (field.getAttributes().get(field.getName()).isMutable()) {
throw new IllegalArgumentException("Field " + field + " in '" + search.getDocument().getName() +
"' can not be marked mutable as it inside the document.");
}
}
}
} | "' can not be marked mutable as it inside the document."); | public void process(boolean validate) {
for (SDField field : search.allConcreteFields()) {
if (!field.isExtraField() && field.getAttributes().containsKey(field.getName())) {
if (field.getAttributes().get(field.getName()).isMutable()) {
throw new IllegalArgumentException("Field " + field + " in '" + search.getDocument().getName() +
"' can not be marked mutable as it inside the document.");
}
}
}
} | class MutableAttributes extends Processor {
public MutableAttributes(Search search, DeployLogger deployLogger,
RankProfileRegistry rankProfileRegistry, QueryProfiles queryProfiles)
{
super(search, deployLogger, rankProfileRegistry, queryProfiles);
}
@Override
} | class MutableAttributes extends Processor {
public MutableAttributes(Search search, DeployLogger deployLogger,
RankProfileRegistry rankProfileRegistry, QueryProfiles queryProfiles)
{
super(search, deployLogger, rankProfileRegistry, queryProfiles);
}
@Override
} |
Will this method always be called with arg `false` after a reconfig? If not, don't you risk having old non-existing clusters in the map forever? | public void setContainerHasClusters(boolean containerHasClusters) {
synchronized (mutex) {
this.containerHasClusters = containerHasClusters;
if ( ! containerHasClusters)
clusterStatus.clear();
}
} | clusterStatus.clear(); | public void setContainerHasClusters(boolean containerHasClusters) {
synchronized (mutex) {
this.containerHasClusters = containerHasClusters;
if ( ! containerHasClusters)
clusterStatus.clear();
}
} | class ClustersStatus extends AbstractComponent {
@Inject
public ClustersStatus() { }
/** Are there any (in-service influencing) clusters in this container? */
private boolean containerHasClusters;
/** If we have no clusters, what should we answer? */
private boolean receiveTrafficByDefault;
private final Object mutex = new Object();
/** The status of clusters, when known. Note that clusters may exist for which there is no knowledge yet. */
private final Map<Object, Boolean> clusterStatus = new HashMap<>();
public void setReceiveTrafficByDefault(boolean receiveTrafficByDefault) {
synchronized (mutex) {
this.receiveTrafficByDefault = receiveTrafficByDefault;
}
}
public void setUp(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.TRUE);
}
}
public void setDown(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.FALSE);
}
}
/** Returns whether this container should receive traffic based on the state of this */
public boolean containerShouldReceiveTraffic() {
synchronized (mutex) {
if (containerHasClusters) {
return clusterStatus.values().stream().anyMatch(status -> status==true);
}
else {
return receiveTrafficByDefault;
}
}
}
} | class ClustersStatus extends AbstractComponent {
@Inject
public ClustersStatus() { }
/** Are there any (in-service influencing) clusters in this container? */
private boolean containerHasClusters;
/** If we have no clusters, what should we answer? */
private boolean receiveTrafficByDefault;
private final Object mutex = new Object();
/** The status of clusters, when known. Note that clusters may exist for which there is no knowledge yet. */
private final Map<Object, Boolean> clusterStatus = new HashMap<>();
public void setReceiveTrafficByDefault(boolean receiveTrafficByDefault) {
synchronized (mutex) {
this.receiveTrafficByDefault = receiveTrafficByDefault;
}
}
public void setUp(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.TRUE);
}
}
public void setDown(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.FALSE);
}
}
/** Returns whether this container should receive traffic based on the state of this */
public boolean containerShouldReceiveTraffic() {
synchronized (mutex) {
if (containerHasClusters) {
return clusterStatus.values().stream().anyMatch(status -> status==true);
}
else {
return receiveTrafficByDefault;
}
}
}
} |
Well spotted, we do risk having old non-existing clusters in the map forever, but only as long as there are other clusters (this method will be called on each change to clusters and when there are none this is cleared). And since we only need a single cluster to be up to determine we are up, such phantom clusters are harmless. | public void setContainerHasClusters(boolean containerHasClusters) {
synchronized (mutex) {
this.containerHasClusters = containerHasClusters;
if ( ! containerHasClusters)
clusterStatus.clear();
}
} | clusterStatus.clear(); | public void setContainerHasClusters(boolean containerHasClusters) {
synchronized (mutex) {
this.containerHasClusters = containerHasClusters;
if ( ! containerHasClusters)
clusterStatus.clear();
}
} | class ClustersStatus extends AbstractComponent {
@Inject
public ClustersStatus() { }
/** Are there any (in-service influencing) clusters in this container? */
private boolean containerHasClusters;
/** If we have no clusters, what should we answer? */
private boolean receiveTrafficByDefault;
private final Object mutex = new Object();
/** The status of clusters, when known. Note that clusters may exist for which there is no knowledge yet. */
private final Map<Object, Boolean> clusterStatus = new HashMap<>();
public void setReceiveTrafficByDefault(boolean receiveTrafficByDefault) {
synchronized (mutex) {
this.receiveTrafficByDefault = receiveTrafficByDefault;
}
}
public void setUp(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.TRUE);
}
}
public void setDown(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.FALSE);
}
}
/** Returns whether this container should receive traffic based on the state of this */
public boolean containerShouldReceiveTraffic() {
synchronized (mutex) {
if (containerHasClusters) {
return clusterStatus.values().stream().anyMatch(status -> status==true);
}
else {
return receiveTrafficByDefault;
}
}
}
} | class ClustersStatus extends AbstractComponent {
@Inject
public ClustersStatus() { }
/** Are there any (in-service influencing) clusters in this container? */
private boolean containerHasClusters;
/** If we have no clusters, what should we answer? */
private boolean receiveTrafficByDefault;
private final Object mutex = new Object();
/** The status of clusters, when known. Note that clusters may exist for which there is no knowledge yet. */
private final Map<Object, Boolean> clusterStatus = new HashMap<>();
public void setReceiveTrafficByDefault(boolean receiveTrafficByDefault) {
synchronized (mutex) {
this.receiveTrafficByDefault = receiveTrafficByDefault;
}
}
public void setUp(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.TRUE);
}
}
public void setDown(Object clusterIdentifier) {
synchronized (mutex) {
clusterStatus.put(clusterIdentifier, Boolean.FALSE);
}
}
/** Returns whether this container should receive traffic based on the state of this */
public boolean containerShouldReceiveTraffic() {
synchronized (mutex) {
if (containerHasClusters) {
return clusterStatus.values().stream().anyMatch(status -> status==true);
}
else {
return receiveTrafficByDefault;
}
}
}
} |
Need separate method for EC private keys? Note that at least when exported from OpenSSL these tend to be separated into two PEM sections, `EC PARAMETERS` and `EC PRIVATE KEY`, presumably due to describing which curve type is used etc. Not sure what the Java APIs emit. | public static String toPem(PrivateKey privateKey) {
try (StringWriter stringWriter = new StringWriter(); JcaPEMWriter pemWriter = new JcaPEMWriter(stringWriter)) {
pemWriter.writeObject(new PemObject("RSA PRIVATE KEY", getPkcs1Bytes(privateKey)));
pemWriter.flush();
return stringWriter.toString();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | pemWriter.writeObject(new PemObject("RSA PRIVATE KEY", getPkcs1Bytes(privateKey))); | public static String toPem(PrivateKey privateKey) {
try (StringWriter stringWriter = new StringWriter(); JcaPEMWriter pemWriter = new JcaPEMWriter(stringWriter)) {
pemWriter.writeObject(new PemObject("RSA PRIVATE KEY", getPkcs1Bytes(privateKey)));
pemWriter.flush();
return stringWriter.toString();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | class KeyUtils {
private KeyUtils() {}
public static KeyPair generateKeypair(KeyAlgorithm algorithm, int keySize) {
try {
KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
if (keySize != -1) {
keyGen.initialize(keySize);
}
return keyGen.genKeyPair();
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static KeyPair generateKeypair(KeyAlgorithm algorithm) {
return generateKeypair(algorithm, -1);
}
public static PublicKey extractPublicKey(PrivateKey privateKey) {
String algorithm = privateKey.getAlgorithm();
try {
if (algorithm.equals(RSA.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(RSA.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
RSAPrivateCrtKey rsaPrivateCrtKey = (RSAPrivateCrtKey) privateKey;
RSAPublicKeySpec keySpec = new RSAPublicKeySpec(rsaPrivateCrtKey.getModulus(), rsaPrivateCrtKey.getPublicExponent());
return keyFactory.generatePublic(keySpec);
} else if (algorithm.equals(EC.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(EC.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
BCECPrivateKey ecPrivateKey = (BCECPrivateKey) privateKey;
ECParameterSpec ecParameterSpec = ecPrivateKey.getParameters();
ECPoint ecPoint = new FixedPointCombMultiplier().multiply(ecParameterSpec.getG(), ecPrivateKey.getD());
ECPublicKeySpec keySpec = new ECPublicKeySpec(ecPoint, ecParameterSpec);
return keyFactory.generatePublic(keySpec);
} else {
throw new IllegalArgumentException("Unexpected key algorithm: " + algorithm);
}
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static PrivateKey fromPemEncodedPrivateKey(String pem) {
try (PEMParser parser = new PEMParser(new StringReader(pem))) {
Object pemObject = parser.readObject();
if (pemObject instanceof PrivateKeyInfo) {
PrivateKeyInfo keyInfo = (PrivateKeyInfo) pemObject;
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyInfo.getEncoded());
return KeyFactory.getInstance(RSA.getAlgorithmName()).generatePrivate(keySpec);
} else if (pemObject instanceof PEMKeyPair) {
PEMKeyPair pemKeypair = (PEMKeyPair) pemObject;
PrivateKeyInfo keyInfo = pemKeypair.getPrivateKeyInfo();
JcaPEMKeyConverter pemConverter = new JcaPEMKeyConverter();
return pemConverter.getPrivateKey(keyInfo);
}
throw new IllegalArgumentException("Unexpected type of PEM type: " + pemObject);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
private static byte[] getPkcs1Bytes(PrivateKey privateKey) throws IOException{
byte[] privBytes = privateKey.getEncoded();
PrivateKeyInfo pkInfo = PrivateKeyInfo.getInstance(privBytes);
ASN1Encodable encodable = pkInfo.parsePrivateKey();
ASN1Primitive primitive = encodable.toASN1Primitive();
return primitive.getEncoded();
}
} | class KeyUtils {
private KeyUtils() {}
public static KeyPair generateKeypair(KeyAlgorithm algorithm, int keySize) {
try {
KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
if (keySize != -1) {
keyGen.initialize(keySize);
}
return keyGen.genKeyPair();
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static KeyPair generateKeypair(KeyAlgorithm algorithm) {
return generateKeypair(algorithm, -1);
}
public static PublicKey extractPublicKey(PrivateKey privateKey) {
String algorithm = privateKey.getAlgorithm();
try {
if (algorithm.equals(RSA.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(RSA.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
RSAPrivateCrtKey rsaPrivateCrtKey = (RSAPrivateCrtKey) privateKey;
RSAPublicKeySpec keySpec = new RSAPublicKeySpec(rsaPrivateCrtKey.getModulus(), rsaPrivateCrtKey.getPublicExponent());
return keyFactory.generatePublic(keySpec);
} else if (algorithm.equals(EC.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(EC.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
BCECPrivateKey ecPrivateKey = (BCECPrivateKey) privateKey;
ECParameterSpec ecParameterSpec = ecPrivateKey.getParameters();
ECPoint ecPoint = new FixedPointCombMultiplier().multiply(ecParameterSpec.getG(), ecPrivateKey.getD());
ECPublicKeySpec keySpec = new ECPublicKeySpec(ecPoint, ecParameterSpec);
return keyFactory.generatePublic(keySpec);
} else {
throw new IllegalArgumentException("Unexpected key algorithm: " + algorithm);
}
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static PrivateKey fromPemEncodedPrivateKey(String pem) {
try (PEMParser parser = new PEMParser(new StringReader(pem))) {
Object pemObject = parser.readObject();
if (pemObject instanceof PrivateKeyInfo) {
PrivateKeyInfo keyInfo = (PrivateKeyInfo) pemObject;
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyInfo.getEncoded());
return KeyFactory.getInstance(RSA.getAlgorithmName()).generatePrivate(keySpec);
} else if (pemObject instanceof PEMKeyPair) {
PEMKeyPair pemKeypair = (PEMKeyPair) pemObject;
PrivateKeyInfo keyInfo = pemKeypair.getPrivateKeyInfo();
JcaPEMKeyConverter pemConverter = new JcaPEMKeyConverter();
return pemConverter.getPrivateKey(keyInfo);
}
throw new IllegalArgumentException("Unexpected type of PEM type: " + pemObject);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
private static byte[] getPkcs1Bytes(PrivateKey privateKey) throws IOException{
byte[] privBytes = privateKey.getEncoded();
PrivateKeyInfo pkInfo = PrivateKeyInfo.getInstance(privBytes);
ASN1Encodable encodable = pkInfo.parsePrivateKey();
ASN1Primitive primitive = encodable.toASN1Primitive();
return primitive.getEncoded();
}
} |
Consider creating a helper wrapper a-la `setIsCA(boolean)` (with implicit `isCritical == true`) to make usage more obvious | public X509CertificateBuilder setBasicConstraints(boolean isCritical, boolean isCertAuthorityCertificate) {
this.basicConstraintsExtension = new BasicConstraintsExtension(isCritical, isCertAuthorityCertificate);
return this;
} | } | public X509CertificateBuilder setBasicConstraints(boolean isCritical, boolean isCertAuthorityCertificate) {
this.basicConstraintsExtension = new BasicConstraintsExtension(isCritical, isCertAuthorityCertificate);
return this;
} | class X509CertificateBuilder {
private final long serialNumber;
private final SignatureAlgorithm signingAlgorithm;
private final PrivateKey caPrivateKey;
private final Instant notBefore;
private final Instant notAfter;
private final List<SubjectAlternativeName> subjectAlternativeNames = new ArrayList<>();
private final X500Principal issuer;
private final X500Principal subject;
private final PublicKey certPublicKey;
private BasicConstraintsExtension basicConstraintsExtension;
private X509CertificateBuilder(X500Principal issuer,
X500Principal subject,
Instant notBefore,
Instant notAfter,
PublicKey certPublicKey,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
long serialNumber) {
this.issuer = issuer;
this.subject = subject;
this.notBefore = notBefore;
this.notAfter = notAfter;
this.certPublicKey = certPublicKey;
this.caPrivateKey = caPrivateKey;
this.signingAlgorithm = signingAlgorithm;
this.serialNumber = serialNumber;
}
public static X509CertificateBuilder fromCsr(Pkcs10Csr csr,
X500Principal caIssuer,
Instant notBefore,
Instant notAfter,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
long serialNumber) {
try {
PKCS10CertificationRequest bcCsr = csr.getBcCsr();
PublicKey publicKey = new JcaPKCS10CertificationRequest(bcCsr)
.setProvider(BouncyCastleProviderHolder.getInstance())
.getPublicKey();
return new X509CertificateBuilder(caIssuer,
new X500Principal(bcCsr.getSubject().getEncoded()),
notBefore,
notAfter,
publicKey,
caPrivateKey,
signingAlgorithm,
serialNumber);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public static X509CertificateBuilder fromKeypair(KeyPair keyPair,
X500Principal subject,
Instant notBefore,
Instant notAfter,
SignatureAlgorithm signingAlgorithm,
long serialNumber) {
return new X509CertificateBuilder(subject,
subject,
notBefore,
notAfter,
keyPair.getPublic(),
keyPair.getPrivate(),
signingAlgorithm,
serialNumber);
}
public X509CertificateBuilder addSubjectAlternativeName(String dnsName) {
this.subjectAlternativeNames.add(new SubjectAlternativeName(DNS_NAME, dnsName));
return this;
}
public X509CertificateBuilder addSubjectAlternativeName(SubjectAlternativeName san) {
this.subjectAlternativeNames.add(san);
return this;
}
public X509Certificate build() {
try {
JcaX509v3CertificateBuilder jcaCertBuilder = new JcaX509v3CertificateBuilder(
issuer, BigInteger.valueOf(serialNumber), Date.from(notBefore), Date.from(notAfter), subject, certPublicKey);
if (basicConstraintsExtension != null) {
jcaCertBuilder.addExtension(
Extension.basicConstraints,
basicConstraintsExtension.isCritical,
new BasicConstraints(basicConstraintsExtension.isCertAuthorityCertificate));
}
if (!subjectAlternativeNames.isEmpty()) {
GeneralNames generalNames = new GeneralNames(
subjectAlternativeNames.stream()
.map(SubjectAlternativeName::toGeneralName)
.toArray(GeneralName[]::new));
jcaCertBuilder.addExtension(Extension.subjectAlternativeName, false, generalNames);
}
ContentSigner contentSigner = new JcaContentSignerBuilder(signingAlgorithm.getAlgorithmName())
.setProvider(BouncyCastleProviderHolder.getInstance())
.build(caPrivateKey);
return new JcaX509CertificateConverter()
.setProvider(BouncyCastleProviderHolder.getInstance())
.getCertificate(jcaCertBuilder.build(contentSigner));
} catch (OperatorException | GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
} | class X509CertificateBuilder {
private final BigInteger serialNumber;
private final SignatureAlgorithm signingAlgorithm;
private final PrivateKey caPrivateKey;
private final Instant notBefore;
private final Instant notAfter;
private final List<SubjectAlternativeName> subjectAlternativeNames = new ArrayList<>();
private final X500Principal issuer;
private final X500Principal subject;
private final PublicKey certPublicKey;
private BasicConstraintsExtension basicConstraintsExtension;
private X509CertificateBuilder(X500Principal issuer,
X500Principal subject,
Instant notBefore,
Instant notAfter,
PublicKey certPublicKey,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
BigInteger serialNumber) {
this.issuer = issuer;
this.subject = subject;
this.notBefore = notBefore;
this.notAfter = notAfter;
this.certPublicKey = certPublicKey;
this.caPrivateKey = caPrivateKey;
this.signingAlgorithm = signingAlgorithm;
this.serialNumber = serialNumber;
}
public static X509CertificateBuilder fromCsr(Pkcs10Csr csr,
X500Principal caIssuer,
Instant notBefore,
Instant notAfter,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
BigInteger serialNumber) {
try {
PKCS10CertificationRequest bcCsr = csr.getBcCsr();
PublicKey publicKey = new JcaPKCS10CertificationRequest(bcCsr)
.setProvider(BouncyCastleProviderHolder.getInstance())
.getPublicKey();
return new X509CertificateBuilder(caIssuer,
new X500Principal(bcCsr.getSubject().getEncoded()),
notBefore,
notAfter,
publicKey,
caPrivateKey,
signingAlgorithm,
serialNumber);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public static X509CertificateBuilder fromKeypair(KeyPair keyPair,
X500Principal subject,
Instant notBefore,
Instant notAfter,
SignatureAlgorithm signingAlgorithm,
BigInteger serialNumber) {
return new X509CertificateBuilder(subject,
subject,
notBefore,
notAfter,
keyPair.getPublic(),
keyPair.getPrivate(),
signingAlgorithm,
serialNumber);
}
/**
* @return generates a cryptographically secure positive serial number up to 128 bits
*/
public static BigInteger generateRandomSerialNumber() {
return new BigInteger(128, new SecureRandom());
}
public X509CertificateBuilder addSubjectAlternativeName(String dnsName) {
this.subjectAlternativeNames.add(new SubjectAlternativeName(DNS_NAME, dnsName));
return this;
}
public X509CertificateBuilder addSubjectAlternativeName(SubjectAlternativeName san) {
this.subjectAlternativeNames.add(san);
return this;
}
public X509CertificateBuilder setIsCertAuthority(boolean isCertAuthority) {
return setBasicConstraints(true, isCertAuthority);
}
public X509Certificate build() {
try {
JcaX509v3CertificateBuilder jcaCertBuilder = new JcaX509v3CertificateBuilder(
issuer, serialNumber, Date.from(notBefore), Date.from(notAfter), subject, certPublicKey);
if (basicConstraintsExtension != null) {
jcaCertBuilder.addExtension(
Extension.basicConstraints,
basicConstraintsExtension.isCritical,
new BasicConstraints(basicConstraintsExtension.isCertAuthorityCertificate));
}
if (!subjectAlternativeNames.isEmpty()) {
GeneralNames generalNames = new GeneralNames(
subjectAlternativeNames.stream()
.map(SubjectAlternativeName::toGeneralName)
.toArray(GeneralName[]::new));
jcaCertBuilder.addExtension(Extension.subjectAlternativeName, false, generalNames);
}
ContentSigner contentSigner = new JcaContentSignerBuilder(signingAlgorithm.getAlgorithmName())
.setProvider(BouncyCastleProviderHolder.getInstance())
.build(caPrivateKey);
return new JcaX509CertificateConverter()
.setProvider(BouncyCastleProviderHolder.getInstance())
.getCertificate(jcaCertBuilder.build(contentSigner));
} catch (OperatorException | GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
} |
Should `subject1`/`subject2` have different CN values to ensure that distinct certificates are read from the PEM list? Currently both have `CN=myservice`. | public void can_deserialize_serialized_pem_certificate_list() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject1 = new X500Principal("CN=myservice");
X509Certificate cert1 = TestUtils.createCertificate(keypair, subject1);
X500Principal subject2 = new X500Principal("CN=myservice");
X509Certificate cert2 = TestUtils.createCertificate(keypair, subject2);
List<X509Certificate> certificateList = Arrays.asList(cert1, cert2);
String pem = X509CertificateUtils.toPem(certificateList);
List<X509Certificate> deserializedCertificateList = X509CertificateUtils.certificateListFromPem(pem);
assertEquals(2, certificateList.size());
assertEquals(subject1, deserializedCertificateList.get(0).getSubjectX500Principal());
assertEquals(subject2, deserializedCertificateList.get(1).getSubjectX500Principal());
} | assertEquals(subject2, deserializedCertificateList.get(1).getSubjectX500Principal()); | public void can_deserialize_serialized_pem_certificate_list() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject1 = new X500Principal("CN=myservice1");
X509Certificate cert1 = TestUtils.createCertificate(keypair, subject1);
X500Principal subject2 = new X500Principal("CN=myservice2");
X509Certificate cert2 = TestUtils.createCertificate(keypair, subject2);
List<X509Certificate> certificateList = Arrays.asList(cert1, cert2);
String pem = X509CertificateUtils.toPem(certificateList);
List<X509Certificate> deserializedCertificateList = X509CertificateUtils.certificateListFromPem(pem);
assertEquals(2, certificateList.size());
assertEquals(subject1, deserializedCertificateList.get(0).getSubjectX500Principal());
assertEquals(subject2, deserializedCertificateList.get(1).getSubjectX500Principal());
} | class X509CertificateUtilsTest {
@Test
public void can_deserialize_serialized_pem_certificate() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
X509Certificate cert = TestUtils.createCertificate(keypair, subject);
assertEquals(subject, cert.getSubjectX500Principal());
String pem = X509CertificateUtils.toPem(cert);
assertThat(pem, containsString("BEGIN CERTIFICATE"));
assertThat(pem, containsString("END CERTIFICATE"));
X509Certificate deserializedCert = X509CertificateUtils.fromPem(pem);
assertEquals(subject, deserializedCert.getSubjectX500Principal());
}
@Test
@Test
public void can_list_subject_alternative_names() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
SubjectAlternativeName san = new SubjectAlternativeName(DNS_NAME, "dns-san");
X509Certificate cert = X509CertificateBuilder
.fromKeypair(
keypair,
subject,
Instant.now(),
Instant.now().plus(1, ChronoUnit.DAYS),
SignatureAlgorithm.SHA512_WITH_ECDSA,
1)
.addSubjectAlternativeName(san)
.build();
List<SubjectAlternativeName> sans = X509CertificateUtils.getSubjectAlternativeNames(cert);
assertThat(sans.size(), is(1));
assertThat(sans.get(0), equalTo(san));
}
} | class X509CertificateUtilsTest {
@Test
public void can_deserialize_serialized_pem_certificate() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
X509Certificate cert = TestUtils.createCertificate(keypair, subject);
assertEquals(subject, cert.getSubjectX500Principal());
String pem = X509CertificateUtils.toPem(cert);
assertThat(pem, containsString("BEGIN CERTIFICATE"));
assertThat(pem, containsString("END CERTIFICATE"));
X509Certificate deserializedCert = X509CertificateUtils.fromPem(pem);
assertEquals(subject, deserializedCert.getSubjectX500Principal());
}
@Test
@Test
public void can_list_subject_alternative_names() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
SubjectAlternativeName san = new SubjectAlternativeName(DNS_NAME, "dns-san");
X509Certificate cert = X509CertificateBuilder
.fromKeypair(
keypair,
subject,
Instant.now(),
Instant.now().plus(1, ChronoUnit.DAYS),
SignatureAlgorithm.SHA512_WITH_ECDSA,
BigInteger.valueOf(1))
.addSubjectAlternativeName(san)
.build();
List<SubjectAlternativeName> sans = X509CertificateUtils.getSubjectAlternativeNames(cert);
assertThat(sans.size(), is(1));
assertThat(sans.get(0), equalTo(san));
}
} |
The same method can be used, although it will have to use `PrivateKey.getAlgorithm()` to determine how to serialize the key. I'll look into it later. | public static String toPem(PrivateKey privateKey) {
try (StringWriter stringWriter = new StringWriter(); JcaPEMWriter pemWriter = new JcaPEMWriter(stringWriter)) {
pemWriter.writeObject(new PemObject("RSA PRIVATE KEY", getPkcs1Bytes(privateKey)));
pemWriter.flush();
return stringWriter.toString();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | pemWriter.writeObject(new PemObject("RSA PRIVATE KEY", getPkcs1Bytes(privateKey))); | public static String toPem(PrivateKey privateKey) {
try (StringWriter stringWriter = new StringWriter(); JcaPEMWriter pemWriter = new JcaPEMWriter(stringWriter)) {
pemWriter.writeObject(new PemObject("RSA PRIVATE KEY", getPkcs1Bytes(privateKey)));
pemWriter.flush();
return stringWriter.toString();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} | class KeyUtils {
private KeyUtils() {}
public static KeyPair generateKeypair(KeyAlgorithm algorithm, int keySize) {
try {
KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
if (keySize != -1) {
keyGen.initialize(keySize);
}
return keyGen.genKeyPair();
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static KeyPair generateKeypair(KeyAlgorithm algorithm) {
return generateKeypair(algorithm, -1);
}
public static PublicKey extractPublicKey(PrivateKey privateKey) {
String algorithm = privateKey.getAlgorithm();
try {
if (algorithm.equals(RSA.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(RSA.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
RSAPrivateCrtKey rsaPrivateCrtKey = (RSAPrivateCrtKey) privateKey;
RSAPublicKeySpec keySpec = new RSAPublicKeySpec(rsaPrivateCrtKey.getModulus(), rsaPrivateCrtKey.getPublicExponent());
return keyFactory.generatePublic(keySpec);
} else if (algorithm.equals(EC.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(EC.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
BCECPrivateKey ecPrivateKey = (BCECPrivateKey) privateKey;
ECParameterSpec ecParameterSpec = ecPrivateKey.getParameters();
ECPoint ecPoint = new FixedPointCombMultiplier().multiply(ecParameterSpec.getG(), ecPrivateKey.getD());
ECPublicKeySpec keySpec = new ECPublicKeySpec(ecPoint, ecParameterSpec);
return keyFactory.generatePublic(keySpec);
} else {
throw new IllegalArgumentException("Unexpected key algorithm: " + algorithm);
}
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static PrivateKey fromPemEncodedPrivateKey(String pem) {
try (PEMParser parser = new PEMParser(new StringReader(pem))) {
Object pemObject = parser.readObject();
if (pemObject instanceof PrivateKeyInfo) {
PrivateKeyInfo keyInfo = (PrivateKeyInfo) pemObject;
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyInfo.getEncoded());
return KeyFactory.getInstance(RSA.getAlgorithmName()).generatePrivate(keySpec);
} else if (pemObject instanceof PEMKeyPair) {
PEMKeyPair pemKeypair = (PEMKeyPair) pemObject;
PrivateKeyInfo keyInfo = pemKeypair.getPrivateKeyInfo();
JcaPEMKeyConverter pemConverter = new JcaPEMKeyConverter();
return pemConverter.getPrivateKey(keyInfo);
}
throw new IllegalArgumentException("Unexpected type of PEM type: " + pemObject);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
private static byte[] getPkcs1Bytes(PrivateKey privateKey) throws IOException{
byte[] privBytes = privateKey.getEncoded();
PrivateKeyInfo pkInfo = PrivateKeyInfo.getInstance(privBytes);
ASN1Encodable encodable = pkInfo.parsePrivateKey();
ASN1Primitive primitive = encodable.toASN1Primitive();
return primitive.getEncoded();
}
} | class KeyUtils {
private KeyUtils() {}
public static KeyPair generateKeypair(KeyAlgorithm algorithm, int keySize) {
try {
KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
if (keySize != -1) {
keyGen.initialize(keySize);
}
return keyGen.genKeyPair();
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static KeyPair generateKeypair(KeyAlgorithm algorithm) {
return generateKeypair(algorithm, -1);
}
public static PublicKey extractPublicKey(PrivateKey privateKey) {
String algorithm = privateKey.getAlgorithm();
try {
if (algorithm.equals(RSA.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(RSA.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
RSAPrivateCrtKey rsaPrivateCrtKey = (RSAPrivateCrtKey) privateKey;
RSAPublicKeySpec keySpec = new RSAPublicKeySpec(rsaPrivateCrtKey.getModulus(), rsaPrivateCrtKey.getPublicExponent());
return keyFactory.generatePublic(keySpec);
} else if (algorithm.equals(EC.getAlgorithmName())) {
KeyFactory keyFactory = KeyFactory.getInstance(EC.getAlgorithmName(), BouncyCastleProviderHolder.getInstance());
BCECPrivateKey ecPrivateKey = (BCECPrivateKey) privateKey;
ECParameterSpec ecParameterSpec = ecPrivateKey.getParameters();
ECPoint ecPoint = new FixedPointCombMultiplier().multiply(ecParameterSpec.getG(), ecPrivateKey.getD());
ECPublicKeySpec keySpec = new ECPublicKeySpec(ecPoint, ecParameterSpec);
return keyFactory.generatePublic(keySpec);
} else {
throw new IllegalArgumentException("Unexpected key algorithm: " + algorithm);
}
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
public static PrivateKey fromPemEncodedPrivateKey(String pem) {
try (PEMParser parser = new PEMParser(new StringReader(pem))) {
Object pemObject = parser.readObject();
if (pemObject instanceof PrivateKeyInfo) {
PrivateKeyInfo keyInfo = (PrivateKeyInfo) pemObject;
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(keyInfo.getEncoded());
return KeyFactory.getInstance(RSA.getAlgorithmName()).generatePrivate(keySpec);
} else if (pemObject instanceof PEMKeyPair) {
PEMKeyPair pemKeypair = (PEMKeyPair) pemObject;
PrivateKeyInfo keyInfo = pemKeypair.getPrivateKeyInfo();
JcaPEMKeyConverter pemConverter = new JcaPEMKeyConverter();
return pemConverter.getPrivateKey(keyInfo);
}
throw new IllegalArgumentException("Unexpected type of PEM type: " + pemObject);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
}
}
private static byte[] getPkcs1Bytes(PrivateKey privateKey) throws IOException{
byte[] privBytes = privateKey.getEncoded();
PrivateKeyInfo pkInfo = PrivateKeyInfo.getInstance(privBytes);
ASN1Encodable encodable = pkInfo.parsePrivateKey();
ASN1Primitive primitive = encodable.toASN1Primitive();
return primitive.getEncoded();
}
} |
Fixed | public void can_deserialize_serialized_pem_certificate_list() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject1 = new X500Principal("CN=myservice");
X509Certificate cert1 = TestUtils.createCertificate(keypair, subject1);
X500Principal subject2 = new X500Principal("CN=myservice");
X509Certificate cert2 = TestUtils.createCertificate(keypair, subject2);
List<X509Certificate> certificateList = Arrays.asList(cert1, cert2);
String pem = X509CertificateUtils.toPem(certificateList);
List<X509Certificate> deserializedCertificateList = X509CertificateUtils.certificateListFromPem(pem);
assertEquals(2, certificateList.size());
assertEquals(subject1, deserializedCertificateList.get(0).getSubjectX500Principal());
assertEquals(subject2, deserializedCertificateList.get(1).getSubjectX500Principal());
} | assertEquals(subject2, deserializedCertificateList.get(1).getSubjectX500Principal()); | public void can_deserialize_serialized_pem_certificate_list() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject1 = new X500Principal("CN=myservice1");
X509Certificate cert1 = TestUtils.createCertificate(keypair, subject1);
X500Principal subject2 = new X500Principal("CN=myservice2");
X509Certificate cert2 = TestUtils.createCertificate(keypair, subject2);
List<X509Certificate> certificateList = Arrays.asList(cert1, cert2);
String pem = X509CertificateUtils.toPem(certificateList);
List<X509Certificate> deserializedCertificateList = X509CertificateUtils.certificateListFromPem(pem);
assertEquals(2, certificateList.size());
assertEquals(subject1, deserializedCertificateList.get(0).getSubjectX500Principal());
assertEquals(subject2, deserializedCertificateList.get(1).getSubjectX500Principal());
} | class X509CertificateUtilsTest {
@Test
public void can_deserialize_serialized_pem_certificate() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
X509Certificate cert = TestUtils.createCertificate(keypair, subject);
assertEquals(subject, cert.getSubjectX500Principal());
String pem = X509CertificateUtils.toPem(cert);
assertThat(pem, containsString("BEGIN CERTIFICATE"));
assertThat(pem, containsString("END CERTIFICATE"));
X509Certificate deserializedCert = X509CertificateUtils.fromPem(pem);
assertEquals(subject, deserializedCert.getSubjectX500Principal());
}
@Test
@Test
public void can_list_subject_alternative_names() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
SubjectAlternativeName san = new SubjectAlternativeName(DNS_NAME, "dns-san");
X509Certificate cert = X509CertificateBuilder
.fromKeypair(
keypair,
subject,
Instant.now(),
Instant.now().plus(1, ChronoUnit.DAYS),
SignatureAlgorithm.SHA512_WITH_ECDSA,
1)
.addSubjectAlternativeName(san)
.build();
List<SubjectAlternativeName> sans = X509CertificateUtils.getSubjectAlternativeNames(cert);
assertThat(sans.size(), is(1));
assertThat(sans.get(0), equalTo(san));
}
} | class X509CertificateUtilsTest {
@Test
public void can_deserialize_serialized_pem_certificate() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
X509Certificate cert = TestUtils.createCertificate(keypair, subject);
assertEquals(subject, cert.getSubjectX500Principal());
String pem = X509CertificateUtils.toPem(cert);
assertThat(pem, containsString("BEGIN CERTIFICATE"));
assertThat(pem, containsString("END CERTIFICATE"));
X509Certificate deserializedCert = X509CertificateUtils.fromPem(pem);
assertEquals(subject, deserializedCert.getSubjectX500Principal());
}
@Test
@Test
public void can_list_subject_alternative_names() {
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
X500Principal subject = new X500Principal("CN=myservice");
SubjectAlternativeName san = new SubjectAlternativeName(DNS_NAME, "dns-san");
X509Certificate cert = X509CertificateBuilder
.fromKeypair(
keypair,
subject,
Instant.now(),
Instant.now().plus(1, ChronoUnit.DAYS),
SignatureAlgorithm.SHA512_WITH_ECDSA,
BigInteger.valueOf(1))
.addSubjectAlternativeName(san)
.build();
List<SubjectAlternativeName> sans = X509CertificateUtils.getSubjectAlternativeNames(cert);
assertThat(sans.size(), is(1));
assertThat(sans.get(0), equalTo(san));
}
} |
💯 thanks! | private NodeResources freeCapacityWith(List<Move> moves, Node host) {
NodeResources resources = hostCapacity.freeCapacityOf(host);
for (Move move : moves) {
if ( ! move.toHost().equals(host)) continue;
resources = resources.subtract(move.node().resources());
}
for (Move move : moves) {
if ( ! move.fromHost().equals(host)) continue;
resources = resources.add(move.fromHost().resources());
}
return resources;
} | resources = resources.add(move.fromHost().resources()); | private NodeResources freeCapacityWith(List<Move> moves, Node host) {
NodeResources resources = hostCapacity.freeCapacityOf(host);
for (Move move : moves) {
if ( ! move.toHost().equals(host)) continue;
resources = resources.subtract(move.node().resources());
}
for (Move move : moves) {
if ( ! move.fromHost().equals(host)) continue;
resources = resources.add(move.node().resources());
}
return resources;
} | class CapacitySolver {
private final HostCapacity hostCapacity;
private final int maxIterations;
private int iterations = 0;
CapacitySolver(HostCapacity hostCapacity, int maxIterations) {
this.hostCapacity = hostCapacity;
this.maxIterations = maxIterations;
}
/** The map of subproblem solutions already found. The value is null when there is no solution. */
private Map<SolutionKey, List<Move>> solutions = new HashMap<>();
/**
* Finds the shortest sequence of moves which makes room for the given node on the given host,
* assuming the given moves already made over the given hosts' current allocation.
*
* @param node the node to make room for
* @param host the target host to make room on
* @param hosts the hosts onto which we can move nodes
* @param movesConsidered the moves already being considered to add as part of this scenario
* (after any moves made by this)
* @param movesMade the moves already made in this scenario
* @return the list of movesMade with the moves needed for this appended, in the order they should be performed,
* or null if no sequence could be found
*/
List<Move> makeRoomFor(Node node, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
SolutionKey solutionKey = new SolutionKey(node, host, movesConsidered, movesMade);
List<Move> solution = solutions.get(solutionKey);
if (solution == null) {
solution = findRoomFor(node, host, hosts, movesConsidered, movesMade);
solutions.put(solutionKey, solution);
}
return solution;
}
private List<Move> findRoomFor(Node node, Node host, List<Node> hosts,
List<Move> movesConsidered, List<Move> movesMade) {
if (iterations++ > maxIterations)
return null;
if ( ! host.resources().satisfies(node.resources())) return null;
NodeResources freeCapacity = freeCapacityWith(movesMade, host);
if (freeCapacity.satisfies(node.resources())) return List.of();
List<Move> shortest = null;
for (var i = subsets(hostCapacity.allNodes().childrenOf(host), 5); i.hasNext(); ) {
List<Node> childrenToMove = i.next();
if ( ! addResourcesOf(childrenToMove, freeCapacity).satisfies(node.resources())) continue;
List<Move> moves = move(childrenToMove, host, hosts, movesConsidered, movesMade);
if (moves == null) continue;
if (shortest == null || moves.size() < shortest.size())
shortest = moves;
}
if (shortest == null) return null;
return append(movesMade, shortest);
}
private List<Move> move(List<Node> nodes, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
List<Move> moves = new ArrayList<>();
for (Node childToMove : nodes) {
List<Move> childMoves = move(childToMove, host, hosts, movesConsidered, append(movesMade, moves));
if (childMoves == null) return null;
moves.addAll(childMoves);
}
return moves;
}
private List<Move> move(Node node, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
if (contains(node, movesConsidered)) return null;
if (contains(node, movesMade)) return null;
List<Move> shortest = null;
for (Node target : hosts) {
if (target.equals(host)) continue;
Move move = new Move(node, host, target);
List<Move> childMoves = makeRoomFor(node, target, hosts, append(movesConsidered, move), movesMade);
if (childMoves == null) continue;
if (shortest == null || shortest.size() > childMoves.size() + 1) {
shortest = new ArrayList<>(childMoves);
shortest.add(move);
}
}
return shortest;
}
private boolean contains(Node node, List<Move> moves) {
return moves.stream().anyMatch(move -> move.node().equals(node));
}
private NodeResources addResourcesOf(List<Node> nodes, NodeResources resources) {
for (Node node : nodes)
resources = resources.add(node.resources());
return resources;
}
private Iterator<List<Node>> subsets(NodeList nodes, int maxSize) {
return new SubsetIterator(nodes.asList(), maxSize);
}
private List<Move> append(List<Move> a, List<Move> b) {
List<Move> list = new ArrayList<>();
list.addAll(a);
list.addAll(b);
return list;
}
private List<Move> append(List<Move> moves, Move move) {
List<Move> list = new ArrayList<>(moves);
list.add(move);
return list;
}
} | class CapacitySolver {
private final HostCapacity hostCapacity;
private final int maxIterations;
private int iterations = 0;
CapacitySolver(HostCapacity hostCapacity, int maxIterations) {
this.hostCapacity = hostCapacity;
this.maxIterations = maxIterations;
}
/** The map of subproblem solutions already found. The value is null when there is no solution. */
private Map<SolutionKey, List<Move>> solutions = new HashMap<>();
/**
* Finds the shortest sequence of moves which makes room for the given node on the given host,
* assuming the given moves already made over the given hosts' current allocation.
*
* @param node the node to make room for
* @param host the target host to make room on
* @param hosts the hosts onto which we can move nodes
* @param movesConsidered the moves already being considered to add as part of this scenario
* (after any moves made by this)
* @param movesMade the moves already made in this scenario
* @return the list of movesMade with the moves needed for this appended, in the order they should be performed,
* or null if no sequence could be found
*/
List<Move> makeRoomFor(Node node, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
SolutionKey solutionKey = new SolutionKey(node, host, movesConsidered, movesMade);
List<Move> solution = solutions.get(solutionKey);
if (solution == null) {
solution = findRoomFor(node, host, hosts, movesConsidered, movesMade);
solutions.put(solutionKey, solution);
}
return solution;
}
private List<Move> findRoomFor(Node node, Node host, List<Node> hosts,
List<Move> movesConsidered, List<Move> movesMade) {
if (iterations++ > maxIterations)
return null;
if ( ! host.resources().satisfies(node.resources())) return null;
NodeResources freeCapacity = freeCapacityWith(movesMade, host);
if (freeCapacity.satisfies(node.resources())) return List.of();
List<Move> shortest = null;
for (var i = subsets(hostCapacity.allNodes().childrenOf(host), 5); i.hasNext(); ) {
List<Node> childrenToMove = i.next();
if ( ! addResourcesOf(childrenToMove, freeCapacity).satisfies(node.resources())) continue;
List<Move> moves = move(childrenToMove, host, hosts, movesConsidered, movesMade);
if (moves == null) continue;
if (shortest == null || moves.size() < shortest.size())
shortest = moves;
}
if (shortest == null) return null;
return append(movesMade, shortest);
}
private List<Move> move(List<Node> nodes, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
List<Move> moves = new ArrayList<>();
for (Node childToMove : nodes) {
List<Move> childMoves = move(childToMove, host, hosts, movesConsidered, append(movesMade, moves));
if (childMoves == null) return null;
moves.addAll(childMoves);
}
return moves;
}
private List<Move> move(Node node, Node host, List<Node> hosts, List<Move> movesConsidered, List<Move> movesMade) {
if (contains(node, movesConsidered)) return null;
if (contains(node, movesMade)) return null;
List<Move> shortest = null;
for (Node target : hosts) {
if (target.equals(host)) continue;
Move move = new Move(node, host, target);
List<Move> childMoves = makeRoomFor(node, target, hosts, append(movesConsidered, move), movesMade);
if (childMoves == null) continue;
if (shortest == null || shortest.size() > childMoves.size() + 1) {
shortest = new ArrayList<>(childMoves);
shortest.add(move);
}
}
return shortest;
}
private boolean contains(Node node, List<Move> moves) {
return moves.stream().anyMatch(move -> move.node().equals(node));
}
private NodeResources addResourcesOf(List<Node> nodes, NodeResources resources) {
for (Node node : nodes)
resources = resources.add(node.resources());
return resources;
}
private Iterator<List<Node>> subsets(NodeList nodes, int maxSize) {
return new SubsetIterator(nodes.asList(), maxSize);
}
private List<Move> append(List<Move> a, List<Move> b) {
List<Move> list = new ArrayList<>();
list.addAll(a);
list.addAll(b);
return list;
}
private List<Move> append(List<Move> moves, Move move) {
List<Move> list = new ArrayList<>(moves);
list.add(move);
return list;
}
} |
Fixed | public X509CertificateBuilder setBasicConstraints(boolean isCritical, boolean isCertAuthorityCertificate) {
this.basicConstraintsExtension = new BasicConstraintsExtension(isCritical, isCertAuthorityCertificate);
return this;
} | } | public X509CertificateBuilder setBasicConstraints(boolean isCritical, boolean isCertAuthorityCertificate) {
this.basicConstraintsExtension = new BasicConstraintsExtension(isCritical, isCertAuthorityCertificate);
return this;
} | class X509CertificateBuilder {
private final long serialNumber;
private final SignatureAlgorithm signingAlgorithm;
private final PrivateKey caPrivateKey;
private final Instant notBefore;
private final Instant notAfter;
private final List<SubjectAlternativeName> subjectAlternativeNames = new ArrayList<>();
private final X500Principal issuer;
private final X500Principal subject;
private final PublicKey certPublicKey;
private BasicConstraintsExtension basicConstraintsExtension;
private X509CertificateBuilder(X500Principal issuer,
X500Principal subject,
Instant notBefore,
Instant notAfter,
PublicKey certPublicKey,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
long serialNumber) {
this.issuer = issuer;
this.subject = subject;
this.notBefore = notBefore;
this.notAfter = notAfter;
this.certPublicKey = certPublicKey;
this.caPrivateKey = caPrivateKey;
this.signingAlgorithm = signingAlgorithm;
this.serialNumber = serialNumber;
}
public static X509CertificateBuilder fromCsr(Pkcs10Csr csr,
X500Principal caIssuer,
Instant notBefore,
Instant notAfter,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
long serialNumber) {
try {
PKCS10CertificationRequest bcCsr = csr.getBcCsr();
PublicKey publicKey = new JcaPKCS10CertificationRequest(bcCsr)
.setProvider(BouncyCastleProviderHolder.getInstance())
.getPublicKey();
return new X509CertificateBuilder(caIssuer,
new X500Principal(bcCsr.getSubject().getEncoded()),
notBefore,
notAfter,
publicKey,
caPrivateKey,
signingAlgorithm,
serialNumber);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public static X509CertificateBuilder fromKeypair(KeyPair keyPair,
X500Principal subject,
Instant notBefore,
Instant notAfter,
SignatureAlgorithm signingAlgorithm,
long serialNumber) {
return new X509CertificateBuilder(subject,
subject,
notBefore,
notAfter,
keyPair.getPublic(),
keyPair.getPrivate(),
signingAlgorithm,
serialNumber);
}
public X509CertificateBuilder addSubjectAlternativeName(String dnsName) {
this.subjectAlternativeNames.add(new SubjectAlternativeName(DNS_NAME, dnsName));
return this;
}
public X509CertificateBuilder addSubjectAlternativeName(SubjectAlternativeName san) {
this.subjectAlternativeNames.add(san);
return this;
}
public X509Certificate build() {
try {
JcaX509v3CertificateBuilder jcaCertBuilder = new JcaX509v3CertificateBuilder(
issuer, BigInteger.valueOf(serialNumber), Date.from(notBefore), Date.from(notAfter), subject, certPublicKey);
if (basicConstraintsExtension != null) {
jcaCertBuilder.addExtension(
Extension.basicConstraints,
basicConstraintsExtension.isCritical,
new BasicConstraints(basicConstraintsExtension.isCertAuthorityCertificate));
}
if (!subjectAlternativeNames.isEmpty()) {
GeneralNames generalNames = new GeneralNames(
subjectAlternativeNames.stream()
.map(SubjectAlternativeName::toGeneralName)
.toArray(GeneralName[]::new));
jcaCertBuilder.addExtension(Extension.subjectAlternativeName, false, generalNames);
}
ContentSigner contentSigner = new JcaContentSignerBuilder(signingAlgorithm.getAlgorithmName())
.setProvider(BouncyCastleProviderHolder.getInstance())
.build(caPrivateKey);
return new JcaX509CertificateConverter()
.setProvider(BouncyCastleProviderHolder.getInstance())
.getCertificate(jcaCertBuilder.build(contentSigner));
} catch (OperatorException | GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
} | class X509CertificateBuilder {
private final BigInteger serialNumber;
private final SignatureAlgorithm signingAlgorithm;
private final PrivateKey caPrivateKey;
private final Instant notBefore;
private final Instant notAfter;
private final List<SubjectAlternativeName> subjectAlternativeNames = new ArrayList<>();
private final X500Principal issuer;
private final X500Principal subject;
private final PublicKey certPublicKey;
private BasicConstraintsExtension basicConstraintsExtension;
private X509CertificateBuilder(X500Principal issuer,
X500Principal subject,
Instant notBefore,
Instant notAfter,
PublicKey certPublicKey,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
BigInteger serialNumber) {
this.issuer = issuer;
this.subject = subject;
this.notBefore = notBefore;
this.notAfter = notAfter;
this.certPublicKey = certPublicKey;
this.caPrivateKey = caPrivateKey;
this.signingAlgorithm = signingAlgorithm;
this.serialNumber = serialNumber;
}
public static X509CertificateBuilder fromCsr(Pkcs10Csr csr,
X500Principal caIssuer,
Instant notBefore,
Instant notAfter,
PrivateKey caPrivateKey,
SignatureAlgorithm signingAlgorithm,
BigInteger serialNumber) {
try {
PKCS10CertificationRequest bcCsr = csr.getBcCsr();
PublicKey publicKey = new JcaPKCS10CertificationRequest(bcCsr)
.setProvider(BouncyCastleProviderHolder.getInstance())
.getPublicKey();
return new X509CertificateBuilder(caIssuer,
new X500Principal(bcCsr.getSubject().getEncoded()),
notBefore,
notAfter,
publicKey,
caPrivateKey,
signingAlgorithm,
serialNumber);
} catch (GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public static X509CertificateBuilder fromKeypair(KeyPair keyPair,
X500Principal subject,
Instant notBefore,
Instant notAfter,
SignatureAlgorithm signingAlgorithm,
BigInteger serialNumber) {
return new X509CertificateBuilder(subject,
subject,
notBefore,
notAfter,
keyPair.getPublic(),
keyPair.getPrivate(),
signingAlgorithm,
serialNumber);
}
/**
* @return generates a cryptographically secure positive serial number up to 128 bits
*/
public static BigInteger generateRandomSerialNumber() {
return new BigInteger(128, new SecureRandom());
}
public X509CertificateBuilder addSubjectAlternativeName(String dnsName) {
this.subjectAlternativeNames.add(new SubjectAlternativeName(DNS_NAME, dnsName));
return this;
}
public X509CertificateBuilder addSubjectAlternativeName(SubjectAlternativeName san) {
this.subjectAlternativeNames.add(san);
return this;
}
public X509CertificateBuilder setIsCertAuthority(boolean isCertAuthority) {
return setBasicConstraints(true, isCertAuthority);
}
public X509Certificate build() {
try {
JcaX509v3CertificateBuilder jcaCertBuilder = new JcaX509v3CertificateBuilder(
issuer, serialNumber, Date.from(notBefore), Date.from(notAfter), subject, certPublicKey);
if (basicConstraintsExtension != null) {
jcaCertBuilder.addExtension(
Extension.basicConstraints,
basicConstraintsExtension.isCritical,
new BasicConstraints(basicConstraintsExtension.isCertAuthorityCertificate));
}
if (!subjectAlternativeNames.isEmpty()) {
GeneralNames generalNames = new GeneralNames(
subjectAlternativeNames.stream()
.map(SubjectAlternativeName::toGeneralName)
.toArray(GeneralName[]::new));
jcaCertBuilder.addExtension(Extension.subjectAlternativeName, false, generalNames);
}
ContentSigner contentSigner = new JcaContentSignerBuilder(signingAlgorithm.getAlgorithmName())
.setProvider(BouncyCastleProviderHolder.getInstance())
.build(caPrivateKey);
return new JcaX509CertificateConverter()
.setProvider(BouncyCastleProviderHolder.getInstance())
.getCertificate(jcaCertBuilder.build(contentSigner));
} catch (OperatorException | GeneralSecurityException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
} |
Why is setExcludeProtocols() used and not setExcludeCipherSuites()? Am I missing something? | private SslConnectionFactory newSslConnectionFactory() {
Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion);
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
return new SslConnectionFactory(factory, HttpVersion.HTTP_1_1.asString());
} | factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion); | private SslConnectionFactory newSslConnectionFactory() {
Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion);
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
return new SslConnectionFactory(factory, HttpVersion.HTTP_1_1.asString());
} | class ConnectorFactory {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
@Inject
public ConnectorFactory(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
public ConnectorConfig getConnectorConfig() {
return connectorConfig;
}
public ServerConnector createConnector(final Metric metric, final Server server, final ServerSocketChannel ch) {
ServerConnector connector;
if (connectorConfig.ssl().enabled()) {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newSslConnectionFactory(),
newHttpConnectionFactory());
} else {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newHttpConnectionFactory());
}
connector.setPort(connectorConfig.listenPort());
connector.setName(connectorConfig.name());
connector.setAcceptQueueSize(connectorConfig.acceptQueueSize());
connector.setReuseAddress(connectorConfig.reuseAddress());
connector.setIdleTimeout((long)(connectorConfig.idleTimeout() * 1000.0));
connector.setStopTimeout((long)(connectorConfig.stopTimeout() * 1000.0));
return connector;
}
private HttpConnectionFactory newHttpConnectionFactory() {
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSendDateHeader(true);
httpConfig.setSendServerVersion(false);
httpConfig.setSendXPoweredBy(false);
httpConfig.setHeaderCacheSize(connectorConfig.headerCacheSize());
httpConfig.setOutputBufferSize(connectorConfig.outputBufferSize());
httpConfig.setRequestHeaderSize(connectorConfig.requestHeaderSize());
httpConfig.setResponseHeaderSize(connectorConfig.responseHeaderSize());
if (connectorConfig.ssl().enabled()) {
httpConfig.addCustomizer(new SecureRequestCustomizer());
}
return new HttpConnectionFactory(httpConfig);
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class ConnectorFactory {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
@Inject
public ConnectorFactory(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
public ConnectorConfig getConnectorConfig() {
return connectorConfig;
}
public ServerConnector createConnector(final Metric metric, final Server server, final ServerSocketChannel ch) {
ServerConnector connector;
if (connectorConfig.ssl().enabled()) {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newSslConnectionFactory(),
newHttpConnectionFactory());
} else {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newHttpConnectionFactory());
}
connector.setPort(connectorConfig.listenPort());
connector.setName(connectorConfig.name());
connector.setAcceptQueueSize(connectorConfig.acceptQueueSize());
connector.setReuseAddress(connectorConfig.reuseAddress());
connector.setIdleTimeout((long)(connectorConfig.idleTimeout() * 1000.0));
connector.setStopTimeout((long)(connectorConfig.stopTimeout() * 1000.0));
return connector;
}
private HttpConnectionFactory newHttpConnectionFactory() {
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSendDateHeader(true);
httpConfig.setSendServerVersion(false);
httpConfig.setSendXPoweredBy(false);
httpConfig.setHeaderCacheSize(connectorConfig.headerCacheSize());
httpConfig.setOutputBufferSize(connectorConfig.outputBufferSize());
httpConfig.setRequestHeaderSize(connectorConfig.requestHeaderSize());
httpConfig.setResponseHeaderSize(connectorConfig.responseHeaderSize());
if (connectorConfig.ssl().enabled()) {
httpConfig.addCustomizer(new SecureRequestCustomizer());
}
return new HttpConnectionFactory(httpConfig);
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} |
Hmm, good point. Looks like it should be cipher suites: https://github.com/eclipse/jetty.project/pull/2855/files | private SslConnectionFactory newSslConnectionFactory() {
Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion);
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
return new SslConnectionFactory(factory, HttpVersion.HTTP_1_1.asString());
} | factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion); | private SslConnectionFactory newSslConnectionFactory() {
Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion);
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
return new SslConnectionFactory(factory, HttpVersion.HTTP_1_1.asString());
} | class ConnectorFactory {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
@Inject
public ConnectorFactory(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
public ConnectorConfig getConnectorConfig() {
return connectorConfig;
}
public ServerConnector createConnector(final Metric metric, final Server server, final ServerSocketChannel ch) {
ServerConnector connector;
if (connectorConfig.ssl().enabled()) {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newSslConnectionFactory(),
newHttpConnectionFactory());
} else {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newHttpConnectionFactory());
}
connector.setPort(connectorConfig.listenPort());
connector.setName(connectorConfig.name());
connector.setAcceptQueueSize(connectorConfig.acceptQueueSize());
connector.setReuseAddress(connectorConfig.reuseAddress());
connector.setIdleTimeout((long)(connectorConfig.idleTimeout() * 1000.0));
connector.setStopTimeout((long)(connectorConfig.stopTimeout() * 1000.0));
return connector;
}
private HttpConnectionFactory newHttpConnectionFactory() {
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSendDateHeader(true);
httpConfig.setSendServerVersion(false);
httpConfig.setSendXPoweredBy(false);
httpConfig.setHeaderCacheSize(connectorConfig.headerCacheSize());
httpConfig.setOutputBufferSize(connectorConfig.outputBufferSize());
httpConfig.setRequestHeaderSize(connectorConfig.requestHeaderSize());
httpConfig.setResponseHeaderSize(connectorConfig.responseHeaderSize());
if (connectorConfig.ssl().enabled()) {
httpConfig.addCustomizer(new SecureRequestCustomizer());
}
return new HttpConnectionFactory(httpConfig);
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class ConnectorFactory {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
@Inject
public ConnectorFactory(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
public ConnectorConfig getConnectorConfig() {
return connectorConfig;
}
public ServerConnector createConnector(final Metric metric, final Server server, final ServerSocketChannel ch) {
ServerConnector connector;
if (connectorConfig.ssl().enabled()) {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newSslConnectionFactory(),
newHttpConnectionFactory());
} else {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newHttpConnectionFactory());
}
connector.setPort(connectorConfig.listenPort());
connector.setName(connectorConfig.name());
connector.setAcceptQueueSize(connectorConfig.acceptQueueSize());
connector.setReuseAddress(connectorConfig.reuseAddress());
connector.setIdleTimeout((long)(connectorConfig.idleTimeout() * 1000.0));
connector.setStopTimeout((long)(connectorConfig.stopTimeout() * 1000.0));
return connector;
}
private HttpConnectionFactory newHttpConnectionFactory() {
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSendDateHeader(true);
httpConfig.setSendServerVersion(false);
httpConfig.setSendXPoweredBy(false);
httpConfig.setHeaderCacheSize(connectorConfig.headerCacheSize());
httpConfig.setOutputBufferSize(connectorConfig.outputBufferSize());
httpConfig.setRequestHeaderSize(connectorConfig.requestHeaderSize());
httpConfig.setResponseHeaderSize(connectorConfig.responseHeaderSize());
if (connectorConfig.ssl().enabled()) {
httpConfig.addCustomizer(new SecureRequestCustomizer());
}
return new HttpConnectionFactory(httpConfig);
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} |
It's a typo, it should have been `setExcludeCipherSuites()` | private SslConnectionFactory newSslConnectionFactory() {
Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion);
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
return new SslConnectionFactory(factory, HttpVersion.HTTP_1_1.asString());
} | factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion); | private SslConnectionFactory newSslConnectionFactory() {
Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeProtocols(excludedCiphersWithoutTlsRsaExclusion);
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
return new SslConnectionFactory(factory, HttpVersion.HTTP_1_1.asString());
} | class ConnectorFactory {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
@Inject
public ConnectorFactory(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
public ConnectorConfig getConnectorConfig() {
return connectorConfig;
}
public ServerConnector createConnector(final Metric metric, final Server server, final ServerSocketChannel ch) {
ServerConnector connector;
if (connectorConfig.ssl().enabled()) {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newSslConnectionFactory(),
newHttpConnectionFactory());
} else {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newHttpConnectionFactory());
}
connector.setPort(connectorConfig.listenPort());
connector.setName(connectorConfig.name());
connector.setAcceptQueueSize(connectorConfig.acceptQueueSize());
connector.setReuseAddress(connectorConfig.reuseAddress());
connector.setIdleTimeout((long)(connectorConfig.idleTimeout() * 1000.0));
connector.setStopTimeout((long)(connectorConfig.stopTimeout() * 1000.0));
return connector;
}
private HttpConnectionFactory newHttpConnectionFactory() {
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSendDateHeader(true);
httpConfig.setSendServerVersion(false);
httpConfig.setSendXPoweredBy(false);
httpConfig.setHeaderCacheSize(connectorConfig.headerCacheSize());
httpConfig.setOutputBufferSize(connectorConfig.outputBufferSize());
httpConfig.setRequestHeaderSize(connectorConfig.requestHeaderSize());
httpConfig.setResponseHeaderSize(connectorConfig.responseHeaderSize());
if (connectorConfig.ssl().enabled()) {
httpConfig.addCustomizer(new SecureRequestCustomizer());
}
return new HttpConnectionFactory(httpConfig);
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class ConnectorFactory {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
@Inject
public ConnectorFactory(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
public ConnectorConfig getConnectorConfig() {
return connectorConfig;
}
public ServerConnector createConnector(final Metric metric, final Server server, final ServerSocketChannel ch) {
ServerConnector connector;
if (connectorConfig.ssl().enabled()) {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newSslConnectionFactory(),
newHttpConnectionFactory());
} else {
connector = new JDiscServerConnector(connectorConfig, metric, server, ch,
newHttpConnectionFactory());
}
connector.setPort(connectorConfig.listenPort());
connector.setName(connectorConfig.name());
connector.setAcceptQueueSize(connectorConfig.acceptQueueSize());
connector.setReuseAddress(connectorConfig.reuseAddress());
connector.setIdleTimeout((long)(connectorConfig.idleTimeout() * 1000.0));
connector.setStopTimeout((long)(connectorConfig.stopTimeout() * 1000.0));
return connector;
}
private HttpConnectionFactory newHttpConnectionFactory() {
HttpConfiguration httpConfig = new HttpConfiguration();
httpConfig.setSendDateHeader(true);
httpConfig.setSendServerVersion(false);
httpConfig.setSendXPoweredBy(false);
httpConfig.setHeaderCacheSize(connectorConfig.headerCacheSize());
httpConfig.setOutputBufferSize(connectorConfig.outputBufferSize());
httpConfig.setRequestHeaderSize(connectorConfig.requestHeaderSize());
httpConfig.setResponseHeaderSize(connectorConfig.responseHeaderSize());
if (connectorConfig.ssl().enabled()) {
httpConfig.addCustomizer(new SecureRequestCustomizer());
}
return new HttpConnectionFactory(httpConfig);
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} |
TODO Vespa 7? | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | |
I found this a bit hard to read. Some suggestions: - Create a ModuloInteger or something which encapsulates the wraparound - here you need to "manually" wrap around two different pointers. - If you give that class a plus(int i) method which returns the wrapped-around result of adding i to it, you can get rid of index completely and just use needle.plus(i). - I suggest a custom method returning boolean to compare schedules instead of compareTo, for example sched.preferredOver(other), then it can always be preferred over null as well. | private Optional<Group> allocateNextGroup() {
synchronized (this) {
GroupSchedule bestSchedule = null;
int index = needle;
for (int i = 0; i < scoreboard.size(); i++) {
GroupSchedule sched = scoreboard.get(index);
index++;
if (index >= scoreboard.size()) {
index = 0;
}
if (sched.group.hasSufficientCoverage() && (bestSchedule == null || sched.compareTo(bestSchedule) < 0)) {
bestSchedule = sched;
}
}
needle++;
if (needle >= scoreboard.size()) {
needle = 0;
}
Group ret = null;
if (bestSchedule != null) {
bestSchedule.adjustScore(1);
ret = bestSchedule.group;
}
if (log.isLoggable(Level.FINE)) {
log.fine("Offering <" + ret + "> for query connection");
}
return Optional.ofNullable(ret);
}
} | int index = needle; | private Optional<Group> allocateNextGroup() {
synchronized (this) {
GroupSchedule bestSchedule = null;
int index = needle;
for (int i = 0; i < scoreboard.size(); i++) {
GroupSchedule sched = scoreboard.get(index);
if (sched.isPreferredOver(bestSchedule)) {
bestSchedule = sched;
}
index = nextScoreboardIndex(index);
}
needle = nextScoreboardIndex(needle);
Group ret = null;
if (bestSchedule != null) {
bestSchedule.adjustScore(1);
ret = bestSchedule.group;
}
if (log.isLoggable(Level.FINE)) {
log.fine("Offering <" + ret + "> for query connection");
}
return Optional.ofNullable(ret);
}
} | class LoadBalancer {
private final static Logger log = Logger.getLogger(LoadBalancer.class.getName());
private final boolean isInternallyDispatchable;
private final List<GroupSchedule> scoreboard;
private int needle = 0;
public LoadBalancer(SearchCluster searchCluster) {
if (searchCluster == null) {
this.isInternallyDispatchable = false;
this.scoreboard = null;
return;
}
this.isInternallyDispatchable = (searchCluster.groupSize() == 1);
this.scoreboard = new ArrayList<>(searchCluster.groups().size());
for (Group group : searchCluster.groups().values()) {
scoreboard.add(new GroupSchedule(group));
}
Collections.shuffle(scoreboard);
}
public Optional<Group> getGroupForQuery(Query query) {
if (!isInternallyDispatchable) {
return Optional.empty();
}
return allocateNextGroup();
}
public void releaseGroup(Group group) {
synchronized (this) {
for (GroupSchedule sched : scoreboard) {
if (sched.group.id() == group.id()) {
sched.adjustScore(-1);
break;
}
}
}
}
public static class GroupSchedule implements Comparable<GroupSchedule> {
private final Group group;
private int score;
public GroupSchedule(Group group) {
this.group = group;
this.score = 0;
}
@Override
public int compareTo(GroupSchedule that) {
return this.score - that.score;
}
public void adjustScore(int amount) {
this.score += amount;
if (score < 0) {
log.warning("Double free of query target group detected");
score = 0;
}
}
}
} | class LoadBalancer {
private final static Logger log = Logger.getLogger(LoadBalancer.class.getName());
private final boolean isInternallyDispatchable;
private final List<GroupSchedule> scoreboard;
private int needle = 0;
public LoadBalancer(SearchCluster searchCluster) {
if (searchCluster == null) {
this.isInternallyDispatchable = false;
this.scoreboard = null;
return;
}
this.isInternallyDispatchable = (searchCluster.groupSize() == 1);
this.scoreboard = new ArrayList<>(searchCluster.groups().size());
for (Group group : searchCluster.groups().values()) {
scoreboard.add(new GroupSchedule(group));
}
Collections.shuffle(scoreboard);
}
/**
* Select and allocate the search cluster group which is to be used for the provided query. Callers <b>must</b> call
* {@link
*
* @param query
* @return The node group to target, or <i>empty</i> if the internal dispatch logic cannot be used
*/
public Optional<Group> takeGroupForQuery(Query query) {
if (!isInternallyDispatchable) {
return Optional.empty();
}
return allocateNextGroup();
}
/**
* Release an allocation given by {@link
*
* @param group
* previously allocated group
*/
public void releaseGroup(Group group) {
synchronized (this) {
for (GroupSchedule sched : scoreboard) {
if (sched.group.id() == group.id()) {
sched.adjustScore(-1);
break;
}
}
}
}
private int nextScoreboardIndex(int current) {
int next = current + 1;
if (next >= scoreboard.size()) {
next %= scoreboard.size();
}
return next;
}
private static class GroupSchedule {
private final Group group;
private int score;
public GroupSchedule(Group group) {
this.group = group;
this.score = 0;
}
public boolean isPreferredOver(GroupSchedule other) {
if (! group.hasSufficientCoverage()) {
return false;
}
if (other == null) {
return true;
}
return this.score < other.score;
}
public void adjustScore(int amount) {
this.score += amount;
if (score < 0) {
log.warning("Double free of query target group detected");
score = 0;
}
}
}
} |
I'm replacing the wraparounds with a method at this time, and replacing the comparison logic with a boolean method. The result is much cleaner, as expected. | private Optional<Group> allocateNextGroup() {
synchronized (this) {
GroupSchedule bestSchedule = null;
int index = needle;
for (int i = 0; i < scoreboard.size(); i++) {
GroupSchedule sched = scoreboard.get(index);
index++;
if (index >= scoreboard.size()) {
index = 0;
}
if (sched.group.hasSufficientCoverage() && (bestSchedule == null || sched.compareTo(bestSchedule) < 0)) {
bestSchedule = sched;
}
}
needle++;
if (needle >= scoreboard.size()) {
needle = 0;
}
Group ret = null;
if (bestSchedule != null) {
bestSchedule.adjustScore(1);
ret = bestSchedule.group;
}
if (log.isLoggable(Level.FINE)) {
log.fine("Offering <" + ret + "> for query connection");
}
return Optional.ofNullable(ret);
}
} | int index = needle; | private Optional<Group> allocateNextGroup() {
synchronized (this) {
GroupSchedule bestSchedule = null;
int index = needle;
for (int i = 0; i < scoreboard.size(); i++) {
GroupSchedule sched = scoreboard.get(index);
if (sched.isPreferredOver(bestSchedule)) {
bestSchedule = sched;
}
index = nextScoreboardIndex(index);
}
needle = nextScoreboardIndex(needle);
Group ret = null;
if (bestSchedule != null) {
bestSchedule.adjustScore(1);
ret = bestSchedule.group;
}
if (log.isLoggable(Level.FINE)) {
log.fine("Offering <" + ret + "> for query connection");
}
return Optional.ofNullable(ret);
}
} | class LoadBalancer {
private final static Logger log = Logger.getLogger(LoadBalancer.class.getName());
private final boolean isInternallyDispatchable;
private final List<GroupSchedule> scoreboard;
private int needle = 0;
public LoadBalancer(SearchCluster searchCluster) {
if (searchCluster == null) {
this.isInternallyDispatchable = false;
this.scoreboard = null;
return;
}
this.isInternallyDispatchable = (searchCluster.groupSize() == 1);
this.scoreboard = new ArrayList<>(searchCluster.groups().size());
for (Group group : searchCluster.groups().values()) {
scoreboard.add(new GroupSchedule(group));
}
Collections.shuffle(scoreboard);
}
public Optional<Group> getGroupForQuery(Query query) {
if (!isInternallyDispatchable) {
return Optional.empty();
}
return allocateNextGroup();
}
public void releaseGroup(Group group) {
synchronized (this) {
for (GroupSchedule sched : scoreboard) {
if (sched.group.id() == group.id()) {
sched.adjustScore(-1);
break;
}
}
}
}
public static class GroupSchedule implements Comparable<GroupSchedule> {
private final Group group;
private int score;
public GroupSchedule(Group group) {
this.group = group;
this.score = 0;
}
@Override
public int compareTo(GroupSchedule that) {
return this.score - that.score;
}
public void adjustScore(int amount) {
this.score += amount;
if (score < 0) {
log.warning("Double free of query target group detected");
score = 0;
}
}
}
} | class LoadBalancer {
private final static Logger log = Logger.getLogger(LoadBalancer.class.getName());
private final boolean isInternallyDispatchable;
private final List<GroupSchedule> scoreboard;
private int needle = 0;
public LoadBalancer(SearchCluster searchCluster) {
if (searchCluster == null) {
this.isInternallyDispatchable = false;
this.scoreboard = null;
return;
}
this.isInternallyDispatchable = (searchCluster.groupSize() == 1);
this.scoreboard = new ArrayList<>(searchCluster.groups().size());
for (Group group : searchCluster.groups().values()) {
scoreboard.add(new GroupSchedule(group));
}
Collections.shuffle(scoreboard);
}
/**
* Select and allocate the search cluster group which is to be used for the provided query. Callers <b>must</b> call
* {@link
*
* @param query
* @return The node group to target, or <i>empty</i> if the internal dispatch logic cannot be used
*/
public Optional<Group> takeGroupForQuery(Query query) {
if (!isInternallyDispatchable) {
return Optional.empty();
}
return allocateNextGroup();
}
/**
* Release an allocation given by {@link
*
* @param group
* previously allocated group
*/
public void releaseGroup(Group group) {
synchronized (this) {
for (GroupSchedule sched : scoreboard) {
if (sched.group.id() == group.id()) {
sched.adjustScore(-1);
break;
}
}
}
}
private int nextScoreboardIndex(int current) {
int next = current + 1;
if (next >= scoreboard.size()) {
next %= scoreboard.size();
}
return next;
}
private static class GroupSchedule {
private final Group group;
private int score;
public GroupSchedule(Group group) {
this.group = group;
this.score = 0;
}
public boolean isPreferredOver(GroupSchedule other) {
if (! group.hasSufficientCoverage()) {
return false;
}
if (other == null) {
return true;
}
return this.score < other.score;
}
public void adjustScore(int amount) {
this.score += amount;
if (score < 0) {
log.warning("Double free of query target group detected");
score = 0;
}
}
}
} |
can you show some examples for such case? | public ParseNode visitSimpleFunctionCall(StarRocksParser.SimpleFunctionCallContext context) {
String fullFunctionName = getQualifiedName(context.qualifiedName()).toString();
NodePosition pos = createPos(context);
FunctionName fnName = FunctionName.createFnName(fullFunctionName);
String functionName = fnName.getFunction();
if (functionName.equals(FunctionSet.TIME_SLICE) || functionName.equals(FunctionSet.DATE_SLICE)) {
if (context.expression().size() == 2) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
if (!(e2 instanceof IntervalLiteral)) {
e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY"));
}
IntervalLiteral intervalLiteral = (IntervalLiteral) e2;
FunctionCallExpr functionCallExpr = new FunctionCallExpr(fnName, getArgumentsForTimeSlice(e1,
intervalLiteral.getValue(), intervalLiteral.getUnitIdentifier().getDescription().toLowerCase(),
"floor"), pos);
return functionCallExpr;
} else if (context.expression().size() == 3) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
if (!(e2 instanceof IntervalLiteral)) {
e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY"));
}
IntervalLiteral intervalLiteral = (IntervalLiteral) e2;
ParseNode e3 = visit(context.expression(2));
if (!(e3 instanceof UnitBoundary)) {
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(functionName), e3.getPos());
}
UnitBoundary unitBoundary = (UnitBoundary) e3;
FunctionCallExpr functionCallExpr = new FunctionCallExpr(fnName, getArgumentsForTimeSlice(e1,
intervalLiteral.getValue(), intervalLiteral.getUnitIdentifier().getDescription().toLowerCase(),
unitBoundary.getDescription().toLowerCase()), pos);
return functionCallExpr;
} else if (context.expression().size() == 4) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
Expr e3 = (Expr) visit(context.expression(2));
Expr e4 = (Expr) visit(context.expression(3));
if (!(e3 instanceof StringLiteral)) {
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(functionName), e3.getPos());
}
String ident = ((StringLiteral) e3).getValue();
if (!(e4 instanceof StringLiteral)) {
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(functionName), e4.getPos());
}
String boundary = ((StringLiteral) e4).getValue();
return new FunctionCallExpr(fnName, getArgumentsForTimeSlice(e1, e2, ident, boundary));
} else {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
}
if (DATE_FUNCTIONS.contains(functionName)) {
if (context.expression().size() != 2) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
if (!(e2 instanceof IntervalLiteral)) {
e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY"));
}
IntervalLiteral intervalLiteral = (IntervalLiteral) e2;
return new TimestampArithmeticExpr(functionName, e1, intervalLiteral.getValue(),
intervalLiteral.getUnitIdentifier().getDescription(), pos);
}
if (functionName.equals(FunctionSet.ELEMENT_AT)) {
List<Expr> params = visit(context.expression(), Expr.class);
if (params.size() != 2) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
return new CollectionElementExpr(params.get(0), params.get(1), false);
}
if (functionName.equals(FunctionSet.ISNULL)) {
List<Expr> params = visit(context.expression(), Expr.class);
if (params.size() != 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
return new IsNullPredicate(params.get(0), false, pos);
}
if (functionName.equals(FunctionSet.ISNOTNULL)) {
List<Expr> params = visit(context.expression(), Expr.class);
if (params.size() != 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
return new IsNullPredicate(params.get(0), true, pos);
}
if (ArithmeticExpr.isArithmeticExpr(fnName.getFunction())) {
if (context.expression().size() < 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = context.expression().size() > 1 ? (Expr) visit(context.expression(1)) : null;
return new ArithmeticExpr(ArithmeticExpr.getArithmeticOperator(fnName.getFunction()), e1, e2, pos);
}
if (functionName.equals(FunctionSet.STR_TO_MAP)) {
Expr e0;
Expr e1;
Expr e2;
String collectionDelimiter = ",";
String mapDelimiter = ":";
if (context.expression().size() == 1) {
e0 = (Expr) visit(context.expression(0));
e1 = new StringLiteral(collectionDelimiter, pos);
e2 = new StringLiteral(mapDelimiter, pos);
} else if (context.expression().size() == 2) {
e0 = (Expr) visit(context.expression(0));
e1 = (Expr) visit(context.expression(1));
e2 = new StringLiteral(mapDelimiter, pos);
} else if (context.expression().size() == 3) {
e0 = (Expr) visit(context.expression(0));
e1 = (Expr) visit(context.expression(1));
e2 = (Expr) visit(context.expression(2));
} else {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(FunctionSet.STR_TO_MAP));
}
if (e0.getType().isStringType()) {
FunctionCallExpr split = new FunctionCallExpr(FunctionSet.SPLIT, ImmutableList.of(e0, e1), pos);
return new FunctionCallExpr(functionName, ImmutableList.of(split, e2), pos);
} else {
return new FunctionCallExpr(functionName, ImmutableList.of(e0, e1), pos);
}
}
if (fnName.getFunction().equalsIgnoreCase(FunctionSet.CONNECTION_ID)) {
return new InformationFunction(FunctionSet.CONNECTION_ID.toUpperCase());
}
if (functionName.equals(FunctionSet.MAP)) {
List<Expr> exprs;
if (context.expression() != null) {
int num = context.expression().size();
if (num % 2 == 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(num, "map()",
"Arguments must be in key/value pairs"), pos);
}
exprs = visit(context.expression(), Expr.class);
} else {
exprs = Collections.emptyList();
}
return new MapExpr(Type.ANY_MAP, exprs, pos);
}
if (functionName.equals(FunctionSet.SUBSTR) || functionName.equals(FunctionSet.SUBSTRING)) {
List<Expr> exprs = Lists.newArrayList();
if (context.expression().size() == 2) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
exprs.add(e1);
addArgumentUseTypeInt(e2, exprs);
} else if (context.expression().size() == 3) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
Expr e3 = (Expr) visit(context.expression(2));
exprs.add(e1);
addArgumentUseTypeInt(e2, exprs);
addArgumentUseTypeInt(e3, exprs);
}
return new FunctionCallExpr(fnName, exprs, pos);
}
if (functionName.equals(FunctionSet.LPAD) || functionName.equals(FunctionSet.RPAD)) {
if (context.expression().size() == 2) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
FunctionCallExpr functionCallExpr = new FunctionCallExpr(
fnName, Lists.newArrayList(e1, e2, new StringLiteral(" ")), pos);
return functionCallExpr;
}
}
if (functionName.equals(FunctionSet.DICT_MAPPING)) {
List<Expr> params = visit(context.expression(), Expr.class);
return new DictQueryExpr(params);
}
FunctionCallExpr functionCallExpr = new FunctionCallExpr(fnName,
new FunctionParams(false, visit(context.expression(), Expr.class)), pos);
if (context.over() != null) {
return buildOverClause(functionCallExpr, context.over(), pos);
}
return SyntaxSugars.parse(functionCallExpr);
} | return new FunctionCallExpr(functionName, ImmutableList.of(e0, e1), pos); | public ParseNode visitSimpleFunctionCall(StarRocksParser.SimpleFunctionCallContext context) {
String fullFunctionName = getQualifiedName(context.qualifiedName()).toString();
NodePosition pos = createPos(context);
FunctionName fnName = FunctionName.createFnName(fullFunctionName);
String functionName = fnName.getFunction();
if (functionName.equals(FunctionSet.TIME_SLICE) || functionName.equals(FunctionSet.DATE_SLICE)) {
if (context.expression().size() == 2) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
if (!(e2 instanceof IntervalLiteral)) {
e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY"));
}
IntervalLiteral intervalLiteral = (IntervalLiteral) e2;
FunctionCallExpr functionCallExpr = new FunctionCallExpr(fnName, getArgumentsForTimeSlice(e1,
intervalLiteral.getValue(), intervalLiteral.getUnitIdentifier().getDescription().toLowerCase(),
"floor"), pos);
return functionCallExpr;
} else if (context.expression().size() == 3) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
if (!(e2 instanceof IntervalLiteral)) {
e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY"));
}
IntervalLiteral intervalLiteral = (IntervalLiteral) e2;
ParseNode e3 = visit(context.expression(2));
if (!(e3 instanceof UnitBoundary)) {
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(functionName), e3.getPos());
}
UnitBoundary unitBoundary = (UnitBoundary) e3;
FunctionCallExpr functionCallExpr = new FunctionCallExpr(fnName, getArgumentsForTimeSlice(e1,
intervalLiteral.getValue(), intervalLiteral.getUnitIdentifier().getDescription().toLowerCase(),
unitBoundary.getDescription().toLowerCase()), pos);
return functionCallExpr;
} else if (context.expression().size() == 4) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
Expr e3 = (Expr) visit(context.expression(2));
Expr e4 = (Expr) visit(context.expression(3));
if (!(e3 instanceof StringLiteral)) {
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(functionName), e3.getPos());
}
String ident = ((StringLiteral) e3).getValue();
if (!(e4 instanceof StringLiteral)) {
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(functionName), e4.getPos());
}
String boundary = ((StringLiteral) e4).getValue();
return new FunctionCallExpr(fnName, getArgumentsForTimeSlice(e1, e2, ident, boundary));
} else {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
}
if (DATE_FUNCTIONS.contains(functionName)) {
if (context.expression().size() != 2) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
if (!(e2 instanceof IntervalLiteral)) {
e2 = new IntervalLiteral(e2, new UnitIdentifier("DAY"));
}
IntervalLiteral intervalLiteral = (IntervalLiteral) e2;
return new TimestampArithmeticExpr(functionName, e1, intervalLiteral.getValue(),
intervalLiteral.getUnitIdentifier().getDescription(), pos);
}
if (functionName.equals(FunctionSet.ELEMENT_AT)) {
List<Expr> params = visit(context.expression(), Expr.class);
if (params.size() != 2) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
return new CollectionElementExpr(params.get(0), params.get(1), false);
}
if (functionName.equals(FunctionSet.ISNULL)) {
List<Expr> params = visit(context.expression(), Expr.class);
if (params.size() != 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
return new IsNullPredicate(params.get(0), false, pos);
}
if (functionName.equals(FunctionSet.ISNOTNULL)) {
List<Expr> params = visit(context.expression(), Expr.class);
if (params.size() != 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
return new IsNullPredicate(params.get(0), true, pos);
}
if (ArithmeticExpr.isArithmeticExpr(fnName.getFunction())) {
if (context.expression().size() < 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = context.expression().size() > 1 ? (Expr) visit(context.expression(1)) : null;
return new ArithmeticExpr(ArithmeticExpr.getArithmeticOperator(fnName.getFunction()), e1, e2, pos);
}
if (functionName.equals(FunctionSet.STR_TO_MAP)) {
Expr e0;
Expr e1;
Expr e2;
String collectionDelimiter = ",";
String mapDelimiter = ":";
if (context.expression().size() == 1) {
e0 = (Expr) visit(context.expression(0));
e1 = new StringLiteral(collectionDelimiter, pos);
e2 = new StringLiteral(mapDelimiter, pos);
} else if (context.expression().size() == 2) {
e0 = (Expr) visit(context.expression(0));
e1 = (Expr) visit(context.expression(1));
e2 = new StringLiteral(mapDelimiter, pos);
} else if (context.expression().size() == 3) {
e0 = (Expr) visit(context.expression(0));
e1 = (Expr) visit(context.expression(1));
e2 = (Expr) visit(context.expression(2));
} else {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(FunctionSet.STR_TO_MAP));
}
return new FunctionCallExpr(functionName, ImmutableList.of(e0, e1, e2), pos);
}
if (fnName.getFunction().equalsIgnoreCase(FunctionSet.CONNECTION_ID)) {
return new InformationFunction(FunctionSet.CONNECTION_ID.toUpperCase());
}
if (functionName.equals(FunctionSet.MAP)) {
List<Expr> exprs;
if (context.expression() != null) {
int num = context.expression().size();
if (num % 2 == 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(num, "map()",
"Arguments must be in key/value pairs"), pos);
}
exprs = visit(context.expression(), Expr.class);
} else {
exprs = Collections.emptyList();
}
return new MapExpr(Type.ANY_MAP, exprs, pos);
}
if (functionName.equals(FunctionSet.SUBSTR) || functionName.equals(FunctionSet.SUBSTRING)) {
List<Expr> exprs = Lists.newArrayList();
if (context.expression().size() == 2) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
exprs.add(e1);
addArgumentUseTypeInt(e2, exprs);
} else if (context.expression().size() == 3) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
Expr e3 = (Expr) visit(context.expression(2));
exprs.add(e1);
addArgumentUseTypeInt(e2, exprs);
addArgumentUseTypeInt(e3, exprs);
}
return new FunctionCallExpr(fnName, exprs, pos);
}
if (functionName.equals(FunctionSet.LPAD) || functionName.equals(FunctionSet.RPAD)) {
if (context.expression().size() == 2) {
Expr e1 = (Expr) visit(context.expression(0));
Expr e2 = (Expr) visit(context.expression(1));
FunctionCallExpr functionCallExpr = new FunctionCallExpr(
fnName, Lists.newArrayList(e1, e2, new StringLiteral(" ")), pos);
return functionCallExpr;
}
}
if (functionName.equals(FunctionSet.DICT_MAPPING)) {
List<Expr> params = visit(context.expression(), Expr.class);
return new DictQueryExpr(params);
}
FunctionCallExpr functionCallExpr = new FunctionCallExpr(fnName,
new FunctionParams(false, visit(context.expression(), Expr.class)), pos);
if (context.over() != null) {
return buildOverClause(functionCallExpr, context.over(), pos);
}
return SyntaxSugars.parse(functionCallExpr);
} | class AstBuilder extends StarRocksBaseVisitor<ParseNode> {
private final long sqlMode;
private final IdentityHashMap<ParserRuleContext, List<HintNode>> hintMap;
private int placeHolderSlotId = 0;
private List<Parameter> parameters;
private static final BigInteger LONG_MAX = new BigInteger("9223372036854775807");
private static final BigInteger LARGEINT_MAX_ABS =
new BigInteger("170141183460469231731687303715884105728");
private static final List<String> DATE_FUNCTIONS =
Lists.newArrayList(FunctionSet.DATE_ADD,
FunctionSet.ADDDATE,
FunctionSet.DATE_ADD, FunctionSet.DATE_SUB,
FunctionSet.SUBDATE,
FunctionSet.DAYS_SUB);
private static final List<String> PARTITION_FUNCTIONS =
Lists.newArrayList(FunctionSet.SUBSTR, FunctionSet.SUBSTRING,
FunctionSet.FROM_UNIXTIME, FunctionSet.FROM_UNIXTIME_MS,
FunctionSet.STR2DATE);
public AstBuilder(long sqlMode) {
this(sqlMode, new IdentityHashMap<>());
}
public AstBuilder(long sqlMode, IdentityHashMap<ParserRuleContext, List<HintNode>> hintMap) {
this.hintMap = hintMap;
long hintSqlMode = 0L;
for (Map.Entry<ParserRuleContext, List<HintNode>> entry : hintMap.entrySet()) {
for (HintNode hint : entry.getValue()) {
if (hint instanceof SetVarHint) {
SetVarHint setVarHint = (SetVarHint) hint;
hintSqlMode = setVarHint.getSqlModeHintValue();
}
}
}
this.sqlMode = sqlMode | hintSqlMode;
}
public List<Parameter> getParameters() {
return parameters;
}
@Override
public ParseNode visitSingleStatement(StarRocksParser.SingleStatementContext context) {
if (context.statement() != null) {
StatementBase stmt = (StatementBase) visit(context.statement());
if (MapUtils.isNotEmpty(hintMap)) {
stmt.setAllQueryScopeHints(extractQueryScopeHintNode());
hintMap.clear();
}
return stmt;
} else {
return visit(context.emptyStatement());
}
}
@Override
public ParseNode visitEmptyStatement(StarRocksParser.EmptyStatementContext context) {
return new EmptyStmt();
}
@Override
public ParseNode visitUseDatabaseStatement(StarRocksParser.UseDatabaseStatementContext context) {
NodePosition pos = createPos(context);
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
List<String> parts = qualifiedName.getParts();
if (parts.size() == 1) {
return new UseDbStmt(null, parts.get(0), pos);
} else if (parts.size() == 2) {
return new UseDbStmt(parts.get(0), parts.get(1), pos);
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidDbFormat(qualifiedName.toString()),
qualifiedName.getPos());
}
}
@Override
public ParseNode visitUseCatalogStatement(StarRocksParser.UseCatalogStatementContext context) {
StringLiteral literal = (StringLiteral) visit(context.string());
return new UseCatalogStmt(literal.getValue(), createPos(context));
}
@Override
public ParseNode visitSetCatalogStatement(StarRocksParser.SetCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String catalogName = identifier.getValue();
return new SetCatalogStmt(catalogName, createPos(context));
}
@Override
public ParseNode visitShowDatabasesStatement(StarRocksParser.ShowDatabasesStatementContext context) {
String catalog = null;
NodePosition pos = createPos(context);
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowDbStmt(stringLiteral.getValue(), null, catalog, pos);
} else if (context.expression() != null) {
return new ShowDbStmt(null, (Expr) visit(context.expression()), catalog, pos);
} else {
return new ShowDbStmt(null, null, catalog, pos);
}
}
@Override
public ParseNode visitAlterDbQuotaStatement(StarRocksParser.AlterDbQuotaStatementContext context) {
String dbName = ((Identifier) visit(context.identifier(0))).getValue();
NodePosition pos = createPos(context);
if (context.DATA() != null) {
String quotaValue = ((Identifier) visit(context.identifier(1))).getValue();
return new AlterDatabaseQuotaStmt(dbName,
AlterDatabaseQuotaStmt.QuotaType.DATA,
quotaValue, pos);
} else {
String quotaValue = context.INTEGER_VALUE().getText();
return new AlterDatabaseQuotaStmt(dbName,
AlterDatabaseQuotaStmt.QuotaType.REPLICA,
quotaValue, pos);
}
}
@Override
public ParseNode visitCreateDbStatement(StarRocksParser.CreateDbStatementContext context) {
String catalogName = "";
if (context.catalog != null) {
catalogName = getIdentifierName(context.catalog);
}
String dbName = getIdentifierName(context.database);
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateDbStmt(context.IF() != null, catalogName, dbName, properties, createPos(context));
}
@Override
public ParseNode visitDropDbStatement(StarRocksParser.DropDbStatementContext context) {
String catalogName = "";
if (context.catalog != null) {
catalogName = getIdentifierName(context.catalog);
}
String dbName = getIdentifierName(context.database);
return new DropDbStmt(context.IF() != null, catalogName, dbName, context.FORCE() != null,
createPos(context));
}
@Override
public ParseNode visitShowCreateDbStatement(StarRocksParser.ShowCreateDbStatementContext context) {
String dbName = ((Identifier) visit(context.identifier())).getValue();
return new ShowCreateDbStmt(dbName, createPos(context));
}
@Override
public ParseNode visitAlterDatabaseRenameStatement(StarRocksParser.AlterDatabaseRenameStatementContext context) {
String dbName = ((Identifier) visit(context.identifier(0))).getValue();
String newName = ((Identifier) visit(context.identifier(1))).getValue();
return new AlterDatabaseRenameStatement(dbName, newName, createPos(context));
}
@Override
public ParseNode visitRecoverDbStmt(StarRocksParser.RecoverDbStmtContext context) {
String dbName = ((Identifier) visit(context.identifier())).getValue();
return new RecoverDbStmt(dbName, createPos(context));
}
@Override
public ParseNode visitShowDataStmt(StarRocksParser.ShowDataStmtContext context) {
NodePosition pos = createPos(context);
if (context.FROM() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new ShowDataStmt(targetTableName.getDb(), targetTableName.getTbl(), pos);
} else {
return new ShowDataStmt(null, null, pos);
}
}
@Override
public ParseNode visitCreateTableStatement(StarRocksParser.CreateTableStatementContext context) {
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
Map<String, String> extProperties = null;
if (context.extProperties() != null) {
extProperties = new HashMap<>();
List<Property> propertyList = visit(context.extProperties().properties().property(), Property.class);
for (Property property : propertyList) {
extProperties.put(property.getKey(), property.getValue());
}
}
TableName tableName = qualifiedNameToTableName(getQualifiedName(context.qualifiedName()));
List<ColumnDef> columnDefs = null;
if (context.columnDesc() != null) {
columnDefs = getColumnDefs(context.columnDesc());
}
return new CreateTableStmt(
context.IF() != null,
context.EXTERNAL() != null,
tableName,
columnDefs,
context.indexDesc() == null ? null : getIndexDefs(context.indexDesc()),
context.engineDesc() == null ? "" :
((Identifier) visit(context.engineDesc().identifier())).getValue(),
context.charsetDesc() == null ? null :
((Identifier) visit(context.charsetDesc().identifierOrString())).getValue(),
context.keyDesc() == null ? null : getKeysDesc(context.keyDesc()),
context.partitionDesc() == null ? null : getPartitionDesc(context.partitionDesc(), columnDefs),
context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()),
properties,
extProperties,
context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue(),
context.rollupDesc() == null ?
null : context.rollupDesc().rollupItem().stream().map(this::getRollup).collect(toList()),
context.orderByDesc() == null ? null :
visit(context.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList()));
}
private PartitionDesc getPartitionDesc(StarRocksParser.PartitionDescContext context, List<ColumnDef> columnDefs) {
List<PartitionDesc> partitionDescList = new ArrayList<>();
if (context.functionCall() != null) {
String currentGranularity = null;
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
if (!(rangePartitionDesc instanceof MultiRangePartitionDesc)) {
throw new ParsingException("Automatic partition table creation only supports " +
"batch create partition syntax", rangePartitionDesc.getPos());
}
MultiRangePartitionDesc multiRangePartitionDesc = (MultiRangePartitionDesc) rangePartitionDesc;
String descGranularity = multiRangePartitionDesc.getTimeUnit().toLowerCase();
if (currentGranularity == null) {
currentGranularity = descGranularity;
} else if (!currentGranularity.equals(descGranularity)) {
throw new ParsingException("The partition granularity of automatic partition table " +
"batch creation in advance should be consistent", rangePartitionDesc.getPos());
}
partitionDescList.add(rangePartitionDesc);
}
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall());
List<String> columnList = AnalyzerUtils.checkAndExtractPartitionCol(functionCallExpr, columnDefs);
AnalyzerUtils.checkAutoPartitionTableLimit(functionCallExpr, currentGranularity);
RangePartitionDesc rangePartitionDesc = new RangePartitionDesc(columnList, partitionDescList);
rangePartitionDesc.setAutoPartitionTable(true);
return new ExpressionPartitionDesc(rangePartitionDesc, functionCallExpr);
}
StarRocksParser.PrimaryExpressionContext primaryExpressionContext = context.primaryExpression();
if (primaryExpressionContext != null) {
Expr primaryExpression = (Expr) visit(primaryExpressionContext);
if (context.RANGE() != null) {
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
partitionDescList.add(rangePartitionDesc);
}
}
List<String> columnList = checkAndExtractPartitionColForRange(primaryExpression, false);
RangePartitionDesc rangePartitionDesc = new RangePartitionDesc(columnList, partitionDescList);
if (primaryExpression instanceof FunctionCallExpr) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) primaryExpression;
String functionName = functionCallExpr.getFnName().getFunction();
if (FunctionSet.FROM_UNIXTIME.equals(functionName)
|| FunctionSet.FROM_UNIXTIME_MS.equals(functionName)) {
primaryExpression = new CastExpr(TypeDef.create(PrimitiveType.DATETIME), primaryExpression);
}
}
return new ExpressionPartitionDesc(rangePartitionDesc, primaryExpression);
}
List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
List<String> columnList = identifierList.stream().map(Identifier::getValue).collect(toList());
if (context.RANGE() != null) {
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
partitionDescList.add(rangePartitionDesc);
}
return new RangePartitionDesc(columnList, partitionDescList);
} else if (context.LIST() != null) {
for (StarRocksParser.ListPartitionDescContext listPartitionDescContext : context.listPartitionDesc()) {
final PartitionDesc listPartitionDesc = (PartitionDesc) visit(listPartitionDescContext);
partitionDescList.add(listPartitionDesc);
}
return new ListPartitionDesc(columnList, partitionDescList);
} else {
if (context.listPartitionDesc().size() > 0) {
throw new ParsingException("Does not support creating partitions in advance");
}
ListPartitionDesc listPartitionDesc = new ListPartitionDesc(columnList, partitionDescList);
listPartitionDesc.setAutoPartitionTable(true);
return listPartitionDesc;
}
}
private List<String> checkAndExtractPartitionColForRange(Expr expr, boolean hasCast) {
if (expr instanceof CastExpr) {
CastExpr castExpr = (CastExpr) expr;
return checkAndExtractPartitionColForRange(castExpr.getChild(0), true);
}
NodePosition pos = expr.getPos();
List<String> columnList = new ArrayList<>();
if (expr instanceof FunctionCallExpr) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) expr;
String functionName = functionCallExpr.getFnName().getFunction().toLowerCase();
List<Expr> paramsExpr = functionCallExpr.getParams().exprs();
if (PARTITION_FUNCTIONS.contains(functionName)) {
Expr firstExpr = paramsExpr.get(0);
if (firstExpr instanceof SlotRef) {
columnList.add(((SlotRef) firstExpr).getColumnName());
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"),
pos);
}
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"), pos);
}
if (functionName.equals(FunctionSet.FROM_UNIXTIME) || functionName.equals(FunctionSet.FROM_UNIXTIME_MS)) {
if (hasCast || paramsExpr.size() > 1) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"), pos);
}
}
}
return columnList;
}
private AlterClause getRollup(StarRocksParser.RollupItemContext rollupItemContext) {
String rollupName = ((Identifier) visit(rollupItemContext.identifier())).getValue();
List<Identifier> columnList =
visit(rollupItemContext.identifierList().identifier(), Identifier.class);
List<String> dupKeys = null;
if (rollupItemContext.dupKeys() != null) {
final List<Identifier> identifierList =
visit(rollupItemContext.dupKeys().identifierList().identifier(), Identifier.class);
dupKeys = identifierList.stream().map(Identifier::getValue).collect(toList());
}
String baseRollupName = rollupItemContext.fromRollup() != null ?
((Identifier) visit(rollupItemContext.fromRollup().identifier())).getValue() : null;
Map<String, String> properties = null;
if (rollupItemContext.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(rollupItemContext.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AddRollupClause(rollupName, columnList.stream().map(Identifier::getValue).collect(toList()),
dupKeys, baseRollupName,
properties, createPos(rollupItemContext));
}
private KeysDesc getKeysDesc(StarRocksParser.KeyDescContext context) {
KeysType keysType = null;
if (null != context.PRIMARY()) {
keysType = KeysType.PRIMARY_KEYS;
} else if (null != context.DUPLICATE()) {
keysType = KeysType.DUP_KEYS;
} else if (null != context.AGGREGATE()) {
keysType = KeysType.AGG_KEYS;
} else if (null != context.UNIQUE()) {
keysType = KeysType.UNIQUE_KEYS;
}
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
return new KeysDesc(keysType, columnList.stream().map(Identifier::getValue).collect(toList()),
createPos(context));
}
private List<IndexDef> getIndexDefs(List<StarRocksParser.IndexDescContext> indexDesc) {
List<IndexDef> indexDefList = new ArrayList<>();
for (StarRocksParser.IndexDescContext context : indexDesc) {
String indexName = ((Identifier) visit(context.identifier())).getValue();
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
String comment =
context.comment() != null ? ((StringLiteral) visit(context.comment())).getStringValue() : null;
final IndexDef indexDef =
new IndexDef(indexName, columnList.stream().map(Identifier::getValue).collect(toList()),
getIndexType(context.indexType()), comment, getPropertyList(context.propertyList()),
createPos(context));
indexDefList.add(indexDef);
}
return indexDefList;
}
private List<ColumnDef> getColumnDefs(List<StarRocksParser.ColumnDescContext> columnDesc) {
return columnDesc.stream().map(context -> getColumnDef(context)).collect(toList());
}
private ColumnDef getColumnDef(StarRocksParser.ColumnDescContext context) {
Identifier colIdentifier = (Identifier) visit(context.identifier());
String columnName = colIdentifier.getValue();
TypeDef typeDef = new TypeDef(getType(context.type()), createPos(context.type()));
String charsetName = context.charsetName() != null ?
((Identifier) visit(context.charsetName().identifier())).getValue() : null;
boolean isKey = context.KEY() != null;
AggregateType aggregateType =
context.aggDesc() != null ? AggregateType.valueOf(context.aggDesc().getText().toUpperCase()) : null;
Boolean isAllowNull = null;
if (context.NOT() != null && context.NULL() != null) {
isAllowNull = false;
} else if (context.NULL() != null) {
isAllowNull = true;
}
Boolean isAutoIncrement = null;
if (context.AUTO_INCREMENT() != null) {
isAutoIncrement = true;
}
if (isAutoIncrement != null && isAllowNull != null && isAllowNull) {
throw new ParsingException(PARSER_ERROR_MSG.nullColFoundInPK(columnName), colIdentifier.getPos());
}
if (isAutoIncrement != null) {
isAllowNull = false;
}
ColumnDef.DefaultValueDef defaultValueDef = ColumnDef.DefaultValueDef.NOT_SET;
final StarRocksParser.DefaultDescContext defaultDescContext = context.defaultDesc();
if (defaultDescContext != null) {
if (defaultDescContext.string() != null) {
String value = ((StringLiteral) visit(defaultDescContext.string())).getStringValue();
defaultValueDef = new ColumnDef.DefaultValueDef(true, new StringLiteral(value));
} else if (defaultDescContext.NULL() != null) {
defaultValueDef = ColumnDef.DefaultValueDef.NULL_DEFAULT_VALUE;
} else if (defaultDescContext.CURRENT_TIMESTAMP() != null) {
defaultValueDef = ColumnDef.DefaultValueDef.CURRENT_TIMESTAMP_VALUE;
} else if (defaultDescContext.qualifiedName() != null) {
String functionName = defaultDescContext.qualifiedName().getText().toLowerCase();
defaultValueDef = new ColumnDef.DefaultValueDef(true,
new FunctionCallExpr(functionName, new ArrayList<>()));
}
}
final StarRocksParser.GeneratedColumnDescContext generatedColumnDescContext =
context.generatedColumnDesc();
Expr expr = null;
if (generatedColumnDescContext != null) {
if (isAllowNull != null && isAllowNull == false) {
throw new ParsingException(PARSER_ERROR_MSG.foundNotNull("Generated Column"));
}
if (isKey) {
throw new ParsingException(PARSER_ERROR_MSG.isKey("Generated Column"));
}
expr = (Expr) visit(generatedColumnDescContext.expression());
}
String comment = context.comment() == null ? "" :
((StringLiteral) visit(context.comment().string())).getStringValue();
return new ColumnDef(columnName, typeDef, charsetName, isKey, aggregateType, isAllowNull, defaultValueDef,
isAutoIncrement, expr, comment, createPos(context));
}
@Override
public ParseNode visitCreateTemporaryTableStatement(StarRocksParser.CreateTemporaryTableStatementContext context) {
if (!Config.enable_experimental_temporary_table) {
throw new SemanticException(
"Temporary table feature is experimental and disabled by default, could be enabled through " +
": admin set frontend config('enable_experimental_temporary_table' = 'true')");
}
CreateTableStmt createTableStmt = new CreateTableStmt(
false,
false,
qualifiedNameToTableName(getQualifiedName(context.qualifiedName())),
null,
EngineType.defaultEngine().name(),
null,
null,
null,
new HashMap<>(),
null,
null);
return new CreateTableAsSelectStmt(
createTableStmt,
null,
(QueryStatement) visit(context.queryStatement()));
}
@Override
public ParseNode visitCreateTableAsSelectStatement(StarRocksParser.CreateTableAsSelectStatementContext context) {
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
PartitionDesc partitionDesc = null;
if (context.partitionDesc() != null) {
partitionDesc = (PartitionDesc) visit(context.partitionDesc());
if (partitionDesc instanceof ListPartitionDesc && context.partitionDesc().LIST() == null) {
((ListPartitionDesc) partitionDesc).setAutoPartitionTable(true);
}
}
CreateTableStmt createTableStmt = new CreateTableStmt(
context.IF() != null,
false,
qualifiedNameToTableName(getQualifiedName(context.qualifiedName())),
null,
context.indexDesc() == null ? null : getIndexDefs(context.indexDesc()),
"",
null,
context.keyDesc() == null ? null : getKeysDesc(context.keyDesc()),
partitionDesc,
context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()),
properties,
null,
context.comment() == null ? null :
((StringLiteral) visit(context.comment().string())).getStringValue(),
null,
context.orderByDesc() == null ? null :
visit(context.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList())
);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
return new CreateTableAsSelectStmt(
createTableStmt,
columns == null ? null : columns.stream().map(Identifier::getValue).collect(toList()),
(QueryStatement) visit(context.queryStatement()),
createPos(context));
}
@Override
public ParseNode visitCreateTableLikeStatement(StarRocksParser.CreateTableLikeStatementContext context) {
PartitionDesc partitionDesc = context.partitionDesc() == null ? null :
(PartitionDesc) visit(context.partitionDesc());
DistributionDesc distributionDesc = context.distributionDesc() == null ? null :
(DistributionDesc) visit(context.distributionDesc());
Map<String, String> properties = getProperties(context.properties());
return new CreateTableLikeStmt(context.IF() != null,
qualifiedNameToTableName(getQualifiedName(context.qualifiedName(0))),
qualifiedNameToTableName(getQualifiedName(context.qualifiedName(1))),
partitionDesc, distributionDesc, properties,
createPos(context));
}
@Override
public ParseNode visitShowCreateTableStatement(StarRocksParser.ShowCreateTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
NodePosition pos = createPos(context);
if (context.MATERIALIZED() != null && context.VIEW() != null) {
return new ShowCreateTableStmt(targetTableName, ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW, pos);
}
if (context.VIEW() != null) {
return new ShowCreateTableStmt(targetTableName, ShowCreateTableStmt.CreateTableType.VIEW, pos);
}
return new ShowCreateTableStmt(targetTableName, ShowCreateTableStmt.CreateTableType.TABLE, pos);
}
@Override
public ParseNode visitDropTableStatement(StarRocksParser.DropTableStatementContext context) {
boolean ifExists = context.IF() != null && context.EXISTS() != null;
boolean force = context.FORCE() != null;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new DropTableStmt(ifExists, targetTableName, false, force, createPos(context));
}
@Override
public ParseNode visitRecoverTableStatement(StarRocksParser.RecoverTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
return new RecoverTableStmt(tableName, createPos(context));
}
@Override
public ParseNode visitTruncateTableStatement(StarRocksParser.TruncateTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
Token start = context.start;
Token stop = context.stop;
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
NodePosition pos = createPos(start, stop);
return new TruncateTableStmt(new TableRef(targetTableName, null, partitionNames, pos));
}
@Override
public ParseNode visitShowTableStatement(StarRocksParser.ShowTableStatementContext context) {
boolean isVerbose = context.FULL() != null;
String database = null;
String catalog = null;
if (context.qualifiedName() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
List<String> parts = qualifiedName.getParts();
if (parts.size() == 2) {
catalog = qualifiedName.getParts().get(0);
database = qualifiedName.getParts().get(1);
} else if (parts.size() == 1) {
database = qualifiedName.getParts().get(0);
}
}
NodePosition pos = createPos(context);
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowTableStmt(database, isVerbose, stringLiteral.getValue(), null, catalog, pos);
} else if (context.expression() != null) {
return new ShowTableStmt(database, isVerbose, null, (Expr) visit(context.expression()), catalog, pos);
} else {
return new ShowTableStmt(database, isVerbose, null, null, catalog, pos);
}
}
@Override
public ParseNode visitDescTableStatement(StarRocksParser.DescTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new DescribeStmt(targetTableName, context.ALL() != null, createPos(context));
}
@Override
public ParseNode visitShowTableStatusStatement(StarRocksParser.ShowTableStatusStatementContext context) {
QualifiedName dbName = null;
if (context.qualifiedName() != null) {
dbName = getQualifiedName(context.db);
}
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowTableStatusStmt(dbName == null ? null : dbName.toString(), pattern, where,
createPos(context));
}
@Override
public ParseNode visitShowColumnStatement(StarRocksParser.ShowColumnStatementContext context) {
QualifiedName tableName = getQualifiedName(context.table);
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowColumnStmt(qualifiedNameToTableName(tableName),
dbName == null ? null : dbName.toString(),
pattern,
context.FULL() != null,
where, createPos(context));
}
@Override
public ParseNode visitRefreshTableStatement(StarRocksParser.RefreshTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<String> partitionNames = null;
if (context.string() != null) {
partitionNames = context.string().stream()
.map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
}
return new RefreshTableStmt(targetTableName, partitionNames, createPos(context));
}
@Override
public ParseNode visitAlterTableStatement(StarRocksParser.AlterTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
NodePosition pos = createPos(context);
if (context.ROLLUP() != null) {
if (context.ADD() != null) {
List<AlterClause> clauses = context.rollupItem().stream().map(this::getRollup).collect(toList());
return new AlterTableStmt(targetTableName, clauses, pos);
} else {
List<Identifier> rollupList = visit(context.identifier(), Identifier.class);
List<AlterClause> clauses = new ArrayList<>();
for (Identifier rollupName : rollupList) {
clauses.add(new DropRollupClause(rollupName.getValue(), null, rollupName.getPos()));
}
return new AlterTableStmt(targetTableName, clauses, pos);
}
} else {
List<AlterClause> alterClauses = visit(context.alterClause(), AlterClause.class);
return new AlterTableStmt(targetTableName, alterClauses, pos);
}
}
@Override
public ParseNode visitCancelAlterTableStatement(StarRocksParser.CancelAlterTableStatementContext context) {
ShowAlterStmt.AlterType alterType;
if (context.ROLLUP() != null) {
alterType = ShowAlterStmt.AlterType.ROLLUP;
} else if (context.MATERIALIZED() != null && context.VIEW() != null) {
alterType = ShowAlterStmt.AlterType.MATERIALIZED_VIEW;
} else if (context.OPTIMIZE() != null) {
alterType = ShowAlterStmt.AlterType.OPTIMIZE;
} else {
alterType = ShowAlterStmt.AlterType.COLUMN;
}
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName dbTableName = qualifiedNameToTableName(qualifiedName);
List<Long> alterJobIdList = null;
if (context.INTEGER_VALUE() != null) {
alterJobIdList = context.INTEGER_VALUE()
.stream().map(ParseTree::getText).map(Long::parseLong).collect(toList());
}
return new CancelAlterTableStmt(alterType, dbTableName, alterJobIdList, createPos(context));
}
@Override
public ParseNode visitShowAlterStatement(StarRocksParser.ShowAlterStatementContext context) {
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
ShowAlterStmt.AlterType alterType;
if (context.ROLLUP() != null) {
alterType = ShowAlterStmt.AlterType.ROLLUP;
} else if (context.MATERIALIZED() != null && context.VIEW() != null) {
alterType = ShowAlterStmt.AlterType.MATERIALIZED_VIEW;
} else if (context.OPTIMIZE() != null) {
alterType = ShowAlterStmt.AlterType.OPTIMIZE;
} else {
alterType = ShowAlterStmt.AlterType.COLUMN;
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowAlterStmt(alterType, dbName == null ? null : dbName.toString(), where, orderByElements,
limitElement, createPos(context));
}
@Override
public ParseNode visitCreateViewStatement(StarRocksParser.CreateViewStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<ColWithComment> colWithComments = null;
if (context.columnNameWithComment().size() > 0) {
colWithComments = visit(context.columnNameWithComment(), ColWithComment.class);
}
if (context.IF() != null && context.REPLACE() != null) {
throw new ParsingException(PARSER_ERROR_MSG.conflictedOptions("if not exists", "or replace"),
createPos(context));
}
return new CreateViewStmt(
context.IF() != null,
context.REPLACE() != null,
targetTableName,
colWithComments,
context.comment() == null ? null : ((StringLiteral) visit(context.comment())).getStringValue(),
(QueryStatement) visit(context.queryStatement()), createPos(context));
}
@Override
public ParseNode visitAlterViewStatement(StarRocksParser.AlterViewStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<ColWithComment> colWithComments = null;
if (context.columnNameWithComment().size() > 0) {
colWithComments = visit(context.columnNameWithComment(), ColWithComment.class);
}
QueryStatement queryStatement = (QueryStatement) visit(context.queryStatement());
AlterClause alterClause = new AlterViewClause(colWithComments, queryStatement, createPos(context));
return new AlterViewStmt(targetTableName, alterClause, createPos(context));
}
@Override
public ParseNode visitDropViewStatement(StarRocksParser.DropViewStatementContext context) {
boolean ifExists = context.IF() != null && context.EXISTS() != null;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new DropTableStmt(ifExists, targetTableName, true, false, createPos(context));
}
@Override
public ParseNode visitShowPartitionsStatement(StarRocksParser.ShowPartitionsStatementContext context) {
boolean temp = context.TEMPORARY() != null;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.ORDER() != null) {
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowPartitionsStmt(tableName, where, orderByElements, limitElement, temp, createPos(context));
}
@Override
public ParseNode visitRecoverPartitionStatement(StarRocksParser.RecoverPartitionStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
String partitionName = ((Identifier) visit(context.identifier())).getValue();
return new RecoverPartitionStmt(tableName, partitionName, createPos(context));
}
@Override
public ParseNode visitShowTabletStatement(StarRocksParser.ShowTabletStatementContext context) {
NodePosition pos = createPos(context);
if (context.INTEGER_VALUE() != null) {
return new ShowTabletStmt(null, Long.parseLong(context.INTEGER_VALUE().getText()), pos);
} else {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName dbTblName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowTabletStmt(dbTblName, -1L, partitionNames, where, orderByElements, limitElement,
createPos(context));
}
}
@Override
public ParseNode visitCreateIndexStatement(StarRocksParser.CreateIndexStatementContext context) {
String indexName = ((Identifier) visit(context.identifier())).getValue();
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
Token idxStart = context.identifier().start;
Token idxStop = context.identifierList().stop;
String comment = null;
if (context.comment() != null) {
comment = ((StringLiteral) visit(context.comment())).getStringValue();
idxStop = context.comment().stop;
}
NodePosition idxPos = createPos(idxStart, idxStop);
IndexDef indexDef = new IndexDef(indexName,
columnList.stream().map(Identifier::getValue).collect(toList()),
getIndexType(context.indexType()),
comment, getPropertyList(context.propertyList()), idxPos);
CreateIndexClause createIndexClause = new CreateIndexClause(indexDef, idxPos);
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new AlterTableStmt(targetTableName, Lists.newArrayList(createIndexClause), createPos(context));
}
@Override
public ParseNode visitDropIndexStatement(StarRocksParser.DropIndexStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
DropIndexClause dropIndexClause = new DropIndexClause(identifier.getValue(),
createPos(context.identifier()));
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new AlterTableStmt(targetTableName, Lists.newArrayList(dropIndexClause), createPos(context));
}
@Override
public ParseNode visitShowIndexStatement(StarRocksParser.ShowIndexStatementContext context) {
QualifiedName tableName = getQualifiedName(context.table);
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
return new ShowIndexStmt(dbName == null ? null : dbName.toString(),
qualifiedNameToTableName(tableName), createPos(context));
}
private Map<String, String> buildProperties(StarRocksParser.PropertiesContext properties) {
Map<String, String> result = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
if (properties != null) {
List<Property> propertyList = visit(properties.property(), Property.class);
for (Property property : ListUtils.emptyIfNull(propertyList)) {
result.put(property.getKey(), property.getValue());
}
}
return result;
}
@Override
public ParseNode visitSubmitTaskStatement(StarRocksParser.SubmitTaskStatementContext context) {
QualifiedName qualifiedName = null;
if (context.qualifiedName() != null) {
qualifiedName = getQualifiedName(context.qualifiedName());
}
Map<String, String> properties = buildProperties(context.properties());
properties.putAll(extractVarHintValues(hintMap.get(context)));
CreateTableAsSelectStmt createTableAsSelectStmt = null;
InsertStmt insertStmt = null;
if (context.createTableAsSelectStatement() != null) {
createTableAsSelectStmt = (CreateTableAsSelectStmt) visit(context.createTableAsSelectStatement());
} else if (context.insertStatement() != null) {
insertStmt = (InsertStmt) visit(context.insertStatement());
}
int startIndex = 0;
if (createTableAsSelectStmt != null) {
startIndex = context.createTableAsSelectStatement().start.getStartIndex();
} else {
startIndex = context.insertStatement().start.getStartIndex();
}
NodePosition pos = createPos(context);
TaskName taskName;
if (qualifiedName == null) {
taskName = new TaskName(null, null, pos);
} else {
taskName = qualifiedNameToTaskName(qualifiedName);
}
if (createTableAsSelectStmt != null) {
return new SubmitTaskStmt(taskName, properties, startIndex, createTableAsSelectStmt, pos);
} else {
return new SubmitTaskStmt(taskName, properties, startIndex, insertStmt, pos);
}
}
@Override
public ParseNode visitDropTaskStatement(StarRocksParser.DropTaskStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TaskName taskName = qualifiedNameToTaskName(qualifiedName);
boolean force = context.FORCE() != null;
return new DropTaskStmt(taskName, force, createPos(context));
}
public static final ImmutableList<String> MATERIALIZEDVIEW_REFRESHSCHEME_SUPPORT_UNIT_IDENTIFIERS =
new ImmutableList.Builder<String>()
.add("SECOND").add("MINUTE").add("HOUR").add("DAY")
.build();
private void checkMaterializedViewAsyncRefreshSchemeUnitIdentifier(
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc) {
if (asyncRefreshSchemeDesc.getIntervalLiteral() == null ||
asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier() == null) {
return;
}
String unit = asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier().getDescription();
if (StringUtils.isEmpty(unit)) {
return;
}
if (!MATERIALIZEDVIEW_REFRESHSCHEME_SUPPORT_UNIT_IDENTIFIERS.contains(unit)) {
throw new ParsingException(PARSER_ERROR_MSG.forbidClauseInMV("Refresh interval unit", unit),
asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier().getPos());
}
}
@Override
public ParseNode visitCreateMaterializedViewStatement(
StarRocksParser.CreateMaterializedViewStatementContext context) {
boolean ifNotExist = context.IF() != null;
QualifiedName qualifiedName = getQualifiedName(context.mvName);
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<ColWithComment> colWithComments = null;
if (!context.columnNameWithComment().isEmpty()) {
colWithComments = visit(context.columnNameWithComment(), ColWithComment.class);
}
String comment =
context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue();
QueryStatement queryStatement = (QueryStatement) visit(context.queryStatement());
RefreshSchemeClause refreshSchemeDesc = null;
Map<String, String> properties = new HashMap<>();
ExpressionPartitionDesc expressionPartitionDesc = null;
DistributionDesc distributionDesc = null;
List<String> sortKeys = null;
for (StarRocksParser.MaterializedViewDescContext desc : ListUtils.emptyIfNull(context.materializedViewDesc())) {
NodePosition clausePos = createPos(desc);
if (desc.properties() != null) {
if (MapUtils.isNotEmpty(properties)) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("PROPERTY"), clausePos);
}
List<Property> propertyList = visit(desc.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (desc.refreshSchemeDesc() != null) {
if (refreshSchemeDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("REFRESH"), clausePos);
}
refreshSchemeDesc = ((RefreshSchemeClause) visit(desc.refreshSchemeDesc()));
}
if (desc.primaryExpression() != null) {
if (expressionPartitionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("PARTITION"), clausePos);
}
Expr expr = (Expr) visit(desc.primaryExpression());
if (expr instanceof SlotRef) {
expressionPartitionDesc = new ExpressionPartitionDesc(expr);
} else if (expr instanceof FunctionCallExpr) {
AnalyzerUtils.checkAndExtractPartitionCol((FunctionCallExpr) expr, null);
expressionPartitionDesc = new ExpressionPartitionDesc(expr);
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"),
expr.getPos());
}
}
if (desc.distributionDesc() != null) {
if (distributionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("DISTRIBUTION"), clausePos);
}
distributionDesc = (DistributionDesc) visit(desc.distributionDesc());
}
if (desc.orderByDesc() != null) {
sortKeys = visit(desc.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList());
}
}
if (refreshSchemeDesc == null) {
if (distributionDesc == null) {
refreshSchemeDesc = new SyncRefreshSchemeDesc();
} else {
refreshSchemeDesc =
new ManualRefreshSchemeDesc(MaterializedView.RefreshMoment.IMMEDIATE, NodePosition.ZERO);
}
}
if (refreshSchemeDesc instanceof SyncRefreshSchemeDesc) {
if (expressionPartitionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.forbidClauseInMV("SYNC refresh type", "PARTITION BY"),
expressionPartitionDesc.getPos());
}
if (distributionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.forbidClauseInMV("SYNC refresh type", "DISTRIBUTION BY"),
distributionDesc.getPos());
}
return new CreateMaterializedViewStmt(tableName, queryStatement, properties);
}
if (refreshSchemeDesc instanceof AsyncRefreshSchemeDesc) {
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc = (AsyncRefreshSchemeDesc) refreshSchemeDesc;
checkMaterializedViewAsyncRefreshSchemeUnitIdentifier(asyncRefreshSchemeDesc);
}
if (!Config.enable_experimental_mv) {
throw new ParsingException(PARSER_ERROR_MSG.feConfigDisable("enable_experimental_mv"), NodePosition.ZERO);
}
return new CreateMaterializedViewStatement(tableName, ifNotExist, colWithComments,
context.indexDesc() == null ? null : getIndexDefs(context.indexDesc()),
comment,
refreshSchemeDesc,
expressionPartitionDesc, distributionDesc, sortKeys, properties, queryStatement, createPos(context));
}
@Override
public ParseNode visitShowMaterializedViewsStatement(
StarRocksParser.ShowMaterializedViewsStatementContext context) {
String database = null;
NodePosition pos = createPos(context);
if (context.qualifiedName() != null) {
database = getQualifiedName(context.qualifiedName()).toString();
}
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowMaterializedViewsStmt(database, stringLiteral.getValue(), null, pos);
} else if (context.expression() != null) {
return new ShowMaterializedViewsStmt(database, null, (Expr) visit(context.expression()), pos);
} else {
return new ShowMaterializedViewsStmt(database, null, null, pos);
}
}
@Override
public ParseNode visitDropMaterializedViewStatement(StarRocksParser.DropMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
return new DropMaterializedViewStmt(context.IF() != null, mvName, createPos(context));
}
@Override
public ParseNode visitAlterMaterializedViewStatement(
StarRocksParser.AlterMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
AlterTableClause alterTableClause = null;
if (context.tableRenameClause() != null) {
alterTableClause = (TableRenameClause) visit(context.tableRenameClause());
}
if (context.refreshSchemeDesc() != null) {
alterTableClause = ((RefreshSchemeClause) visit(context.refreshSchemeDesc()));
if (alterTableClause instanceof AsyncRefreshSchemeDesc) {
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc = (AsyncRefreshSchemeDesc) alterTableClause;
checkMaterializedViewAsyncRefreshSchemeUnitIdentifier(asyncRefreshSchemeDesc);
}
}
if (context.modifyPropertiesClause() != null) {
alterTableClause = (ModifyTablePropertiesClause) visit(context.modifyPropertiesClause());
}
if (context.statusDesc() != null) {
String status = context.statusDesc().getText();
alterTableClause = new AlterMaterializedViewStatusClause(status, createPos(context));
}
if (context.swapTableClause() != null) {
alterTableClause = (SwapTableClause) visit(context.swapTableClause());
}
return new AlterMaterializedViewStmt(mvName, alterTableClause, createPos(context));
}
@Override
public ParseNode visitRefreshMaterializedViewStatement(
StarRocksParser.RefreshMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
PartitionRangeDesc partitionRangeDesc = null;
if (context.partitionRangeDesc() != null) {
partitionRangeDesc =
(PartitionRangeDesc) visit(context.partitionRangeDesc());
}
return new RefreshMaterializedViewStatement(mvName, partitionRangeDesc, context.FORCE() != null,
context.SYNC() != null,
createPos(context));
}
@Override
public ParseNode visitCancelRefreshMaterializedViewStatement(
StarRocksParser.CancelRefreshMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
return new CancelRefreshMaterializedViewStmt(mvName, createPos(context));
}
@Override
public ParseNode visitCreateExternalCatalogStatement(
StarRocksParser.CreateExternalCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String catalogName = identifier.getValue();
String comment = null;
if (context.comment() != null) {
comment = ((StringLiteral) visit(context.comment())).getStringValue();
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateCatalogStmt(catalogName, comment, properties, createPos(context));
}
@Override
public ParseNode visitDropExternalCatalogStatement(StarRocksParser.DropExternalCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.catalogName);
String catalogName = identifier.getValue();
return new DropCatalogStmt(catalogName, createPos(context));
}
@Override
public ParseNode visitShowCreateExternalCatalogStatement(
StarRocksParser.ShowCreateExternalCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.catalogName);
String catalogName = identifier.getValue();
return new ShowCreateExternalCatalogStmt(catalogName, createPos(context));
}
@Override
public ParseNode visitShowCatalogsStatement(StarRocksParser.ShowCatalogsStatementContext context) {
return new ShowCatalogsStmt(createPos(context));
}
@Override
public ParseNode visitAlterCatalogStatement(StarRocksParser.AlterCatalogStatementContext context) {
String catalogName = ((Identifier) visit(context.catalogName)).getValue();
AlterClause alterClause = (AlterClause) visit(context.modifyPropertiesClause());
return new AlterCatalogStmt(catalogName, alterClause, createPos(context));
}
@Override
public ParseNode visitShowWarehousesStatement(StarRocksParser.ShowWarehousesStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowWarehousesStmt(pattern, where, createPos(context));
}
@Override
public ParseNode visitInsertStatement(StarRocksParser.InsertStatementContext context) {
QueryStatement queryStatement;
if (context.VALUES() != null) {
List<ValueList> rowValues = visit(context.expressionsWithDefault(), ValueList.class);
List<List<Expr>> rows = rowValues.stream().map(ValueList::getRow).collect(toList());
List<String> colNames = new ArrayList<>();
for (int i = 0; i < rows.get(0).size(); ++i) {
colNames.add("column_" + i);
}
queryStatement = new QueryStatement(new ValuesRelation(rows, colNames,
createPos(context.VALUES().getSymbol(), context.stop)));
} else {
queryStatement = (QueryStatement) visit(context.queryStatement());
}
if (context.explainDesc() != null) {
queryStatement.setIsExplain(true, getExplainType(context.explainDesc()));
}
if (context.qualifiedName() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
InsertStmt stmt = new InsertStmt(targetTableName, partitionNames,
context.label == null ? null : ((Identifier) visit(context.label)).getValue(),
getColumnNames(context.columnAliases()), queryStatement, context.OVERWRITE() != null,
createPos(context));
stmt.setHintNodes(hintMap.get(context));
return stmt;
}
if (context.BLACKHOLE() != null) {
return new InsertStmt(queryStatement, createPos(context));
}
Map<String, String> tableFunctionProperties = getPropertyList(context.propertyList());
InsertStmt res = new InsertStmt(tableFunctionProperties, queryStatement, createPos(context));
res.setHintNodes(hintMap.get(context));
return res;
}
@Override
public ParseNode visitUpdateStatement(StarRocksParser.UpdateStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<ColumnAssignment> assignments = visit(context.assignmentList().assignment(), ColumnAssignment.class);
List<Relation> fromRelations = null;
if (context.fromClause() instanceof StarRocksParser.DualContext) {
ValuesRelation valuesRelation = ValuesRelation.newDualRelation(createPos(context.fromClause()));
fromRelations = Lists.newArrayList(valuesRelation);
} else {
StarRocksParser.FromContext fromContext = (StarRocksParser.FromContext) context.fromClause();
if (fromContext.relations() != null) {
fromRelations = visit(fromContext.relations().relation(), Relation.class);
}
}
Expr where = context.where != null ? (Expr) visit(context.where) : null;
List<CTERelation> ctes = null;
if (context.withClause() != null) {
ctes = visit(context.withClause().commonTableExpression(), CTERelation.class);
}
UpdateStmt ret = new UpdateStmt(targetTableName, assignments, fromRelations, where, ctes, createPos(context));
if (context.explainDesc() != null) {
ret.setIsExplain(true, getExplainType(context.explainDesc()));
if (StatementBase.ExplainLevel.ANALYZE.equals(ret.getExplainLevel())) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedOp("analyze"));
}
}
ret.setHintNodes(hintMap.get(context));
return ret;
}
@Override
public ParseNode visitDeleteStatement(StarRocksParser.DeleteStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
List<Relation> usingRelations = context.using != null ? visit(context.using.relation(), Relation.class) : null;
Expr where = context.where != null ? (Expr) visit(context.where) : null;
List<CTERelation> ctes = null;
if (context.withClause() != null) {
ctes = visit(context.withClause().commonTableExpression(), CTERelation.class);
}
DeleteStmt ret =
new DeleteStmt(targetTableName, partitionNames, usingRelations, where, ctes, createPos(context));
if (context.explainDesc() != null) {
ret.setIsExplain(true, getExplainType(context.explainDesc()));
if (StatementBase.ExplainLevel.ANALYZE.equals(ret.getExplainLevel())) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedOp("analyze"));
}
}
ret.setHintNodes(hintMap.get(context));
return ret;
}
@Override
public ParseNode visitCreateRoutineLoadStatement(StarRocksParser.CreateRoutineLoadStatementContext context) {
QualifiedName tableName = null;
if (context.table != null) {
tableName = getQualifiedName(context.table);
}
List<StarRocksParser.LoadPropertiesContext> loadPropertiesContexts = context.loadProperties();
List<ParseNode> loadPropertyList = getLoadPropertyList(loadPropertiesContexts);
String typeName = context.source.getText();
Map<String, String> jobProperties = getJobProperties(context.jobProperties());
Map<String, String> dataSourceProperties = getDataSourceProperties(context.dataSourceProperties());
return new CreateRoutineLoadStmt(createLabelName(context.db, context.name),
tableName == null ? null : tableName.toString(), loadPropertyList, jobProperties, typeName,
dataSourceProperties, createPos(context));
}
@Override
public ParseNode visitShowCreateRoutineLoadStatement(
StarRocksParser.ShowCreateRoutineLoadStatementContext context) {
return new ShowCreateRoutineLoadStmt(createLabelName(context.db, context.name));
}
@Override
public ParseNode visitAlterRoutineLoadStatement(StarRocksParser.AlterRoutineLoadStatementContext context) {
NodePosition pos = createPos(context);
List<StarRocksParser.LoadPropertiesContext> loadPropertiesContexts = context.loadProperties();
List<ParseNode> loadPropertyList = getLoadPropertyList(loadPropertiesContexts);
Map<String, String> jobProperties = getJobProperties(context.jobProperties());
if (context.dataSource() != null) {
String typeName = context.dataSource().source.getText();
Map<String, String> dataSourceProperties =
getDataSourceProperties(context.dataSource().dataSourceProperties());
RoutineLoadDataSourceProperties dataSource =
new RoutineLoadDataSourceProperties(typeName, dataSourceProperties,
createPos(context.dataSource()));
return new AlterRoutineLoadStmt(createLabelName(context.db, context.name),
loadPropertyList, jobProperties, dataSource, pos);
}
return new AlterRoutineLoadStmt(createLabelName(context.db, context.name), loadPropertyList, jobProperties,
new RoutineLoadDataSourceProperties(), pos);
}
@Override
public ParseNode visitAlterLoadStatement(StarRocksParser.AlterLoadStatementContext context) {
Map<String, String> jobProperties = getJobProperties(context.jobProperties());
return new AlterLoadStmt(createLabelName(context.db, context.name), jobProperties, createPos(context));
}
@Override
public ParseNode visitStopRoutineLoadStatement(StarRocksParser.StopRoutineLoadStatementContext context) {
return new StopRoutineLoadStmt(createLabelName(context.db, context.name), createPos(context));
}
@Override
public ParseNode visitResumeRoutineLoadStatement(StarRocksParser.ResumeRoutineLoadStatementContext context) {
return new ResumeRoutineLoadStmt(createLabelName(context.db, context.name), createPos(context));
}
@Override
public ParseNode visitPauseRoutineLoadStatement(StarRocksParser.PauseRoutineLoadStatementContext context) {
return new PauseRoutineLoadStmt(createLabelName(context.db, context.name), createPos(context));
}
@Override
public ParseNode visitShowRoutineLoadStatement(StarRocksParser.ShowRoutineLoadStatementContext context) {
boolean isVerbose = context.ALL() != null;
String database = null;
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowRoutineLoadStmt(createLabelName(context.db, context.name), isVerbose, where, orderByElements,
limitElement, createPos(context));
}
@Override
public ParseNode visitShowRoutineLoadTaskStatement(StarRocksParser.ShowRoutineLoadTaskStatementContext context) {
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowRoutineLoadTaskStmt(dbName == null ? null : dbName.toString(), where, createPos(context));
}
@Override
public ParseNode visitShowStreamLoadStatement(StarRocksParser.ShowStreamLoadStatementContext context) {
boolean isVerbose = context.ALL() != null;
String database = null;
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowStreamLoadStmt(createLabelName(context.db, context.name), isVerbose, where, orderByElements,
limitElement, createPos(context));
}
@Override
public ParseNode visitAdminSetConfigStatement(StarRocksParser.AdminSetConfigStatementContext context) {
Property config = (Property) visitProperty(context.property());
return new AdminSetConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, config, createPos(context));
}
@Override
public ParseNode visitAdminSetReplicaStatusStatement(
StarRocksParser.AdminSetReplicaStatusStatementContext context) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
return new AdminSetReplicaStatusStmt(new PropertySet(propertyList, createPos(context.properties())),
createPos(context));
}
@Override
public ParseNode visitAdminShowConfigStatement(StarRocksParser.AdminShowConfigStatementContext context) {
NodePosition pos = createPos(context);
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new AdminShowConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, stringLiteral.getValue(), pos);
}
return new AdminShowConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, null, pos);
}
@Override
public ParseNode visitAdminShowReplicaDistributionStatement(
StarRocksParser.AdminShowReplicaDistributionStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminShowReplicaDistributionStmt(new TableRef(targetTableName, null,
partitionNames, createPos(start, stop)),
createPos(context));
}
@Override
public ParseNode visitAdminShowReplicaStatusStatement(
StarRocksParser.AdminShowReplicaStatusStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
Expr where = context.where != null ? (Expr) visit(context.where) : null;
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminShowReplicaStatusStmt(
new TableRef(targetTableName, null, partitionNames, createPos(start, stop)),
where,
createPos(context));
}
@Override
public ParseNode visitAdminRepairTableStatement(StarRocksParser.AdminRepairTableStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminRepairTableStmt(new TableRef(targetTableName, null, partitionNames, createPos(start, stop)),
createPos(context));
}
@Override
public ParseNode visitAdminCancelRepairTableStatement(
StarRocksParser.AdminCancelRepairTableStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminCancelRepairTableStmt(
new TableRef(targetTableName, null, partitionNames, createPos(start, stop)),
createPos(context));
}
@Override
public ParseNode visitAdminCheckTabletsStatement(StarRocksParser.AdminCheckTabletsStatementContext context) {
List<Long> tabletIds = Lists.newArrayList();
if (context.tabletList() != null) {
tabletIds = context.tabletList().INTEGER_VALUE().stream().map(ParseTree::getText)
.map(Long::parseLong).collect(toList());
}
return new AdminCheckTabletsStmt(tabletIds, (Property) visitProperty(context.property()), createPos(context));
}
@Override
public ParseNode visitKillStatement(StarRocksParser.KillStatementContext context) {
NodePosition pos = createPos(context);
long id = Long.parseLong(context.INTEGER_VALUE().getText());
if (context.QUERY() != null) {
return new KillStmt(false, id, pos);
} else {
return new KillStmt(true, id, pos);
}
}
@Override
public ParseNode visitSyncStatement(StarRocksParser.SyncStatementContext context) {
return new SyncStmt(createPos(context));
}
@Override
public ParseNode visitAlterSystemStatement(StarRocksParser.AlterSystemStatementContext context) {
return new AlterSystemStmt((AlterClause) visit(context.alterClause()), createPos(context));
}
@Override
public ParseNode visitCancelAlterSystemStatement(StarRocksParser.CancelAlterSystemStatementContext context) {
return new CancelAlterSystemStmt(visit(context.string(), StringLiteral.class)
.stream().map(StringLiteral::getValue).collect(toList()), createPos(context));
}
@Override
public ParseNode visitShowComputeNodesStatement(StarRocksParser.ShowComputeNodesStatementContext context) {
return new ShowComputeNodesStmt(createPos(context));
}
@Override
public ParseNode visitAnalyzeStatement(StarRocksParser.AnalyzeStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AnalyzeStmt(tableName, columnNames, properties,
context.SAMPLE() != null,
context.ASYNC() != null,
new AnalyzeBasicDesc(), createPos(context));
}
@Override
public ParseNode visitDropStatsStatement(StarRocksParser.DropStatsStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
return new DropStatsStmt(tableName, createPos(context));
}
@Override
public ParseNode visitCreateAnalyzeStatement(StarRocksParser.CreateAnalyzeStatementContext context) {
NodePosition pos = createPos(context);
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.DATABASE() != null) {
return new CreateAnalyzeJobStmt(((Identifier) visit(context.db)).getValue(), context.FULL() == null,
properties, pos);
} else if (context.TABLE() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
return new CreateAnalyzeJobStmt(tableName, columnNames, context.SAMPLE() != null, properties, pos);
} else {
return new CreateAnalyzeJobStmt(context.FULL() == null, properties, pos);
}
}
@Override
public ParseNode visitDropAnalyzeJobStatement(StarRocksParser.DropAnalyzeJobStatementContext context) {
return new DropAnalyzeJobStmt(Long.parseLong(context.INTEGER_VALUE().getText()), createPos(context));
}
@Override
public ParseNode visitShowAnalyzeStatement(StarRocksParser.ShowAnalyzeStatementContext context) {
Predicate predicate = null;
NodePosition pos = createPos(context);
if (context.expression() != null) {
predicate = (Predicate) visit(context.expression());
}
if (context.STATUS() != null) {
return new ShowAnalyzeStatusStmt(predicate, pos);
} else if (context.JOB() != null) {
return new ShowAnalyzeJobStmt(predicate, pos);
} else {
return new ShowAnalyzeJobStmt(predicate, pos);
}
}
@Override
public ParseNode visitShowStatsMetaStatement(StarRocksParser.ShowStatsMetaStatementContext context) {
Predicate predicate = null;
if (context.expression() != null) {
predicate = (Predicate) visit(context.expression());
}
return new ShowBasicStatsMetaStmt(predicate, createPos(context));
}
@Override
public ParseNode visitShowHistogramMetaStatement(StarRocksParser.ShowHistogramMetaStatementContext context) {
Predicate predicate = null;
if (context.expression() != null) {
predicate = (Predicate) visit(context.expression());
}
return new ShowHistogramStatsMetaStmt(predicate, createPos(context));
}
@Override
public ParseNode visitAnalyzeHistogramStatement(StarRocksParser.AnalyzeHistogramStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
long bucket;
if (context.bucket != null) {
bucket = Long.parseLong(context.bucket.getText());
} else {
bucket = Config.histogram_buckets_size;
}
return new AnalyzeStmt(tableName, columnNames, properties, true,
context.ASYNC() != null, new AnalyzeHistogramDesc(bucket), createPos(context));
}
@Override
public ParseNode visitDropHistogramStatement(StarRocksParser.DropHistogramStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
return new DropHistogramStmt(tableName, columnNames, createPos(context));
}
@Override
public ParseNode visitKillAnalyzeStatement(StarRocksParser.KillAnalyzeStatementContext context) {
return new KillAnalyzeStmt(Long.parseLong(context.INTEGER_VALUE().getText()), createPos(context));
}
@Override
public ParseNode visitAnalyzeProfileStatement(StarRocksParser.AnalyzeProfileStatementContext context) {
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
List<Integer> planNodeIds = Lists.newArrayList();
if (context.INTEGER_VALUE() != null) {
planNodeIds = context.INTEGER_VALUE().stream()
.map(ParseTree::getText)
.map(Integer::parseInt)
.collect(toList());
}
return new AnalyzeProfileStmt(stringLiteral.getStringValue(), planNodeIds, createPos(context));
}
public ParseNode visitCreateResourceGroupStatement(StarRocksParser.CreateResourceGroupStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
String name = identifier.getValue();
List<List<Predicate>> predicatesList = new ArrayList<>();
for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) {
List<Predicate> p = visit(classifierContext.expressionList().expression(), Predicate.class);
predicatesList.add(p);
}
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new CreateResourceGroupStmt(name,
context.EXISTS() != null,
context.REPLACE() != null,
predicatesList,
properties, createPos(context));
}
@Override
public ParseNode visitDropResourceGroupStatement(StarRocksParser.DropResourceGroupStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new DropResourceGroupStmt(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitAlterResourceGroupStatement(StarRocksParser.AlterResourceGroupStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
String name = identifier.getValue();
NodePosition pos = createPos(context);
if (context.ADD() != null) {
List<List<Predicate>> predicatesList = new ArrayList<>();
for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) {
List<Predicate> p = visit(classifierContext.expressionList().expression(), Predicate.class);
predicatesList.add(p);
}
return new AlterResourceGroupStmt(name, new AlterResourceGroupStmt.AddClassifiers(predicatesList), pos);
} else if (context.DROP() != null) {
if (context.ALL() != null) {
return new AlterResourceGroupStmt(name, new AlterResourceGroupStmt.DropAllClassifiers(), pos);
} else {
return new AlterResourceGroupStmt(name,
new AlterResourceGroupStmt.DropClassifiers(context.INTEGER_VALUE()
.stream().map(ParseTree::getText).map(Long::parseLong).collect(toList())), pos);
}
} else {
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new AlterResourceGroupStmt(name, new AlterResourceGroupStmt.AlterProperties(properties), pos);
}
}
@Override
public ParseNode visitShowResourceGroupStatement(StarRocksParser.ShowResourceGroupStatementContext context) {
NodePosition pos = createPos(context);
if (context.GROUPS() != null) {
return new ShowResourceGroupStmt(null, context.ALL() != null, pos);
} else {
Identifier identifier = (Identifier) visit(context.identifier());
return new ShowResourceGroupStmt(identifier.getValue(), false, pos);
}
}
public ParseNode visitCreateResourceStatement(StarRocksParser.CreateResourceStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateResourceStmt(context.EXTERNAL() != null, identifier.getValue(), properties,
createPos(context));
}
public ParseNode visitDropResourceStatement(StarRocksParser.DropResourceStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
return new DropResourceStmt(identifier.getValue(), createPos(context));
}
public ParseNode visitAlterResourceStatement(StarRocksParser.AlterResourceStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AlterResourceStmt(identifier.getValue(), properties, createPos(context));
}
public ParseNode visitShowResourceStatement(StarRocksParser.ShowResourceStatementContext context) {
return new ShowResourcesStmt(createPos(context));
}
@Override
public ParseNode visitLoadStatement(StarRocksParser.LoadStatementContext context) {
NodePosition pos = createPos(context);
LabelName label = getLabelName(context.labelName());
List<DataDescription> dataDescriptions = null;
if (context.data != null) {
dataDescriptions = context.data.dataDesc().stream().map(this::getDataDescription)
.collect(toList());
}
Map<String, String> properties = null;
if (context.props != null) {
properties = Maps.newHashMap();
List<Property> propertyList = visit(context.props.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.resource != null) {
ResourceDesc resourceDesc = getResourceDesc(context.resource);
return new LoadStmt(label, dataDescriptions, resourceDesc, properties, pos);
}
BrokerDesc brokerDesc = getBrokerDesc(context.broker);
String cluster = null;
if (context.system != null) {
cluster = ((Identifier) visit(context.system)).getValue();
}
LoadStmt stmt = new LoadStmt(label, dataDescriptions, brokerDesc, cluster, properties, pos);
stmt.setHintNodes(hintMap.get(context));
return stmt;
}
private LabelName getLabelName(StarRocksParser.LabelNameContext context) {
String label = ((Identifier) visit(context.label)).getValue();
String db = "";
if (context.db != null) {
db = ((Identifier) visit(context.db)).getValue();
}
return new LabelName(db, label, createPos(context));
}
private DataDescription getDataDescription(StarRocksParser.DataDescContext context) {
NodePosition pos = createPos(context);
String dstTableName = ((Identifier) visit(context.dstTableName)).getValue();
PartitionNames partitionNames = (PartitionNames) visitIfPresent(context.partitions);
Expr whereExpr = (Expr) visitIfPresent(context.where);
List<Expr> colMappingList = null;
if (context.colMappingList != null) {
colMappingList = visit(context.colMappingList.expressionList().expression(), Expr.class);
}
if (context.srcTableName != null) {
String srcTableName = ((Identifier) visit(context.srcTableName)).getValue();
return new DataDescription(dstTableName, partitionNames, srcTableName,
context.NEGATIVE() != null, colMappingList, whereExpr, pos);
}
List<String> files = context.srcFiles.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue())
.collect(toList());
ColumnSeparator colSep = getColumnSeparator(context.colSep);
RowDelimiter rowDelimiter = getRowDelimiter(context.rowSep);
String format = null;
if (context.format != null) {
if (context.format.identifier() != null) {
format = ((Identifier) visit(context.format.identifier())).getValue();
} else if (context.format.string() != null) {
format = ((StringLiteral) visit(context.format.string())).getStringValue();
}
}
List<String> colList = null;
if (context.colList != null) {
List<Identifier> identifiers = visit(context.colList.identifier(), Identifier.class);
colList = identifiers.stream().map(Identifier::getValue).collect(toList());
}
List<String> colFromPath = null;
if (context.colFromPath != null) {
List<Identifier> identifiers = visit(context.colFromPath.identifier(), Identifier.class);
colFromPath = identifiers.stream().map(Identifier::getValue).collect(toList());
}
StarRocksParser.FormatPropsContext formatPropsContext;
CsvFormat csvFormat;
if (context.formatPropsField != null) {
formatPropsContext = context.formatProps();
String escape = null;
if (formatPropsContext.escapeCharacter != null) {
StringLiteral stringLiteral = (StringLiteral) visit(formatPropsContext.escapeCharacter);
escape = stringLiteral.getValue();
}
String enclose = null;
if (formatPropsContext.encloseCharacter != null) {
StringLiteral stringLiteral = (StringLiteral) visit(formatPropsContext.encloseCharacter);
enclose = stringLiteral.getValue();
}
long skipheader = 0;
if (formatPropsContext.INTEGER_VALUE() != null) {
skipheader = Long.parseLong(formatPropsContext.INTEGER_VALUE().getText());
if (skipheader < 0) {
skipheader = 0;
}
}
boolean trimspace = false;
if (formatPropsContext.booleanValue() != null) {
trimspace = Boolean.parseBoolean(formatPropsContext.booleanValue().getText());
}
csvFormat = new CsvFormat(enclose == null ? 0 : (byte) enclose.charAt(0),
escape == null ? 0 : (byte) escape.charAt(0),
skipheader, trimspace);
} else {
csvFormat = new CsvFormat((byte) 0, (byte) 0, 0, false);
}
return new DataDescription(dstTableName, partitionNames, files, colList, colSep, rowDelimiter,
format, colFromPath, context.NEGATIVE() != null, colMappingList, whereExpr,
csvFormat, createPos(context));
}
private ColumnSeparator getColumnSeparator(StarRocksParser.StringContext context) {
if (context != null) {
String sep = ((StringLiteral) visit(context)).getValue();
return new ColumnSeparator(sep);
}
return null;
}
private RowDelimiter getRowDelimiter(StarRocksParser.StringContext context) {
if (context != null) {
String sep = ((StringLiteral) visit(context)).getValue();
return new RowDelimiter(sep);
}
return null;
}
private BrokerDesc getBrokerDesc(StarRocksParser.BrokerDescContext context) {
if (context != null) {
NodePosition pos = createPos(context);
Map<String, String> properties = null;
if (context.props != null) {
properties = Maps.newHashMap();
List<Property> propertyList = visit(context.props.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.identifierOrString() != null) {
String brokerName = ((Identifier) visit(context.identifierOrString())).getValue();
return new BrokerDesc(brokerName, properties, pos);
} else {
return new BrokerDesc(properties, pos);
}
}
return null;
}
private ResourceDesc getResourceDesc(StarRocksParser.ResourceDescContext context) {
if (context != null) {
String brokerName = ((Identifier) visit(context.identifierOrString())).getValue();
Map<String, String> properties = null;
if (context.props != null) {
properties = Maps.newHashMap();
List<Property> propertyList = visit(context.props.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new ResourceDesc(brokerName, properties, createPos(context));
}
return null;
}
@Override
public ParseNode visitShowLoadStatement(StarRocksParser.ShowLoadStatementContext context) {
String db = null;
if (context.identifier() != null) {
db = ((Identifier) visit(context.identifier())).getValue();
}
Expr labelExpr = null;
if (context.expression() != null) {
labelExpr = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
boolean all = context.ALL() != null;
ShowLoadStmt res = new ShowLoadStmt(db, labelExpr, orderByElements, limitElement, createPos(context));
res.setAll(all);
return res;
}
@Override
public ParseNode visitShowLoadWarningsStatement(StarRocksParser.ShowLoadWarningsStatementContext context) {
if (context.ON() != null) {
String url = ((StringLiteral) visit(context.string())).getValue();
return new ShowLoadWarningsStmt(null, url, null, null);
}
String db = null;
if (context.identifier() != null) {
db = ((Identifier) visit(context.identifier())).getValue();
}
Expr labelExpr = null;
if (context.expression() != null) {
labelExpr = (Expr) visit(context.expression());
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowLoadWarningsStmt(db, null, labelExpr, limitElement, createPos(context));
}
@Override
public ParseNode visitCancelLoadStatement(StarRocksParser.CancelLoadStatementContext context) {
String db = null;
if (context.identifier() != null) {
db = ((Identifier) visit(context.identifier())).getValue();
}
Expr labelExpr = null;
if (context.expression() != null) {
labelExpr = (Expr) visit(context.expression());
}
return new CancelLoadStmt(db, labelExpr, createPos(context));
}
@Override
public ParseNode visitCancelCompactionStatement(StarRocksParser.CancelCompactionStatementContext context) {
Expr txnIdExpr = null;
if (context.expression() != null) {
txnIdExpr = (Expr) visit(context.expression());
}
return new CancelCompactionStmt(txnIdExpr, createPos(context));
}
@Override
public ParseNode visitShowAuthorStatement(StarRocksParser.ShowAuthorStatementContext context) {
return new ShowAuthorStmt(createPos(context));
}
@Override
public ParseNode visitShowBackendsStatement(StarRocksParser.ShowBackendsStatementContext context) {
return new ShowBackendsStmt(createPos(context));
}
@Override
public ParseNode visitShowBrokerStatement(StarRocksParser.ShowBrokerStatementContext context) {
return new ShowBrokerStmt(createPos(context));
}
@Override
public ParseNode visitShowCharsetStatement(StarRocksParser.ShowCharsetStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowCharsetStmt(pattern, where, createPos(context));
}
@Override
public ParseNode visitShowCollationStatement(StarRocksParser.ShowCollationStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowCollationStmt(pattern, where, createPos(context));
}
@Override
public ParseNode visitShowDeleteStatement(StarRocksParser.ShowDeleteStatementContext context) {
QualifiedName dbName = null;
if (context.qualifiedName() != null) {
dbName = getQualifiedName(context.db);
}
return new ShowDeleteStmt(dbName == null ? null : dbName.toString(), createPos(context));
}
@Override
public ParseNode visitShowDynamicPartitionStatement(StarRocksParser.ShowDynamicPartitionStatementContext context) {
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
return new ShowDynamicPartitionStmt(dbName == null ? null : dbName.toString(), createPos(context));
}
@Override
public ParseNode visitShowEventsStatement(StarRocksParser.ShowEventsStatementContext context) {
return new ShowEventsStmt(createPos(context));
}
@Override
public ParseNode visitShowEnginesStatement(StarRocksParser.ShowEnginesStatementContext context) {
return new ShowEnginesStmt(createPos(context));
}
@Override
public ParseNode visitShowFrontendsStatement(StarRocksParser.ShowFrontendsStatementContext context) {
return new ShowFrontendsStmt(createPos(context));
}
@Override
public ParseNode visitShowPluginsStatement(StarRocksParser.ShowPluginsStatementContext context) {
return new ShowPluginsStmt(createPos(context));
}
@Override
public ParseNode visitShowRepositoriesStatement(StarRocksParser.ShowRepositoriesStatementContext context) {
return new ShowRepositoriesStmt(createPos(context));
}
@Override
public ParseNode visitShowOpenTableStatement(StarRocksParser.ShowOpenTableStatementContext context) {
return new ShowOpenTableStmt(createPos(context));
}
@Override
public ParseNode visitShowProcedureStatement(StarRocksParser.ShowProcedureStatementContext context) {
NodePosition pos = createPos(context);
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowProcedureStmt(stringLiteral.getValue(), null, pos);
} else if (context.expression() != null) {
return new ShowProcedureStmt(null, (Expr) visit(context.expression()), pos);
} else {
return new ShowProcedureStmt(null, null, pos);
}
}
@Override
public ParseNode visitShowProcStatement(StarRocksParser.ShowProcStatementContext context) {
StringLiteral stringLiteral = (StringLiteral) visit(context.path);
return new ShowProcStmt(stringLiteral.getValue(), createPos(context));
}
@Override
public ParseNode visitShowProcesslistStatement(StarRocksParser.ShowProcesslistStatementContext context) {
boolean isShowFull = context.FULL() != null;
return new ShowProcesslistStmt(isShowFull, createPos(context));
}
@Override
public ParseNode visitShowProfilelistStatement(StarRocksParser.ShowProfilelistStatementContext context) {
int limit = context.LIMIT() != null ? Integer.parseInt(context.limit.getText()) : -1;
return new ShowProfilelistStmt(limit, createPos(context));
}
@Override
public ParseNode visitShowRunningQueriesStatement(StarRocksParser.ShowRunningQueriesStatementContext context) {
int limit = context.LIMIT() != null ? Integer.parseInt(context.limit.getText()) : -1;
return new ShowRunningQueriesStmt(limit, createPos(context));
}
@Override
public ParseNode visitShowResourceGroupUsageStatement(
StarRocksParser.ShowResourceGroupUsageStatementContext context) {
if (context.GROUPS() != null) {
return new ShowResourceGroupUsageStmt(null, createPos(context));
}
Identifier groupName = (Identifier) visit(context.identifier());
return new ShowResourceGroupUsageStmt(groupName.getValue(), createPos(context));
}
@Override
public ParseNode visitShowTransactionStatement(StarRocksParser.ShowTransactionStatementContext context) {
String database = null;
if (context.qualifiedName() != null) {
database = getQualifiedName(context.qualifiedName()).toString();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowTransactionStmt(database, where, createPos(context));
}
@Override
public ParseNode visitShowStatusStatement(StarRocksParser.ShowStatusStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowStatusStmt(getVariableType(context.varType()), pattern, where, createPos(context));
}
@Override
public ParseNode visitShowTriggersStatement(StarRocksParser.ShowTriggersStatementContext context) {
return new ShowTriggersStmt(createPos(context));
}
@Override
public ParseNode visitShowUserPropertyStatement(StarRocksParser.ShowUserPropertyStatementContext context) {
String user;
String pattern;
if (context.FOR() == null) {
user = null;
pattern = context.LIKE() == null ? null : ((StringLiteral) visit(context.string(0))).getValue();
} else {
user = ((StringLiteral) visit(context.string(0))).getValue();
pattern = context.LIKE() == null ? null : ((StringLiteral) visit(context.string(1))).getValue();
}
return new ShowUserPropertyStmt(user, pattern, createPos(context));
}
@Override
public ParseNode visitShowVariablesStatement(StarRocksParser.ShowVariablesStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowVariablesStmt(getVariableType(context.varType()), pattern, where, createPos(context));
}
@Override
public ParseNode visitShowWarningStatement(StarRocksParser.ShowWarningStatementContext context) {
NodePosition pos = createPos(context);
if (context.limitElement() != null) {
return new ShowWarningStmt((LimitElement) visit(context.limitElement()), pos);
}
return new ShowWarningStmt(null, pos);
}
@Override
public ParseNode visitHelpStatement(StarRocksParser.HelpStatementContext context) {
String mask = ((Identifier) visit(context.identifierOrString())).getValue();
return new HelpStmt(mask, createPos(context));
}
@Override
public ParseNode visitBackupStatement(StarRocksParser.BackupStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
LabelName labelName = qualifiedNameToLabelName(qualifiedName);
List<TableRef> tblRefs = new ArrayList<>();
for (StarRocksParser.TableDescContext tableDescContext : context.tableDesc()) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = tableDescContext.qualifiedName();
qualifiedName = getQualifiedName(qualifiedNameContext);
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (tableDescContext.partitionNames() != null) {
partitionNames = (PartitionNames) visit(tableDescContext.partitionNames());
}
TableRef tableRef = new TableRef(tableName, null, partitionNames, createPos(tableDescContext));
tblRefs.add(tableRef);
}
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String repoName = ((Identifier) visit(context.identifier())).getValue();
return new BackupStmt(labelName, repoName, tblRefs, properties, createPos(context));
}
@Override
public ParseNode visitCancelBackupStatement(StarRocksParser.CancelBackupStatementContext context) {
return new CancelBackupStmt(((Identifier) visit(context.identifier())).getValue(),
false, createPos(context));
}
@Override
public ParseNode visitShowBackupStatement(StarRocksParser.ShowBackupStatementContext context) {
NodePosition pos = createPos(context);
if (context.identifier() == null) {
return new ShowBackupStmt(null, pos);
}
return new ShowBackupStmt(((Identifier) visit(context.identifier())).getValue(), pos);
}
@Override
public ParseNode visitRestoreStatement(StarRocksParser.RestoreStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
LabelName labelName = qualifiedNameToLabelName(qualifiedName);
List<TableRef> tblRefs = new ArrayList<>();
for (StarRocksParser.RestoreTableDescContext tableDescContext : context.restoreTableDesc()) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = tableDescContext.qualifiedName();
qualifiedName = getQualifiedName(qualifiedNameContext);
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (tableDescContext.partitionNames() != null) {
partitionNames = (PartitionNames) visit(tableDescContext.partitionNames());
}
String alias = null;
if (tableDescContext.identifier() != null) {
alias = ((Identifier) visit(tableDescContext.identifier())).getValue();
}
TableRef tableRef = new TableRef(tableName, alias, partitionNames, createPos(tableDescContext));
tblRefs.add(tableRef);
}
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String repoName = ((Identifier) visit(context.identifier())).getValue();
return new RestoreStmt(labelName, repoName, tblRefs, properties, createPos(context));
}
@Override
public ParseNode visitCancelRestoreStatement(StarRocksParser.CancelRestoreStatementContext context) {
return new CancelBackupStmt(((Identifier) visit(context.identifier())).getValue(), true,
createPos(context));
}
@Override
public ParseNode visitShowRestoreStatement(StarRocksParser.ShowRestoreStatementContext context) {
NodePosition pos = createPos(context);
if (context.identifier() == null) {
return new ShowRestoreStmt(null, null, pos);
}
if (context.expression() != null) {
return new ShowRestoreStmt(((Identifier) visit(context.identifier())).getValue(),
(Expr) visit(context.expression()), pos);
} else {
return new ShowRestoreStmt(((Identifier) visit(context.identifier())).getValue(), null, pos);
}
}
@Override
public ParseNode visitShowSnapshotStatement(StarRocksParser.ShowSnapshotStatementContext context) {
StarRocksParser.ExpressionContext expression = context.expression();
Expr where = null;
if (expression != null) {
where = (Expr) visit(context.expression());
}
String repoName = ((Identifier) visit(context.identifier())).getValue();
return new ShowSnapshotStmt(repoName, where, createPos(context));
}
@Override
public ParseNode visitCreateRepositoryStatement(StarRocksParser.CreateRepositoryStatementContext context) {
boolean isReadOnly = context.READ() != null && context.ONLY() != null;
Map<String, String> properties = new HashMap<>();
if (context.propertyList() != null) {
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String location = ((StringLiteral) visit(context.location)).getValue();
String repoName = ((Identifier) visit(context.repoName)).getValue();
String brokerName = null;
if (context.brokerName != null) {
brokerName = ((Identifier) visit(context.brokerName)).getValue();
}
return new CreateRepositoryStmt(isReadOnly, repoName, brokerName,
location, properties, createPos(context));
}
@Override
public ParseNode visitDropRepositoryStatement(StarRocksParser.DropRepositoryStatementContext context) {
return new DropRepositoryStmt(((Identifier) visit(context.identifier())).getValue(), createPos(context));
}
@Override
public ParseNode visitAddSqlBlackListStatement(StarRocksParser.AddSqlBlackListStatementContext context) {
String sql = ((StringLiteral) visit(context.string())).getStringValue();
if (sql == null || sql.isEmpty()) {
throw new ParsingException(PARSER_ERROR_MSG.emptySql(), createPos(context.string()));
}
return new AddSqlBlackListStmt(sql);
}
@Override
public ParseNode visitDelSqlBlackListStatement(StarRocksParser.DelSqlBlackListStatementContext context) {
List<Long> indexes = context.INTEGER_VALUE().stream().map(ParseTree::getText)
.map(Long::parseLong).collect(toList());
return new DelSqlBlackListStmt(indexes, createPos(context));
}
@Override
public ParseNode visitShowSqlBlackListStatement(StarRocksParser.ShowSqlBlackListStatementContext context) {
return new ShowSqlBlackListStmt(createPos(context));
}
@Override
public ParseNode visitShowWhiteListStatement(StarRocksParser.ShowWhiteListStatementContext context) {
return new ShowWhiteListStmt();
}
@Override
public ParseNode visitAddBackendBlackListStatement(StarRocksParser.AddBackendBlackListStatementContext ctx) {
List<Long> ids =
ctx.INTEGER_VALUE().stream().map(ParseTree::getText).map(Long::parseLong).collect(toList());
return new AddBackendBlackListStmt(ids, createPos(ctx));
}
@Override
public ParseNode visitDelBackendBlackListStatement(StarRocksParser.DelBackendBlackListStatementContext ctx) {
List<Long> ids =
ctx.INTEGER_VALUE().stream().map(ParseTree::getText).map(Long::parseLong).collect(toList());
return new DelBackendBlackListStmt(createPos(ctx), ids);
}
@Override
public ParseNode visitShowBackendBlackListStatement(StarRocksParser.ShowBackendBlackListStatementContext ctx) {
return new ShowBackendBlackListStmt(createPos(ctx));
}
@Override
public ParseNode visitCreateDataCacheRuleStatement(StarRocksParser.CreateDataCacheRuleStatementContext ctx) {
List<StarRocksParser.IdentifierOrStringOrStarContext> partList =
ctx.dataCacheTarget().identifierOrStringOrStar();
List<String> parts = partList.stream().map(c -> ((Identifier) visit(c)).getValue()).collect(toList());
QualifiedName qualifiedName = QualifiedName.of(parts);
int priority = Integer.parseInt(ctx.INTEGER_VALUE().getText());
if (ctx.MINUS_SYMBOL() != null) {
priority *= -1;
}
Expr predicates = null;
if (ctx.expression() != null) {
predicates = (Expr) visit(ctx.expression());
}
Map<String, String> properties = null;
if (ctx.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(ctx.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateDataCacheRuleStmt(qualifiedName, predicates, priority, properties, createPos(ctx));
}
@Override
public ParseNode visitShowDataCacheRulesStatement(StarRocksParser.ShowDataCacheRulesStatementContext ctx) {
return new ShowDataCacheRulesStmt(createPos(ctx));
}
@Override
public ParseNode visitDropDataCacheRuleStatement(StarRocksParser.DropDataCacheRuleStatementContext ctx) {
long id = Long.parseLong(ctx.INTEGER_VALUE().getText());
return new DropDataCacheRuleStmt(id, createPos(ctx));
}
@Override
public ParseNode visitClearDataCacheRulesStatement(StarRocksParser.ClearDataCacheRulesStatementContext ctx) {
return new ClearDataCacheRulesStmt(createPos(ctx));
}
@Override
public ParseNode visitExportStatement(StarRocksParser.ExportStatementContext context) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = context.tableDesc().qualifiedName();
Token start = qualifiedNameContext.start;
Token stop = qualifiedNameContext.stop;
QualifiedName qualifiedName = getQualifiedName(qualifiedNameContext);
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.tableDesc().partitionNames() != null) {
stop = context.tableDesc().partitionNames().stop;
partitionNames = (PartitionNames) visit(context.tableDesc().partitionNames());
}
TableRef tableRef = new TableRef(tableName, null, partitionNames, createPos(start, stop));
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
BrokerDesc brokerDesc = getBrokerDesc(context.brokerDesc());
boolean sync = context.SYNC() != null;
return new ExportStmt(tableRef, getColumnNames(context.columnAliases()),
stringLiteral.getValue(), properties, brokerDesc, createPos(context), sync);
}
@Override
public ParseNode visitCancelExportStatement(StarRocksParser.CancelExportStatementContext context) {
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new CancelExportStmt(catalog, where, createPos(context));
}
@Override
public ParseNode visitShowExportStatement(StarRocksParser.ShowExportStatementContext context) {
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
LimitElement le = null;
if (context.limitElement() != null) {
le = (LimitElement) visit(context.limitElement());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
Expr whereExpr = null;
if (context.expression() != null) {
whereExpr = (Expr) visit(context.expression());
}
return new ShowExportStmt(catalog, whereExpr, orderByElements, le, createPos(context));
}
@Override
public ParseNode visitInstallPluginStatement(StarRocksParser.InstallPluginStatementContext context) {
String pluginPath = ((Identifier) visit(context.identifierOrString())).getValue();
Map<String, String> properties = getProperties(context.properties());
return new InstallPluginStmt(pluginPath, properties, createPos(context));
}
@Override
public ParseNode visitUninstallPluginStatement(StarRocksParser.UninstallPluginStatementContext context) {
String pluginPath = ((Identifier) visit(context.identifierOrString())).getValue();
return new UninstallPluginStmt(pluginPath, createPos(context));
}
@Override
public ParseNode visitCreateFileStatement(StarRocksParser.CreateFileStatementContext context) {
String fileName = ((StringLiteral) visit(context.string())).getStringValue();
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
Map<String, String> properties = getProperties(context.properties());
return new CreateFileStmt(fileName, catalog, properties, createPos(context));
}
@Override
public ParseNode visitDropFileStatement(StarRocksParser.DropFileStatementContext context) {
String fileName = ((StringLiteral) visit(context.string())).getStringValue();
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
Map<String, String> properties = getProperties(context.properties());
return new DropFileStmt(fileName, catalog, properties, createPos(context));
}
@Override
public ParseNode visitShowSmallFilesStatement(StarRocksParser.ShowSmallFilesStatementContext context) {
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
return new ShowSmallFilesStmt(catalog, createPos(context));
}
@Override
public ParseNode visitSetStatement(StarRocksParser.SetStatementContext context) {
List<SetListItem> propertyList = visit(context.setVar(), SetListItem.class);
return new SetStmt(propertyList, createPos(context));
}
@Override
public ParseNode visitSetNames(StarRocksParser.SetNamesContext context) {
NodePosition pos = createPos(context);
if (context.CHAR() != null || context.CHARSET() != null) {
if (context.identifierOrString().isEmpty()) {
return new SetNamesVar(null, null, pos);
} else {
return new SetNamesVar(
((Identifier) visit(context.identifierOrString().get(0))).getValue(),
null,
pos);
}
} else {
String charset = null;
if (context.charset != null) {
charset = ((Identifier) visit(context.charset)).getValue();
}
String collate = null;
if (context.collate != null) {
collate = ((Identifier) visit(context.collate)).getValue();
}
return new SetNamesVar(charset, collate, pos);
}
}
@Override
public ParseNode visitSetPassword(StarRocksParser.SetPasswordContext context) {
NodePosition pos = createPos(context);
String passwordText;
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
if (context.PASSWORD().size() > 1) {
passwordText = new String(MysqlPassword.makeScrambledPassword(stringLiteral.getStringValue()));
} else {
passwordText = stringLiteral.getStringValue();
}
if (context.user() != null) {
return new SetPassVar((UserIdentity) visit(context.user()), passwordText, pos);
} else {
return new SetPassVar(null, passwordText, pos);
}
}
@Override
public ParseNode visitSetUserVar(StarRocksParser.SetUserVarContext context) {
VariableExpr variableDesc = (VariableExpr) visit(context.userVariable());
Expr expr = (Expr) visit(context.expression());
return new UserVariable(variableDesc.getName(), expr, createPos(context));
}
@Override
public ParseNode visitSetSystemVar(StarRocksParser.SetSystemVarContext context) {
NodePosition pos = createPos(context);
if (context.systemVariable() != null) {
VariableExpr variableDesc = (VariableExpr) visit(context.systemVariable());
Expr expr = (Expr) visit(context.setExprOrDefault());
return new SystemVariable(variableDesc.getSetType(), variableDesc.getName(), expr, pos);
} else {
Expr expr = (Expr) visit(context.setExprOrDefault());
String variable = ((Identifier) visit(context.identifier())).getValue();
if (context.varType() != null) {
return new SystemVariable(getVariableType(context.varType()), variable, expr, pos);
} else {
return new SystemVariable(SetType.SESSION, variable, expr, pos);
}
}
}
@Override
public ParseNode visitSetTransaction(StarRocksParser.SetTransactionContext context) {
return new SetTransaction(createPos(context));
}
@Override
public ParseNode visitSetUserPropertyStatement(StarRocksParser.SetUserPropertyStatementContext context) {
String user = context.FOR() == null ? null : ((StringLiteral) visit(context.string())).getValue();
List<SetUserPropertyVar> list = new ArrayList<>();
if (context.userPropertyList() != null) {
List<Property> propertyList = visit(context.userPropertyList().property(), Property.class);
for (Property property : propertyList) {
SetUserPropertyVar setVar = new SetUserPropertyVar(property.getKey(), property.getValue());
list.add(setVar);
}
}
return new SetUserPropertyStmt(user, list, createPos(context));
}
@Override
public ParseNode visitSetExprOrDefault(StarRocksParser.SetExprOrDefaultContext context) {
if (context.DEFAULT() != null) {
return null;
} else if (context.ON() != null) {
return new StringLiteral("ON");
} else if (context.ALL() != null) {
return new StringLiteral("ALL");
} else {
return visit(context.expression());
}
}
@Override
public ParseNode visitExecuteScriptStatement(StarRocksParser.ExecuteScriptStatementContext context) {
long beId = -1;
if (context.INTEGER_VALUE() != null) {
beId = Long.parseLong(context.INTEGER_VALUE().getText());
}
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
String script = stringLiteral.getStringValue();
return new ExecuteScriptStmt(beId, script, createPos(context));
}
@Override
public ParseNode visitCreateStorageVolumeStatement(StarRocksParser.CreateStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
String storageType = ((Identifier) visit(context.typeDesc().identifier())).getValue();
List<StarRocksParser.StringContext> locationList = context.locationsDesc().stringList().string();
List<String> locations = new ArrayList<>();
for (StarRocksParser.StringContext location : locationList) {
locations.add(((StringLiteral) visit(location)).getValue());
}
return new CreateStorageVolumeStmt(context.IF() != null,
svName, storageType, getProperties(context.properties()), locations,
context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue(),
createPos(context));
}
@Override
public ParseNode visitShowStorageVolumesStatement(StarRocksParser.ShowStorageVolumesStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
return new ShowStorageVolumesStmt(pattern, createPos(context));
}
@Override
public ParseNode visitAlterStorageVolumeStatement(StarRocksParser.AlterStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
NodePosition pos = createPos(context);
List<AlterStorageVolumeClause> alterClauses = visit(context.alterStorageVolumeClause(),
AlterStorageVolumeClause.class);
Map<String, String> properties = new HashMap<>();
String comment = null;
for (AlterStorageVolumeClause clause : alterClauses) {
if (clause.getOpType().equals(AlterStorageVolumeClause.AlterOpType.ALTER_COMMENT)) {
comment = ((AlterStorageVolumeCommentClause) clause).getNewComment();
} else if (clause.getOpType().equals(AlterStorageVolumeClause.AlterOpType.MODIFY_PROPERTIES)) {
properties = ((ModifyStorageVolumePropertiesClause) clause).getProperties();
}
}
return new AlterStorageVolumeStmt(svName, properties, comment, pos);
}
@Override
public ParseNode visitDropStorageVolumeStatement(StarRocksParser.DropStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
return new DropStorageVolumeStmt(context.IF() != null, svName, createPos(context));
}
@Override
public ParseNode visitDescStorageVolumeStatement(StarRocksParser.DescStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
return new DescStorageVolumeStmt(svName, createPos(context));
}
@Override
public ParseNode visitSetDefaultStorageVolumeStatement(
StarRocksParser.SetDefaultStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
return new SetDefaultStorageVolumeStmt(svName, createPos(context));
}
@Override
public ParseNode visitModifyStorageVolumeCommentClause(
StarRocksParser.ModifyStorageVolumeCommentClauseContext context) {
String comment = ((StringLiteral) visit(context.string())).getStringValue();
return new AlterStorageVolumeCommentClause(comment, createPos(context));
}
@Override
public ParseNode visitModifyStorageVolumePropertiesClause(
StarRocksParser.ModifyStorageVolumePropertiesClauseContext context) {
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new ModifyStorageVolumePropertiesClause(properties, createPos(context));
}
@Override
public ParseNode visitUpdateFailPointStatusStatement(
StarRocksParser.UpdateFailPointStatusStatementContext ctx) {
String failpointName = ((StringLiteral) visit(ctx.string(0))).getStringValue();
List<String> backendList = null;
if (ctx.BACKEND() != null) {
String tmp = ((StringLiteral) visit(ctx.string(1))).getStringValue();
backendList = Lists.newArrayList(tmp.split(","));
}
if (ctx.ENABLE() != null) {
if (ctx.TIMES() != null) {
int nTimes = Integer.parseInt(ctx.INTEGER_VALUE().getText());
if (nTimes <= 0) {
throw new ParsingException(String.format(
"Invalid TIMES value %d, it should be a positive integer", nTimes));
}
return new UpdateFailPointStatusStatement(failpointName, nTimes, backendList, createPos(ctx));
} else if (ctx.PROBABILITY() != null) {
double probability = Double.parseDouble(ctx.DECIMAL_VALUE().getText());
if (probability < 0 || probability > 1) {
throw new ParsingException(String.format(
"Invalid PROBABILITY value %f, it should be in range [0, 1]", probability));
}
return new UpdateFailPointStatusStatement(failpointName, probability, backendList, createPos(ctx));
}
return new UpdateFailPointStatusStatement(failpointName, true, backendList, createPos(ctx));
}
return new UpdateFailPointStatusStatement(failpointName, false, backendList, createPos(ctx));
}
@Override
public ParseNode visitShowFailPointStatement(StarRocksParser.ShowFailPointStatementContext ctx) {
String pattern = null;
List<String> backendList = null;
int idx = 0;
if (ctx.LIKE() != null) {
pattern = ((StringLiteral) visit(ctx.string(idx++))).getStringValue();
}
if (ctx.BACKEND() != null) {
String tmp = ((StringLiteral) visit(ctx.string(idx++))).getStringValue();
backendList = Lists.newArrayList(tmp.split(","));
}
return new ShowFailPointStatement(pattern, backendList, createPos(ctx));
}
@Override
public ParseNode visitCreateDictionaryStatement(StarRocksParser.CreateDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.dictionaryName().qualifiedName()).toString();
String queryableObject = getQualifiedName(context.qualifiedName()).toString();
List<StarRocksParser.DictionaryColumnDescContext> dictionaryColumnDescs = context.dictionaryColumnDesc();
List<String> dictionaryKeys = new ArrayList<>();
List<String> dictionaryValues = new ArrayList<>();
for (StarRocksParser.DictionaryColumnDescContext desc : dictionaryColumnDescs) {
String columnName = getQualifiedName(desc.qualifiedName()).toString();
if (desc.KEY() != null) {
dictionaryKeys.add(columnName);
}
if (desc.VALUE() != null) {
dictionaryValues.add(columnName);
}
}
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateDictionaryStmt(dictionaryName, queryableObject, dictionaryKeys, dictionaryValues,
properties, createPos(context));
}
@Override
public ParseNode visitDropDictionaryStatement(StarRocksParser.DropDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.qualifiedName()).toString();
boolean cacheOnly = false;
if (context.CACHE() != null) {
cacheOnly = true;
}
return new DropDictionaryStmt(dictionaryName, cacheOnly, createPos(context));
}
@Override
public ParseNode visitRefreshDictionaryStatement(StarRocksParser.RefreshDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.qualifiedName()).toString();
return new RefreshDictionaryStmt(dictionaryName, createPos(context));
}
@Override
public ParseNode visitShowDictionaryStatement(StarRocksParser.ShowDictionaryStatementContext context) {
String dictionaryName = null;
if (context.qualifiedName() != null) {
dictionaryName = getQualifiedName(context.qualifiedName()).toString();
}
return new ShowDictionaryStmt(dictionaryName, createPos(context));
}
@Override
public ParseNode visitCancelRefreshDictionaryStatement(
StarRocksParser.CancelRefreshDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.qualifiedName()).toString();
return new CancelRefreshDictionaryStmt(dictionaryName, createPos(context));
}
@Override
public ParseNode visitUnsupportedStatement(StarRocksParser.UnsupportedStatementContext context) {
return new UnsupportedStmt(createPos(context));
}
@Override
public ParseNode visitAddFrontendClause(StarRocksParser.AddFrontendClauseContext context) {
String cluster = ((StringLiteral) visit(context.string())).getStringValue();
NodePosition pos = createPos(context);
if (context.FOLLOWER() != null) {
return new AddFollowerClause(cluster, pos);
} else {
return new AddObserverClause(cluster, pos);
}
}
@Override
public ParseNode visitDropFrontendClause(StarRocksParser.DropFrontendClauseContext context) {
String cluster = ((StringLiteral) visit(context.string())).getStringValue();
NodePosition pos = createPos(context);
if (context.FOLLOWER() != null) {
return new DropFollowerClause(cluster, pos);
} else {
return new DropObserverClause(cluster, pos);
}
}
@Override
public ParseNode visitModifyFrontendHostClause(StarRocksParser.ModifyFrontendHostClauseContext context) {
List<String> clusters =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new ModifyFrontendAddressClause(clusters.get(0), clusters.get(1), createPos(context));
}
@Override
public ParseNode visitAddBackendClause(StarRocksParser.AddBackendClauseContext context) {
List<String> backends =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new AddBackendClause(backends, createPos(context));
}
@Override
public ParseNode visitDropBackendClause(StarRocksParser.DropBackendClauseContext context) {
List<String> clusters =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new DropBackendClause(clusters, context.FORCE() != null, createPos(context));
}
@Override
public ParseNode visitDecommissionBackendClause(StarRocksParser.DecommissionBackendClauseContext context) {
List<String> clusters =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new DecommissionBackendClause(clusters, createPos(context));
}
@Override
public ParseNode visitModifyBackendClause(StarRocksParser.ModifyBackendClauseContext context) {
List<String> strings =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
if (context.HOST() != null) {
return new ModifyBackendClause(strings.get(0), strings.get(1), createPos(context));
} else {
String backendHostPort = strings.get(0);
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new ModifyBackendClause(backendHostPort, properties, createPos(context));
}
}
@Override
public ParseNode visitAddComputeNodeClause(StarRocksParser.AddComputeNodeClauseContext context) {
List<String> hostPorts =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new AddComputeNodeClause(hostPorts);
}
@Override
public ParseNode visitDropComputeNodeClause(StarRocksParser.DropComputeNodeClauseContext context) {
List<String> hostPorts =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new DropComputeNodeClause(hostPorts, createPos(context));
}
@Override
public ParseNode visitModifyBrokerClause(StarRocksParser.ModifyBrokerClauseContext context) {
String brokerName = ((Identifier) visit(context.identifierOrString())).getValue();
NodePosition pos = createPos(context);
if (context.ALL() != null) {
return ModifyBrokerClause.createDropAllBrokerClause(brokerName, pos);
}
List<String> hostPorts =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
if (context.ADD() != null) {
return ModifyBrokerClause.createAddBrokerClause(brokerName, hostPorts, pos);
}
return ModifyBrokerClause.createDropBrokerClause(brokerName, hostPorts, pos);
}
@Override
public ParseNode visitAlterLoadErrorUrlClause(StarRocksParser.AlterLoadErrorUrlClauseContext context) {
return new AlterLoadErrorUrlClause(getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitCreateImageClause(StarRocksParser.CreateImageClauseContext context) {
return new CreateImageClause(createPos(context));
}
@Override
public ParseNode visitCleanTabletSchedQClause(
StarRocksParser.CleanTabletSchedQClauseContext context) {
return new CleanTabletSchedQClause(createPos(context));
}
@Override
public ParseNode visitCreateIndexClause(StarRocksParser.CreateIndexClauseContext context) {
Token start = context.identifier().start;
String indexName = ((Identifier) visit(context.identifier())).getValue();
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
Token stop = context.identifierList().stop;
String comment = null;
if (context.comment() != null) {
stop = context.comment().stop;
comment = ((StringLiteral) visit(context.comment())).getStringValue();
}
IndexDef indexDef = new IndexDef(indexName,
columnList.stream().map(Identifier::getValue).collect(toList()),
getIndexType(context.indexType()),
comment, getPropertyList(context.propertyList()),
createPos(start, stop));
return new CreateIndexClause(indexDef, createPos(context));
}
@Override
public ParseNode visitDropIndexClause(StarRocksParser.DropIndexClauseContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new DropIndexClause(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitTableRenameClause(StarRocksParser.TableRenameClauseContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new TableRenameClause(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitModifyCommentClause(StarRocksParser.ModifyCommentClauseContext context) {
String comment = ((StringLiteral) visit(context.string())).getStringValue();
return new AlterTableCommentClause(comment, createPos(context));
}
@Override
public ParseNode visitSwapTableClause(StarRocksParser.SwapTableClauseContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new SwapTableClause(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitModifyPropertiesClause(StarRocksParser.ModifyPropertiesClauseContext context) {
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new ModifyTablePropertiesClause(properties, createPos(context));
}
@Override
public ParseNode visitOptimizeClause(StarRocksParser.OptimizeClauseContext context) {
return new OptimizeClause(
context.keyDesc() == null ? null : getKeysDesc(context.keyDesc()),
context.partitionDesc() == null ? null : getPartitionDesc(context.partitionDesc(), null),
context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()),
context.orderByDesc() == null ? null :
visit(context.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList()),
context.partitionNames() == null ? null : (PartitionNames) visit(context.partitionNames()),
createPos(context));
}
@Override
public ParseNode visitAddColumnClause(StarRocksParser.AddColumnClauseContext context) {
ColumnDef columnDef = getColumnDef(context.columnDesc());
if (columnDef.isAutoIncrement()) {
throw new ParsingException(PARSER_ERROR_MSG.autoIncrementForbid(columnDef.getName(), "ADD"),
columnDef.getPos());
}
ColumnPosition columnPosition = null;
if (context.FIRST() != null) {
columnPosition = ColumnPosition.FIRST;
} else if (context.AFTER() != null) {
StarRocksParser.IdentifierContext identifier = context.identifier(0);
String afterColumnName = getIdentifierName(identifier);
columnPosition = new ColumnPosition(afterColumnName, createPos(identifier));
}
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
Map<String, String> properties = new HashMap<>();
;
properties = getProperties(context.properties());
if (columnDef.isGeneratedColumn()) {
if (rollupName != null) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("rollupName", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
if (columnPosition != null) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("AFTER", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
if (properties.size() != 0) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("properties", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
}
return new AddColumnClause(columnDef, columnPosition, rollupName, properties, createPos(context));
}
@Override
public ParseNode visitAddColumnsClause(StarRocksParser.AddColumnsClauseContext context) {
List<ColumnDef> columnDefs = getColumnDefs(context.columnDesc());
Map<String, String> properties = new HashMap<>();
properties = getProperties(context.properties());
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
for (ColumnDef columnDef : columnDefs) {
if (columnDef.isAutoIncrement()) {
throw new ParsingException(PARSER_ERROR_MSG.autoIncrementForbid(columnDef.getName(), "ADD"),
columnDef.getPos());
}
if (columnDef.isGeneratedColumn()) {
if (rollupName != null) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("rollupName", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
if (properties.size() != 0) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("properties", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
}
}
return new AddColumnsClause(columnDefs, rollupName, getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitDropColumnClause(StarRocksParser.DropColumnClauseContext context) {
String columnName = getIdentifierName(context.identifier(0));
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
return new DropColumnClause(columnName, rollupName, getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitModifyColumnClause(StarRocksParser.ModifyColumnClauseContext context) {
ColumnDef columnDef = getColumnDef(context.columnDesc());
if (columnDef.isAutoIncrement()) {
throw new ParsingException(PARSER_ERROR_MSG.autoIncrementForbid(columnDef.getName(), "MODIFY"),
columnDef.getPos());
}
ColumnPosition columnPosition = null;
if (context.FIRST() != null) {
columnPosition = ColumnPosition.FIRST;
} else if (context.AFTER() != null) {
StarRocksParser.IdentifierContext identifier = context.identifier(0);
String afterColumnName = getIdentifierName(identifier);
columnPosition = new ColumnPosition(afterColumnName, createPos(identifier));
}
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
if (columnDef.isGeneratedColumn()) {
if (rollupName != null) {
throw new ParsingException(PARSER_ERROR_MSG.generatedColumnLimit("rollupName",
"MODIFY GENERATED COLUMN"), columnDef.getPos());
}
if (columnPosition != null) {
throw new ParsingException(PARSER_ERROR_MSG.generatedColumnLimit("columnPosition",
"MODIFY GENERATED COLUMN"), columnDef.getPos());
}
}
return new ModifyColumnClause(columnDef, columnPosition, rollupName, getProperties(context.properties()),
createPos(context));
}
@Override
public ParseNode visitColumnRenameClause(StarRocksParser.ColumnRenameClauseContext context) {
String oldColumnName = getIdentifierName(context.oldColumn);
String newColumnName = getIdentifierName(context.newColumn);
return new ColumnRenameClause(oldColumnName, newColumnName, createPos(context));
}
@Override
public ParseNode visitReorderColumnsClause(StarRocksParser.ReorderColumnsClauseContext context) {
List<String> cols =
context.identifierList().identifier().stream().map(this::getIdentifierName).collect(toList());
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
return new ReorderColumnsClause(cols, rollupName, getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitRollupRenameClause(StarRocksParser.RollupRenameClauseContext context) {
String rollupName = ((Identifier) visit(context.rollupName)).getValue();
String newRollupName = ((Identifier) visit(context.newRollupName)).getValue();
return new RollupRenameClause(rollupName, newRollupName, createPos(context));
}
@Override
public ParseNode visitCompactionClause(StarRocksParser.CompactionClauseContext ctx) {
NodePosition pos = createPos(ctx);
boolean baseCompaction = ctx.CUMULATIVE() == null;
if (ctx.identifier() != null) {
final String partitionName = ((Identifier) visit(ctx.identifier())).getValue();
return new CompactionClause(Collections.singletonList(partitionName), baseCompaction, pos);
} else if (ctx.identifierList() != null) {
final List<Identifier> identifierList = visit(ctx.identifierList().identifier(), Identifier.class);
return new CompactionClause(identifierList.stream().map(Identifier::getValue).collect(toList()),
baseCompaction, pos);
} else {
return new CompactionClause(baseCompaction, pos);
}
}
@Override
public ParseNode visitAddPartitionClause(StarRocksParser.AddPartitionClauseContext context) {
boolean temporary = context.TEMPORARY() != null;
PartitionDesc partitionDesc = null;
if (context.singleRangePartition() != null) {
partitionDesc = (PartitionDesc) visitSingleRangePartition(context.singleRangePartition());
} else if (context.multiRangePartition() != null) {
partitionDesc = (PartitionDesc) visitMultiRangePartition(context.multiRangePartition());
} else if (context.singleItemListPartitionDesc() != null) {
partitionDesc = (PartitionDesc) visitSingleItemListPartitionDesc(context.singleItemListPartitionDesc());
} else if (context.multiItemListPartitionDesc() != null) {
partitionDesc = (PartitionDesc) visitMultiItemListPartitionDesc(context.multiItemListPartitionDesc());
}
DistributionDesc distributionDesc = null;
if (context.distributionDesc() != null) {
distributionDesc = (DistributionDesc) visitDistributionDesc(context.distributionDesc());
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AddPartitionClause(partitionDesc, distributionDesc, properties, temporary, createPos(context));
}
@Override
public ParseNode visitDropPartitionClause(StarRocksParser.DropPartitionClauseContext context) {
String partitionName = ((Identifier) visit(context.identifier())).getValue();
boolean temp = context.TEMPORARY() != null;
boolean force = context.FORCE() != null;
boolean exists = context.EXISTS() != null;
return new DropPartitionClause(exists, partitionName, temp, force, createPos(context));
}
@Override
public ParseNode visitTruncatePartitionClause(StarRocksParser.TruncatePartitionClauseContext context) {
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new TruncatePartitionClause(partitionNames, createPos(context));
}
@Override
public ParseNode visitModifyPartitionClause(StarRocksParser.ModifyPartitionClauseContext context) {
Map<String, String> properties = null;
NodePosition pos = createPos(context);
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.identifier() != null) {
final String partitionName = ((Identifier) visit(context.identifier())).getValue();
return new ModifyPartitionClause(Collections.singletonList(partitionName), properties, pos);
} else if (context.identifierList() != null) {
final List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
return new ModifyPartitionClause(identifierList.stream().map(Identifier::getValue).collect(toList()),
properties, pos);
} else {
return ModifyPartitionClause.createStarClause(properties, pos);
}
}
@Override
public ParseNode visitReplacePartitionClause(StarRocksParser.ReplacePartitionClauseContext context) {
PartitionNames partitionNames = (PartitionNames) visit(context.parName);
PartitionNames newPartitionNames = (PartitionNames) visit(context.tempParName);
return new ReplacePartitionClause(partitionNames, newPartitionNames,
getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitPartitionRenameClause(StarRocksParser.PartitionRenameClauseContext context) {
String partitionName = ((Identifier) visit(context.parName)).getValue();
String newPartitionName = ((Identifier) visit(context.newParName)).getValue();
return new PartitionRenameClause(partitionName, newPartitionName, createPos(context));
}
private PipeName resolvePipeName(StarRocksParser.QualifiedNameContext context) {
String dbName = null;
String pipeName = null;
QualifiedName qualifiedName = getQualifiedName(context);
if (qualifiedName.getParts().size() == 2) {
dbName = qualifiedName.getParts().get(0);
pipeName = qualifiedName.getParts().get(1);
} else if (qualifiedName.getParts().size() == 1) {
pipeName = qualifiedName.getParts().get(0);
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidPipeName(qualifiedName.toString()));
}
if (dbName != null && pipeName != null) {
return new PipeName(createPos(context), dbName, pipeName);
} else if (pipeName != null) {
return new PipeName(createPos(context), pipeName);
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidPipeName(qualifiedName.toString()));
}
}
@Override
public ParseNode visitCreatePipeStatement(StarRocksParser.CreatePipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
boolean ifNotExists = context.ifNotExists() != null && context.ifNotExists().IF() != null;
boolean replace = context.orReplace() != null && context.orReplace().OR() != null;
if (ifNotExists && replace) {
throw new ParsingException(PARSER_ERROR_MSG.conflictedOptions("OR REPLACE", "IF NOT EXISTS"));
}
ParseNode insertNode = visit(context.insertStatement());
if (!(insertNode instanceof InsertStmt)) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedStatement(insertNode.toSql()),
context.insertStatement());
}
Map<String, String> properties = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
InsertStmt insertStmt = (InsertStmt) insertNode;
int insertSqlIndex = context.insertStatement().start.getStartIndex();
return new CreatePipeStmt(ifNotExists, replace, pipeName, insertSqlIndex, insertStmt, properties,
createPos(context));
}
@Override
public ParseNode visitDropPipeStatement(StarRocksParser.DropPipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
boolean ifExists = context.IF() != null;
return new DropPipeStmt(ifExists, pipeName, createPos(context));
}
@Override
public ParseNode visitShowPipeStatement(StarRocksParser.ShowPipeStatementContext context) {
String dbName = null;
if (context.qualifiedName() != null) {
dbName = getQualifiedName(context.qualifiedName()).toString();
}
List<OrderByElement> orderBy = null;
if (context.ORDER() != null) {
orderBy = new ArrayList<>();
orderBy.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limit = null;
if (context.limitElement() != null) {
limit = (LimitElement) visit(context.limitElement());
}
if (context.LIKE() != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowPipeStmt(dbName, stringLiteral.getValue(), null, orderBy, limit, createPos(context));
} else if (context.WHERE() != null) {
return new ShowPipeStmt(dbName, null, (Expr) visit(context.expression()), orderBy, limit,
createPos(context));
} else {
return new ShowPipeStmt(dbName, null, null, orderBy, limit, createPos(context));
}
}
@Override
public ParseNode visitDescPipeStatement(StarRocksParser.DescPipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
return new DescPipeStmt(createPos(context), pipeName);
}
@Override
public ParseNode visitAlterPipeClause(StarRocksParser.AlterPipeClauseContext context) {
if (context.SUSPEND() != null) {
return new AlterPipePauseResume(createPos(context), true);
} else if (context.RESUME() != null) {
return new AlterPipePauseResume(createPos(context), false);
} else if (context.RETRY() != null) {
if (context.ALL() != null) {
return new AlterPipeClauseRetry(createPos(context), true);
} else {
String fileName = ((StringLiteral) visitString(context.fileName)).getStringValue();
return new AlterPipeClauseRetry(createPos(context), false, fileName);
}
} else if (context.SET() != null) {
Map<String, String> properties = getPropertyList(context.propertyList());
if (MapUtils.isEmpty(properties)) {
throw new ParsingException("empty property");
}
return new AlterPipeSetProperty(createPos(context), properties);
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedOpWithInfo(context.toString()));
}
}
@Override
public ParseNode visitAlterPipeStatement(StarRocksParser.AlterPipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
AlterPipeClause alterPipeClause = (AlterPipeClause) visit(context.alterPipeClause());
return new AlterPipeStmt(createPos(context), pipeName, alterPipeClause);
}
@Override
public ParseNode visitQueryStatement(StarRocksParser.QueryStatementContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.queryRelation());
QueryStatement queryStatement = new QueryStatement(queryRelation);
if (context.outfile() != null) {
queryStatement.setOutFileClause((OutFileClause) visit(context.outfile()));
}
if (context.explainDesc() != null) {
queryStatement.setIsExplain(true, getExplainType(context.explainDesc()));
}
if (context.optimizerTrace() != null) {
String module = "base";
if (context.optimizerTrace().identifier() != null) {
module = ((Identifier) visit(context.optimizerTrace().identifier())).getValue();
}
queryStatement.setIsTrace(getTraceMode(context.optimizerTrace()), module);
}
return queryStatement;
}
private Tracers.Mode getTraceMode(StarRocksParser.OptimizerTraceContext context) {
if (context.LOGS() != null) {
return Tracers.Mode.LOGS;
} else if (context.VALUES() != null) {
return Tracers.Mode.VARS;
} else if (context.TIMES() != null) {
return Tracers.Mode.TIMER;
} else if (context.ALL() != null) {
return Tracers.Mode.TIMING;
} else {
return Tracers.Mode.NONE;
}
}
@Override
public ParseNode visitQueryRelation(StarRocksParser.QueryRelationContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.queryNoWith());
List<CTERelation> withQuery = new ArrayList<>();
if (context.withClause() != null) {
withQuery = visit(context.withClause().commonTableExpression(), CTERelation.class);
}
withQuery.forEach(queryRelation::addCTERelation);
return queryRelation;
}
@Override
public ParseNode visitCommonTableExpression(StarRocksParser.CommonTableExpressionContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.queryRelation());
return new CTERelation(
RelationId.of(queryRelation).hashCode(),
((Identifier) visit(context.name)).getValue(),
getColumnNames(context.columnAliases()),
new QueryStatement(queryRelation),
queryRelation.getPos());
}
@Override
public ParseNode visitQueryNoWith(StarRocksParser.QueryNoWithContext context) {
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.ORDER() != null) {
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
QueryRelation queryRelation = (QueryRelation) visit(context.queryPrimary());
queryRelation.setOrderBy(orderByElements);
queryRelation.setLimit(limitElement);
return queryRelation;
}
@Override
public ParseNode visitSetOperation(StarRocksParser.SetOperationContext context) {
NodePosition pos = createPos(context);
QueryRelation left = (QueryRelation) visit(context.left);
QueryRelation right = (QueryRelation) visit(context.right);
boolean distinct = true;
if (context.setQuantifier() != null) {
if (context.setQuantifier().DISTINCT() != null) {
distinct = true;
} else if (context.setQuantifier().ALL() != null) {
distinct = false;
}
}
SetQualifier setQualifier = distinct ? SetQualifier.DISTINCT : SetQualifier.ALL;
switch (context.operator.getType()) {
case StarRocksLexer.UNION:
if (left instanceof UnionRelation && ((UnionRelation) left).getQualifier().equals(setQualifier)) {
((UnionRelation) left).addRelation(right);
return left;
} else {
return new UnionRelation(Lists.newArrayList(left, right), setQualifier, pos);
}
case StarRocksLexer.INTERSECT:
if (left instanceof IntersectRelation &&
((IntersectRelation) left).getQualifier().equals(setQualifier)) {
((IntersectRelation) left).addRelation(right);
return left;
} else {
return new IntersectRelation(Lists.newArrayList(left, right), setQualifier, pos);
}
default:
if (left instanceof ExceptRelation && ((ExceptRelation) left).getQualifier().equals(setQualifier)) {
((ExceptRelation) left).addRelation(right);
return left;
} else {
return new ExceptRelation(Lists.newArrayList(left, right), setQualifier, pos);
}
}
}
private Map<String, String> extractVarHintValues(List<HintNode> hints) {
Map<String, String> selectHints = new HashMap<>();
if (CollectionUtils.isEmpty(hints)) {
return selectHints;
}
for (HintNode hintNode : hints) {
if (hintNode instanceof SetVarHint) {
selectHints.putAll(hintNode.getValue());
}
}
return selectHints;
}
@Override
public ParseNode visitQuerySpecification(StarRocksParser.QuerySpecificationContext context) {
Relation from = null;
List<SelectListItem> selectItems = visit(context.selectItem(), SelectListItem.class);
if (context.fromClause() instanceof StarRocksParser.DualContext) {
for (SelectListItem item : selectItems) {
if (item.isStar()) {
throw new ParsingException(PARSER_ERROR_MSG.noTableUsed(), item.getPos());
}
}
} else {
StarRocksParser.FromContext fromContext = (StarRocksParser.FromContext) context.fromClause();
if (fromContext.relations() != null) {
List<Relation> relations = visit(fromContext.relations().relation(), Relation.class);
Iterator<Relation> iterator = relations.iterator();
Relation relation = iterator.next();
while (iterator.hasNext()) {
Relation next = iterator.next();
relation = new JoinRelation(null, relation, next, null, false);
}
from = relation;
}
}
/*
from == null means a statement without from or from dual, add a single row of null values here,
so that the semantics are the same, and the processing of subsequent query logic can be simplified,
such as select sum(1) or select sum(1) from dual, will be converted to select sum(1) from (values(null)) t.
This can share the same logic as select sum(1) from table
*/
if (from == null) {
from = ValuesRelation.newDualRelation();
}
boolean isDistinct = context.setQuantifier() != null && context.setQuantifier().DISTINCT() != null;
SelectList selectList = new SelectList(selectItems, isDistinct);
selectList.setHintNodes(hintMap.get(context));
SelectRelation resultSelectRelation = new SelectRelation(
selectList,
from,
(Expr) visitIfPresent(context.where),
(GroupByClause) visitIfPresent(context.groupingElement()),
(Expr) visitIfPresent(context.having),
createPos(context));
if (context.qualifyFunction != null) {
resultSelectRelation.setOrderBy(new ArrayList<>());
SubqueryRelation subqueryRelation = new SubqueryRelation(new QueryStatement(resultSelectRelation));
TableName qualifyTableName = new TableName(null, "__QUALIFY__TABLE");
subqueryRelation.setAlias(qualifyTableName);
SelectListItem windowFunction = selectItems.get(selectItems.size() - 1);
windowFunction.setAlias("__QUALIFY__VALUE");
long selectValue = Long.parseLong(context.limit.getText());
List<SelectListItem> selectItemsVirtual = Lists.newArrayList(selectItems);
selectItemsVirtual.remove(selectItemsVirtual.size() - 1);
List<SelectListItem> selectItemsOuter = new ArrayList<>();
for (SelectListItem item : selectItemsVirtual) {
if (item.getExpr() instanceof SlotRef) {
SlotRef exprRef = (SlotRef) item.getExpr();
String columnName = item.getAlias() == null ? exprRef.getColumnName() : item.getAlias();
SlotRef resultSlotRef = new SlotRef(qualifyTableName, columnName);
selectItemsOuter.add(new SelectListItem(resultSlotRef, null));
} else {
throw new ParsingException("Can't support result other than column.");
}
}
SelectList selectListOuter = new SelectList(selectItemsOuter, isDistinct);
IntLiteral rightValue = new IntLiteral(selectValue);
SlotRef leftSlotRef = new SlotRef(qualifyTableName, "__QUALIFY__VALUE");
BinaryType op = getComparisonOperator(((TerminalNode) context.comparisonOperator()
.getChild(0)).getSymbol());
return new SelectRelation(selectListOuter, subqueryRelation,
new BinaryPredicate(op, leftSlotRef, rightValue), null, null, createPos(context));
} else {
return resultSelectRelation;
}
}
@Override
public ParseNode visitSelectSingle(StarRocksParser.SelectSingleContext context) {
String alias = null;
if (context.identifier() != null) {
alias = ((Identifier) visit(context.identifier())).getValue();
} else if (context.string() != null) {
alias = ((StringLiteral) visit(context.string())).getStringValue();
}
return new SelectListItem((Expr) visit(context.expression()), alias, createPos(context));
}
@Override
public ParseNode visitSelectAll(StarRocksParser.SelectAllContext context) {
NodePosition pos = createPos(context);
if (context.qualifiedName() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
return new SelectListItem(qualifiedNameToTableName(qualifiedName), pos);
}
return new SelectListItem(null, pos);
}
@Override
public ParseNode visitSingleGroupingSet(StarRocksParser.SingleGroupingSetContext context) {
return new GroupByClause(new ArrayList<>(visit(context.expressionList().expression(), Expr.class)),
GroupByClause.GroupingType.GROUP_BY, createPos(context));
}
@Override
public ParseNode visitRollup(StarRocksParser.RollupContext context) {
List<Expr> groupingExprs = visit(context.expressionList().expression(), Expr.class);
return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.ROLLUP, createPos(context));
}
@Override
public ParseNode visitCube(StarRocksParser.CubeContext context) {
List<Expr> groupingExprs = visit(context.expressionList().expression(), Expr.class);
return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.CUBE, createPos(context));
}
@Override
public ParseNode visitMultipleGroupingSets(StarRocksParser.MultipleGroupingSetsContext context) {
List<ArrayList<Expr>> groupingSets = new ArrayList<>();
for (StarRocksParser.GroupingSetContext groupingSetContext : context.groupingSet()) {
List<Expr> l = visit(groupingSetContext.expression(), Expr.class);
groupingSets.add(new ArrayList<>(l));
}
return new GroupByClause(groupingSets, GroupByClause.GroupingType.GROUPING_SETS, createPos(context));
}
@Override
public ParseNode visitGroupingOperation(StarRocksParser.GroupingOperationContext context) {
List<Expr> arguments = visit(context.expression(), Expr.class);
return new GroupingFunctionCallExpr("grouping", arguments, createPos(context));
}
@Override
public ParseNode visitWindowFrame(StarRocksParser.WindowFrameContext context) {
NodePosition pos = createPos(context);
if (context.end != null) {
return new AnalyticWindow(
getFrameType(context.frameType),
(AnalyticWindow.Boundary) visit(context.start),
(AnalyticWindow.Boundary) visit(context.end),
pos);
} else {
return new AnalyticWindow(
getFrameType(context.frameType),
(AnalyticWindow.Boundary) visit(context.start),
pos);
}
}
private static AnalyticWindow.Type getFrameType(Token type) {
if (type.getType() == StarRocksLexer.RANGE) {
return AnalyticWindow.Type.RANGE;
} else {
return AnalyticWindow.Type.ROWS;
}
}
@Override
public ParseNode visitUnboundedFrame(StarRocksParser.UnboundedFrameContext context) {
return new AnalyticWindow.Boundary(getUnboundedFrameBoundType(context.boundType), null);
}
@Override
public ParseNode visitBoundedFrame(StarRocksParser.BoundedFrameContext context) {
return new AnalyticWindow.Boundary(getBoundedFrameBoundType(context.boundType),
(Expr) visit(context.expression()));
}
@Override
public ParseNode visitCurrentRowBound(StarRocksParser.CurrentRowBoundContext context) {
return new AnalyticWindow.Boundary(AnalyticWindow.BoundaryType.CURRENT_ROW, null);
}
private static AnalyticWindow.BoundaryType getBoundedFrameBoundType(Token token) {
if (token.getType() == StarRocksLexer.PRECEDING) {
return AnalyticWindow.BoundaryType.PRECEDING;
} else {
return AnalyticWindow.BoundaryType.FOLLOWING;
}
}
private static AnalyticWindow.BoundaryType getUnboundedFrameBoundType(Token token) {
if (token.getType() == StarRocksLexer.PRECEDING) {
return AnalyticWindow.BoundaryType.UNBOUNDED_PRECEDING;
} else {
return AnalyticWindow.BoundaryType.UNBOUNDED_FOLLOWING;
}
}
@Override
public ParseNode visitSortItem(StarRocksParser.SortItemContext context) {
return new OrderByElement(
(Expr) visit(context.expression()),
getOrderingType(context.ordering),
getNullOrderingType(getOrderingType(context.ordering), context.nullOrdering),
createPos(context));
}
private boolean getNullOrderingType(boolean isAsc, Token token) {
if (token == null) {
return (!SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_SORT_NULLS_LAST)) == isAsc;
}
return token.getType() == StarRocksLexer.FIRST;
}
private static boolean getOrderingType(Token token) {
if (token == null) {
return true;
}
return token.getType() == StarRocksLexer.ASC;
}
@Override
public ParseNode visitLimitElement(StarRocksParser.LimitElementContext context) {
if (context.limit.getText().equals("?") || (context.offset != null && context.offset.getText().equals("?"))) {
throw new ParsingException("using parameter(?) as limit or offset not supported");
}
long limit = Long.parseLong(context.limit.getText());
long offset = 0;
if (context.offset != null) {
offset = Long.parseLong(context.offset.getText());
}
return new LimitElement(offset, limit, createPos(context));
}
@Override
public ParseNode visitRelation(StarRocksParser.RelationContext context) {
Relation relation = (Relation) visit(context.relationPrimary());
List<JoinRelation> joinRelations = visit(context.joinRelation(), JoinRelation.class);
Relation leftChildRelation = relation;
for (JoinRelation joinRelation : joinRelations) {
joinRelation.setLeft(leftChildRelation);
leftChildRelation = joinRelation;
}
return leftChildRelation;
}
@Override
public ParseNode visitParenthesizedRelation(StarRocksParser.ParenthesizedRelationContext context) {
if (context.relations().relation().size() == 1) {
return visit(context.relations().relation().get(0));
} else {
List<Relation> relations = visit(context.relations().relation(), Relation.class);
Iterator<Relation> iterator = relations.iterator();
Relation relation = iterator.next();
while (iterator.hasNext()) {
relation = new JoinRelation(null, relation, iterator.next(), null, false);
}
return relation;
}
}
@Override
public ParseNode visitTableAtom(StarRocksParser.TableAtomContext context) {
Token start = context.start;
Token stop = context.stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
List<Long> tabletIds = Lists.newArrayList();
if (context.tabletList() != null) {
stop = context.tabletList().stop;
tabletIds = context.tabletList().INTEGER_VALUE().stream().map(ParseTree::getText)
.map(Long::parseLong).collect(toList());
}
List<Long> replicaLists = Lists.newArrayList();
if (context.replicaList() != null) {
stop = context.replicaList().stop;
replicaLists = context.replicaList().INTEGER_VALUE().stream().map(ParseTree::getText).map(Long::parseLong)
.collect(toList());
}
TableRelation tableRelation =
new TableRelation(tableName, partitionNames, tabletIds, replicaLists, createPos(start, stop));
if (context.bracketHint() != null) {
for (Identifier identifier : visit(context.bracketHint().identifier(), Identifier.class)) {
tableRelation.addTableHint(identifier.getValue());
}
}
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
tableRelation.setAlias(new TableName(null, identifier.getValue()));
}
if (context.temporalClause() != null) {
StringBuilder sb = new StringBuilder();
for (ParseTree child : context.temporalClause().children) {
sb.append(child.getText());
sb.append(" ");
}
tableRelation.setTemporalClause(sb.toString());
}
return tableRelation;
}
@Override
public ParseNode visitJoinRelation(StarRocksParser.JoinRelationContext context) {
Relation left = null;
Relation right = (Relation) visit(context.rightRelation);
JoinOperator joinType = JoinOperator.INNER_JOIN;
if (context.crossOrInnerJoinType() != null) {
if (context.crossOrInnerJoinType().CROSS() != null) {
joinType = JoinOperator.CROSS_JOIN;
} else {
joinType = JoinOperator.INNER_JOIN;
}
} else if (context.outerAndSemiJoinType().LEFT() != null) {
if (context.outerAndSemiJoinType().OUTER() != null) {
joinType = JoinOperator.LEFT_OUTER_JOIN;
} else if (context.outerAndSemiJoinType().SEMI() != null) {
joinType = JoinOperator.LEFT_SEMI_JOIN;
} else if (context.outerAndSemiJoinType().ANTI() != null) {
joinType = JoinOperator.LEFT_ANTI_JOIN;
} else {
joinType = JoinOperator.LEFT_OUTER_JOIN;
}
} else if (context.outerAndSemiJoinType().RIGHT() != null) {
if (context.outerAndSemiJoinType().OUTER() != null) {
joinType = JoinOperator.RIGHT_OUTER_JOIN;
} else if (context.outerAndSemiJoinType().SEMI() != null) {
joinType = JoinOperator.RIGHT_SEMI_JOIN;
} else if (context.outerAndSemiJoinType().ANTI() != null) {
joinType = JoinOperator.RIGHT_ANTI_JOIN;
} else {
joinType = JoinOperator.RIGHT_OUTER_JOIN;
}
} else if (context.outerAndSemiJoinType().FULL() != null) {
joinType = JoinOperator.FULL_OUTER_JOIN;
}
Expr predicate = null;
List<String> usingColNames = null;
if (context.joinCriteria() != null) {
if (context.joinCriteria().ON() != null) {
predicate = (Expr) visit(context.joinCriteria().expression());
} else {
List<Identifier> criteria = visit(context.joinCriteria().identifier(), Identifier.class);
usingColNames = criteria.stream().map(Identifier::getValue).collect(Collectors.toList());
}
}
JoinRelation joinRelation = new JoinRelation(joinType, left, right, predicate,
context.LATERAL() != null, createPos(context));
joinRelation.setUsingColNames(usingColNames);
if (context.bracketHint() != null) {
joinRelation.setJoinHint(((Identifier) visit(context.bracketHint().identifier(0))).getValue());
if (context.bracketHint().primaryExpression() != null) {
joinRelation.setSkewColumn((Expr) visit(context.bracketHint().primaryExpression()));
}
if (context.bracketHint().literalExpressionList() != null) {
joinRelation.setSkewValues(visit(context.bracketHint().literalExpressionList().literalExpression(),
Expr.class));
}
}
return joinRelation;
}
@Override
public ParseNode visitInlineTable(StarRocksParser.InlineTableContext context) {
List<ValueList> rowValues = visit(context.rowConstructor(), ValueList.class);
List<List<Expr>> rows = rowValues.stream().map(ValueList::getRow).collect(toList());
List<String> colNames = getColumnNames(context.columnAliases());
if (colNames == null) {
colNames = new ArrayList<>();
for (int i = 0; i < rows.get(0).size(); ++i) {
colNames.add("column_" + i);
}
}
ValuesRelation valuesRelation = new ValuesRelation(rows, colNames, createPos(context));
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
valuesRelation.setAlias(new TableName(null, identifier.getValue()));
}
return valuesRelation;
}
@Override
public ParseNode visitNamedArguments(StarRocksParser.NamedArgumentsContext context) {
String name = ((Identifier) visit(context.identifier())).getValue();
if (name == null || name.isEmpty() || name.equals(" ")) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr(" The left of => shouldn't be empty"));
}
Expr node = (Expr) visit(context.expression());
if (node == null) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr(" The right of => shouldn't be null"));
}
return new NamedArgument(name, node);
}
@Override
public ParseNode visitTableFunction(StarRocksParser.TableFunctionContext context) {
QualifiedName functionName = getQualifiedName(context.qualifiedName());
List<Expr> parameters = visit(context.expressionList().expression(), Expr.class);
FunctionCallExpr functionCallExpr =
new FunctionCallExpr(FunctionName.createFnName(functionName.toString()), parameters);
TableFunctionRelation tableFunctionRelation = new TableFunctionRelation(functionCallExpr);
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
tableFunctionRelation.setAlias(new TableName(null, identifier.getValue()));
}
tableFunctionRelation.setColumnOutputNames(getColumnNames(context.columnAliases()));
return tableFunctionRelation;
}
@Override
public ParseNode visitNormalizedTableFunction(StarRocksParser.NormalizedTableFunctionContext context) {
QualifiedName functionName = getQualifiedName(context.qualifiedName());
List<Expr> parameters = null;
if (context.argumentList().expressionList() != null) {
parameters = visit(context.argumentList().expressionList().expression(), Expr.class);
} else {
parameters = visit(context.argumentList().namedArgumentList().namedArgument(), Expr.class);
}
int namedArgNum = parameters.stream().filter(f -> f instanceof NamedArgument).collect(toList()).size();
if (namedArgNum > 0 && namedArgNum < parameters.size()) {
throw new SemanticException("All arguments must be passed by name or all must be passed positionally");
}
FunctionCallExpr functionCallExpr =
new FunctionCallExpr(FunctionName.createFnName(functionName.toString()), parameters,
createPos(context));
TableFunctionRelation relation = new TableFunctionRelation(functionCallExpr);
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
relation.setAlias(new TableName(null, identifier.getValue()));
}
relation.setColumnOutputNames(getColumnNames(context.columnAliases()));
return new NormalizedTableFunctionRelation(relation);
}
@Override
public ParseNode visitFileTableFunction(StarRocksParser.FileTableFunctionContext context) {
Map<String, String> properties = getPropertyList(context.propertyList());
return new FileTableFunctionRelation(properties, NodePosition.ZERO);
}
@Override
public ParseNode visitRowConstructor(StarRocksParser.RowConstructorContext context) {
ArrayList<Expr> row = new ArrayList<>(visit(context.expressionList().expression(), Expr.class));
return new ValueList(row, createPos(context));
}
@Override
public ParseNode visitPartitionNames(StarRocksParser.PartitionNamesContext context) {
if (context.keyPartitions() != null) {
return visit(context.keyPartitions());
}
List<Identifier> identifierList = visit(context.identifierOrString(), Identifier.class);
return new PartitionNames(context.TEMPORARY() != null,
identifierList.stream().map(Identifier::getValue).collect(toList()),
createPos(context));
}
@Override
public ParseNode visitKeyPartitionList(StarRocksParser.KeyPartitionListContext context) {
List<String> partitionColNames = Lists.newArrayList();
List<Expr> partitionColValues = Lists.newArrayList();
for (StarRocksParser.KeyPartitionContext pair : context.keyPartition()) {
Identifier partitionName = (Identifier) visit(pair.partitionColName);
Expr partitionValue = (Expr) visit(pair.partitionColValue);
partitionColNames.add(partitionName.getValue());
partitionColValues.add(partitionValue);
}
return new PartitionNames(false, new ArrayList<>(), partitionColNames, partitionColValues, NodePosition.ZERO);
}
@Override
public ParseNode visitSubquery(StarRocksParser.SubqueryContext context) {
return visit(context.queryRelation());
}
@Override
public ParseNode visitQueryWithParentheses(StarRocksParser.QueryWithParenthesesContext context) {
QueryRelation relation = (QueryRelation) visit(context.subquery());
return new SubqueryRelation(new QueryStatement(relation));
}
@Override
public ParseNode visitSubqueryWithAlias(StarRocksParser.SubqueryWithAliasContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.subquery());
SubqueryRelation subqueryRelation = new SubqueryRelation(new QueryStatement(queryRelation));
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
subqueryRelation.setAlias(new TableName(null, identifier.getValue()));
} else {
subqueryRelation.setAlias(new TableName(null, null));
}
subqueryRelation.setColumnOutputNames(getColumnNames(context.columnAliases()));
return subqueryRelation;
}
@Override
public ParseNode visitSubqueryExpression(StarRocksParser.SubqueryExpressionContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.subquery());
return new Subquery(new QueryStatement(queryRelation));
}
@Override
public ParseNode visitInSubquery(StarRocksParser.InSubqueryContext context) {
boolean isNotIn = context.NOT() != null;
QueryRelation query = (QueryRelation) visit(context.queryRelation());
return new InPredicate((Expr) visit(context.value), new Subquery(new QueryStatement(query)),
isNotIn, createPos(context));
}
@Override
public ParseNode visitTupleInSubquery(StarRocksParser.TupleInSubqueryContext context) {
boolean isNotIn = context.NOT() != null;
QueryRelation query = (QueryRelation) visit(context.queryRelation());
List<Expr> tupleExpressions = visit(context.expression(), Expr.class);
return new MultiInPredicate(tupleExpressions, new Subquery(new QueryStatement(query)), isNotIn,
createPos(context));
}
@Override
public ParseNode visitExists(StarRocksParser.ExistsContext context) {
QueryRelation query = (QueryRelation) visit(context.queryRelation());
return new ExistsPredicate(new Subquery(new QueryStatement(query)), false, createPos(context));
}
@Override
public ParseNode visitScalarSubquery(StarRocksParser.ScalarSubqueryContext context) {
BinaryType op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0))
.getSymbol());
Subquery subquery = new Subquery(new QueryStatement((QueryRelation) visit(context.queryRelation())));
return new BinaryPredicate(op, (Expr) visit(context.booleanExpression()), subquery, createPos(context));
}
@Override
public ParseNode visitShowFunctionsStatement(StarRocksParser.ShowFunctionsStatementContext context) {
boolean isBuiltIn = context.BUILTIN() != null;
boolean isGlobal = context.GLOBAL() != null;
boolean isVerbose = context.FULL() != null;
String dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db).toString();
}
String pattern = null;
if (context.pattern != null) {
pattern = ((StringLiteral) visit(context.pattern)).getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowFunctionsStmt(dbName, isBuiltIn, isGlobal, isVerbose, pattern, where, createPos(context));
}
@Override
public ParseNode visitShowPrivilegesStatement(StarRocksParser.ShowPrivilegesStatementContext ctx) {
return new ShowPrivilegesStmt();
}
@Override
public ParseNode visitDropFunctionStatement(StarRocksParser.DropFunctionStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
String functionName = qualifiedName.toString();
boolean isGlobal = context.GLOBAL() != null;
FunctionName fnName = FunctionName.createFnName(functionName);
if (isGlobal) {
if (!Strings.isNullOrEmpty(fnName.getDb())) {
throw new ParsingException(PARSER_ERROR_MSG.invalidUDFName(functionName), qualifiedName.getPos());
}
fnName.setAsGlobalFunction();
}
return new DropFunctionStmt(fnName, getFunctionArgsDef(context.typeList()), createPos(context));
}
@Override
public ParseNode visitCreateFunctionStatement(StarRocksParser.CreateFunctionStatementContext context) {
String functionType = "SCALAR";
boolean isGlobal = context.GLOBAL() != null;
if (context.functionType != null) {
functionType = context.functionType.getText();
}
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
String functionName = qualifiedName.toString();
TypeDef returnTypeDef = new TypeDef(getType(context.returnType), createPos(context.returnType));
TypeDef intermediateType = null;
if (context.intermediateType != null) {
intermediateType = new TypeDef(getType(context.intermediateType), createPos(context.intermediateType));
}
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
FunctionName fnName = FunctionName.createFnName(functionName);
if (isGlobal) {
if (!Strings.isNullOrEmpty(fnName.getDb())) {
throw new ParsingException(PARSER_ERROR_MSG.invalidUDFName(functionName), qualifiedName.getPos());
}
fnName.setAsGlobalFunction();
}
return new CreateFunctionStmt(functionType, fnName,
getFunctionArgsDef(context.typeList()), returnTypeDef, intermediateType, properties);
}
@Override
public ParseNode visitCreateUserStatement(StarRocksParser.CreateUserStatementContext context) {
UserDesc userDesc;
Token start = context.user().start;
Token stop;
UserIdentity user = (UserIdentity) visit(context.user());
UserAuthOption authOption = context.authOption() == null ? null : (UserAuthOption) visit(context.authOption());
if (authOption == null) {
userDesc = new UserDesc(user, "", false, user.getPos());
} else if (authOption.getAuthPlugin() == null) {
stop = context.authOption().stop;
userDesc =
new UserDesc(user, authOption.getPassword(), authOption.isPasswordPlain(), createPos(start, stop));
} else {
stop = context.authOption().stop;
userDesc = new UserDesc(user, authOption.getAuthPlugin(), authOption.getAuthString(),
authOption.isPasswordPlain(), createPos(start, stop));
}
boolean ifNotExists = context.IF() != null;
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
return new CreateUserStmt(ifNotExists, userDesc, roles, createPos(context));
}
@Override
public ParseNode visitDropUserStatement(StarRocksParser.DropUserStatementContext context) {
UserIdentity user = (UserIdentity) visit(context.user());
return new DropUserStmt(user, context.EXISTS() != null, createPos(context));
}
@Override
public ParseNode visitAlterUserStatement(StarRocksParser.AlterUserStatementContext context) {
UserDesc userDesc;
UserIdentity user = (UserIdentity) visit(context.user());
Token start = context.user().start;
Token stop;
if (context.ROLE() != null) {
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
SetRoleType setRoleType;
if (context.ALL() != null) {
setRoleType = SetRoleType.ALL;
} else if (context.NONE() != null) {
setRoleType = SetRoleType.NONE;
} else {
setRoleType = SetRoleType.ROLE;
}
return new SetDefaultRoleStmt(user, setRoleType, roles, createPos(context));
}
stop = context.authOption().stop;
UserAuthOption authOption = (UserAuthOption) visit(context.authOption());
if (authOption.getAuthPlugin() == null) {
userDesc =
new UserDesc(user, authOption.getPassword(), authOption.isPasswordPlain(), createPos(start, stop));
} else {
userDesc = new UserDesc(user, authOption.getAuthPlugin(), authOption.getAuthString(),
authOption.isPasswordPlain(), createPos(start, stop));
}
return new AlterUserStmt(userDesc, context.EXISTS() != null, createPos(context));
}
@Override
public ParseNode visitShowUserStatement(StarRocksParser.ShowUserStatementContext context) {
NodePosition pos = createPos(context);
if (context.USERS() != null) {
return new ShowUserStmt(true, pos);
} else {
return new ShowUserStmt(false, pos);
}
}
@Override
public ParseNode visitShowAllAuthentication(StarRocksParser.ShowAllAuthenticationContext context) {
return new ShowAuthenticationStmt(null, true, createPos(context));
}
@Override
public ParseNode visitShowAuthenticationForUser(StarRocksParser.ShowAuthenticationForUserContext context) {
NodePosition pos = createPos(context);
if (context.user() != null) {
return new ShowAuthenticationStmt((UserIdentity) visit(context.user()), false, pos);
} else {
return new ShowAuthenticationStmt(null, false, pos);
}
}
@Override
public ParseNode visitExecuteAsStatement(StarRocksParser.ExecuteAsStatementContext context) {
boolean allowRevert = context.WITH() == null;
return new ExecuteAsStmt((UserIdentity) visit(context.user()), allowRevert, createPos(context));
}
@Override
public ParseNode visitCreateRoleStatement(StarRocksParser.CreateRoleStatementContext context) {
List<String> roles = context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(Collectors.toList());
String comment = context.comment() == null ? "" : ((StringLiteral) visit(context.comment())).getStringValue();
return new CreateRoleStmt(roles, context.NOT() != null, comment, createPos(context));
}
@Override
public ParseNode visitAlterRoleStatement(StarRocksParser.AlterRoleStatementContext context) {
List<String> roles = context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(Collectors.toList());
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
String comment = stringLiteral.getStringValue();
return new AlterRoleStmt(roles, context.IF() != null, comment);
}
@Override
public ParseNode visitDropRoleStatement(StarRocksParser.DropRoleStatementContext context) {
List<String> roles = new ArrayList<>();
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
return new DropRoleStmt(roles, context.EXISTS() != null, createPos(context));
}
@Override
public ParseNode visitShowRolesStatement(StarRocksParser.ShowRolesStatementContext context) {
return new ShowRolesStmt();
}
@Override
public ParseNode visitGrantRoleToUser(StarRocksParser.GrantRoleToUserContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new GrantRoleStmt(roleNameList, (UserIdentity) visit(context.user()), createPos(context));
}
@Override
public ParseNode visitGrantRoleToRole(StarRocksParser.GrantRoleToRoleContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new GrantRoleStmt(roleNameList, ((Identifier) visit(context.identifierOrString())).getValue(),
createPos(context));
}
@Override
public ParseNode visitRevokeRoleFromUser(StarRocksParser.RevokeRoleFromUserContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new RevokeRoleStmt(roleNameList, (UserIdentity) visit(context.user()), createPos(context));
}
@Override
public ParseNode visitRevokeRoleFromRole(StarRocksParser.RevokeRoleFromRoleContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new RevokeRoleStmt(roleNameList, ((Identifier) visit(context.identifierOrString())).getValue(),
createPos(context));
}
@Override
public ParseNode visitSetRoleStatement(StarRocksParser.SetRoleStatementContext context) {
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
SetRoleType setRoleType;
if (context.ALL() != null) {
setRoleType = SetRoleType.ALL;
} else if (context.DEFAULT() != null) {
setRoleType = SetRoleType.DEFAULT;
} else if (context.NONE() != null) {
setRoleType = SetRoleType.NONE;
} else {
setRoleType = SetRoleType.ROLE;
}
return new SetRoleStmt(setRoleType, roles, createPos(context));
}
@Override
public ParseNode visitSetDefaultRoleStatement(StarRocksParser.SetDefaultRoleStatementContext context) {
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
SetRoleType setRoleType;
if (context.ALL() != null) {
setRoleType = SetRoleType.ALL;
} else if (context.NONE() != null) {
setRoleType = SetRoleType.NONE;
} else {
setRoleType = SetRoleType.ROLE;
}
return new SetDefaultRoleStmt((UserIdentity) visit(context.user()), setRoleType, roles, createPos(context));
}
@Override
public ParseNode visitShowGrantsStatement(StarRocksParser.ShowGrantsStatementContext context) {
NodePosition pos = createPos(context);
if (context.ROLE() != null) {
Identifier role = (Identifier) visit(context.identifierOrString());
return new ShowGrantsStmt(null, role.getValue(), pos);
} else {
UserIdentity userId = context.user() == null ? null : (UserIdentity) visit(context.user());
return new ShowGrantsStmt(userId, null, pos);
}
}
@Override
public ParseNode visitAuthWithoutPlugin(StarRocksParser.AuthWithoutPluginContext context) {
String password = ((StringLiteral) visit(context.string())).getStringValue();
boolean isPasswordPlain = context.PASSWORD() == null;
return new UserAuthOption(password, null, null, isPasswordPlain, createPos(context));
}
@Override
public ParseNode visitAuthWithPlugin(StarRocksParser.AuthWithPluginContext context) {
Identifier authPlugin = (Identifier) visit(context.identifierOrString());
String authString =
context.string() == null ? null : ((StringLiteral) visit(context.string())).getStringValue();
boolean isPasswordPlain = context.AS() == null;
return new UserAuthOption(null, authPlugin.getValue().toUpperCase(), authString,
isPasswordPlain, createPos(context));
}
@Override
public ParseNode visitGrantRevokeClause(StarRocksParser.GrantRevokeClauseContext context) {
NodePosition pos = createPos(context);
if (context.user() != null) {
UserIdentity user = (UserIdentity) visit(context.user());
return new GrantRevokeClause(user, null, pos);
} else {
String roleName = ((Identifier) visit(context.identifierOrString())).getValue();
return new GrantRevokeClause(null, roleName, pos);
}
}
@Override
public ParseNode visitGrantOnUser(StarRocksParser.GrantOnUserContext context) {
List<String> privList = Collections.singletonList("IMPERSONATE");
GrantRevokeClause clause = (GrantRevokeClause) visit(context.grantRevokeClause());
List<UserIdentity> users = context.user().stream()
.map(user -> (UserIdentity) visit(user)).collect(toList());
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
objects.setUserPrivilegeObjectList(users);
return new GrantPrivilegeStmt(privList, "USER", clause, objects,
context.WITH() != null, createPos(context));
}
@Override
public ParseNode visitRevokeOnUser(StarRocksParser.RevokeOnUserContext context) {
List<String> privList = Collections.singletonList("IMPERSONATE");
GrantRevokeClause clause = (GrantRevokeClause) visit(context.grantRevokeClause());
List<UserIdentity> users = context.user().stream()
.map(user -> (UserIdentity) visit(user)).collect(toList());
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
objects.setUserPrivilegeObjectList(users);
return new RevokePrivilegeStmt(privList, "USER", clause, objects, createPos(context));
}
@Override
public ParseNode visitGrantOnTableBrief(StarRocksParser.GrantOnTableBriefContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new GrantPrivilegeStmt(privilegeList, "TABLE",
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnTableBrief(StarRocksParser.RevokeOnTableBriefContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new RevokePrivilegeStmt(privilegeList, "TABLE",
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
createPos(context));
}
@Override
public ParseNode visitGrantOnSystem(StarRocksParser.GrantOnSystemContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new GrantPrivilegeStmt(privilegeList, "SYSTEM",
(GrantRevokeClause) visit(context.grantRevokeClause()), null, context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnSystem(StarRocksParser.RevokeOnSystemContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new RevokePrivilegeStmt(privilegeList, "SYSTEM",
(GrantRevokeClause) visit(context.grantRevokeClause()), null, createPos(context));
}
@Override
public ParseNode visitGrantOnPrimaryObj(StarRocksParser.GrantOnPrimaryObjContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectType())).getValue().toUpperCase();
return new GrantPrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnPrimaryObj(StarRocksParser.RevokeOnPrimaryObjContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectType())).getValue().toUpperCase();
return new RevokePrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
createPos(context));
}
@Override
public ParseNode visitGrantOnFunc(StarRocksParser.GrantOnFuncContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
GrantRevokePrivilegeObjects objects = buildGrantRevokePrivWithFunction(context.privFunctionObjectNameList(),
context.GLOBAL() != null);
return new GrantPrivilegeStmt(privilegeList, extendPrivilegeType(context.GLOBAL() != null, "FUNCTION"),
(GrantRevokeClause) visit(context.grantRevokeClause()), objects, context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnFunc(StarRocksParser.RevokeOnFuncContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
GrantRevokePrivilegeObjects objects = buildGrantRevokePrivWithFunction(context.privFunctionObjectNameList(),
context.GLOBAL() != null);
return new RevokePrivilegeStmt(privilegeList, extendPrivilegeType(context.GLOBAL() != null, "FUNCTION"),
(GrantRevokeClause) visit(context.grantRevokeClause()), objects,
createPos(context));
}
private GrantRevokePrivilegeObjects buildGrantRevokePrivWithFunction(
StarRocksParser.PrivFunctionObjectNameListContext context, boolean isGlobal) {
List<Pair<FunctionName, FunctionArgsDef>> functions = new ArrayList<>();
int functionSize = context.qualifiedName().size();
List<StarRocksParser.TypeListContext> typeListContexts = context.typeList();
for (int i = 0; i < functionSize; ++i) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = context.qualifiedName(i);
QualifiedName qualifiedName = getQualifiedName(qualifiedNameContext);
FunctionName functionName;
if (qualifiedName.getParts().size() == 1) {
functionName = new FunctionName(qualifiedName.getParts().get(0));
} else if (qualifiedName.getParts().size() == 2) {
functionName = new FunctionName(qualifiedName.getParts().get(0), qualifiedName.getParts().get(1));
} else {
throw new SemanticException("Error function format " + qualifiedName);
}
if (isGlobal) {
functionName.setAsGlobalFunction();
}
FunctionArgsDef argsDef = getFunctionArgsDef(typeListContexts.get(i));
functions.add(Pair.create(functionName, argsDef));
}
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
objects.setFunctions(functions);
return objects;
}
public String extendPrivilegeType(boolean isGlobal, String type) {
if (isGlobal) {
if (type.equals("FUNCTIONS") || type.equals("FUNCTION")) {
return "GLOBAL " + type;
}
}
return type;
}
@Override
public ParseNode visitGrantOnAll(StarRocksParser.GrantOnAllContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectTypePlural())).getValue().toUpperCase();
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
ArrayList<String> tokenList;
if (context.isAll != null) {
tokenList = Lists.newArrayList("*", "*");
} else if (context.IN() != null) {
String dbName = ((Identifier) visit(context.identifierOrString())).getValue();
tokenList = Lists.newArrayList(dbName, "*");
} else {
tokenList = Lists.newArrayList("*");
}
objects.setPrivilegeObjectNameTokensList(Collections.singletonList(tokenList));
GrantPrivilegeStmt grantPrivilegeStmt = new GrantPrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()),
objects, context.WITH() != null, createPos(context));
grantPrivilegeStmt.setGrantOnAll();
return grantPrivilegeStmt;
}
@Override
public ParseNode visitRevokeOnAll(StarRocksParser.RevokeOnAllContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectTypePlural())).getValue().toUpperCase();
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
ArrayList<String> tokenList;
if (context.isAll != null) {
tokenList = Lists.newArrayList("*", "*");
} else if (context.IN() != null) {
String dbName = ((Identifier) visit(context.identifierOrString())).getValue();
tokenList = Lists.newArrayList(dbName, "*");
} else {
tokenList = Lists.newArrayList("*");
}
objects.setPrivilegeObjectNameTokensList(Collections.singletonList(tokenList));
RevokePrivilegeStmt revokePrivilegeStmt = new RevokePrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()), objects, createPos(context));
revokePrivilegeStmt.setGrantOnAll();
return revokePrivilegeStmt;
}
@Override
public ParseNode visitPrivilegeType(StarRocksParser.PrivilegeTypeContext context) {
NodePosition pos = createPos(context);
List<String> ps = new ArrayList<>();
for (int i = 0; i < context.getChildCount(); ++i) {
ps.add(context.getChild(i).getText());
}
return new Identifier(Joiner.on(" ").join(ps), pos);
}
@Override
public ParseNode visitPrivObjectType(StarRocksParser.PrivObjectTypeContext context) {
NodePosition pos = createPos(context);
List<String> ps = new ArrayList<>();
for (int i = 0; i < context.getChildCount(); ++i) {
ps.add(context.getChild(i).getText());
}
return new Identifier(Joiner.on(" ").join(ps), pos);
}
@Override
public ParseNode visitPrivObjectTypePlural(StarRocksParser.PrivObjectTypePluralContext context) {
NodePosition pos = createPos(context);
List<String> ps = new ArrayList<>();
for (int i = 0; i < context.getChildCount(); ++i) {
ps.add(context.getChild(i).getText());
}
return new Identifier(Joiner.on(" ").join(ps), pos);
}
private GrantRevokePrivilegeObjects parsePrivilegeObjectNameList(
StarRocksParser.PrivObjectNameListContext context) {
if (context == null) {
return null;
}
GrantRevokePrivilegeObjects grantRevokePrivilegeObjects = new GrantRevokePrivilegeObjects(createPos(context));
List<List<String>> objectNameList = new ArrayList<>();
for (StarRocksParser.PrivObjectNameContext privObjectNameContext : context.privObjectName()) {
objectNameList.add(privObjectNameContext.identifierOrStringOrStar().stream()
.map(c -> ((Identifier) visit(c)).getValue()).collect(toList()));
}
grantRevokePrivilegeObjects.setPrivilegeObjectNameTokensList(objectNameList);
return grantRevokePrivilegeObjects;
}
@Override
public ParseNode visitCreateSecurityIntegrationStatement(
StarRocksParser.CreateSecurityIntegrationStatementContext context) {
String name = ((Identifier) visit(context.identifier())).getValue();
Map<String, String> propertyMap = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
propertyMap.put(property.getKey(), property.getValue());
}
}
return new CreateSecurityIntegrationStatement(name, propertyMap, createPos(context));
}
@Override
public ParseNode visitExpressionOrDefault(StarRocksParser.ExpressionOrDefaultContext context) {
if (context.DEFAULT() != null) {
return new DefaultValueExpr(createPos(context));
} else {
return visit(context.expression());
}
}
@Override
public ParseNode visitExpressionsWithDefault(StarRocksParser.ExpressionsWithDefaultContext context) {
ArrayList<Expr> row = Lists.newArrayList();
for (int i = 0; i < context.expressionOrDefault().size(); ++i) {
row.add((Expr) visit(context.expressionOrDefault(i)));
}
return new ValueList(row, createPos(context));
}
@Override
public ParseNode visitExpressionSingleton(StarRocksParser.ExpressionSingletonContext context) {
return visit(context.expression());
}
@Override
public ParseNode visitLogicalNot(StarRocksParser.LogicalNotContext context) {
return new CompoundPredicate(CompoundPredicate.Operator.NOT, (Expr) visit(context.expression()),
null, createPos(context));
}
@Override
public ParseNode visitLogicalBinary(StarRocksParser.LogicalBinaryContext context) {
Expr left = (Expr) visit(context.left);
Expr right = (Expr) visit(context.right);
return new CompoundPredicate(getLogicalBinaryOperator(context.operator), left, right, createPos(context));
}
private static CompoundPredicate.Operator getLogicalBinaryOperator(Token token) {
switch (token.getType()) {
case StarRocksLexer.AND:
case StarRocksLexer.LOGICAL_AND:
return CompoundPredicate.Operator.AND;
default:
return CompoundPredicate.Operator.OR;
}
}
@Override
public ParseNode visitPredicate(StarRocksParser.PredicateContext context) {
if (context.predicateOperations() != null) {
return visit(context.predicateOperations());
} else if (context.tupleInSubquery() != null) {
return visit(context.tupleInSubquery());
} else {
return visit(context.valueExpression());
}
}
@Override
public ParseNode visitIsNull(StarRocksParser.IsNullContext context) {
Expr child = (Expr) visit(context.booleanExpression());
NodePosition pos = createPos(context);
if (context.NOT() == null) {
return new IsNullPredicate(child, false, pos);
} else {
return new IsNullPredicate(child, true, pos);
}
}
@Override
public ParseNode visitComparison(StarRocksParser.ComparisonContext context) {
BinaryType op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0))
.getSymbol());
return new BinaryPredicate(op, (Expr) visit(context.left), (Expr) visit(context.right), createPos(context));
}
private static BinaryType getComparisonOperator(Token symbol) {
switch (symbol.getType()) {
case StarRocksParser.EQ:
return BinaryType.EQ;
case StarRocksParser.NEQ:
return BinaryType.NE;
case StarRocksParser.LT:
return BinaryType.LT;
case StarRocksParser.LTE:
return BinaryType.LE;
case StarRocksParser.GT:
return BinaryType.GT;
case StarRocksParser.GTE:
return BinaryType.GE;
default:
return BinaryType.EQ_FOR_NULL;
}
}
@Override
public ParseNode visitInList(StarRocksParser.InListContext context) {
boolean isNotIn = context.NOT() != null;
return new InPredicate(
(Expr) visit(context.value),
visit(context.expressionList().expression(), Expr.class), isNotIn, createPos(context));
}
@Override
public ParseNode visitBetween(StarRocksParser.BetweenContext context) {
boolean isNotBetween = context.NOT() != null;
return new BetweenPredicate(
(Expr) visit(context.value),
(Expr) visit(context.lower),
(Expr) visit(context.upper),
isNotBetween,
createPos(context));
}
@Override
public ParseNode visitLike(StarRocksParser.LikeContext context) {
LikePredicate likePredicate;
NodePosition pos = createPos(context);
if (context.REGEXP() != null || context.RLIKE() != null) {
likePredicate = new LikePredicate(LikePredicate.Operator.REGEXP,
(Expr) visit(context.value),
(Expr) visit(context.pattern),
pos);
} else {
likePredicate = new LikePredicate(
LikePredicate.Operator.LIKE,
(Expr) visit(context.value),
(Expr) visit(context.pattern),
pos);
}
if (context.NOT() != null) {
return new CompoundPredicate(CompoundPredicate.Operator.NOT, likePredicate, null, pos);
} else {
return likePredicate;
}
}
@Override
public ParseNode visitSimpleCase(StarRocksParser.SimpleCaseContext context) {
return new CaseExpr(
(Expr) visit(context.caseExpr),
visit(context.whenClause(), CaseWhenClause.class),
(Expr) visitIfPresent(context.elseExpression),
createPos(context));
}
@Override
public ParseNode visitSearchedCase(StarRocksParser.SearchedCaseContext context) {
return new CaseExpr(
null,
visit(context.whenClause(), CaseWhenClause.class),
(Expr) visitIfPresent(context.elseExpression),
createPos(context));
}
@Override
public ParseNode visitWhenClause(StarRocksParser.WhenClauseContext context) {
return new CaseWhenClause((Expr) visit(context.condition), (Expr) visit(context.result), createPos(context));
}
@Override
public ParseNode visitArithmeticUnary(StarRocksParser.ArithmeticUnaryContext context) {
Expr child = (Expr) visit(context.primaryExpression());
NodePosition pos = createPos(context);
switch (context.operator.getType()) {
case StarRocksLexer.MINUS_SYMBOL:
if (child.isLiteral() && child.getType().isNumericType()) {
try {
((LiteralExpr) child).swapSign();
} catch (NotImplementedException e) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr(child.toSql()), child.getPos());
}
return child;
} else {
return new ArithmeticExpr(ArithmeticExpr.Operator.MULTIPLY, new IntLiteral(-1), child, pos);
}
case StarRocksLexer.PLUS_SYMBOL:
return child;
case StarRocksLexer.BITNOT:
return new ArithmeticExpr(ArithmeticExpr.Operator.BITNOT, child, null, pos);
default:
return new CompoundPredicate(CompoundPredicate.Operator.NOT, child, null, pos);
}
}
@Override
public ParseNode visitArithmeticBinary(StarRocksParser.ArithmeticBinaryContext context) {
Expr left = (Expr) visit(context.left);
Expr right = (Expr) visit(context.right);
NodePosition pos = createPos(context);
if (left instanceof IntervalLiteral) {
return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), right,
((IntervalLiteral) left).getValue(),
((IntervalLiteral) left).getUnitIdentifier().getDescription(),
true, pos);
}
if (right instanceof IntervalLiteral) {
return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), left,
((IntervalLiteral) right).getValue(),
((IntervalLiteral) right).getUnitIdentifier().getDescription(),
false, pos);
}
return new ArithmeticExpr(getArithmeticBinaryOperator(context.operator), left, right, pos);
}
private static ArithmeticExpr.Operator getArithmeticBinaryOperator(Token operator) {
switch (operator.getType()) {
case StarRocksLexer.PLUS_SYMBOL:
return ArithmeticExpr.Operator.ADD;
case StarRocksLexer.MINUS_SYMBOL:
return ArithmeticExpr.Operator.SUBTRACT;
case StarRocksLexer.ASTERISK_SYMBOL:
return ArithmeticExpr.Operator.MULTIPLY;
case StarRocksLexer.SLASH_SYMBOL:
return ArithmeticExpr.Operator.DIVIDE;
case StarRocksLexer.PERCENT_SYMBOL:
case StarRocksLexer.MOD:
return ArithmeticExpr.Operator.MOD;
case StarRocksLexer.INT_DIV:
return ArithmeticExpr.Operator.INT_DIVIDE;
case StarRocksLexer.BITAND:
return ArithmeticExpr.Operator.BITAND;
case StarRocksLexer.BITOR:
return ArithmeticExpr.Operator.BITOR;
case StarRocksLexer.BITXOR:
return ArithmeticExpr.Operator.BITXOR;
case StarRocksLexer.BIT_SHIFT_LEFT:
return ArithmeticExpr.Operator.BIT_SHIFT_LEFT;
case StarRocksLexer.BIT_SHIFT_RIGHT:
return ArithmeticExpr.Operator.BIT_SHIFT_RIGHT;
case StarRocksLexer.BIT_SHIFT_RIGHT_LOGICAL:
return ArithmeticExpr.Operator.BIT_SHIFT_RIGHT_LOGICAL;
default:
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(operator.getText()),
new NodePosition(operator));
}
}
@Override
public ParseNode visitOdbcFunctionCallExpression(StarRocksParser.OdbcFunctionCallExpressionContext context) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall());
OdbcScalarFunctionCall odbcScalarFunctionCall = new OdbcScalarFunctionCall(functionCallExpr);
return odbcScalarFunctionCall.mappingFunction();
}
private static List<Expr> getArgumentsForTimeSlice(Expr time, Expr value, String ident, String boundary) {
List<Expr> exprs = Lists.newLinkedList();
exprs.add(time);
addArgumentUseTypeInt(value, exprs);
exprs.add(new StringLiteral(ident));
exprs.add(new StringLiteral(boundary));
return exprs;
}
private static void addArgumentUseTypeInt(Expr value, List<Expr> exprs) {
try {
if (value instanceof IntLiteral) {
exprs.add(new IntLiteral(((IntLiteral) value).getValue(), Type.INT));
} else {
exprs.add(value);
}
} catch (Exception e) {
throw new IllegalArgumentException(String.format("Cast argument %s to int type failed.", value.toSql()));
}
}
@Override
@Override
public ParseNode visitAggregationFunctionCall(StarRocksParser.AggregationFunctionCallContext context) {
NodePosition pos = createPos(context);
String functionName;
boolean isGroupConcat = false;
boolean isLegacyGroupConcat = false;
boolean isDistinct = false;
if (context.aggregationFunction().COUNT() != null) {
functionName = FunctionSet.COUNT;
} else if (context.aggregationFunction().AVG() != null) {
functionName = FunctionSet.AVG;
} else if (context.aggregationFunction().SUM() != null) {
functionName = FunctionSet.SUM;
} else if (context.aggregationFunction().MIN() != null) {
functionName = FunctionSet.MIN;
} else if (context.aggregationFunction().ARRAY_AGG() != null) {
functionName = FunctionSet.ARRAY_AGG;
} else if (context.aggregationFunction().ARRAY_AGG_DISTINCT() != null) {
functionName = FunctionSet.ARRAY_AGG;
isDistinct = true;
} else if (context.aggregationFunction().GROUP_CONCAT() != null) {
functionName = FunctionSet.GROUP_CONCAT;
isGroupConcat = true;
isLegacyGroupConcat = SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_GROUP_CONCAT_LEGACY);
} else {
functionName = FunctionSet.MAX;
}
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.aggregationFunction().ORDER() != null) {
orderByElements = visit(context.aggregationFunction().sortItem(), OrderByElement.class);
}
List<String> hints = Lists.newArrayList();
if (context.aggregationFunction().bracketHint() != null) {
hints = context.aggregationFunction().bracketHint().identifier().stream().map(
RuleContext::getText).collect(Collectors.toList());
}
if (context.aggregationFunction().setQuantifier() != null) {
isDistinct = context.aggregationFunction().setQuantifier().DISTINCT() != null;
}
if (isDistinct && CollectionUtils.isEmpty(context.aggregationFunction().expression())) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
List<Expr> exprs = visit(context.aggregationFunction().expression(), Expr.class);
if (isGroupConcat && !exprs.isEmpty() && context.aggregationFunction().SEPARATOR() == null) {
if (isLegacyGroupConcat) {
if (exprs.size() == 1) {
Expr sepExpr;
String sep = ", ";
sepExpr = new StringLiteral(sep, pos);
exprs.add(sepExpr);
}
} else {
Expr sepExpr;
String sep = ",";
sepExpr = new StringLiteral(sep, pos);
exprs.add(sepExpr);
}
}
if (!orderByElements.isEmpty()) {
int exprSize = exprs.size();
if (isGroupConcat) {
exprSize--;
}
for (OrderByElement orderByElement : orderByElements) {
Expr by = orderByElement.getExpr();
if (by instanceof IntLiteral) {
long ordinal = ((IntLiteral) by).getLongValue();
if (ordinal < 1 || ordinal > exprSize) {
throw new ParsingException(format("ORDER BY position %s is not in %s output list", ordinal,
functionName), pos);
}
by = exprs.get((int) ordinal - 1);
orderByElement.setExpr(by);
}
}
orderByElements = orderByElements.stream().filter(x -> !x.getExpr().isConstant()).collect(toList());
}
if (CollectionUtils.isNotEmpty(orderByElements)) {
orderByElements.stream().forEach(e -> exprs.add(e.getExpr()));
}
FunctionCallExpr functionCallExpr = new FunctionCallExpr(functionName,
context.aggregationFunction().ASTERISK_SYMBOL() == null ?
new FunctionParams(isDistinct, exprs, orderByElements) :
FunctionParams.createStarParam(), pos);
functionCallExpr = SyntaxSugars.parse(functionCallExpr);
functionCallExpr.setHints(hints);
if (context.over() != null) {
return buildOverClause(functionCallExpr, context.over(), pos);
}
return functionCallExpr;
}
@Override
public ParseNode visitWindowFunctionCall(StarRocksParser.WindowFunctionCallContext context) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.windowFunction());
return buildOverClause(functionCallExpr, context.over(), createPos(context));
}
@Override
public ParseNode visitWindowFunction(StarRocksParser.WindowFunctionContext context) {
FunctionCallExpr functionCallExpr = new FunctionCallExpr(context.name.getText().toLowerCase(),
new FunctionParams(false, visit(context.expression(), Expr.class)), createPos(context));
functionCallExpr = SyntaxSugars.parse(functionCallExpr);
boolean ignoreNull = CollectionUtils.isNotEmpty(context.ignoreNulls())
&& context.ignoreNulls().stream().anyMatch(Objects::nonNull);
functionCallExpr.setIgnoreNulls(ignoreNull);
return functionCallExpr;
}
private AnalyticExpr buildOverClause(FunctionCallExpr functionCallExpr, StarRocksParser.OverContext context,
NodePosition pos) {
functionCallExpr.setIsAnalyticFnCall(true);
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.ORDER() != null) {
orderByElements = visit(context.sortItem(), OrderByElement.class);
}
List<Expr> partitionExprs = visit(context.partition, Expr.class);
return new AnalyticExpr(functionCallExpr, partitionExprs, orderByElements,
(AnalyticWindow) visitIfPresent(context.windowFrame()),
context.bracketHint() == null ? null : context.bracketHint().identifier().stream()
.map(RuleContext::getText).collect(toList()), pos);
}
@Override
public ParseNode visitExtract(StarRocksParser.ExtractContext context) {
String fieldString = context.identifier().getText();
return new FunctionCallExpr(fieldString,
new FunctionParams(Lists.newArrayList((Expr) visit(context.valueExpression()))), createPos(context));
}
@Override
public ParseNode visitCast(StarRocksParser.CastContext context) {
return new CastExpr(new TypeDef(getType(context.type())), (Expr) visit(context.expression()),
createPos(context));
}
@Override
public ParseNode visitConvert(StarRocksParser.ConvertContext context) {
return new CastExpr(new TypeDef(getType(context.type())), (Expr) visit(context.expression()),
createPos(context));
}
@Override
public ParseNode visitInformationFunctionExpression(StarRocksParser.InformationFunctionExpressionContext context) {
return new InformationFunction(context.name.getText().toUpperCase(), createPos(context));
}
@Override
public ParseNode visitSpecialDateTimeExpression(StarRocksParser.SpecialDateTimeExpressionContext context) {
return new FunctionCallExpr(context.name.getText().toUpperCase(), Lists.newArrayList());
}
@Override
public ParseNode visitSpecialFunctionExpression(StarRocksParser.SpecialFunctionExpressionContext context) {
NodePosition pos = createPos(context);
if (context.CHAR() != null) {
return new FunctionCallExpr("char", visit(context.expression(), Expr.class), pos);
} else if (context.DAY() != null) {
return new FunctionCallExpr("day", visit(context.expression(), Expr.class), pos);
} else if (context.HOUR() != null) {
return new FunctionCallExpr("hour", visit(context.expression(), Expr.class), pos);
} else if (context.IF() != null) {
return new FunctionCallExpr("if", visit(context.expression(), Expr.class), pos);
} else if (context.LEFT() != null) {
return new FunctionCallExpr("left", visit(context.expression(), Expr.class), pos);
} else if (context.LIKE() != null) {
return new FunctionCallExpr("like", visit(context.expression(), Expr.class), pos);
} else if (context.MINUTE() != null) {
return new FunctionCallExpr("minute", visit(context.expression(), Expr.class), pos);
} else if (context.MOD() != null) {
return new FunctionCallExpr("mod", visit(context.expression(), Expr.class), pos);
} else if (context.MONTH() != null) {
return new FunctionCallExpr("month", visit(context.expression(), Expr.class), pos);
} else if (context.QUARTER() != null) {
return new FunctionCallExpr("quarter", visit(context.expression(), Expr.class), pos);
} else if (context.REGEXP() != null) {
return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class), pos);
} else if (context.REPLACE() != null) {
return new FunctionCallExpr("replace", visit(context.expression(), Expr.class), pos);
} else if (context.RIGHT() != null) {
return new FunctionCallExpr("right", visit(context.expression(), Expr.class), pos);
} else if (context.RLIKE() != null) {
return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class), pos);
} else if (context.SECOND() != null) {
return new FunctionCallExpr("second", visit(context.expression(), Expr.class), pos);
} else if (context.YEAR() != null) {
return new FunctionCallExpr("year", visit(context.expression(), Expr.class), pos);
} else if (context.PASSWORD() != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
return new StringLiteral(new String(MysqlPassword.makeScrambledPassword(stringLiteral.getValue())), pos);
} else if (context.FLOOR() != null) {
return new FunctionCallExpr("floor", visit(context.expression(), Expr.class), pos);
} else if (context.CEIL() != null) {
return new FunctionCallExpr("ceil", visit(context.expression(), Expr.class), pos);
}
String functionName = context.TIMESTAMPADD() != null ? "TIMESTAMPADD" : "TIMESTAMPDIFF";
UnitIdentifier e1 = (UnitIdentifier) visit(context.unitIdentifier());
Expr e2 = (Expr) visit(context.expression(0));
Expr e3 = (Expr) visit(context.expression(1));
return new TimestampArithmeticExpr(functionName, e3, e2, e1.getDescription(), pos);
}
@Override
public ParseNode visitConcat(StarRocksParser.ConcatContext context) {
Expr left = (Expr) visit(context.left);
Expr right = (Expr) visit(context.right);
return new FunctionCallExpr("concat", new FunctionParams(Lists.newArrayList(left, right)),
createPos(context));
}
@Override
public ParseNode visitNullLiteral(StarRocksParser.NullLiteralContext context) {
return new NullLiteral(createPos(context));
}
@Override
public ParseNode visitBooleanLiteral(StarRocksParser.BooleanLiteralContext context) {
NodePosition pos = createPos(context);
String value = context.getText();
return new BoolLiteral("TRUE".equalsIgnoreCase(value), pos);
}
@Override
public ParseNode visitNumericLiteral(StarRocksParser.NumericLiteralContext context) {
return visit(context.number());
}
@Override
public ParseNode visitIntegerValue(StarRocksParser.IntegerValueContext context) {
NodePosition pos = createPos(context);
try {
BigInteger intLiteral = new BigInteger(context.getText());
if (intLiteral.compareTo(LONG_MAX) <= 0) {
return new IntLiteral(intLiteral.longValue(), pos);
} else if (intLiteral.compareTo(LARGEINT_MAX_ABS) <= 0) {
return new LargeIntLiteral(intLiteral.toString(), pos);
} else {
throw new ParsingException(PARSER_ERROR_MSG.numOverflow(context.getText()), pos);
}
} catch (NumberFormatException | AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidNumFormat(context.getText()), pos);
}
}
@Override
public ParseNode visitDoubleValue(StarRocksParser.DoubleValueContext context) {
NodePosition pos = createPos(context);
try {
if (SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_DOUBLE_LITERAL)) {
return new FloatLiteral(context.getText(), pos);
} else {
BigDecimal decimal = new BigDecimal(context.getText());
int precision = DecimalLiteral.getRealPrecision(decimal);
int scale = DecimalLiteral.getRealScale(decimal);
int integerPartWidth = precision - scale;
if (integerPartWidth > 38) {
return new FloatLiteral(context.getText(), pos);
}
return new DecimalLiteral(decimal, pos);
}
} catch (AnalysisException | NumberFormatException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidNumFormat(context.getText()), pos);
}
}
@Override
public ParseNode visitDecimalValue(StarRocksParser.DecimalValueContext context) {
NodePosition pos = createPos(context);
try {
if (SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_DOUBLE_LITERAL)) {
return new FloatLiteral(context.getText(), pos);
} else {
return new DecimalLiteral(context.getText(), pos);
}
} catch (AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidNumFormat(context.getText()), pos);
}
}
@Override
public ParseNode visitDateLiteral(StarRocksParser.DateLiteralContext context) {
NodePosition pos = createPos(context);
String value = ((StringLiteral) visit(context.string())).getValue();
try {
if (context.DATE() != null) {
return new DateLiteral(value, Type.DATE);
} else {
return new DateLiteral(value, Type.DATETIME);
}
} catch (AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidDateFormat(value), pos);
}
}
@Override
public ParseNode visitString(StarRocksParser.StringContext context) {
String quotedString;
NodePosition pos = createPos(context);
if (context.SINGLE_QUOTED_TEXT() != null) {
quotedString = context.SINGLE_QUOTED_TEXT().getText();
quotedString = quotedString.substring(1, quotedString.length() - 1).replace("''", "'");
} else {
quotedString = context.DOUBLE_QUOTED_TEXT().getText();
quotedString = quotedString.substring(1, quotedString.length() - 1).replace("\"\"", "\"");
}
return new StringLiteral(escapeBackSlash(quotedString), pos);
}
@Override
public ParseNode visitBinary(StarRocksParser.BinaryContext context) {
String quotedText;
if (context.BINARY_SINGLE_QUOTED_TEXT() != null) {
quotedText = context.BINARY_SINGLE_QUOTED_TEXT().getText();
} else {
quotedText = context.BINARY_DOUBLE_QUOTED_TEXT().getText();
}
return new VarBinaryLiteral(quotedText.substring(2, quotedText.length() - 1), createPos(context));
}
private static String escapeBackSlash(String str) {
StringWriter writer = new StringWriter();
int strLen = str.length();
for (int i = 0; i < strLen; ++i) {
char c = str.charAt(i);
if (c == '\\' && (i + 1) < strLen) {
switch (str.charAt(i + 1)) {
case 'n':
writer.append('\n');
break;
case 't':
writer.append('\t');
break;
case 'r':
writer.append('\r');
break;
case 'b':
writer.append('\b');
break;
case '0':
writer.append('\0');
break;
case 'Z':
writer.append('\032');
break;
case '_':
case '%':
writer.append('\\');
/* Fall through */
default:
writer.append(str.charAt(i + 1));
break;
}
i++;
} else {
writer.append(c);
}
}
return writer.toString();
}
@Override
public ParseNode visitArrayConstructor(StarRocksParser.ArrayConstructorContext context) {
NodePosition pos = createPos(context);
Type type = null;
if (context.arrayType() != null) {
type = new ArrayType(getType(context.arrayType().type()));
}
List<Expr> exprs;
if (context.expressionList() != null) {
exprs = visit(context.expressionList().expression(), Expr.class);
} else {
exprs = Collections.emptyList();
}
return new ArrayExpr(type, exprs, pos);
}
@Override
public ParseNode visitMapExpression(StarRocksParser.MapExpressionContext context) {
ArrayList<Expr> row = Lists.newArrayList();
Expr key = (Expr) visit(context.key);
Expr value = (Expr) visit(context.value);
row.add(key);
row.add(value);
return new ValueList(row, createPos(context));
}
@Override
public ParseNode visitMapConstructor(StarRocksParser.MapConstructorContext context) {
NodePosition pos = createPos(context);
Type type = Type.ANY_MAP;
if (context.mapType() != null) {
type = getMapType(context.mapType());
}
List<Expr> exprs;
if (context.mapExpressionList() != null) {
List<ValueList> rowValues = visit(context.mapExpressionList().mapExpression(), ValueList.class);
List<List<Expr>> rows = rowValues.stream().map(ValueList::getRow).collect(toList());
exprs = rows.stream().flatMap(Collection::stream).collect(Collectors.toList());
int num = exprs.size();
if (num % 2 == 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(num, "map()",
"Arguments must be in key/value pairs"), pos);
}
} else {
exprs = Collections.emptyList();
}
return new MapExpr(type, exprs, pos);
}
@Override
public ParseNode visitCollectionSubscript(StarRocksParser.CollectionSubscriptContext context) {
Expr value = (Expr) visit(context.value);
Expr index = (Expr) visit(context.index);
return new CollectionElementExpr(value, index, false);
}
@Override
public ParseNode visitArraySlice(StarRocksParser.ArraySliceContext context) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr("array slice"), createPos(context));
/*
Expr expr = (Expr) visit(context.primaryExpression());
IntLiteral lowerBound;
if (context.start != null) {
lowerBound = new IntLiteral(Long.parseLong(context.start.getText()));
} else {
lowerBound = new IntLiteral(0);
}
IntLiteral upperBound;
if (context.end != null) {
upperBound = new IntLiteral(Long.parseLong(context.end.getText()));
} else {
upperBound = new IntLiteral(-1);
}
return new ArraySliceExpr(expr, lowerBound, upperBound);
*/
}
@Override
public ParseNode visitInterval(StarRocksParser.IntervalContext context) {
return new IntervalLiteral((Expr) visit(context.value), (UnitIdentifier) visit(context.from),
createPos(context));
}
@Override
public ParseNode visitUnitIdentifier(StarRocksParser.UnitIdentifierContext context) {
return new UnitIdentifier(context.getText(), createPos(context));
}
@Override
public ParseNode visitUnitBoundary(StarRocksParser.UnitBoundaryContext context) {
return new UnitBoundary(context.getText(), createPos(context));
}
@Override
public ParseNode visitDereference(StarRocksParser.DereferenceContext ctx) {
Expr base = (Expr) visit(ctx.base);
NodePosition pos = createPos(ctx);
String fieldName;
if (ctx.DOT_IDENTIFIER() != null) {
fieldName = ctx.DOT_IDENTIFIER().getText().substring(1);
} else {
fieldName = ((Identifier) visit(ctx.fieldName)).getValue();
}
if (base instanceof SlotRef) {
SlotRef tmp = (SlotRef) base;
List<String> parts = new ArrayList<>(tmp.getQualifiedName().getParts());
parts.add(fieldName);
return new SlotRef(QualifiedName.of(parts, pos));
} else if (base instanceof SubfieldExpr) {
SubfieldExpr subfieldExpr = (SubfieldExpr) base;
ImmutableList.Builder<String> builder = new ImmutableList.Builder<>();
for (String tmpFieldName : subfieldExpr.getFieldNames()) {
builder.add(tmpFieldName);
}
builder.add(fieldName);
return new SubfieldExpr(subfieldExpr.getChild(0), builder.build(), pos);
} else {
return new SubfieldExpr(base, ImmutableList.of(fieldName), pos);
}
}
@Override
public ParseNode visitColumnReference(StarRocksParser.ColumnReferenceContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
List<String> parts = new ArrayList<>();
parts.add(identifier.getValue());
QualifiedName qualifiedName = QualifiedName.of(parts, createPos(context));
return new SlotRef(qualifiedName);
}
@Override
public ParseNode visitArrowExpression(StarRocksParser.ArrowExpressionContext context) {
Expr expr = (Expr) visit(context.primaryExpression());
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
return new ArrowExpr(expr, stringLiteral, createPos(context));
}
@Override
public ParseNode visitLambdaFunctionExpr(StarRocksParser.LambdaFunctionExprContext context) {
List<String> names = Lists.newLinkedList();
if (context.identifierList() != null) {
final List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
names = identifierList.stream().map(Identifier::getValue).collect(toList());
} else {
names.add(((Identifier) visit(context.identifier())).getValue());
}
List<Expr> arguments = Lists.newLinkedList();
Expr expr = null;
if (context.expression() != null) {
expr = (Expr) visit(context.expression());
} else if (context.expressionList() != null) {
List<Expr> exprs = visit(context.expressionList().expression(), Expr.class);
if (exprs.size() != 2) {
throw new IllegalArgumentException("The right part of map lambda functions can accept at most 2 " +
"expressions, but there are " + exprs.size());
}
expr = new MapExpr(Type.ANY_MAP, exprs);
}
arguments.add(expr);
for (int i = 0; i < names.size(); ++i) {
arguments.add(new LambdaArgument(names.get(i)));
}
return new LambdaFunctionExpr(arguments);
}
@Override
public ParseNode visitUserVariable(StarRocksParser.UserVariableContext context) {
String variable = ((Identifier) visit(context.identifierOrString())).getValue();
return new VariableExpr(variable, SetType.USER, createPos(context));
}
@Override
public ParseNode visitSystemVariable(StarRocksParser.SystemVariableContext context) {
SetType setType = getVariableType(context.varType());
return new VariableExpr(((Identifier) visit(context.identifier())).getValue(), setType, createPos(context));
}
@Override
public ParseNode visitCollate(StarRocksParser.CollateContext context) {
return visit(context.primaryExpression());
}
@Override
public ParseNode visitParenthesizedExpression(StarRocksParser.ParenthesizedExpressionContext context) {
return visit(context.expression());
}
@Override
public ParseNode visitUnquotedIdentifier(StarRocksParser.UnquotedIdentifierContext context) {
return new Identifier(context.getText(), createPos(context));
}
@Override
public ParseNode visitBackQuotedIdentifier(StarRocksParser.BackQuotedIdentifierContext context) {
return new Identifier(context.getText().replace("`", ""), createPos(context));
}
@Override
public ParseNode visitDigitIdentifier(StarRocksParser.DigitIdentifierContext context) {
return new Identifier(context.getText(), createPos(context));
}
@Override
public ParseNode visitDictionaryGetExpr(StarRocksParser.DictionaryGetExprContext context) {
List<Expr> params = visit(context.expressionList().expression(), Expr.class);
return new DictionaryGetExpr(params);
}
private static StatementBase.ExplainLevel getExplainType(StarRocksParser.ExplainDescContext context) {
StatementBase.ExplainLevel explainLevel = StatementBase.ExplainLevel.NORMAL;
if (context.LOGICAL() != null) {
explainLevel = StatementBase.ExplainLevel.LOGICAL;
} else if (context.ANALYZE() != null) {
explainLevel = StatementBase.ExplainLevel.ANALYZE;
} else if (context.VERBOSE() != null) {
explainLevel = StatementBase.ExplainLevel.VERBOSE;
} else if (context.COSTS() != null) {
explainLevel = StatementBase.ExplainLevel.COST;
} else if (context.SCHEDULER() != null) {
explainLevel = StatementBase.ExplainLevel.SCHEDULER;
}
return explainLevel;
}
public static SetType getVariableType(StarRocksParser.VarTypeContext context) {
if (context == null) {
return null;
}
if (context.GLOBAL() != null) {
return SetType.GLOBAL;
} else if (context.VERBOSE() != null) {
return SetType.VERBOSE;
} else {
return SetType.SESSION;
}
}
@Override
public ParseNode visitAssignment(StarRocksParser.AssignmentContext context) {
String column = ((Identifier) visit(context.identifier())).getValue();
Expr expr = (Expr) visit(context.expressionOrDefault());
return new ColumnAssignment(column, expr, createPos(context));
}
@Override
public ParseNode visitPartitionDesc(StarRocksParser.PartitionDescContext context) {
List<PartitionDesc> partitionDescList = new ArrayList<>();
StarRocksParser.IdentifierListContext identifierListContext = context.identifierList();
if (context.functionCall() != null) {
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
partitionDescList.add(rangePartitionDesc);
}
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall());
List<String> columnList = AnalyzerUtils.checkAndExtractPartitionCol(functionCallExpr, null);
RangePartitionDesc rangePartitionDesc = new RangePartitionDesc(columnList, partitionDescList);
return new ExpressionPartitionDesc(rangePartitionDesc, functionCallExpr);
}
List<Identifier> identifierList = visit(identifierListContext.identifier(), Identifier.class);
if (context.LIST() == null && context.RANGE() == null) {
List<String> columnList = identifierList.stream().map(Identifier::getValue).collect(toList());
return new ListPartitionDesc(columnList, new ArrayList<>());
} else {
List<PartitionDesc> partitionDesc = visit(context.rangePartitionDesc(), PartitionDesc.class);
return new RangePartitionDesc(
identifierList.stream().map(Identifier::getValue).collect(toList()),
partitionDesc,
createPos(context));
}
}
@Override
public ParseNode visitSingleRangePartition(StarRocksParser.SingleRangePartitionContext context) {
PartitionKeyDesc partitionKeyDesc = (PartitionKeyDesc) visit(context.partitionKeyDesc());
boolean ifNotExists = context.IF() != null;
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new SingleRangePartitionDesc(ifNotExists, ((Identifier) visit(context.identifier())).getValue(),
partitionKeyDesc, properties, createPos(context));
}
@Override
public ParseNode visitMultiRangePartition(StarRocksParser.MultiRangePartitionContext context) {
NodePosition pos = createPos(context);
if (context.interval() != null) {
IntervalLiteral intervalLiteral = (IntervalLiteral) visit(context.interval());
Expr expr = intervalLiteral.getValue();
long intervalVal;
if (expr instanceof IntLiteral) {
intervalVal = ((IntLiteral) expr).getLongValue();
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(),
"RANGE DESC"), expr.getPos());
}
return new MultiRangePartitionDesc(
((StringLiteral) visit(context.string(0))).getStringValue(),
((StringLiteral) visit(context.string(1))).getStringValue(),
intervalVal,
intervalLiteral.getUnitIdentifier().getDescription(),
pos);
} else {
return new MultiRangePartitionDesc(
((StringLiteral) visit(context.string(0))).getStringValue(),
((StringLiteral) visit(context.string(1))).getStringValue(),
Long.parseLong(context.INTEGER_VALUE().getText()),
null,
pos);
}
}
@Override
public ParseNode visitPartitionRangeDesc(StarRocksParser.PartitionRangeDescContext context) {
return new PartitionRangeDesc(
((StringLiteral) visit(context.string(0))).getStringValue(),
((StringLiteral) visit(context.string(1))).getStringValue(),
createPos(context));
}
@Override
public ParseNode visitSingleItemListPartitionDesc(StarRocksParser.SingleItemListPartitionDescContext context) {
List<String> values =
context.stringList().string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue())
.collect(toList());
boolean ifNotExists = context.IF() != null;
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new SingleItemListPartitionDesc(ifNotExists, ((Identifier) visit(context.identifier())).getValue(),
values, properties, createPos(context));
}
@Override
public ParseNode visitMultiItemListPartitionDesc(StarRocksParser.MultiItemListPartitionDescContext context) {
boolean ifNotExists = context.IF() != null;
List<List<String>> multiValues = new ArrayList<>();
for (StarRocksParser.StringListContext stringListContext : context.stringList()) {
List<String> values =
stringListContext.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue())
.collect(toList());
multiValues.add(values);
}
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new MultiItemListPartitionDesc(ifNotExists, ((Identifier) visit(context.identifier())).getValue(),
multiValues, properties, createPos(context));
}
@Override
public ParseNode visitPartitionKeyDesc(StarRocksParser.PartitionKeyDescContext context) {
PartitionKeyDesc partitionKeyDesc;
NodePosition pos = createPos(context);
if (context.LESS() != null) {
if (context.MAXVALUE() != null) {
return PartitionKeyDesc.createMaxKeyDesc();
}
List<PartitionValue> partitionValueList =
visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class);
partitionKeyDesc = new PartitionKeyDesc(partitionValueList, pos);
} else {
List<PartitionValue> lowerPartitionValueList =
visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class);
List<PartitionValue> upperPartitionValueList =
visit(context.partitionValueList().get(1).partitionValue(), PartitionValue.class);
partitionKeyDesc = new PartitionKeyDesc(lowerPartitionValueList, upperPartitionValueList, pos);
}
return partitionKeyDesc;
}
@Override
public ParseNode visitPartitionValue(StarRocksParser.PartitionValueContext context) {
NodePosition pos = createPos(context);
if (context.MAXVALUE() != null) {
return PartitionValue.MAX_VALUE;
} else {
return new PartitionValue(((StringLiteral) visit(context.string())).getStringValue(), pos);
}
}
@Override
public ParseNode visitDistributionDesc(StarRocksParser.DistributionDescContext context) {
int buckets = 0;
NodePosition pos = createPos(context);
if (context.INTEGER_VALUE() != null) {
buckets = Integer.parseInt(context.INTEGER_VALUE().getText());
}
if (context.HASH() != null) {
List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
return new HashDistributionDesc(buckets,
identifierList.stream().map(Identifier::getValue).collect(toList()),
pos);
} else {
return new RandomDistributionDesc(buckets, pos);
}
}
@Override
public ParseNode visitRefreshSchemeDesc(StarRocksParser.RefreshSchemeDescContext context) {
LocalDateTime startTime = LocalDateTime.now();
IntervalLiteral intervalLiteral = null;
NodePosition pos = createPos(context);
MaterializedView.RefreshMoment refreshMoment =
Config.default_mv_refresh_immediate ?
MaterializedView.RefreshMoment.IMMEDIATE : MaterializedView.RefreshMoment.DEFERRED;
if (context.DEFERRED() != null) {
refreshMoment = MaterializedView.RefreshMoment.DEFERRED;
} else if (context.IMMEDIATE() != null) {
refreshMoment = MaterializedView.RefreshMoment.IMMEDIATE;
}
if (context.ASYNC() != null) {
boolean defineStartTime = false;
if (context.START() != null) {
NodePosition timePos = createPos(context.string());
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
DateTimeFormatter dateTimeFormatter = null;
try {
dateTimeFormatter = DateUtils.probeFormat(stringLiteral.getStringValue());
LocalDateTime tempStartTime = DateUtils.
parseStringWithDefaultHSM(stringLiteral.getStringValue(), dateTimeFormatter);
startTime = tempStartTime;
defineStartTime = true;
} catch (AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidDateFormat(stringLiteral.getStringValue()),
timePos);
}
}
if (context.interval() != null) {
intervalLiteral = (IntervalLiteral) visit(context.interval());
if (!(intervalLiteral.getValue() instanceof IntLiteral)) {
String exprSql = intervalLiteral.getValue().toSql();
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(exprSql, "INTERVAL"),
createPos(context.interval()));
}
}
return new AsyncRefreshSchemeDesc(defineStartTime, startTime, intervalLiteral, refreshMoment, pos);
} else if (context.MANUAL() != null) {
return new ManualRefreshSchemeDesc(refreshMoment, pos);
} else if (context.INCREMENTAL() != null) {
return new IncrementalRefreshSchemeDesc(refreshMoment, pos);
}
return null;
}
@Override
public ParseNode visitProperty(StarRocksParser.PropertyContext context) {
return new Property(
((StringLiteral) visit(context.key)).getStringValue(),
((StringLiteral) visit(context.value)).getStringValue(),
createPos(context));
}
@Override
public ParseNode visitOutfile(StarRocksParser.OutfileContext context) {
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String format = null;
if (context.fileFormat() != null) {
if (context.fileFormat().identifier() != null) {
format = ((Identifier) visit(context.fileFormat().identifier())).getValue();
} else if (context.fileFormat().string() != null) {
format = ((StringLiteral) visit(context.fileFormat().string())).getStringValue();
}
}
return new OutFileClause(
((StringLiteral) visit(context.file)).getStringValue(),
format,
properties, createPos(context));
}
@Override
public ParseNode visitColumnNameWithComment(StarRocksParser.ColumnNameWithCommentContext context) {
String comment = null;
if (context.comment() != null) {
comment = ((StringLiteral) visit(context.comment())).getStringValue();
}
return new ColWithComment(((Identifier) visit(context.identifier())).getValue(), comment,
createPos(context));
}
@Override
public ParseNode visitIdentifierOrStringOrStar(StarRocksParser.IdentifierOrStringOrStarContext context) {
String s = null;
if (context.identifier() != null) {
return visit(context.identifier());
} else if (context.string() != null) {
s = ((StringLiteral) visit(context.string())).getStringValue();
} else if (context.ASTERISK_SYMBOL() != null) {
s = "*";
}
return new Identifier(s, createPos(context));
}
@Override
public ParseNode visitIdentifierOrString(StarRocksParser.IdentifierOrStringContext context) {
String s = null;
if (context.identifier() != null) {
return visit(context.identifier());
} else if (context.string() != null) {
s = ((StringLiteral) visit(context.string())).getStringValue();
}
return new Identifier(s, createPos(context));
}
@Override
public ParseNode visitUserWithHostAndBlanket(StarRocksParser.UserWithHostAndBlanketContext context) {
Identifier user = (Identifier) visit(context.identifierOrString(0));
Identifier host = (Identifier) visit(context.identifierOrString(1));
return new UserIdentity(user.getValue(), host.getValue(), true, createPos(context), false);
}
@Override
public ParseNode visitUserWithHost(StarRocksParser.UserWithHostContext context) {
Identifier user = (Identifier) visit(context.identifierOrString(0));
Identifier host = (Identifier) visit(context.identifierOrString(1));
return new UserIdentity(user.getValue(), host.getValue(), false, createPos(context), false);
}
@Override
public ParseNode visitUserWithoutHost(StarRocksParser.UserWithoutHostContext context) {
Identifier user = (Identifier) visit(context.identifierOrString());
return new UserIdentity(user.getValue(), "%", false, createPos(context), false);
}
@Override
public ParseNode visitPrepareStatement(StarRocksParser.PrepareStatementContext context) {
String stmtName = context.identifier().getText();
StatementBase statement = null;
if (context.prepareSql().statement() != null) {
statement = (StatementBase) visitStatement(context.prepareSql().statement());
return new PrepareStmt(stmtName, statement, parameters);
} else if (context.prepareSql().SINGLE_QUOTED_TEXT() != null) {
String sql = context.prepareSql().SINGLE_QUOTED_TEXT().getText();
statement = SqlParser.parseSingleStatement(sql.substring(1, sql.length() - 1), sqlMode);
if (null != statement && statement instanceof PrepareStmt) {
PrepareStmt prepareStmt = (PrepareStmt) statement;
return new PrepareStmt(stmtName, prepareStmt.getInnerStmt(), prepareStmt.getParameters());
} else {
return new PrepareStmt(stmtName, statement, ImmutableList.of());
}
}
throw new ParsingException("error prepare sql");
}
@Override
public ParseNode visitDeallocateStatement(StarRocksParser.DeallocateStatementContext ctx) {
return new DeallocateStmt(ctx.identifier().getText());
}
@Override
public ParseNode visitExecuteStatement(StarRocksParser.ExecuteStatementContext context) {
String stmtName = context.identifier().getText();
List<StarRocksParser.IdentifierOrStringContext> queryStatementContext = context.identifierOrString();
List<Expr> variableExprs = new ArrayList<>();
if (context.identifierOrString() != null) {
queryStatementContext.forEach(varNameContext -> {
Identifier identifier = (Identifier) visit(varNameContext);
variableExprs.add(new VariableExpr(identifier.getValue(), SetType.USER));
});
}
return new ExecuteStmt(stmtName, variableExprs);
}
@Override
public ParseNode visitParameter(StarRocksParser.ParameterContext ctx) {
if (parameters == null) {
parameters = new ArrayList<>();
}
Parameter parameter = new Parameter(placeHolderSlotId++);
parameters.add(parameter);
return parameter;
}
@Override
public ParseNode visitDecommissionDiskClause(StarRocksParser.DecommissionDiskClauseContext context) {
throw new SemanticException("not support");
}
@Override
public ParseNode visitCancelDecommissionDiskClause(StarRocksParser.CancelDecommissionDiskClauseContext context) {
throw new SemanticException("not support");
}
@Override
public ParseNode visitDisableDiskClause(StarRocksParser.DisableDiskClauseContext context) {
throw new SemanticException("not support");
}
@Override
public ParseNode visitCancelDisableDiskClause(StarRocksParser.CancelDisableDiskClauseContext context) {
throw new SemanticException("not support");
}
private <T> List<T> visit(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
return contexts.stream()
.map(this::visit)
.map(clazz::cast)
.collect(toList());
}
private <T> List<T> visitIfPresent(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
if (contexts != null && contexts.size() != 0) {
return contexts.stream()
.map(this::visit)
.map(clazz::cast)
.collect(toList());
} else {
return null;
}
}
private ParseNode visitIfPresent(ParserRuleContext context) {
if (context != null) {
return visit(context);
} else {
return null;
}
}
private FunctionArgsDef getFunctionArgsDef(StarRocksParser.TypeListContext typeList) {
List<TypeDef> typeDefList = new ArrayList<>();
for (StarRocksParser.TypeContext typeContext : typeList.type()) {
typeDefList.add(new TypeDef(getType(typeContext)));
}
boolean isVariadic = typeList.DOTDOTDOT() != null;
return new FunctionArgsDef(typeDefList, isVariadic);
}
private String getIdentifierName(StarRocksParser.IdentifierContext context) {
return ((Identifier) visit(context)).getValue();
}
private QualifiedName getQualifiedName(StarRocksParser.QualifiedNameContext context) {
List<String> parts = new ArrayList<>();
NodePosition pos = createPos(context);
for (ParseTree c : context.children) {
if (c instanceof TerminalNode) {
TerminalNode t = (TerminalNode) c;
if (t.getSymbol().getType() == StarRocksParser.DOT_IDENTIFIER) {
parts.add(t.getText().substring(1));
}
} else if (c instanceof StarRocksParser.IdentifierContext) {
StarRocksParser.IdentifierContext identifierContext = (StarRocksParser.IdentifierContext) c;
Identifier identifier = (Identifier) visit(identifierContext);
parts.add(identifier.getValue());
}
}
return QualifiedName.of(parts, pos);
}
private TaskName qualifiedNameToTaskName(QualifiedName qualifiedName) {
List<String> parts = qualifiedName.getParts();
if (parts.size() == 2) {
return new TaskName(parts.get(0), parts.get(1), qualifiedName.getPos());
} else if (parts.size() == 1) {
return new TaskName(null, parts.get(0), qualifiedName.getPos());
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidTaskFormat(qualifiedName.toString()),
qualifiedName.getPos());
}
}
private TableName qualifiedNameToTableName(QualifiedName qualifiedName) {
List<String> parts = qualifiedName.getParts();
if (parts.size() == 3) {
return new TableName(parts.get(0), parts.get(1), parts.get(2), qualifiedName.getPos());
} else if (parts.size() == 2) {
return new TableName(null, qualifiedName.getParts().get(0), qualifiedName.getParts().get(1),
qualifiedName.getPos());
} else if (parts.size() == 1) {
return new TableName(null, null, qualifiedName.getParts().get(0), qualifiedName.getPos());
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidTableFormat(qualifiedName.toString()));
}
}
public Type getType(StarRocksParser.TypeContext context) {
if (context.baseType() != null) {
return getBaseType(context.baseType());
} else if (context.decimalType() != null) {
return getDecimalType(context.decimalType());
} else if (context.arrayType() != null) {
return getArrayType(context.arrayType());
} else if (context.structType() != null) {
return getStructType(context.structType());
} else {
return getMapType(context.mapType());
}
}
private Type getBaseType(StarRocksParser.BaseTypeContext context) {
int length = -1;
if (context.typeParameter() != null) {
length = Integer.parseInt(context.typeParameter().INTEGER_VALUE().toString());
}
if (context.STRING() != null || context.TEXT() != null) {
ScalarType type = ScalarType.createVarcharType(ScalarType.DEFAULT_STRING_LENGTH);
return type;
} else if (context.VARCHAR() != null) {
ScalarType type = ScalarType.createVarcharType(length);
return type;
} else if (context.CHAR() != null) {
ScalarType type = ScalarType.createCharType(length);
return type;
} else if (context.SIGNED() != null) {
return Type.INT;
} else if (context.HLL() != null) {
ScalarType type = ScalarType.createHllType();
return type;
} else if (context.BINARY() != null || context.VARBINARY() != null) {
ScalarType type = ScalarType.createVarbinary(length);
return type;
} else {
return ScalarType.createType(context.getChild(0).getText());
}
}
public ScalarType getDecimalType(StarRocksParser.DecimalTypeContext context) {
Integer precision = null;
Integer scale = null;
if (context.precision != null) {
precision = Integer.parseInt(context.precision.getText());
if (context.scale != null) {
scale = Integer.parseInt(context.scale.getText());
}
}
if (context.DECIMAL() != null || context.NUMBER() != null || context.NUMERIC() != null) {
if (precision != null) {
if (scale != null) {
return ScalarType.createUnifiedDecimalType(precision, scale);
}
return ScalarType.createUnifiedDecimalType(precision);
}
return ScalarType.createUnifiedDecimalType(10, 0);
} else if (context.DECIMAL32() != null || context.DECIMAL64() != null || context.DECIMAL128() != null) {
try {
ScalarType.checkEnableDecimalV3();
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
}
final PrimitiveType primitiveType = PrimitiveType.valueOf(context.children.get(0).getText().toUpperCase());
if (precision != null) {
if (scale != null) {
return ScalarType.createDecimalV3Type(primitiveType, precision, scale);
}
return ScalarType.createDecimalV3Type(primitiveType, precision);
}
return ScalarType.createDecimalV3Type(primitiveType);
} else if (context.DECIMALV2() != null) {
if (precision != null) {
if (scale != null) {
return ScalarType.createDecimalV2Type(precision, scale);
}
return ScalarType.createDecimalV2Type(precision);
}
return ScalarType.createDecimalV2Type();
} else {
throw new IllegalArgumentException("Unsupported type " + context.getText());
}
}
public ArrayType getArrayType(StarRocksParser.ArrayTypeContext context) {
return new ArrayType(getType(context.type()));
}
public StructType getStructType(StarRocksParser.StructTypeContext context) {
ArrayList<StructField> fields = new ArrayList<>();
List<StarRocksParser.SubfieldDescContext> subfields =
context.subfieldDescs().subfieldDesc();
for (StarRocksParser.SubfieldDescContext type : subfields) {
Identifier fieldIdentifier = (Identifier) visit(type.identifier());
String fieldName = fieldIdentifier.getValue();
fields.add(new StructField(fieldName, getType(type.type()), null));
}
return new StructType(fields);
}
public MapType getMapType(StarRocksParser.MapTypeContext context) {
Type keyType = getType(context.type(0));
if (!keyType.isValidMapKeyType()) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedType(keyType.toString(),
"for map's key, which should be base types"),
createPos(context.type(0)));
}
Type valueType = getType(context.type(1));
return new MapType(keyType, valueType);
}
private LabelName qualifiedNameToLabelName(QualifiedName qualifiedName) {
List<String> parts = qualifiedName.getParts();
if (parts.size() == 2) {
return new LabelName(parts.get(0), parts.get(1), qualifiedName.getPos());
} else if (parts.size() == 1) {
return new LabelName(null, parts.get(0), qualifiedName.getPos());
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidTableFormat(qualifiedName.toString()),
qualifiedName.getPos());
}
}
private Map<String, String> getProperties(StarRocksParser.PropertiesContext context) {
Map<String, String> properties = new HashMap<>();
if (context != null && context.property() != null) {
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return properties;
}
private Map<String, String> getPropertyList(StarRocksParser.PropertyListContext context) {
Map<String, String> properties = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
if (context != null && context.property() != null) {
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return properties;
}
private List<ParseNode> getLoadPropertyList(List<StarRocksParser.LoadPropertiesContext> loadPropertiesContexts) {
List<ParseNode> loadPropertyList = new ArrayList<>();
Preconditions.checkNotNull(loadPropertiesContexts, "load properties is null");
for (StarRocksParser.LoadPropertiesContext loadPropertiesContext : loadPropertiesContexts) {
if (loadPropertiesContext.colSeparatorProperty() != null) {
StringLiteral literal = (StringLiteral) visit(loadPropertiesContext.colSeparatorProperty().string());
loadPropertyList.add(new ColumnSeparator(literal.getValue(), literal.getPos()));
}
if (loadPropertiesContext.rowDelimiterProperty() != null) {
StringLiteral literal = (StringLiteral) visit(loadPropertiesContext.rowDelimiterProperty().string());
loadPropertyList.add(new RowDelimiter(literal.getValue(), literal.getPos()));
}
if (loadPropertiesContext.importColumns() != null) {
ImportColumnsStmt importColumnsStmt = (ImportColumnsStmt) visit(loadPropertiesContext.importColumns());
loadPropertyList.add(importColumnsStmt);
}
if (loadPropertiesContext.expression() != null) {
Expr where = (Expr) visit(loadPropertiesContext.expression());
loadPropertyList.add(new ImportWhereStmt(where, where.getPos()));
}
if (loadPropertiesContext.partitionNames() != null) {
loadPropertyList.add(visit(loadPropertiesContext.partitionNames()));
}
}
return loadPropertyList;
}
@Override
public ParseNode visitImportColumns(StarRocksParser.ImportColumnsContext importColumnsContext) {
List<ImportColumnDesc> columns = new ArrayList<>();
for (StarRocksParser.QualifiedNameContext qualifiedNameContext :
importColumnsContext.columnProperties().qualifiedName()) {
String column = ((Identifier) (visit(qualifiedNameContext))).getValue();
ImportColumnDesc columnDesc = new ImportColumnDesc(column, null, createPos(qualifiedNameContext));
columns.add(columnDesc);
}
for (StarRocksParser.AssignmentContext assignmentContext :
importColumnsContext.columnProperties().assignment()) {
ColumnAssignment columnAssignment = (ColumnAssignment) (visit(assignmentContext));
Expr expr = columnAssignment.getExpr();
ImportColumnDesc columnDesc = new ImportColumnDesc(columnAssignment.getColumn(), expr,
createPos(assignmentContext));
columns.add(columnDesc);
}
return new ImportColumnsStmt(columns, createPos(importColumnsContext));
}
private Map<String, String> getJobProperties(StarRocksParser.JobPropertiesContext jobPropertiesContext) {
Map<String, String> jobProperties = new HashMap<>();
if (jobPropertiesContext != null) {
List<Property> propertyList = visit(jobPropertiesContext.properties().property(), Property.class);
for (Property property : propertyList) {
jobProperties.put(property.getKey(), property.getValue());
}
}
return jobProperties;
}
private Map<String, String> getDataSourceProperties(
StarRocksParser.DataSourcePropertiesContext dataSourcePropertiesContext) {
Map<String, String> dataSourceProperties = new HashMap<>();
if (dataSourcePropertiesContext != null) {
List<Property> propertyList = visit(dataSourcePropertiesContext.propertyList().property(), Property.class);
for (Property property : propertyList) {
dataSourceProperties.put(property.getKey(), property.getValue());
}
}
return dataSourceProperties;
}
public List<String> getColumnNames(StarRocksParser.ColumnAliasesContext context) {
if (context == null) {
return null;
}
List<Identifier> targetColumnNamesIdentifiers = visitIfPresent(context.identifier(), Identifier.class);
if (targetColumnNamesIdentifiers != null) {
return targetColumnNamesIdentifiers.stream()
.map(Identifier::getValue).map(String::toLowerCase).collect(toList());
} else {
return null;
}
}
private NodePosition createPos(ParserRuleContext context) {
return createPos(context.start, context.stop);
}
private NodePosition createPos(Token start, Token stop) {
if (start == null) {
return NodePosition.ZERO;
}
if (stop == null) {
return new NodePosition(start.getLine(), start.getCharPositionInLine());
}
return new NodePosition(start, stop);
}
private LabelName createLabelName(StarRocksParser.QualifiedNameContext dbCtx,
StarRocksParser.IdentifierContext nameCtx) {
Token start = null;
Token stop = null;
String name = null;
if (nameCtx != null) {
name = getIdentifierName(nameCtx);
start = nameCtx.start;
stop = nameCtx.stop;
}
String dbName = null;
if (dbCtx != null) {
dbName = getQualifiedName(dbCtx).toString();
start = dbCtx.start;
}
return new LabelName(dbName, name, createPos(start, stop));
}
private List<HintNode> extractQueryScopeHintNode() {
List<HintNode> res = Lists.newArrayList();
for (Map.Entry<ParserRuleContext, List<HintNode>> entry : hintMap.entrySet()) {
for (HintNode hintNode : entry.getValue()) {
if (hintNode.getScope() == HintNode.Scope.QUERY) {
res.add(hintNode);
}
}
}
Collections.sort(res);
return res;
}
} | class AstBuilder extends StarRocksBaseVisitor<ParseNode> {
private final long sqlMode;
private final IdentityHashMap<ParserRuleContext, List<HintNode>> hintMap;
private int placeHolderSlotId = 0;
private List<Parameter> parameters;
private static final BigInteger LONG_MAX = new BigInteger("9223372036854775807");
private static final BigInteger LARGEINT_MAX_ABS =
new BigInteger("170141183460469231731687303715884105728");
private static final List<String> DATE_FUNCTIONS =
Lists.newArrayList(FunctionSet.DATE_ADD,
FunctionSet.ADDDATE,
FunctionSet.DATE_ADD, FunctionSet.DATE_SUB,
FunctionSet.SUBDATE,
FunctionSet.DAYS_SUB);
private static final List<String> PARTITION_FUNCTIONS =
Lists.newArrayList(FunctionSet.SUBSTR, FunctionSet.SUBSTRING,
FunctionSet.FROM_UNIXTIME, FunctionSet.FROM_UNIXTIME_MS,
FunctionSet.STR2DATE);
public AstBuilder(long sqlMode) {
this(sqlMode, new IdentityHashMap<>());
}
public AstBuilder(long sqlMode, IdentityHashMap<ParserRuleContext, List<HintNode>> hintMap) {
this.hintMap = hintMap;
long hintSqlMode = 0L;
for (Map.Entry<ParserRuleContext, List<HintNode>> entry : hintMap.entrySet()) {
for (HintNode hint : entry.getValue()) {
if (hint instanceof SetVarHint) {
SetVarHint setVarHint = (SetVarHint) hint;
hintSqlMode = setVarHint.getSqlModeHintValue();
}
}
}
this.sqlMode = sqlMode | hintSqlMode;
}
public List<Parameter> getParameters() {
return parameters;
}
@Override
public ParseNode visitSingleStatement(StarRocksParser.SingleStatementContext context) {
if (context.statement() != null) {
StatementBase stmt = (StatementBase) visit(context.statement());
if (MapUtils.isNotEmpty(hintMap)) {
stmt.setAllQueryScopeHints(extractQueryScopeHintNode());
hintMap.clear();
}
return stmt;
} else {
return visit(context.emptyStatement());
}
}
@Override
public ParseNode visitEmptyStatement(StarRocksParser.EmptyStatementContext context) {
return new EmptyStmt();
}
@Override
public ParseNode visitUseDatabaseStatement(StarRocksParser.UseDatabaseStatementContext context) {
NodePosition pos = createPos(context);
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
List<String> parts = qualifiedName.getParts();
if (parts.size() == 1) {
return new UseDbStmt(null, parts.get(0), pos);
} else if (parts.size() == 2) {
return new UseDbStmt(parts.get(0), parts.get(1), pos);
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidDbFormat(qualifiedName.toString()),
qualifiedName.getPos());
}
}
@Override
public ParseNode visitUseCatalogStatement(StarRocksParser.UseCatalogStatementContext context) {
StringLiteral literal = (StringLiteral) visit(context.string());
return new UseCatalogStmt(literal.getValue(), createPos(context));
}
@Override
public ParseNode visitSetCatalogStatement(StarRocksParser.SetCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String catalogName = identifier.getValue();
return new SetCatalogStmt(catalogName, createPos(context));
}
@Override
public ParseNode visitShowDatabasesStatement(StarRocksParser.ShowDatabasesStatementContext context) {
String catalog = null;
NodePosition pos = createPos(context);
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowDbStmt(stringLiteral.getValue(), null, catalog, pos);
} else if (context.expression() != null) {
return new ShowDbStmt(null, (Expr) visit(context.expression()), catalog, pos);
} else {
return new ShowDbStmt(null, null, catalog, pos);
}
}
@Override
public ParseNode visitAlterDbQuotaStatement(StarRocksParser.AlterDbQuotaStatementContext context) {
String dbName = ((Identifier) visit(context.identifier(0))).getValue();
NodePosition pos = createPos(context);
if (context.DATA() != null) {
String quotaValue = ((Identifier) visit(context.identifier(1))).getValue();
return new AlterDatabaseQuotaStmt(dbName,
AlterDatabaseQuotaStmt.QuotaType.DATA,
quotaValue, pos);
} else {
String quotaValue = context.INTEGER_VALUE().getText();
return new AlterDatabaseQuotaStmt(dbName,
AlterDatabaseQuotaStmt.QuotaType.REPLICA,
quotaValue, pos);
}
}
@Override
public ParseNode visitCreateDbStatement(StarRocksParser.CreateDbStatementContext context) {
String catalogName = "";
if (context.catalog != null) {
catalogName = getIdentifierName(context.catalog);
}
String dbName = getIdentifierName(context.database);
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateDbStmt(context.IF() != null, catalogName, dbName, properties, createPos(context));
}
@Override
public ParseNode visitDropDbStatement(StarRocksParser.DropDbStatementContext context) {
String catalogName = "";
if (context.catalog != null) {
catalogName = getIdentifierName(context.catalog);
}
String dbName = getIdentifierName(context.database);
return new DropDbStmt(context.IF() != null, catalogName, dbName, context.FORCE() != null,
createPos(context));
}
@Override
public ParseNode visitShowCreateDbStatement(StarRocksParser.ShowCreateDbStatementContext context) {
String dbName = ((Identifier) visit(context.identifier())).getValue();
return new ShowCreateDbStmt(dbName, createPos(context));
}
@Override
public ParseNode visitAlterDatabaseRenameStatement(StarRocksParser.AlterDatabaseRenameStatementContext context) {
String dbName = ((Identifier) visit(context.identifier(0))).getValue();
String newName = ((Identifier) visit(context.identifier(1))).getValue();
return new AlterDatabaseRenameStatement(dbName, newName, createPos(context));
}
@Override
public ParseNode visitRecoverDbStmt(StarRocksParser.RecoverDbStmtContext context) {
String dbName = ((Identifier) visit(context.identifier())).getValue();
return new RecoverDbStmt(dbName, createPos(context));
}
@Override
public ParseNode visitShowDataStmt(StarRocksParser.ShowDataStmtContext context) {
NodePosition pos = createPos(context);
if (context.FROM() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new ShowDataStmt(targetTableName.getDb(), targetTableName.getTbl(), pos);
} else {
return new ShowDataStmt(null, null, pos);
}
}
@Override
public ParseNode visitCreateTableStatement(StarRocksParser.CreateTableStatementContext context) {
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
Map<String, String> extProperties = null;
if (context.extProperties() != null) {
extProperties = new HashMap<>();
List<Property> propertyList = visit(context.extProperties().properties().property(), Property.class);
for (Property property : propertyList) {
extProperties.put(property.getKey(), property.getValue());
}
}
TableName tableName = qualifiedNameToTableName(getQualifiedName(context.qualifiedName()));
List<ColumnDef> columnDefs = null;
if (context.columnDesc() != null) {
columnDefs = getColumnDefs(context.columnDesc());
}
return new CreateTableStmt(
context.IF() != null,
context.EXTERNAL() != null,
tableName,
columnDefs,
context.indexDesc() == null ? null : getIndexDefs(context.indexDesc()),
context.engineDesc() == null ? "" :
((Identifier) visit(context.engineDesc().identifier())).getValue(),
context.charsetDesc() == null ? null :
((Identifier) visit(context.charsetDesc().identifierOrString())).getValue(),
context.keyDesc() == null ? null : getKeysDesc(context.keyDesc()),
context.partitionDesc() == null ? null : getPartitionDesc(context.partitionDesc(), columnDefs),
context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()),
properties,
extProperties,
context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue(),
context.rollupDesc() == null ?
null : context.rollupDesc().rollupItem().stream().map(this::getRollup).collect(toList()),
context.orderByDesc() == null ? null :
visit(context.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList()));
}
private PartitionDesc getPartitionDesc(StarRocksParser.PartitionDescContext context, List<ColumnDef> columnDefs) {
List<PartitionDesc> partitionDescList = new ArrayList<>();
if (context.functionCall() != null) {
String currentGranularity = null;
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
if (!(rangePartitionDesc instanceof MultiRangePartitionDesc)) {
throw new ParsingException("Automatic partition table creation only supports " +
"batch create partition syntax", rangePartitionDesc.getPos());
}
MultiRangePartitionDesc multiRangePartitionDesc = (MultiRangePartitionDesc) rangePartitionDesc;
String descGranularity = multiRangePartitionDesc.getTimeUnit().toLowerCase();
if (currentGranularity == null) {
currentGranularity = descGranularity;
} else if (!currentGranularity.equals(descGranularity)) {
throw new ParsingException("The partition granularity of automatic partition table " +
"batch creation in advance should be consistent", rangePartitionDesc.getPos());
}
partitionDescList.add(rangePartitionDesc);
}
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall());
List<String> columnList = AnalyzerUtils.checkAndExtractPartitionCol(functionCallExpr, columnDefs);
AnalyzerUtils.checkAutoPartitionTableLimit(functionCallExpr, currentGranularity);
RangePartitionDesc rangePartitionDesc = new RangePartitionDesc(columnList, partitionDescList);
rangePartitionDesc.setAutoPartitionTable(true);
return new ExpressionPartitionDesc(rangePartitionDesc, functionCallExpr);
}
StarRocksParser.PrimaryExpressionContext primaryExpressionContext = context.primaryExpression();
if (primaryExpressionContext != null) {
Expr primaryExpression = (Expr) visit(primaryExpressionContext);
if (context.RANGE() != null) {
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
partitionDescList.add(rangePartitionDesc);
}
}
List<String> columnList = checkAndExtractPartitionColForRange(primaryExpression, false);
RangePartitionDesc rangePartitionDesc = new RangePartitionDesc(columnList, partitionDescList);
if (primaryExpression instanceof FunctionCallExpr) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) primaryExpression;
String functionName = functionCallExpr.getFnName().getFunction();
if (FunctionSet.FROM_UNIXTIME.equals(functionName)
|| FunctionSet.FROM_UNIXTIME_MS.equals(functionName)) {
primaryExpression = new CastExpr(TypeDef.create(PrimitiveType.DATETIME), primaryExpression);
}
}
return new ExpressionPartitionDesc(rangePartitionDesc, primaryExpression);
}
List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
List<String> columnList = identifierList.stream().map(Identifier::getValue).collect(toList());
if (context.RANGE() != null) {
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
partitionDescList.add(rangePartitionDesc);
}
return new RangePartitionDesc(columnList, partitionDescList);
} else if (context.LIST() != null) {
for (StarRocksParser.ListPartitionDescContext listPartitionDescContext : context.listPartitionDesc()) {
final PartitionDesc listPartitionDesc = (PartitionDesc) visit(listPartitionDescContext);
partitionDescList.add(listPartitionDesc);
}
return new ListPartitionDesc(columnList, partitionDescList);
} else {
if (context.listPartitionDesc().size() > 0) {
throw new ParsingException("Does not support creating partitions in advance");
}
ListPartitionDesc listPartitionDesc = new ListPartitionDesc(columnList, partitionDescList);
listPartitionDesc.setAutoPartitionTable(true);
return listPartitionDesc;
}
}
private List<String> checkAndExtractPartitionColForRange(Expr expr, boolean hasCast) {
if (expr instanceof CastExpr) {
CastExpr castExpr = (CastExpr) expr;
return checkAndExtractPartitionColForRange(castExpr.getChild(0), true);
}
NodePosition pos = expr.getPos();
List<String> columnList = new ArrayList<>();
if (expr instanceof FunctionCallExpr) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) expr;
String functionName = functionCallExpr.getFnName().getFunction().toLowerCase();
List<Expr> paramsExpr = functionCallExpr.getParams().exprs();
if (PARTITION_FUNCTIONS.contains(functionName)) {
Expr firstExpr = paramsExpr.get(0);
if (firstExpr instanceof SlotRef) {
columnList.add(((SlotRef) firstExpr).getColumnName());
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"),
pos);
}
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"), pos);
}
if (functionName.equals(FunctionSet.FROM_UNIXTIME) || functionName.equals(FunctionSet.FROM_UNIXTIME_MS)) {
if (hasCast || paramsExpr.size() > 1) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"), pos);
}
}
}
return columnList;
}
private AlterClause getRollup(StarRocksParser.RollupItemContext rollupItemContext) {
String rollupName = ((Identifier) visit(rollupItemContext.identifier())).getValue();
List<Identifier> columnList =
visit(rollupItemContext.identifierList().identifier(), Identifier.class);
List<String> dupKeys = null;
if (rollupItemContext.dupKeys() != null) {
final List<Identifier> identifierList =
visit(rollupItemContext.dupKeys().identifierList().identifier(), Identifier.class);
dupKeys = identifierList.stream().map(Identifier::getValue).collect(toList());
}
String baseRollupName = rollupItemContext.fromRollup() != null ?
((Identifier) visit(rollupItemContext.fromRollup().identifier())).getValue() : null;
Map<String, String> properties = null;
if (rollupItemContext.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(rollupItemContext.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AddRollupClause(rollupName, columnList.stream().map(Identifier::getValue).collect(toList()),
dupKeys, baseRollupName,
properties, createPos(rollupItemContext));
}
private KeysDesc getKeysDesc(StarRocksParser.KeyDescContext context) {
KeysType keysType = null;
if (null != context.PRIMARY()) {
keysType = KeysType.PRIMARY_KEYS;
} else if (null != context.DUPLICATE()) {
keysType = KeysType.DUP_KEYS;
} else if (null != context.AGGREGATE()) {
keysType = KeysType.AGG_KEYS;
} else if (null != context.UNIQUE()) {
keysType = KeysType.UNIQUE_KEYS;
}
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
return new KeysDesc(keysType, columnList.stream().map(Identifier::getValue).collect(toList()),
createPos(context));
}
private List<IndexDef> getIndexDefs(List<StarRocksParser.IndexDescContext> indexDesc) {
List<IndexDef> indexDefList = new ArrayList<>();
for (StarRocksParser.IndexDescContext context : indexDesc) {
String indexName = ((Identifier) visit(context.identifier())).getValue();
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
String comment =
context.comment() != null ? ((StringLiteral) visit(context.comment())).getStringValue() : null;
final IndexDef indexDef =
new IndexDef(indexName, columnList.stream().map(Identifier::getValue).collect(toList()),
getIndexType(context.indexType()), comment, getPropertyList(context.propertyList()),
createPos(context));
indexDefList.add(indexDef);
}
return indexDefList;
}
private List<ColumnDef> getColumnDefs(List<StarRocksParser.ColumnDescContext> columnDesc) {
return columnDesc.stream().map(context -> getColumnDef(context)).collect(toList());
}
private ColumnDef getColumnDef(StarRocksParser.ColumnDescContext context) {
Identifier colIdentifier = (Identifier) visit(context.identifier());
String columnName = colIdentifier.getValue();
TypeDef typeDef = new TypeDef(getType(context.type()), createPos(context.type()));
String charsetName = context.charsetName() != null ?
((Identifier) visit(context.charsetName().identifier())).getValue() : null;
boolean isKey = context.KEY() != null;
AggregateType aggregateType =
context.aggDesc() != null ? AggregateType.valueOf(context.aggDesc().getText().toUpperCase()) : null;
Boolean isAllowNull = null;
if (context.NOT() != null && context.NULL() != null) {
isAllowNull = false;
} else if (context.NULL() != null) {
isAllowNull = true;
}
Boolean isAutoIncrement = null;
if (context.AUTO_INCREMENT() != null) {
isAutoIncrement = true;
}
if (isAutoIncrement != null && isAllowNull != null && isAllowNull) {
throw new ParsingException(PARSER_ERROR_MSG.nullColFoundInPK(columnName), colIdentifier.getPos());
}
if (isAutoIncrement != null) {
isAllowNull = false;
}
ColumnDef.DefaultValueDef defaultValueDef = ColumnDef.DefaultValueDef.NOT_SET;
final StarRocksParser.DefaultDescContext defaultDescContext = context.defaultDesc();
if (defaultDescContext != null) {
if (defaultDescContext.string() != null) {
String value = ((StringLiteral) visit(defaultDescContext.string())).getStringValue();
defaultValueDef = new ColumnDef.DefaultValueDef(true, new StringLiteral(value));
} else if (defaultDescContext.NULL() != null) {
defaultValueDef = ColumnDef.DefaultValueDef.NULL_DEFAULT_VALUE;
} else if (defaultDescContext.CURRENT_TIMESTAMP() != null) {
defaultValueDef = ColumnDef.DefaultValueDef.CURRENT_TIMESTAMP_VALUE;
} else if (defaultDescContext.qualifiedName() != null) {
String functionName = defaultDescContext.qualifiedName().getText().toLowerCase();
defaultValueDef = new ColumnDef.DefaultValueDef(true,
new FunctionCallExpr(functionName, new ArrayList<>()));
}
}
final StarRocksParser.GeneratedColumnDescContext generatedColumnDescContext =
context.generatedColumnDesc();
Expr expr = null;
if (generatedColumnDescContext != null) {
if (isAllowNull != null && isAllowNull == false) {
throw new ParsingException(PARSER_ERROR_MSG.foundNotNull("Generated Column"));
}
if (isKey) {
throw new ParsingException(PARSER_ERROR_MSG.isKey("Generated Column"));
}
expr = (Expr) visit(generatedColumnDescContext.expression());
}
String comment = context.comment() == null ? "" :
((StringLiteral) visit(context.comment().string())).getStringValue();
return new ColumnDef(columnName, typeDef, charsetName, isKey, aggregateType, isAllowNull, defaultValueDef,
isAutoIncrement, expr, comment, createPos(context));
}
@Override
public ParseNode visitCreateTemporaryTableStatement(StarRocksParser.CreateTemporaryTableStatementContext context) {
if (!Config.enable_experimental_temporary_table) {
throw new SemanticException(
"Temporary table feature is experimental and disabled by default, could be enabled through " +
": admin set frontend config('enable_experimental_temporary_table' = 'true')");
}
CreateTableStmt createTableStmt = new CreateTableStmt(
false,
false,
qualifiedNameToTableName(getQualifiedName(context.qualifiedName())),
null,
EngineType.defaultEngine().name(),
null,
null,
null,
new HashMap<>(),
null,
null);
return new CreateTableAsSelectStmt(
createTableStmt,
null,
(QueryStatement) visit(context.queryStatement()));
}
@Override
public ParseNode visitCreateTableAsSelectStatement(StarRocksParser.CreateTableAsSelectStatementContext context) {
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
PartitionDesc partitionDesc = null;
if (context.partitionDesc() != null) {
partitionDesc = (PartitionDesc) visit(context.partitionDesc());
if (partitionDesc instanceof ListPartitionDesc && context.partitionDesc().LIST() == null) {
((ListPartitionDesc) partitionDesc).setAutoPartitionTable(true);
}
}
CreateTableStmt createTableStmt = new CreateTableStmt(
context.IF() != null,
false,
qualifiedNameToTableName(getQualifiedName(context.qualifiedName())),
null,
context.indexDesc() == null ? null : getIndexDefs(context.indexDesc()),
"",
null,
context.keyDesc() == null ? null : getKeysDesc(context.keyDesc()),
partitionDesc,
context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()),
properties,
null,
context.comment() == null ? null :
((StringLiteral) visit(context.comment().string())).getStringValue(),
null,
context.orderByDesc() == null ? null :
visit(context.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList())
);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
return new CreateTableAsSelectStmt(
createTableStmt,
columns == null ? null : columns.stream().map(Identifier::getValue).collect(toList()),
(QueryStatement) visit(context.queryStatement()),
createPos(context));
}
@Override
public ParseNode visitCreateTableLikeStatement(StarRocksParser.CreateTableLikeStatementContext context) {
PartitionDesc partitionDesc = context.partitionDesc() == null ? null :
(PartitionDesc) visit(context.partitionDesc());
DistributionDesc distributionDesc = context.distributionDesc() == null ? null :
(DistributionDesc) visit(context.distributionDesc());
Map<String, String> properties = getProperties(context.properties());
return new CreateTableLikeStmt(context.IF() != null,
qualifiedNameToTableName(getQualifiedName(context.qualifiedName(0))),
qualifiedNameToTableName(getQualifiedName(context.qualifiedName(1))),
partitionDesc, distributionDesc, properties,
createPos(context));
}
@Override
public ParseNode visitShowCreateTableStatement(StarRocksParser.ShowCreateTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
NodePosition pos = createPos(context);
if (context.MATERIALIZED() != null && context.VIEW() != null) {
return new ShowCreateTableStmt(targetTableName, ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW, pos);
}
if (context.VIEW() != null) {
return new ShowCreateTableStmt(targetTableName, ShowCreateTableStmt.CreateTableType.VIEW, pos);
}
return new ShowCreateTableStmt(targetTableName, ShowCreateTableStmt.CreateTableType.TABLE, pos);
}
@Override
public ParseNode visitDropTableStatement(StarRocksParser.DropTableStatementContext context) {
boolean ifExists = context.IF() != null && context.EXISTS() != null;
boolean force = context.FORCE() != null;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new DropTableStmt(ifExists, targetTableName, false, force, createPos(context));
}
@Override
public ParseNode visitRecoverTableStatement(StarRocksParser.RecoverTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
return new RecoverTableStmt(tableName, createPos(context));
}
@Override
public ParseNode visitTruncateTableStatement(StarRocksParser.TruncateTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
Token start = context.start;
Token stop = context.stop;
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
NodePosition pos = createPos(start, stop);
return new TruncateTableStmt(new TableRef(targetTableName, null, partitionNames, pos));
}
@Override
public ParseNode visitShowTableStatement(StarRocksParser.ShowTableStatementContext context) {
boolean isVerbose = context.FULL() != null;
String database = null;
String catalog = null;
if (context.qualifiedName() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
List<String> parts = qualifiedName.getParts();
if (parts.size() == 2) {
catalog = qualifiedName.getParts().get(0);
database = qualifiedName.getParts().get(1);
} else if (parts.size() == 1) {
database = qualifiedName.getParts().get(0);
}
}
NodePosition pos = createPos(context);
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowTableStmt(database, isVerbose, stringLiteral.getValue(), null, catalog, pos);
} else if (context.expression() != null) {
return new ShowTableStmt(database, isVerbose, null, (Expr) visit(context.expression()), catalog, pos);
} else {
return new ShowTableStmt(database, isVerbose, null, null, catalog, pos);
}
}
@Override
public ParseNode visitDescTableStatement(StarRocksParser.DescTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new DescribeStmt(targetTableName, context.ALL() != null, createPos(context));
}
@Override
public ParseNode visitShowTableStatusStatement(StarRocksParser.ShowTableStatusStatementContext context) {
QualifiedName dbName = null;
if (context.qualifiedName() != null) {
dbName = getQualifiedName(context.db);
}
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowTableStatusStmt(dbName == null ? null : dbName.toString(), pattern, where,
createPos(context));
}
@Override
public ParseNode visitShowColumnStatement(StarRocksParser.ShowColumnStatementContext context) {
QualifiedName tableName = getQualifiedName(context.table);
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowColumnStmt(qualifiedNameToTableName(tableName),
dbName == null ? null : dbName.toString(),
pattern,
context.FULL() != null,
where, createPos(context));
}
@Override
public ParseNode visitRefreshTableStatement(StarRocksParser.RefreshTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<String> partitionNames = null;
if (context.string() != null) {
partitionNames = context.string().stream()
.map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
}
return new RefreshTableStmt(targetTableName, partitionNames, createPos(context));
}
@Override
public ParseNode visitAlterTableStatement(StarRocksParser.AlterTableStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
NodePosition pos = createPos(context);
if (context.ROLLUP() != null) {
if (context.ADD() != null) {
List<AlterClause> clauses = context.rollupItem().stream().map(this::getRollup).collect(toList());
return new AlterTableStmt(targetTableName, clauses, pos);
} else {
List<Identifier> rollupList = visit(context.identifier(), Identifier.class);
List<AlterClause> clauses = new ArrayList<>();
for (Identifier rollupName : rollupList) {
clauses.add(new DropRollupClause(rollupName.getValue(), null, rollupName.getPos()));
}
return new AlterTableStmt(targetTableName, clauses, pos);
}
} else {
List<AlterClause> alterClauses = visit(context.alterClause(), AlterClause.class);
return new AlterTableStmt(targetTableName, alterClauses, pos);
}
}
@Override
public ParseNode visitCancelAlterTableStatement(StarRocksParser.CancelAlterTableStatementContext context) {
ShowAlterStmt.AlterType alterType;
if (context.ROLLUP() != null) {
alterType = ShowAlterStmt.AlterType.ROLLUP;
} else if (context.MATERIALIZED() != null && context.VIEW() != null) {
alterType = ShowAlterStmt.AlterType.MATERIALIZED_VIEW;
} else if (context.OPTIMIZE() != null) {
alterType = ShowAlterStmt.AlterType.OPTIMIZE;
} else {
alterType = ShowAlterStmt.AlterType.COLUMN;
}
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName dbTableName = qualifiedNameToTableName(qualifiedName);
List<Long> alterJobIdList = null;
if (context.INTEGER_VALUE() != null) {
alterJobIdList = context.INTEGER_VALUE()
.stream().map(ParseTree::getText).map(Long::parseLong).collect(toList());
}
return new CancelAlterTableStmt(alterType, dbTableName, alterJobIdList, createPos(context));
}
@Override
public ParseNode visitShowAlterStatement(StarRocksParser.ShowAlterStatementContext context) {
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
ShowAlterStmt.AlterType alterType;
if (context.ROLLUP() != null) {
alterType = ShowAlterStmt.AlterType.ROLLUP;
} else if (context.MATERIALIZED() != null && context.VIEW() != null) {
alterType = ShowAlterStmt.AlterType.MATERIALIZED_VIEW;
} else if (context.OPTIMIZE() != null) {
alterType = ShowAlterStmt.AlterType.OPTIMIZE;
} else {
alterType = ShowAlterStmt.AlterType.COLUMN;
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowAlterStmt(alterType, dbName == null ? null : dbName.toString(), where, orderByElements,
limitElement, createPos(context));
}
@Override
public ParseNode visitCreateViewStatement(StarRocksParser.CreateViewStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<ColWithComment> colWithComments = null;
if (context.columnNameWithComment().size() > 0) {
colWithComments = visit(context.columnNameWithComment(), ColWithComment.class);
}
if (context.IF() != null && context.REPLACE() != null) {
throw new ParsingException(PARSER_ERROR_MSG.conflictedOptions("if not exists", "or replace"),
createPos(context));
}
return new CreateViewStmt(
context.IF() != null,
context.REPLACE() != null,
targetTableName,
colWithComments,
context.comment() == null ? null : ((StringLiteral) visit(context.comment())).getStringValue(),
(QueryStatement) visit(context.queryStatement()), createPos(context));
}
@Override
public ParseNode visitAlterViewStatement(StarRocksParser.AlterViewStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<ColWithComment> colWithComments = null;
if (context.columnNameWithComment().size() > 0) {
colWithComments = visit(context.columnNameWithComment(), ColWithComment.class);
}
QueryStatement queryStatement = (QueryStatement) visit(context.queryStatement());
AlterClause alterClause = new AlterViewClause(colWithComments, queryStatement, createPos(context));
return new AlterViewStmt(targetTableName, alterClause, createPos(context));
}
@Override
public ParseNode visitDropViewStatement(StarRocksParser.DropViewStatementContext context) {
boolean ifExists = context.IF() != null && context.EXISTS() != null;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new DropTableStmt(ifExists, targetTableName, true, false, createPos(context));
}
@Override
public ParseNode visitShowPartitionsStatement(StarRocksParser.ShowPartitionsStatementContext context) {
boolean temp = context.TEMPORARY() != null;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.ORDER() != null) {
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowPartitionsStmt(tableName, where, orderByElements, limitElement, temp, createPos(context));
}
@Override
public ParseNode visitRecoverPartitionStatement(StarRocksParser.RecoverPartitionStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
String partitionName = ((Identifier) visit(context.identifier())).getValue();
return new RecoverPartitionStmt(tableName, partitionName, createPos(context));
}
@Override
public ParseNode visitShowTabletStatement(StarRocksParser.ShowTabletStatementContext context) {
NodePosition pos = createPos(context);
if (context.INTEGER_VALUE() != null) {
return new ShowTabletStmt(null, Long.parseLong(context.INTEGER_VALUE().getText()), pos);
} else {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName dbTblName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowTabletStmt(dbTblName, -1L, partitionNames, where, orderByElements, limitElement,
createPos(context));
}
}
@Override
public ParseNode visitCreateIndexStatement(StarRocksParser.CreateIndexStatementContext context) {
String indexName = ((Identifier) visit(context.identifier())).getValue();
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
Token idxStart = context.identifier().start;
Token idxStop = context.identifierList().stop;
String comment = null;
if (context.comment() != null) {
comment = ((StringLiteral) visit(context.comment())).getStringValue();
idxStop = context.comment().stop;
}
NodePosition idxPos = createPos(idxStart, idxStop);
IndexDef indexDef = new IndexDef(indexName,
columnList.stream().map(Identifier::getValue).collect(toList()),
getIndexType(context.indexType()),
comment, getPropertyList(context.propertyList()), idxPos);
CreateIndexClause createIndexClause = new CreateIndexClause(indexDef, idxPos);
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new AlterTableStmt(targetTableName, Lists.newArrayList(createIndexClause), createPos(context));
}
@Override
public ParseNode visitDropIndexStatement(StarRocksParser.DropIndexStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
DropIndexClause dropIndexClause = new DropIndexClause(identifier.getValue(),
createPos(context.identifier()));
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
return new AlterTableStmt(targetTableName, Lists.newArrayList(dropIndexClause), createPos(context));
}
@Override
public ParseNode visitShowIndexStatement(StarRocksParser.ShowIndexStatementContext context) {
QualifiedName tableName = getQualifiedName(context.table);
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
return new ShowIndexStmt(dbName == null ? null : dbName.toString(),
qualifiedNameToTableName(tableName), createPos(context));
}
private Map<String, String> buildProperties(StarRocksParser.PropertiesContext properties) {
Map<String, String> result = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
if (properties != null) {
List<Property> propertyList = visit(properties.property(), Property.class);
for (Property property : ListUtils.emptyIfNull(propertyList)) {
result.put(property.getKey(), property.getValue());
}
}
return result;
}
@Override
public ParseNode visitSubmitTaskStatement(StarRocksParser.SubmitTaskStatementContext context) {
QualifiedName qualifiedName = null;
if (context.qualifiedName() != null) {
qualifiedName = getQualifiedName(context.qualifiedName());
}
Map<String, String> properties = buildProperties(context.properties());
properties.putAll(extractVarHintValues(hintMap.get(context)));
CreateTableAsSelectStmt createTableAsSelectStmt = null;
InsertStmt insertStmt = null;
if (context.createTableAsSelectStatement() != null) {
createTableAsSelectStmt = (CreateTableAsSelectStmt) visit(context.createTableAsSelectStatement());
} else if (context.insertStatement() != null) {
insertStmt = (InsertStmt) visit(context.insertStatement());
}
int startIndex = 0;
if (createTableAsSelectStmt != null) {
startIndex = context.createTableAsSelectStatement().start.getStartIndex();
} else {
startIndex = context.insertStatement().start.getStartIndex();
}
NodePosition pos = createPos(context);
TaskName taskName;
if (qualifiedName == null) {
taskName = new TaskName(null, null, pos);
} else {
taskName = qualifiedNameToTaskName(qualifiedName);
}
if (createTableAsSelectStmt != null) {
return new SubmitTaskStmt(taskName, properties, startIndex, createTableAsSelectStmt, pos);
} else {
return new SubmitTaskStmt(taskName, properties, startIndex, insertStmt, pos);
}
}
@Override
public ParseNode visitDropTaskStatement(StarRocksParser.DropTaskStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TaskName taskName = qualifiedNameToTaskName(qualifiedName);
boolean force = context.FORCE() != null;
return new DropTaskStmt(taskName, force, createPos(context));
}
public static final ImmutableList<String> MATERIALIZEDVIEW_REFRESHSCHEME_SUPPORT_UNIT_IDENTIFIERS =
new ImmutableList.Builder<String>()
.add("SECOND").add("MINUTE").add("HOUR").add("DAY")
.build();
private void checkMaterializedViewAsyncRefreshSchemeUnitIdentifier(
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc) {
if (asyncRefreshSchemeDesc.getIntervalLiteral() == null ||
asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier() == null) {
return;
}
String unit = asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier().getDescription();
if (StringUtils.isEmpty(unit)) {
return;
}
if (!MATERIALIZEDVIEW_REFRESHSCHEME_SUPPORT_UNIT_IDENTIFIERS.contains(unit)) {
throw new ParsingException(PARSER_ERROR_MSG.forbidClauseInMV("Refresh interval unit", unit),
asyncRefreshSchemeDesc.getIntervalLiteral().getUnitIdentifier().getPos());
}
}
@Override
public ParseNode visitCreateMaterializedViewStatement(
StarRocksParser.CreateMaterializedViewStatementContext context) {
boolean ifNotExist = context.IF() != null;
QualifiedName qualifiedName = getQualifiedName(context.mvName);
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<ColWithComment> colWithComments = null;
if (!context.columnNameWithComment().isEmpty()) {
colWithComments = visit(context.columnNameWithComment(), ColWithComment.class);
}
String comment =
context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue();
QueryStatement queryStatement = (QueryStatement) visit(context.queryStatement());
RefreshSchemeClause refreshSchemeDesc = null;
Map<String, String> properties = new HashMap<>();
ExpressionPartitionDesc expressionPartitionDesc = null;
DistributionDesc distributionDesc = null;
List<String> sortKeys = null;
for (StarRocksParser.MaterializedViewDescContext desc : ListUtils.emptyIfNull(context.materializedViewDesc())) {
NodePosition clausePos = createPos(desc);
if (desc.properties() != null) {
if (MapUtils.isNotEmpty(properties)) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("PROPERTY"), clausePos);
}
List<Property> propertyList = visit(desc.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (desc.refreshSchemeDesc() != null) {
if (refreshSchemeDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("REFRESH"), clausePos);
}
refreshSchemeDesc = ((RefreshSchemeClause) visit(desc.refreshSchemeDesc()));
}
if (desc.primaryExpression() != null) {
if (expressionPartitionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("PARTITION"), clausePos);
}
Expr expr = (Expr) visit(desc.primaryExpression());
if (expr instanceof SlotRef) {
expressionPartitionDesc = new ExpressionPartitionDesc(expr);
} else if (expr instanceof FunctionCallExpr) {
AnalyzerUtils.checkAndExtractPartitionCol((FunctionCallExpr) expr, null);
expressionPartitionDesc = new ExpressionPartitionDesc(expr);
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(), "PARTITION BY"),
expr.getPos());
}
}
if (desc.distributionDesc() != null) {
if (distributionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.duplicatedClause("DISTRIBUTION"), clausePos);
}
distributionDesc = (DistributionDesc) visit(desc.distributionDesc());
}
if (desc.orderByDesc() != null) {
sortKeys = visit(desc.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList());
}
}
if (refreshSchemeDesc == null) {
if (distributionDesc == null) {
refreshSchemeDesc = new SyncRefreshSchemeDesc();
} else {
refreshSchemeDesc =
new ManualRefreshSchemeDesc(MaterializedView.RefreshMoment.IMMEDIATE, NodePosition.ZERO);
}
}
if (refreshSchemeDesc instanceof SyncRefreshSchemeDesc) {
if (expressionPartitionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.forbidClauseInMV("SYNC refresh type", "PARTITION BY"),
expressionPartitionDesc.getPos());
}
if (distributionDesc != null) {
throw new ParsingException(PARSER_ERROR_MSG.forbidClauseInMV("SYNC refresh type", "DISTRIBUTION BY"),
distributionDesc.getPos());
}
return new CreateMaterializedViewStmt(tableName, queryStatement, properties);
}
if (refreshSchemeDesc instanceof AsyncRefreshSchemeDesc) {
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc = (AsyncRefreshSchemeDesc) refreshSchemeDesc;
checkMaterializedViewAsyncRefreshSchemeUnitIdentifier(asyncRefreshSchemeDesc);
}
if (!Config.enable_experimental_mv) {
throw new ParsingException(PARSER_ERROR_MSG.feConfigDisable("enable_experimental_mv"), NodePosition.ZERO);
}
return new CreateMaterializedViewStatement(tableName, ifNotExist, colWithComments,
context.indexDesc() == null ? null : getIndexDefs(context.indexDesc()),
comment,
refreshSchemeDesc,
expressionPartitionDesc, distributionDesc, sortKeys, properties, queryStatement, createPos(context));
}
@Override
public ParseNode visitShowMaterializedViewsStatement(
StarRocksParser.ShowMaterializedViewsStatementContext context) {
String database = null;
NodePosition pos = createPos(context);
if (context.qualifiedName() != null) {
database = getQualifiedName(context.qualifiedName()).toString();
}
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowMaterializedViewsStmt(database, stringLiteral.getValue(), null, pos);
} else if (context.expression() != null) {
return new ShowMaterializedViewsStmt(database, null, (Expr) visit(context.expression()), pos);
} else {
return new ShowMaterializedViewsStmt(database, null, null, pos);
}
}
@Override
public ParseNode visitDropMaterializedViewStatement(StarRocksParser.DropMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
return new DropMaterializedViewStmt(context.IF() != null, mvName, createPos(context));
}
@Override
public ParseNode visitAlterMaterializedViewStatement(
StarRocksParser.AlterMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
AlterTableClause alterTableClause = null;
if (context.tableRenameClause() != null) {
alterTableClause = (TableRenameClause) visit(context.tableRenameClause());
}
if (context.refreshSchemeDesc() != null) {
alterTableClause = ((RefreshSchemeClause) visit(context.refreshSchemeDesc()));
if (alterTableClause instanceof AsyncRefreshSchemeDesc) {
AsyncRefreshSchemeDesc asyncRefreshSchemeDesc = (AsyncRefreshSchemeDesc) alterTableClause;
checkMaterializedViewAsyncRefreshSchemeUnitIdentifier(asyncRefreshSchemeDesc);
}
}
if (context.modifyPropertiesClause() != null) {
alterTableClause = (ModifyTablePropertiesClause) visit(context.modifyPropertiesClause());
}
if (context.statusDesc() != null) {
String status = context.statusDesc().getText();
alterTableClause = new AlterMaterializedViewStatusClause(status, createPos(context));
}
if (context.swapTableClause() != null) {
alterTableClause = (SwapTableClause) visit(context.swapTableClause());
}
return new AlterMaterializedViewStmt(mvName, alterTableClause, createPos(context));
}
@Override
public ParseNode visitRefreshMaterializedViewStatement(
StarRocksParser.RefreshMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
PartitionRangeDesc partitionRangeDesc = null;
if (context.partitionRangeDesc() != null) {
partitionRangeDesc =
(PartitionRangeDesc) visit(context.partitionRangeDesc());
}
return new RefreshMaterializedViewStatement(mvName, partitionRangeDesc, context.FORCE() != null,
context.SYNC() != null,
createPos(context));
}
@Override
public ParseNode visitCancelRefreshMaterializedViewStatement(
StarRocksParser.CancelRefreshMaterializedViewStatementContext context) {
QualifiedName mvQualifiedName = getQualifiedName(context.qualifiedName());
TableName mvName = qualifiedNameToTableName(mvQualifiedName);
return new CancelRefreshMaterializedViewStmt(mvName, createPos(context));
}
@Override
public ParseNode visitCreateExternalCatalogStatement(
StarRocksParser.CreateExternalCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String catalogName = identifier.getValue();
String comment = null;
if (context.comment() != null) {
comment = ((StringLiteral) visit(context.comment())).getStringValue();
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateCatalogStmt(catalogName, comment, properties, createPos(context));
}
@Override
public ParseNode visitDropExternalCatalogStatement(StarRocksParser.DropExternalCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.catalogName);
String catalogName = identifier.getValue();
return new DropCatalogStmt(catalogName, createPos(context));
}
@Override
public ParseNode visitShowCreateExternalCatalogStatement(
StarRocksParser.ShowCreateExternalCatalogStatementContext context) {
Identifier identifier = (Identifier) visit(context.catalogName);
String catalogName = identifier.getValue();
return new ShowCreateExternalCatalogStmt(catalogName, createPos(context));
}
@Override
public ParseNode visitShowCatalogsStatement(StarRocksParser.ShowCatalogsStatementContext context) {
return new ShowCatalogsStmt(createPos(context));
}
@Override
public ParseNode visitAlterCatalogStatement(StarRocksParser.AlterCatalogStatementContext context) {
String catalogName = ((Identifier) visit(context.catalogName)).getValue();
AlterClause alterClause = (AlterClause) visit(context.modifyPropertiesClause());
return new AlterCatalogStmt(catalogName, alterClause, createPos(context));
}
@Override
public ParseNode visitShowWarehousesStatement(StarRocksParser.ShowWarehousesStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowWarehousesStmt(pattern, where, createPos(context));
}
@Override
public ParseNode visitInsertStatement(StarRocksParser.InsertStatementContext context) {
QueryStatement queryStatement;
if (context.VALUES() != null) {
List<ValueList> rowValues = visit(context.expressionsWithDefault(), ValueList.class);
List<List<Expr>> rows = rowValues.stream().map(ValueList::getRow).collect(toList());
List<String> colNames = new ArrayList<>();
for (int i = 0; i < rows.get(0).size(); ++i) {
colNames.add("column_" + i);
}
queryStatement = new QueryStatement(new ValuesRelation(rows, colNames,
createPos(context.VALUES().getSymbol(), context.stop)));
} else {
queryStatement = (QueryStatement) visit(context.queryStatement());
}
if (context.explainDesc() != null) {
queryStatement.setIsExplain(true, getExplainType(context.explainDesc()));
}
if (context.qualifiedName() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
InsertStmt stmt = new InsertStmt(targetTableName, partitionNames,
context.label == null ? null : ((Identifier) visit(context.label)).getValue(),
getColumnNames(context.columnAliases()), queryStatement, context.OVERWRITE() != null,
createPos(context));
stmt.setHintNodes(hintMap.get(context));
return stmt;
}
if (context.BLACKHOLE() != null) {
return new InsertStmt(queryStatement, createPos(context));
}
Map<String, String> tableFunctionProperties = getPropertyList(context.propertyList());
InsertStmt res = new InsertStmt(tableFunctionProperties, queryStatement, createPos(context));
res.setHintNodes(hintMap.get(context));
return res;
}
@Override
public ParseNode visitUpdateStatement(StarRocksParser.UpdateStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
List<ColumnAssignment> assignments = visit(context.assignmentList().assignment(), ColumnAssignment.class);
List<Relation> fromRelations = null;
if (context.fromClause() instanceof StarRocksParser.DualContext) {
ValuesRelation valuesRelation = ValuesRelation.newDualRelation(createPos(context.fromClause()));
fromRelations = Lists.newArrayList(valuesRelation);
} else {
StarRocksParser.FromContext fromContext = (StarRocksParser.FromContext) context.fromClause();
if (fromContext.relations() != null) {
fromRelations = visit(fromContext.relations().relation(), Relation.class);
}
}
Expr where = context.where != null ? (Expr) visit(context.where) : null;
List<CTERelation> ctes = null;
if (context.withClause() != null) {
ctes = visit(context.withClause().commonTableExpression(), CTERelation.class);
}
UpdateStmt ret = new UpdateStmt(targetTableName, assignments, fromRelations, where, ctes, createPos(context));
if (context.explainDesc() != null) {
ret.setIsExplain(true, getExplainType(context.explainDesc()));
if (StatementBase.ExplainLevel.ANALYZE.equals(ret.getExplainLevel())) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedOp("analyze"));
}
}
ret.setHintNodes(hintMap.get(context));
return ret;
}
@Override
public ParseNode visitDeleteStatement(StarRocksParser.DeleteStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
List<Relation> usingRelations = context.using != null ? visit(context.using.relation(), Relation.class) : null;
Expr where = context.where != null ? (Expr) visit(context.where) : null;
List<CTERelation> ctes = null;
if (context.withClause() != null) {
ctes = visit(context.withClause().commonTableExpression(), CTERelation.class);
}
DeleteStmt ret =
new DeleteStmt(targetTableName, partitionNames, usingRelations, where, ctes, createPos(context));
if (context.explainDesc() != null) {
ret.setIsExplain(true, getExplainType(context.explainDesc()));
if (StatementBase.ExplainLevel.ANALYZE.equals(ret.getExplainLevel())) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedOp("analyze"));
}
}
ret.setHintNodes(hintMap.get(context));
return ret;
}
@Override
public ParseNode visitCreateRoutineLoadStatement(StarRocksParser.CreateRoutineLoadStatementContext context) {
QualifiedName tableName = null;
if (context.table != null) {
tableName = getQualifiedName(context.table);
}
List<StarRocksParser.LoadPropertiesContext> loadPropertiesContexts = context.loadProperties();
List<ParseNode> loadPropertyList = getLoadPropertyList(loadPropertiesContexts);
String typeName = context.source.getText();
Map<String, String> jobProperties = getJobProperties(context.jobProperties());
Map<String, String> dataSourceProperties = getDataSourceProperties(context.dataSourceProperties());
return new CreateRoutineLoadStmt(createLabelName(context.db, context.name),
tableName == null ? null : tableName.toString(), loadPropertyList, jobProperties, typeName,
dataSourceProperties, createPos(context));
}
@Override
public ParseNode visitShowCreateRoutineLoadStatement(
StarRocksParser.ShowCreateRoutineLoadStatementContext context) {
return new ShowCreateRoutineLoadStmt(createLabelName(context.db, context.name));
}
@Override
public ParseNode visitAlterRoutineLoadStatement(StarRocksParser.AlterRoutineLoadStatementContext context) {
NodePosition pos = createPos(context);
List<StarRocksParser.LoadPropertiesContext> loadPropertiesContexts = context.loadProperties();
List<ParseNode> loadPropertyList = getLoadPropertyList(loadPropertiesContexts);
Map<String, String> jobProperties = getJobProperties(context.jobProperties());
if (context.dataSource() != null) {
String typeName = context.dataSource().source.getText();
Map<String, String> dataSourceProperties =
getDataSourceProperties(context.dataSource().dataSourceProperties());
RoutineLoadDataSourceProperties dataSource =
new RoutineLoadDataSourceProperties(typeName, dataSourceProperties,
createPos(context.dataSource()));
return new AlterRoutineLoadStmt(createLabelName(context.db, context.name),
loadPropertyList, jobProperties, dataSource, pos);
}
return new AlterRoutineLoadStmt(createLabelName(context.db, context.name), loadPropertyList, jobProperties,
new RoutineLoadDataSourceProperties(), pos);
}
@Override
public ParseNode visitAlterLoadStatement(StarRocksParser.AlterLoadStatementContext context) {
Map<String, String> jobProperties = getJobProperties(context.jobProperties());
return new AlterLoadStmt(createLabelName(context.db, context.name), jobProperties, createPos(context));
}
@Override
public ParseNode visitStopRoutineLoadStatement(StarRocksParser.StopRoutineLoadStatementContext context) {
return new StopRoutineLoadStmt(createLabelName(context.db, context.name), createPos(context));
}
@Override
public ParseNode visitResumeRoutineLoadStatement(StarRocksParser.ResumeRoutineLoadStatementContext context) {
return new ResumeRoutineLoadStmt(createLabelName(context.db, context.name), createPos(context));
}
@Override
public ParseNode visitPauseRoutineLoadStatement(StarRocksParser.PauseRoutineLoadStatementContext context) {
return new PauseRoutineLoadStmt(createLabelName(context.db, context.name), createPos(context));
}
@Override
public ParseNode visitShowRoutineLoadStatement(StarRocksParser.ShowRoutineLoadStatementContext context) {
boolean isVerbose = context.ALL() != null;
String database = null;
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowRoutineLoadStmt(createLabelName(context.db, context.name), isVerbose, where, orderByElements,
limitElement, createPos(context));
}
@Override
public ParseNode visitShowRoutineLoadTaskStatement(StarRocksParser.ShowRoutineLoadTaskStatementContext context) {
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowRoutineLoadTaskStmt(dbName == null ? null : dbName.toString(), where, createPos(context));
}
@Override
public ParseNode visitShowStreamLoadStatement(StarRocksParser.ShowStreamLoadStatementContext context) {
boolean isVerbose = context.ALL() != null;
String database = null;
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowStreamLoadStmt(createLabelName(context.db, context.name), isVerbose, where, orderByElements,
limitElement, createPos(context));
}
@Override
public ParseNode visitAdminSetConfigStatement(StarRocksParser.AdminSetConfigStatementContext context) {
Property config = (Property) visitProperty(context.property());
return new AdminSetConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, config, createPos(context));
}
@Override
public ParseNode visitAdminSetReplicaStatusStatement(
StarRocksParser.AdminSetReplicaStatusStatementContext context) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
return new AdminSetReplicaStatusStmt(new PropertySet(propertyList, createPos(context.properties())),
createPos(context));
}
@Override
public ParseNode visitAdminShowConfigStatement(StarRocksParser.AdminShowConfigStatementContext context) {
NodePosition pos = createPos(context);
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new AdminShowConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, stringLiteral.getValue(), pos);
}
return new AdminShowConfigStmt(AdminSetConfigStmt.ConfigType.FRONTEND, null, pos);
}
@Override
public ParseNode visitAdminShowReplicaDistributionStatement(
StarRocksParser.AdminShowReplicaDistributionStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminShowReplicaDistributionStmt(new TableRef(targetTableName, null,
partitionNames, createPos(start, stop)),
createPos(context));
}
@Override
public ParseNode visitAdminShowReplicaStatusStatement(
StarRocksParser.AdminShowReplicaStatusStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
Expr where = context.where != null ? (Expr) visit(context.where) : null;
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminShowReplicaStatusStmt(
new TableRef(targetTableName, null, partitionNames, createPos(start, stop)),
where,
createPos(context));
}
@Override
public ParseNode visitAdminRepairTableStatement(StarRocksParser.AdminRepairTableStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminRepairTableStmt(new TableRef(targetTableName, null, partitionNames, createPos(start, stop)),
createPos(context));
}
@Override
public ParseNode visitAdminCancelRepairTableStatement(
StarRocksParser.AdminCancelRepairTableStatementContext context) {
Token start = context.qualifiedName().start;
Token stop = context.qualifiedName().stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName targetTableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new AdminCancelRepairTableStmt(
new TableRef(targetTableName, null, partitionNames, createPos(start, stop)),
createPos(context));
}
@Override
public ParseNode visitAdminCheckTabletsStatement(StarRocksParser.AdminCheckTabletsStatementContext context) {
List<Long> tabletIds = Lists.newArrayList();
if (context.tabletList() != null) {
tabletIds = context.tabletList().INTEGER_VALUE().stream().map(ParseTree::getText)
.map(Long::parseLong).collect(toList());
}
return new AdminCheckTabletsStmt(tabletIds, (Property) visitProperty(context.property()), createPos(context));
}
@Override
public ParseNode visitKillStatement(StarRocksParser.KillStatementContext context) {
NodePosition pos = createPos(context);
long id = Long.parseLong(context.INTEGER_VALUE().getText());
if (context.QUERY() != null) {
return new KillStmt(false, id, pos);
} else {
return new KillStmt(true, id, pos);
}
}
@Override
public ParseNode visitSyncStatement(StarRocksParser.SyncStatementContext context) {
return new SyncStmt(createPos(context));
}
@Override
public ParseNode visitAlterSystemStatement(StarRocksParser.AlterSystemStatementContext context) {
return new AlterSystemStmt((AlterClause) visit(context.alterClause()), createPos(context));
}
@Override
public ParseNode visitCancelAlterSystemStatement(StarRocksParser.CancelAlterSystemStatementContext context) {
return new CancelAlterSystemStmt(visit(context.string(), StringLiteral.class)
.stream().map(StringLiteral::getValue).collect(toList()), createPos(context));
}
@Override
public ParseNode visitShowComputeNodesStatement(StarRocksParser.ShowComputeNodesStatementContext context) {
return new ShowComputeNodesStmt(createPos(context));
}
@Override
public ParseNode visitAnalyzeStatement(StarRocksParser.AnalyzeStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AnalyzeStmt(tableName, columnNames, properties,
context.SAMPLE() != null,
context.ASYNC() != null,
new AnalyzeBasicDesc(), createPos(context));
}
@Override
public ParseNode visitDropStatsStatement(StarRocksParser.DropStatsStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
return new DropStatsStmt(tableName, createPos(context));
}
@Override
public ParseNode visitCreateAnalyzeStatement(StarRocksParser.CreateAnalyzeStatementContext context) {
NodePosition pos = createPos(context);
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.DATABASE() != null) {
return new CreateAnalyzeJobStmt(((Identifier) visit(context.db)).getValue(), context.FULL() == null,
properties, pos);
} else if (context.TABLE() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
return new CreateAnalyzeJobStmt(tableName, columnNames, context.SAMPLE() != null, properties, pos);
} else {
return new CreateAnalyzeJobStmt(context.FULL() == null, properties, pos);
}
}
@Override
public ParseNode visitDropAnalyzeJobStatement(StarRocksParser.DropAnalyzeJobStatementContext context) {
return new DropAnalyzeJobStmt(Long.parseLong(context.INTEGER_VALUE().getText()), createPos(context));
}
@Override
public ParseNode visitShowAnalyzeStatement(StarRocksParser.ShowAnalyzeStatementContext context) {
Predicate predicate = null;
NodePosition pos = createPos(context);
if (context.expression() != null) {
predicate = (Predicate) visit(context.expression());
}
if (context.STATUS() != null) {
return new ShowAnalyzeStatusStmt(predicate, pos);
} else if (context.JOB() != null) {
return new ShowAnalyzeJobStmt(predicate, pos);
} else {
return new ShowAnalyzeJobStmt(predicate, pos);
}
}
@Override
public ParseNode visitShowStatsMetaStatement(StarRocksParser.ShowStatsMetaStatementContext context) {
Predicate predicate = null;
if (context.expression() != null) {
predicate = (Predicate) visit(context.expression());
}
return new ShowBasicStatsMetaStmt(predicate, createPos(context));
}
@Override
public ParseNode visitShowHistogramMetaStatement(StarRocksParser.ShowHistogramMetaStatementContext context) {
Predicate predicate = null;
if (context.expression() != null) {
predicate = (Predicate) visit(context.expression());
}
return new ShowHistogramStatsMetaStmt(predicate, createPos(context));
}
@Override
public ParseNode visitAnalyzeHistogramStatement(StarRocksParser.AnalyzeHistogramStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
long bucket;
if (context.bucket != null) {
bucket = Long.parseLong(context.bucket.getText());
} else {
bucket = Config.histogram_buckets_size;
}
return new AnalyzeStmt(tableName, columnNames, properties, true,
context.ASYNC() != null, new AnalyzeHistogramDesc(bucket), createPos(context));
}
@Override
public ParseNode visitDropHistogramStatement(StarRocksParser.DropHistogramStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
List<Identifier> columns = visitIfPresent(context.identifier(), Identifier.class);
List<String> columnNames = null;
if (columns != null) {
columnNames = columns.stream().map(Identifier::getValue).collect(toList());
}
return new DropHistogramStmt(tableName, columnNames, createPos(context));
}
@Override
public ParseNode visitKillAnalyzeStatement(StarRocksParser.KillAnalyzeStatementContext context) {
return new KillAnalyzeStmt(Long.parseLong(context.INTEGER_VALUE().getText()), createPos(context));
}
@Override
public ParseNode visitAnalyzeProfileStatement(StarRocksParser.AnalyzeProfileStatementContext context) {
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
List<Integer> planNodeIds = Lists.newArrayList();
if (context.INTEGER_VALUE() != null) {
planNodeIds = context.INTEGER_VALUE().stream()
.map(ParseTree::getText)
.map(Integer::parseInt)
.collect(toList());
}
return new AnalyzeProfileStmt(stringLiteral.getStringValue(), planNodeIds, createPos(context));
}
public ParseNode visitCreateResourceGroupStatement(StarRocksParser.CreateResourceGroupStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
String name = identifier.getValue();
List<List<Predicate>> predicatesList = new ArrayList<>();
for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) {
List<Predicate> p = visit(classifierContext.expressionList().expression(), Predicate.class);
predicatesList.add(p);
}
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new CreateResourceGroupStmt(name,
context.EXISTS() != null,
context.REPLACE() != null,
predicatesList,
properties, createPos(context));
}
@Override
public ParseNode visitDropResourceGroupStatement(StarRocksParser.DropResourceGroupStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new DropResourceGroupStmt(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitAlterResourceGroupStatement(StarRocksParser.AlterResourceGroupStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
String name = identifier.getValue();
NodePosition pos = createPos(context);
if (context.ADD() != null) {
List<List<Predicate>> predicatesList = new ArrayList<>();
for (StarRocksParser.ClassifierContext classifierContext : context.classifier()) {
List<Predicate> p = visit(classifierContext.expressionList().expression(), Predicate.class);
predicatesList.add(p);
}
return new AlterResourceGroupStmt(name, new AlterResourceGroupStmt.AddClassifiers(predicatesList), pos);
} else if (context.DROP() != null) {
if (context.ALL() != null) {
return new AlterResourceGroupStmt(name, new AlterResourceGroupStmt.DropAllClassifiers(), pos);
} else {
return new AlterResourceGroupStmt(name,
new AlterResourceGroupStmt.DropClassifiers(context.INTEGER_VALUE()
.stream().map(ParseTree::getText).map(Long::parseLong).collect(toList())), pos);
}
} else {
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new AlterResourceGroupStmt(name, new AlterResourceGroupStmt.AlterProperties(properties), pos);
}
}
@Override
public ParseNode visitShowResourceGroupStatement(StarRocksParser.ShowResourceGroupStatementContext context) {
NodePosition pos = createPos(context);
if (context.GROUPS() != null) {
return new ShowResourceGroupStmt(null, context.ALL() != null, pos);
} else {
Identifier identifier = (Identifier) visit(context.identifier());
return new ShowResourceGroupStmt(identifier.getValue(), false, pos);
}
}
public ParseNode visitCreateResourceStatement(StarRocksParser.CreateResourceStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateResourceStmt(context.EXTERNAL() != null, identifier.getValue(), properties,
createPos(context));
}
public ParseNode visitDropResourceStatement(StarRocksParser.DropResourceStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
return new DropResourceStmt(identifier.getValue(), createPos(context));
}
public ParseNode visitAlterResourceStatement(StarRocksParser.AlterResourceStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AlterResourceStmt(identifier.getValue(), properties, createPos(context));
}
public ParseNode visitShowResourceStatement(StarRocksParser.ShowResourceStatementContext context) {
return new ShowResourcesStmt(createPos(context));
}
@Override
public ParseNode visitLoadStatement(StarRocksParser.LoadStatementContext context) {
NodePosition pos = createPos(context);
LabelName label = getLabelName(context.labelName());
List<DataDescription> dataDescriptions = null;
if (context.data != null) {
dataDescriptions = context.data.dataDesc().stream().map(this::getDataDescription)
.collect(toList());
}
Map<String, String> properties = null;
if (context.props != null) {
properties = Maps.newHashMap();
List<Property> propertyList = visit(context.props.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.resource != null) {
ResourceDesc resourceDesc = getResourceDesc(context.resource);
return new LoadStmt(label, dataDescriptions, resourceDesc, properties, pos);
}
BrokerDesc brokerDesc = getBrokerDesc(context.broker);
String cluster = null;
if (context.system != null) {
cluster = ((Identifier) visit(context.system)).getValue();
}
LoadStmt stmt = new LoadStmt(label, dataDescriptions, brokerDesc, cluster, properties, pos);
stmt.setHintNodes(hintMap.get(context));
return stmt;
}
private LabelName getLabelName(StarRocksParser.LabelNameContext context) {
String label = ((Identifier) visit(context.label)).getValue();
String db = "";
if (context.db != null) {
db = ((Identifier) visit(context.db)).getValue();
}
return new LabelName(db, label, createPos(context));
}
private DataDescription getDataDescription(StarRocksParser.DataDescContext context) {
NodePosition pos = createPos(context);
String dstTableName = ((Identifier) visit(context.dstTableName)).getValue();
PartitionNames partitionNames = (PartitionNames) visitIfPresent(context.partitions);
Expr whereExpr = (Expr) visitIfPresent(context.where);
List<Expr> colMappingList = null;
if (context.colMappingList != null) {
colMappingList = visit(context.colMappingList.expressionList().expression(), Expr.class);
}
if (context.srcTableName != null) {
String srcTableName = ((Identifier) visit(context.srcTableName)).getValue();
return new DataDescription(dstTableName, partitionNames, srcTableName,
context.NEGATIVE() != null, colMappingList, whereExpr, pos);
}
List<String> files = context.srcFiles.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue())
.collect(toList());
ColumnSeparator colSep = getColumnSeparator(context.colSep);
RowDelimiter rowDelimiter = getRowDelimiter(context.rowSep);
String format = null;
if (context.format != null) {
if (context.format.identifier() != null) {
format = ((Identifier) visit(context.format.identifier())).getValue();
} else if (context.format.string() != null) {
format = ((StringLiteral) visit(context.format.string())).getStringValue();
}
}
List<String> colList = null;
if (context.colList != null) {
List<Identifier> identifiers = visit(context.colList.identifier(), Identifier.class);
colList = identifiers.stream().map(Identifier::getValue).collect(toList());
}
List<String> colFromPath = null;
if (context.colFromPath != null) {
List<Identifier> identifiers = visit(context.colFromPath.identifier(), Identifier.class);
colFromPath = identifiers.stream().map(Identifier::getValue).collect(toList());
}
StarRocksParser.FormatPropsContext formatPropsContext;
CsvFormat csvFormat;
if (context.formatPropsField != null) {
formatPropsContext = context.formatProps();
String escape = null;
if (formatPropsContext.escapeCharacter != null) {
StringLiteral stringLiteral = (StringLiteral) visit(formatPropsContext.escapeCharacter);
escape = stringLiteral.getValue();
}
String enclose = null;
if (formatPropsContext.encloseCharacter != null) {
StringLiteral stringLiteral = (StringLiteral) visit(formatPropsContext.encloseCharacter);
enclose = stringLiteral.getValue();
}
long skipheader = 0;
if (formatPropsContext.INTEGER_VALUE() != null) {
skipheader = Long.parseLong(formatPropsContext.INTEGER_VALUE().getText());
if (skipheader < 0) {
skipheader = 0;
}
}
boolean trimspace = false;
if (formatPropsContext.booleanValue() != null) {
trimspace = Boolean.parseBoolean(formatPropsContext.booleanValue().getText());
}
csvFormat = new CsvFormat(enclose == null ? 0 : (byte) enclose.charAt(0),
escape == null ? 0 : (byte) escape.charAt(0),
skipheader, trimspace);
} else {
csvFormat = new CsvFormat((byte) 0, (byte) 0, 0, false);
}
return new DataDescription(dstTableName, partitionNames, files, colList, colSep, rowDelimiter,
format, colFromPath, context.NEGATIVE() != null, colMappingList, whereExpr,
csvFormat, createPos(context));
}
private ColumnSeparator getColumnSeparator(StarRocksParser.StringContext context) {
if (context != null) {
String sep = ((StringLiteral) visit(context)).getValue();
return new ColumnSeparator(sep);
}
return null;
}
private RowDelimiter getRowDelimiter(StarRocksParser.StringContext context) {
if (context != null) {
String sep = ((StringLiteral) visit(context)).getValue();
return new RowDelimiter(sep);
}
return null;
}
private BrokerDesc getBrokerDesc(StarRocksParser.BrokerDescContext context) {
if (context != null) {
NodePosition pos = createPos(context);
Map<String, String> properties = null;
if (context.props != null) {
properties = Maps.newHashMap();
List<Property> propertyList = visit(context.props.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.identifierOrString() != null) {
String brokerName = ((Identifier) visit(context.identifierOrString())).getValue();
return new BrokerDesc(brokerName, properties, pos);
} else {
return new BrokerDesc(properties, pos);
}
}
return null;
}
private ResourceDesc getResourceDesc(StarRocksParser.ResourceDescContext context) {
if (context != null) {
String brokerName = ((Identifier) visit(context.identifierOrString())).getValue();
Map<String, String> properties = null;
if (context.props != null) {
properties = Maps.newHashMap();
List<Property> propertyList = visit(context.props.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new ResourceDesc(brokerName, properties, createPos(context));
}
return null;
}
@Override
public ParseNode visitShowLoadStatement(StarRocksParser.ShowLoadStatementContext context) {
String db = null;
if (context.identifier() != null) {
db = ((Identifier) visit(context.identifier())).getValue();
}
Expr labelExpr = null;
if (context.expression() != null) {
labelExpr = (Expr) visit(context.expression());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
boolean all = context.ALL() != null;
ShowLoadStmt res = new ShowLoadStmt(db, labelExpr, orderByElements, limitElement, createPos(context));
res.setAll(all);
return res;
}
@Override
public ParseNode visitShowLoadWarningsStatement(StarRocksParser.ShowLoadWarningsStatementContext context) {
if (context.ON() != null) {
String url = ((StringLiteral) visit(context.string())).getValue();
return new ShowLoadWarningsStmt(null, url, null, null);
}
String db = null;
if (context.identifier() != null) {
db = ((Identifier) visit(context.identifier())).getValue();
}
Expr labelExpr = null;
if (context.expression() != null) {
labelExpr = (Expr) visit(context.expression());
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
return new ShowLoadWarningsStmt(db, null, labelExpr, limitElement, createPos(context));
}
@Override
public ParseNode visitCancelLoadStatement(StarRocksParser.CancelLoadStatementContext context) {
String db = null;
if (context.identifier() != null) {
db = ((Identifier) visit(context.identifier())).getValue();
}
Expr labelExpr = null;
if (context.expression() != null) {
labelExpr = (Expr) visit(context.expression());
}
return new CancelLoadStmt(db, labelExpr, createPos(context));
}
@Override
public ParseNode visitCancelCompactionStatement(StarRocksParser.CancelCompactionStatementContext context) {
Expr txnIdExpr = null;
if (context.expression() != null) {
txnIdExpr = (Expr) visit(context.expression());
}
return new CancelCompactionStmt(txnIdExpr, createPos(context));
}
@Override
public ParseNode visitShowAuthorStatement(StarRocksParser.ShowAuthorStatementContext context) {
return new ShowAuthorStmt(createPos(context));
}
@Override
public ParseNode visitShowBackendsStatement(StarRocksParser.ShowBackendsStatementContext context) {
return new ShowBackendsStmt(createPos(context));
}
@Override
public ParseNode visitShowBrokerStatement(StarRocksParser.ShowBrokerStatementContext context) {
return new ShowBrokerStmt(createPos(context));
}
@Override
public ParseNode visitShowCharsetStatement(StarRocksParser.ShowCharsetStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowCharsetStmt(pattern, where, createPos(context));
}
@Override
public ParseNode visitShowCollationStatement(StarRocksParser.ShowCollationStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowCollationStmt(pattern, where, createPos(context));
}
@Override
public ParseNode visitShowDeleteStatement(StarRocksParser.ShowDeleteStatementContext context) {
QualifiedName dbName = null;
if (context.qualifiedName() != null) {
dbName = getQualifiedName(context.db);
}
return new ShowDeleteStmt(dbName == null ? null : dbName.toString(), createPos(context));
}
@Override
public ParseNode visitShowDynamicPartitionStatement(StarRocksParser.ShowDynamicPartitionStatementContext context) {
QualifiedName dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db);
}
return new ShowDynamicPartitionStmt(dbName == null ? null : dbName.toString(), createPos(context));
}
@Override
public ParseNode visitShowEventsStatement(StarRocksParser.ShowEventsStatementContext context) {
return new ShowEventsStmt(createPos(context));
}
@Override
public ParseNode visitShowEnginesStatement(StarRocksParser.ShowEnginesStatementContext context) {
return new ShowEnginesStmt(createPos(context));
}
@Override
public ParseNode visitShowFrontendsStatement(StarRocksParser.ShowFrontendsStatementContext context) {
return new ShowFrontendsStmt(createPos(context));
}
@Override
public ParseNode visitShowPluginsStatement(StarRocksParser.ShowPluginsStatementContext context) {
return new ShowPluginsStmt(createPos(context));
}
@Override
public ParseNode visitShowRepositoriesStatement(StarRocksParser.ShowRepositoriesStatementContext context) {
return new ShowRepositoriesStmt(createPos(context));
}
@Override
public ParseNode visitShowOpenTableStatement(StarRocksParser.ShowOpenTableStatementContext context) {
return new ShowOpenTableStmt(createPos(context));
}
@Override
public ParseNode visitShowProcedureStatement(StarRocksParser.ShowProcedureStatementContext context) {
NodePosition pos = createPos(context);
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowProcedureStmt(stringLiteral.getValue(), null, pos);
} else if (context.expression() != null) {
return new ShowProcedureStmt(null, (Expr) visit(context.expression()), pos);
} else {
return new ShowProcedureStmt(null, null, pos);
}
}
@Override
public ParseNode visitShowProcStatement(StarRocksParser.ShowProcStatementContext context) {
StringLiteral stringLiteral = (StringLiteral) visit(context.path);
return new ShowProcStmt(stringLiteral.getValue(), createPos(context));
}
@Override
public ParseNode visitShowProcesslistStatement(StarRocksParser.ShowProcesslistStatementContext context) {
boolean isShowFull = context.FULL() != null;
return new ShowProcesslistStmt(isShowFull, createPos(context));
}
@Override
public ParseNode visitShowProfilelistStatement(StarRocksParser.ShowProfilelistStatementContext context) {
int limit = context.LIMIT() != null ? Integer.parseInt(context.limit.getText()) : -1;
return new ShowProfilelistStmt(limit, createPos(context));
}
@Override
public ParseNode visitShowRunningQueriesStatement(StarRocksParser.ShowRunningQueriesStatementContext context) {
int limit = context.LIMIT() != null ? Integer.parseInt(context.limit.getText()) : -1;
return new ShowRunningQueriesStmt(limit, createPos(context));
}
@Override
public ParseNode visitShowResourceGroupUsageStatement(
StarRocksParser.ShowResourceGroupUsageStatementContext context) {
if (context.GROUPS() != null) {
return new ShowResourceGroupUsageStmt(null, createPos(context));
}
Identifier groupName = (Identifier) visit(context.identifier());
return new ShowResourceGroupUsageStmt(groupName.getValue(), createPos(context));
}
@Override
public ParseNode visitShowTransactionStatement(StarRocksParser.ShowTransactionStatementContext context) {
String database = null;
if (context.qualifiedName() != null) {
database = getQualifiedName(context.qualifiedName()).toString();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowTransactionStmt(database, where, createPos(context));
}
@Override
public ParseNode visitShowStatusStatement(StarRocksParser.ShowStatusStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowStatusStmt(getVariableType(context.varType()), pattern, where, createPos(context));
}
@Override
public ParseNode visitShowTriggersStatement(StarRocksParser.ShowTriggersStatementContext context) {
return new ShowTriggersStmt(createPos(context));
}
@Override
public ParseNode visitShowUserPropertyStatement(StarRocksParser.ShowUserPropertyStatementContext context) {
String user;
String pattern;
if (context.FOR() == null) {
user = null;
pattern = context.LIKE() == null ? null : ((StringLiteral) visit(context.string(0))).getValue();
} else {
user = ((StringLiteral) visit(context.string(0))).getValue();
pattern = context.LIKE() == null ? null : ((StringLiteral) visit(context.string(1))).getValue();
}
return new ShowUserPropertyStmt(user, pattern, createPos(context));
}
@Override
public ParseNode visitShowVariablesStatement(StarRocksParser.ShowVariablesStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowVariablesStmt(getVariableType(context.varType()), pattern, where, createPos(context));
}
@Override
public ParseNode visitShowWarningStatement(StarRocksParser.ShowWarningStatementContext context) {
NodePosition pos = createPos(context);
if (context.limitElement() != null) {
return new ShowWarningStmt((LimitElement) visit(context.limitElement()), pos);
}
return new ShowWarningStmt(null, pos);
}
@Override
public ParseNode visitHelpStatement(StarRocksParser.HelpStatementContext context) {
String mask = ((Identifier) visit(context.identifierOrString())).getValue();
return new HelpStmt(mask, createPos(context));
}
@Override
public ParseNode visitBackupStatement(StarRocksParser.BackupStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
LabelName labelName = qualifiedNameToLabelName(qualifiedName);
List<TableRef> tblRefs = new ArrayList<>();
for (StarRocksParser.TableDescContext tableDescContext : context.tableDesc()) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = tableDescContext.qualifiedName();
qualifiedName = getQualifiedName(qualifiedNameContext);
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (tableDescContext.partitionNames() != null) {
partitionNames = (PartitionNames) visit(tableDescContext.partitionNames());
}
TableRef tableRef = new TableRef(tableName, null, partitionNames, createPos(tableDescContext));
tblRefs.add(tableRef);
}
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String repoName = ((Identifier) visit(context.identifier())).getValue();
return new BackupStmt(labelName, repoName, tblRefs, properties, createPos(context));
}
@Override
public ParseNode visitCancelBackupStatement(StarRocksParser.CancelBackupStatementContext context) {
if (context.identifier() == null) {
throw new ParsingException(PARSER_ERROR_MSG.nullIdentifierCancelBackupRestore());
}
return new CancelBackupStmt(((Identifier) visit(context.identifier())).getValue(),
false, createPos(context));
}
@Override
public ParseNode visitShowBackupStatement(StarRocksParser.ShowBackupStatementContext context) {
NodePosition pos = createPos(context);
if (context.identifier() == null) {
return new ShowBackupStmt(null, pos);
}
return new ShowBackupStmt(((Identifier) visit(context.identifier())).getValue(), pos);
}
@Override
public ParseNode visitRestoreStatement(StarRocksParser.RestoreStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
LabelName labelName = qualifiedNameToLabelName(qualifiedName);
List<TableRef> tblRefs = new ArrayList<>();
for (StarRocksParser.RestoreTableDescContext tableDescContext : context.restoreTableDesc()) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = tableDescContext.qualifiedName();
qualifiedName = getQualifiedName(qualifiedNameContext);
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (tableDescContext.partitionNames() != null) {
partitionNames = (PartitionNames) visit(tableDescContext.partitionNames());
}
String alias = null;
if (tableDescContext.identifier() != null) {
alias = ((Identifier) visit(tableDescContext.identifier())).getValue();
}
TableRef tableRef = new TableRef(tableName, alias, partitionNames, createPos(tableDescContext));
tblRefs.add(tableRef);
}
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String repoName = ((Identifier) visit(context.identifier())).getValue();
return new RestoreStmt(labelName, repoName, tblRefs, properties, createPos(context));
}
@Override
public ParseNode visitCancelRestoreStatement(StarRocksParser.CancelRestoreStatementContext context) {
if (context.identifier() == null) {
throw new ParsingException(PARSER_ERROR_MSG.nullIdentifierCancelBackupRestore());
}
return new CancelBackupStmt(((Identifier) visit(context.identifier())).getValue(), true,
createPos(context));
}
@Override
public ParseNode visitShowRestoreStatement(StarRocksParser.ShowRestoreStatementContext context) {
NodePosition pos = createPos(context);
if (context.identifier() == null) {
return new ShowRestoreStmt(null, null, pos);
}
if (context.expression() != null) {
return new ShowRestoreStmt(((Identifier) visit(context.identifier())).getValue(),
(Expr) visit(context.expression()), pos);
} else {
return new ShowRestoreStmt(((Identifier) visit(context.identifier())).getValue(), null, pos);
}
}
@Override
public ParseNode visitShowSnapshotStatement(StarRocksParser.ShowSnapshotStatementContext context) {
StarRocksParser.ExpressionContext expression = context.expression();
Expr where = null;
if (expression != null) {
where = (Expr) visit(context.expression());
}
String repoName = ((Identifier) visit(context.identifier())).getValue();
return new ShowSnapshotStmt(repoName, where, createPos(context));
}
@Override
public ParseNode visitCreateRepositoryStatement(StarRocksParser.CreateRepositoryStatementContext context) {
boolean isReadOnly = context.READ() != null && context.ONLY() != null;
Map<String, String> properties = new HashMap<>();
if (context.propertyList() != null) {
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String location = ((StringLiteral) visit(context.location)).getValue();
String repoName = ((Identifier) visit(context.repoName)).getValue();
String brokerName = null;
if (context.brokerName != null) {
brokerName = ((Identifier) visit(context.brokerName)).getValue();
}
return new CreateRepositoryStmt(isReadOnly, repoName, brokerName,
location, properties, createPos(context));
}
@Override
public ParseNode visitDropRepositoryStatement(StarRocksParser.DropRepositoryStatementContext context) {
return new DropRepositoryStmt(((Identifier) visit(context.identifier())).getValue(), createPos(context));
}
@Override
public ParseNode visitAddSqlBlackListStatement(StarRocksParser.AddSqlBlackListStatementContext context) {
String sql = ((StringLiteral) visit(context.string())).getStringValue();
if (sql == null || sql.isEmpty()) {
throw new ParsingException(PARSER_ERROR_MSG.emptySql(), createPos(context.string()));
}
return new AddSqlBlackListStmt(sql);
}
@Override
public ParseNode visitDelSqlBlackListStatement(StarRocksParser.DelSqlBlackListStatementContext context) {
List<Long> indexes = context.INTEGER_VALUE().stream().map(ParseTree::getText)
.map(Long::parseLong).collect(toList());
return new DelSqlBlackListStmt(indexes, createPos(context));
}
@Override
public ParseNode visitShowSqlBlackListStatement(StarRocksParser.ShowSqlBlackListStatementContext context) {
return new ShowSqlBlackListStmt(createPos(context));
}
@Override
public ParseNode visitShowWhiteListStatement(StarRocksParser.ShowWhiteListStatementContext context) {
return new ShowWhiteListStmt();
}
@Override
public ParseNode visitAddBackendBlackListStatement(StarRocksParser.AddBackendBlackListStatementContext ctx) {
List<Long> ids =
ctx.INTEGER_VALUE().stream().map(ParseTree::getText).map(Long::parseLong).collect(toList());
return new AddBackendBlackListStmt(ids, createPos(ctx));
}
@Override
public ParseNode visitDelBackendBlackListStatement(StarRocksParser.DelBackendBlackListStatementContext ctx) {
List<Long> ids =
ctx.INTEGER_VALUE().stream().map(ParseTree::getText).map(Long::parseLong).collect(toList());
return new DelBackendBlackListStmt(createPos(ctx), ids);
}
@Override
public ParseNode visitShowBackendBlackListStatement(StarRocksParser.ShowBackendBlackListStatementContext ctx) {
return new ShowBackendBlackListStmt(createPos(ctx));
}
@Override
public ParseNode visitCreateDataCacheRuleStatement(StarRocksParser.CreateDataCacheRuleStatementContext ctx) {
List<StarRocksParser.IdentifierOrStringOrStarContext> partList =
ctx.dataCacheTarget().identifierOrStringOrStar();
List<String> parts = partList.stream().map(c -> ((Identifier) visit(c)).getValue()).collect(toList());
QualifiedName qualifiedName = QualifiedName.of(parts);
int priority = Integer.parseInt(ctx.INTEGER_VALUE().getText());
if (ctx.MINUS_SYMBOL() != null) {
priority *= -1;
}
Expr predicates = null;
if (ctx.expression() != null) {
predicates = (Expr) visit(ctx.expression());
}
Map<String, String> properties = null;
if (ctx.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(ctx.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateDataCacheRuleStmt(qualifiedName, predicates, priority, properties, createPos(ctx));
}
@Override
public ParseNode visitShowDataCacheRulesStatement(StarRocksParser.ShowDataCacheRulesStatementContext ctx) {
return new ShowDataCacheRulesStmt(createPos(ctx));
}
@Override
public ParseNode visitDropDataCacheRuleStatement(StarRocksParser.DropDataCacheRuleStatementContext ctx) {
long id = Long.parseLong(ctx.INTEGER_VALUE().getText());
return new DropDataCacheRuleStmt(id, createPos(ctx));
}
@Override
public ParseNode visitClearDataCacheRulesStatement(StarRocksParser.ClearDataCacheRulesStatementContext ctx) {
return new ClearDataCacheRulesStmt(createPos(ctx));
}
@Override
public ParseNode visitExportStatement(StarRocksParser.ExportStatementContext context) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = context.tableDesc().qualifiedName();
Token start = qualifiedNameContext.start;
Token stop = qualifiedNameContext.stop;
QualifiedName qualifiedName = getQualifiedName(qualifiedNameContext);
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.tableDesc().partitionNames() != null) {
stop = context.tableDesc().partitionNames().stop;
partitionNames = (PartitionNames) visit(context.tableDesc().partitionNames());
}
TableRef tableRef = new TableRef(tableName, null, partitionNames, createPos(start, stop));
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
BrokerDesc brokerDesc = getBrokerDesc(context.brokerDesc());
boolean sync = context.SYNC() != null;
return new ExportStmt(tableRef, getColumnNames(context.columnAliases()),
stringLiteral.getValue(), properties, brokerDesc, createPos(context), sync);
}
@Override
public ParseNode visitCancelExportStatement(StarRocksParser.CancelExportStatementContext context) {
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new CancelExportStmt(catalog, where, createPos(context));
}
@Override
public ParseNode visitShowExportStatement(StarRocksParser.ShowExportStatementContext context) {
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
LimitElement le = null;
if (context.limitElement() != null) {
le = (LimitElement) visit(context.limitElement());
}
List<OrderByElement> orderByElements = null;
if (context.ORDER() != null) {
orderByElements = new ArrayList<>();
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
Expr whereExpr = null;
if (context.expression() != null) {
whereExpr = (Expr) visit(context.expression());
}
return new ShowExportStmt(catalog, whereExpr, orderByElements, le, createPos(context));
}
@Override
public ParseNode visitInstallPluginStatement(StarRocksParser.InstallPluginStatementContext context) {
String pluginPath = ((Identifier) visit(context.identifierOrString())).getValue();
Map<String, String> properties = getProperties(context.properties());
return new InstallPluginStmt(pluginPath, properties, createPos(context));
}
@Override
public ParseNode visitUninstallPluginStatement(StarRocksParser.UninstallPluginStatementContext context) {
String pluginPath = ((Identifier) visit(context.identifierOrString())).getValue();
return new UninstallPluginStmt(pluginPath, createPos(context));
}
@Override
public ParseNode visitCreateFileStatement(StarRocksParser.CreateFileStatementContext context) {
String fileName = ((StringLiteral) visit(context.string())).getStringValue();
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
Map<String, String> properties = getProperties(context.properties());
return new CreateFileStmt(fileName, catalog, properties, createPos(context));
}
@Override
public ParseNode visitDropFileStatement(StarRocksParser.DropFileStatementContext context) {
String fileName = ((StringLiteral) visit(context.string())).getStringValue();
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
Map<String, String> properties = getProperties(context.properties());
return new DropFileStmt(fileName, catalog, properties, createPos(context));
}
@Override
public ParseNode visitShowSmallFilesStatement(StarRocksParser.ShowSmallFilesStatementContext context) {
String catalog = null;
if (context.catalog != null) {
QualifiedName dbName = getQualifiedName(context.catalog);
catalog = dbName.toString();
}
return new ShowSmallFilesStmt(catalog, createPos(context));
}
@Override
public ParseNode visitSetStatement(StarRocksParser.SetStatementContext context) {
List<SetListItem> propertyList = visit(context.setVar(), SetListItem.class);
return new SetStmt(propertyList, createPos(context));
}
@Override
public ParseNode visitSetNames(StarRocksParser.SetNamesContext context) {
NodePosition pos = createPos(context);
if (context.CHAR() != null || context.CHARSET() != null) {
if (context.identifierOrString().isEmpty()) {
return new SetNamesVar(null, null, pos);
} else {
return new SetNamesVar(
((Identifier) visit(context.identifierOrString().get(0))).getValue(),
null,
pos);
}
} else {
String charset = null;
if (context.charset != null) {
charset = ((Identifier) visit(context.charset)).getValue();
}
String collate = null;
if (context.collate != null) {
collate = ((Identifier) visit(context.collate)).getValue();
}
return new SetNamesVar(charset, collate, pos);
}
}
@Override
public ParseNode visitSetPassword(StarRocksParser.SetPasswordContext context) {
NodePosition pos = createPos(context);
String passwordText;
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
if (context.PASSWORD().size() > 1) {
passwordText = new String(MysqlPassword.makeScrambledPassword(stringLiteral.getStringValue()));
} else {
passwordText = stringLiteral.getStringValue();
}
if (context.user() != null) {
return new SetPassVar((UserIdentity) visit(context.user()), passwordText, pos);
} else {
return new SetPassVar(null, passwordText, pos);
}
}
@Override
public ParseNode visitSetUserVar(StarRocksParser.SetUserVarContext context) {
VariableExpr variableDesc = (VariableExpr) visit(context.userVariable());
Expr expr = (Expr) visit(context.expression());
return new UserVariable(variableDesc.getName(), expr, createPos(context));
}
@Override
public ParseNode visitSetSystemVar(StarRocksParser.SetSystemVarContext context) {
NodePosition pos = createPos(context);
if (context.systemVariable() != null) {
VariableExpr variableDesc = (VariableExpr) visit(context.systemVariable());
Expr expr = (Expr) visit(context.setExprOrDefault());
return new SystemVariable(variableDesc.getSetType(), variableDesc.getName(), expr, pos);
} else {
Expr expr = (Expr) visit(context.setExprOrDefault());
String variable = ((Identifier) visit(context.identifier())).getValue();
if (context.varType() != null) {
return new SystemVariable(getVariableType(context.varType()), variable, expr, pos);
} else {
return new SystemVariable(SetType.SESSION, variable, expr, pos);
}
}
}
@Override
public ParseNode visitSetTransaction(StarRocksParser.SetTransactionContext context) {
return new SetTransaction(createPos(context));
}
@Override
public ParseNode visitSetUserPropertyStatement(StarRocksParser.SetUserPropertyStatementContext context) {
String user = context.FOR() == null ? null : ((StringLiteral) visit(context.string())).getValue();
List<SetUserPropertyVar> list = new ArrayList<>();
if (context.userPropertyList() != null) {
List<Property> propertyList = visit(context.userPropertyList().property(), Property.class);
for (Property property : propertyList) {
SetUserPropertyVar setVar = new SetUserPropertyVar(property.getKey(), property.getValue());
list.add(setVar);
}
}
return new SetUserPropertyStmt(user, list, createPos(context));
}
@Override
public ParseNode visitSetExprOrDefault(StarRocksParser.SetExprOrDefaultContext context) {
if (context.DEFAULT() != null) {
return null;
} else if (context.ON() != null) {
return new StringLiteral("ON");
} else if (context.ALL() != null) {
return new StringLiteral("ALL");
} else {
return visit(context.expression());
}
}
@Override
public ParseNode visitExecuteScriptStatement(StarRocksParser.ExecuteScriptStatementContext context) {
long beId = -1;
if (context.INTEGER_VALUE() != null) {
beId = Long.parseLong(context.INTEGER_VALUE().getText());
}
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
String script = stringLiteral.getStringValue();
return new ExecuteScriptStmt(beId, script, createPos(context));
}
@Override
public ParseNode visitCreateStorageVolumeStatement(StarRocksParser.CreateStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
String storageType = ((Identifier) visit(context.typeDesc().identifier())).getValue();
List<StarRocksParser.StringContext> locationList = context.locationsDesc().stringList().string();
List<String> locations = new ArrayList<>();
for (StarRocksParser.StringContext location : locationList) {
locations.add(((StringLiteral) visit(location)).getValue());
}
return new CreateStorageVolumeStmt(context.IF() != null,
svName, storageType, getProperties(context.properties()), locations,
context.comment() == null ? null : ((StringLiteral) visit(context.comment().string())).getStringValue(),
createPos(context));
}
@Override
public ParseNode visitShowStorageVolumesStatement(StarRocksParser.ShowStorageVolumesStatementContext context) {
String pattern = null;
if (context.pattern != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
pattern = stringLiteral.getValue();
}
return new ShowStorageVolumesStmt(pattern, createPos(context));
}
@Override
public ParseNode visitAlterStorageVolumeStatement(StarRocksParser.AlterStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
NodePosition pos = createPos(context);
List<AlterStorageVolumeClause> alterClauses = visit(context.alterStorageVolumeClause(),
AlterStorageVolumeClause.class);
Map<String, String> properties = new HashMap<>();
String comment = null;
for (AlterStorageVolumeClause clause : alterClauses) {
if (clause.getOpType().equals(AlterStorageVolumeClause.AlterOpType.ALTER_COMMENT)) {
comment = ((AlterStorageVolumeCommentClause) clause).getNewComment();
} else if (clause.getOpType().equals(AlterStorageVolumeClause.AlterOpType.MODIFY_PROPERTIES)) {
properties = ((ModifyStorageVolumePropertiesClause) clause).getProperties();
}
}
return new AlterStorageVolumeStmt(svName, properties, comment, pos);
}
@Override
public ParseNode visitDropStorageVolumeStatement(StarRocksParser.DropStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
return new DropStorageVolumeStmt(context.IF() != null, svName, createPos(context));
}
@Override
public ParseNode visitDescStorageVolumeStatement(StarRocksParser.DescStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
return new DescStorageVolumeStmt(svName, createPos(context));
}
@Override
public ParseNode visitSetDefaultStorageVolumeStatement(
StarRocksParser.SetDefaultStorageVolumeStatementContext context) {
Identifier identifier = (Identifier) visit(context.identifierOrString());
String svName = identifier.getValue();
return new SetDefaultStorageVolumeStmt(svName, createPos(context));
}
@Override
public ParseNode visitModifyStorageVolumeCommentClause(
StarRocksParser.ModifyStorageVolumeCommentClauseContext context) {
String comment = ((StringLiteral) visit(context.string())).getStringValue();
return new AlterStorageVolumeCommentClause(comment, createPos(context));
}
@Override
public ParseNode visitModifyStorageVolumePropertiesClause(
StarRocksParser.ModifyStorageVolumePropertiesClauseContext context) {
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new ModifyStorageVolumePropertiesClause(properties, createPos(context));
}
@Override
public ParseNode visitUpdateFailPointStatusStatement(
StarRocksParser.UpdateFailPointStatusStatementContext ctx) {
String failpointName = ((StringLiteral) visit(ctx.string(0))).getStringValue();
List<String> backendList = null;
if (ctx.BACKEND() != null) {
String tmp = ((StringLiteral) visit(ctx.string(1))).getStringValue();
backendList = Lists.newArrayList(tmp.split(","));
}
if (ctx.ENABLE() != null) {
if (ctx.TIMES() != null) {
int nTimes = Integer.parseInt(ctx.INTEGER_VALUE().getText());
if (nTimes <= 0) {
throw new ParsingException(String.format(
"Invalid TIMES value %d, it should be a positive integer", nTimes));
}
return new UpdateFailPointStatusStatement(failpointName, nTimes, backendList, createPos(ctx));
} else if (ctx.PROBABILITY() != null) {
double probability = Double.parseDouble(ctx.DECIMAL_VALUE().getText());
if (probability < 0 || probability > 1) {
throw new ParsingException(String.format(
"Invalid PROBABILITY value %f, it should be in range [0, 1]", probability));
}
return new UpdateFailPointStatusStatement(failpointName, probability, backendList, createPos(ctx));
}
return new UpdateFailPointStatusStatement(failpointName, true, backendList, createPos(ctx));
}
return new UpdateFailPointStatusStatement(failpointName, false, backendList, createPos(ctx));
}
@Override
public ParseNode visitShowFailPointStatement(StarRocksParser.ShowFailPointStatementContext ctx) {
String pattern = null;
List<String> backendList = null;
int idx = 0;
if (ctx.LIKE() != null) {
pattern = ((StringLiteral) visit(ctx.string(idx++))).getStringValue();
}
if (ctx.BACKEND() != null) {
String tmp = ((StringLiteral) visit(ctx.string(idx++))).getStringValue();
backendList = Lists.newArrayList(tmp.split(","));
}
return new ShowFailPointStatement(pattern, backendList, createPos(ctx));
}
@Override
public ParseNode visitCreateDictionaryStatement(StarRocksParser.CreateDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.dictionaryName().qualifiedName()).toString();
String queryableObject = getQualifiedName(context.qualifiedName()).toString();
List<StarRocksParser.DictionaryColumnDescContext> dictionaryColumnDescs = context.dictionaryColumnDesc();
List<String> dictionaryKeys = new ArrayList<>();
List<String> dictionaryValues = new ArrayList<>();
for (StarRocksParser.DictionaryColumnDescContext desc : dictionaryColumnDescs) {
String columnName = getQualifiedName(desc.qualifiedName()).toString();
if (desc.KEY() != null) {
dictionaryKeys.add(columnName);
}
if (desc.VALUE() != null) {
dictionaryValues.add(columnName);
}
}
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new CreateDictionaryStmt(dictionaryName, queryableObject, dictionaryKeys, dictionaryValues,
properties, createPos(context));
}
@Override
public ParseNode visitDropDictionaryStatement(StarRocksParser.DropDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.qualifiedName()).toString();
boolean cacheOnly = false;
if (context.CACHE() != null) {
cacheOnly = true;
}
return new DropDictionaryStmt(dictionaryName, cacheOnly, createPos(context));
}
@Override
public ParseNode visitRefreshDictionaryStatement(StarRocksParser.RefreshDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.qualifiedName()).toString();
return new RefreshDictionaryStmt(dictionaryName, createPos(context));
}
@Override
public ParseNode visitShowDictionaryStatement(StarRocksParser.ShowDictionaryStatementContext context) {
String dictionaryName = null;
if (context.qualifiedName() != null) {
dictionaryName = getQualifiedName(context.qualifiedName()).toString();
}
return new ShowDictionaryStmt(dictionaryName, createPos(context));
}
@Override
public ParseNode visitCancelRefreshDictionaryStatement(
StarRocksParser.CancelRefreshDictionaryStatementContext context) {
String dictionaryName = getQualifiedName(context.qualifiedName()).toString();
return new CancelRefreshDictionaryStmt(dictionaryName, createPos(context));
}
@Override
public ParseNode visitUnsupportedStatement(StarRocksParser.UnsupportedStatementContext context) {
return new UnsupportedStmt(createPos(context));
}
@Override
public ParseNode visitAddFrontendClause(StarRocksParser.AddFrontendClauseContext context) {
String cluster = ((StringLiteral) visit(context.string())).getStringValue();
NodePosition pos = createPos(context);
if (context.FOLLOWER() != null) {
return new AddFollowerClause(cluster, pos);
} else {
return new AddObserverClause(cluster, pos);
}
}
@Override
public ParseNode visitDropFrontendClause(StarRocksParser.DropFrontendClauseContext context) {
String cluster = ((StringLiteral) visit(context.string())).getStringValue();
NodePosition pos = createPos(context);
if (context.FOLLOWER() != null) {
return new DropFollowerClause(cluster, pos);
} else {
return new DropObserverClause(cluster, pos);
}
}
@Override
public ParseNode visitModifyFrontendHostClause(StarRocksParser.ModifyFrontendHostClauseContext context) {
List<String> clusters =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new ModifyFrontendAddressClause(clusters.get(0), clusters.get(1), createPos(context));
}
@Override
public ParseNode visitAddBackendClause(StarRocksParser.AddBackendClauseContext context) {
List<String> backends =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new AddBackendClause(backends, createPos(context));
}
@Override
public ParseNode visitDropBackendClause(StarRocksParser.DropBackendClauseContext context) {
List<String> clusters =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new DropBackendClause(clusters, context.FORCE() != null, createPos(context));
}
@Override
public ParseNode visitDecommissionBackendClause(StarRocksParser.DecommissionBackendClauseContext context) {
List<String> clusters =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new DecommissionBackendClause(clusters, createPos(context));
}
@Override
public ParseNode visitModifyBackendClause(StarRocksParser.ModifyBackendClauseContext context) {
List<String> strings =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
if (context.HOST() != null) {
return new ModifyBackendClause(strings.get(0), strings.get(1), createPos(context));
} else {
String backendHostPort = strings.get(0);
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new ModifyBackendClause(backendHostPort, properties, createPos(context));
}
}
@Override
public ParseNode visitAddComputeNodeClause(StarRocksParser.AddComputeNodeClauseContext context) {
List<String> hostPorts =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new AddComputeNodeClause(hostPorts);
}
@Override
public ParseNode visitDropComputeNodeClause(StarRocksParser.DropComputeNodeClauseContext context) {
List<String> hostPorts =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
return new DropComputeNodeClause(hostPorts, createPos(context));
}
@Override
public ParseNode visitModifyBrokerClause(StarRocksParser.ModifyBrokerClauseContext context) {
String brokerName = ((Identifier) visit(context.identifierOrString())).getValue();
NodePosition pos = createPos(context);
if (context.ALL() != null) {
return ModifyBrokerClause.createDropAllBrokerClause(brokerName, pos);
}
List<String> hostPorts =
context.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue()).collect(toList());
if (context.ADD() != null) {
return ModifyBrokerClause.createAddBrokerClause(brokerName, hostPorts, pos);
}
return ModifyBrokerClause.createDropBrokerClause(brokerName, hostPorts, pos);
}
@Override
public ParseNode visitAlterLoadErrorUrlClause(StarRocksParser.AlterLoadErrorUrlClauseContext context) {
return new AlterLoadErrorUrlClause(getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitCreateImageClause(StarRocksParser.CreateImageClauseContext context) {
return new CreateImageClause(createPos(context));
}
@Override
public ParseNode visitCleanTabletSchedQClause(
StarRocksParser.CleanTabletSchedQClauseContext context) {
return new CleanTabletSchedQClause(createPos(context));
}
@Override
public ParseNode visitCreateIndexClause(StarRocksParser.CreateIndexClauseContext context) {
Token start = context.identifier().start;
String indexName = ((Identifier) visit(context.identifier())).getValue();
List<Identifier> columnList = visit(context.identifierList().identifier(), Identifier.class);
Token stop = context.identifierList().stop;
String comment = null;
if (context.comment() != null) {
stop = context.comment().stop;
comment = ((StringLiteral) visit(context.comment())).getStringValue();
}
IndexDef indexDef = new IndexDef(indexName,
columnList.stream().map(Identifier::getValue).collect(toList()),
getIndexType(context.indexType()),
comment, getPropertyList(context.propertyList()),
createPos(start, stop));
return new CreateIndexClause(indexDef, createPos(context));
}
@Override
public ParseNode visitDropIndexClause(StarRocksParser.DropIndexClauseContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new DropIndexClause(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitTableRenameClause(StarRocksParser.TableRenameClauseContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new TableRenameClause(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitModifyCommentClause(StarRocksParser.ModifyCommentClauseContext context) {
String comment = ((StringLiteral) visit(context.string())).getStringValue();
return new AlterTableCommentClause(comment, createPos(context));
}
@Override
public ParseNode visitSwapTableClause(StarRocksParser.SwapTableClauseContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
return new SwapTableClause(identifier.getValue(), createPos(context));
}
@Override
public ParseNode visitModifyPropertiesClause(StarRocksParser.ModifyPropertiesClauseContext context) {
Map<String, String> properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
return new ModifyTablePropertiesClause(properties, createPos(context));
}
@Override
public ParseNode visitOptimizeClause(StarRocksParser.OptimizeClauseContext context) {
return new OptimizeClause(
context.keyDesc() == null ? null : getKeysDesc(context.keyDesc()),
context.partitionDesc() == null ? null : getPartitionDesc(context.partitionDesc(), null),
context.distributionDesc() == null ? null : (DistributionDesc) visit(context.distributionDesc()),
context.orderByDesc() == null ? null :
visit(context.orderByDesc().identifierList().identifier(), Identifier.class)
.stream().map(Identifier::getValue).collect(toList()),
context.partitionNames() == null ? null : (PartitionNames) visit(context.partitionNames()),
createPos(context));
}
@Override
public ParseNode visitAddColumnClause(StarRocksParser.AddColumnClauseContext context) {
ColumnDef columnDef = getColumnDef(context.columnDesc());
if (columnDef.isAutoIncrement()) {
throw new ParsingException(PARSER_ERROR_MSG.autoIncrementForbid(columnDef.getName(), "ADD"),
columnDef.getPos());
}
ColumnPosition columnPosition = null;
if (context.FIRST() != null) {
columnPosition = ColumnPosition.FIRST;
} else if (context.AFTER() != null) {
StarRocksParser.IdentifierContext identifier = context.identifier(0);
String afterColumnName = getIdentifierName(identifier);
columnPosition = new ColumnPosition(afterColumnName, createPos(identifier));
}
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
Map<String, String> properties = new HashMap<>();
;
properties = getProperties(context.properties());
if (columnDef.isGeneratedColumn()) {
if (rollupName != null) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("rollupName", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
if (columnPosition != null) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("AFTER", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
if (properties.size() != 0) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("properties", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
}
return new AddColumnClause(columnDef, columnPosition, rollupName, properties, createPos(context));
}
@Override
public ParseNode visitAddColumnsClause(StarRocksParser.AddColumnsClauseContext context) {
List<ColumnDef> columnDefs = getColumnDefs(context.columnDesc());
Map<String, String> properties = new HashMap<>();
properties = getProperties(context.properties());
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
for (ColumnDef columnDef : columnDefs) {
if (columnDef.isAutoIncrement()) {
throw new ParsingException(PARSER_ERROR_MSG.autoIncrementForbid(columnDef.getName(), "ADD"),
columnDef.getPos());
}
if (columnDef.isGeneratedColumn()) {
if (rollupName != null) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("rollupName", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
if (properties.size() != 0) {
throw new ParsingException(
PARSER_ERROR_MSG.generatedColumnLimit("properties", "ADD GENERATED COLUMN"),
columnDef.getPos());
}
}
}
return new AddColumnsClause(columnDefs, rollupName, getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitDropColumnClause(StarRocksParser.DropColumnClauseContext context) {
String columnName = getIdentifierName(context.identifier(0));
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
return new DropColumnClause(columnName, rollupName, getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitModifyColumnClause(StarRocksParser.ModifyColumnClauseContext context) {
ColumnDef columnDef = getColumnDef(context.columnDesc());
if (columnDef.isAutoIncrement()) {
throw new ParsingException(PARSER_ERROR_MSG.autoIncrementForbid(columnDef.getName(), "MODIFY"),
columnDef.getPos());
}
ColumnPosition columnPosition = null;
if (context.FIRST() != null) {
columnPosition = ColumnPosition.FIRST;
} else if (context.AFTER() != null) {
StarRocksParser.IdentifierContext identifier = context.identifier(0);
String afterColumnName = getIdentifierName(identifier);
columnPosition = new ColumnPosition(afterColumnName, createPos(identifier));
}
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
if (columnDef.isGeneratedColumn()) {
if (rollupName != null) {
throw new ParsingException(PARSER_ERROR_MSG.generatedColumnLimit("rollupName",
"MODIFY GENERATED COLUMN"), columnDef.getPos());
}
if (columnPosition != null) {
throw new ParsingException(PARSER_ERROR_MSG.generatedColumnLimit("columnPosition",
"MODIFY GENERATED COLUMN"), columnDef.getPos());
}
}
return new ModifyColumnClause(columnDef, columnPosition, rollupName, getProperties(context.properties()),
createPos(context));
}
@Override
public ParseNode visitColumnRenameClause(StarRocksParser.ColumnRenameClauseContext context) {
String oldColumnName = getIdentifierName(context.oldColumn);
String newColumnName = getIdentifierName(context.newColumn);
return new ColumnRenameClause(oldColumnName, newColumnName, createPos(context));
}
@Override
public ParseNode visitReorderColumnsClause(StarRocksParser.ReorderColumnsClauseContext context) {
List<String> cols =
context.identifierList().identifier().stream().map(this::getIdentifierName).collect(toList());
String rollupName = null;
if (context.rollupName != null) {
rollupName = getIdentifierName(context.rollupName);
}
return new ReorderColumnsClause(cols, rollupName, getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitRollupRenameClause(StarRocksParser.RollupRenameClauseContext context) {
String rollupName = ((Identifier) visit(context.rollupName)).getValue();
String newRollupName = ((Identifier) visit(context.newRollupName)).getValue();
return new RollupRenameClause(rollupName, newRollupName, createPos(context));
}
@Override
public ParseNode visitCompactionClause(StarRocksParser.CompactionClauseContext ctx) {
NodePosition pos = createPos(ctx);
boolean baseCompaction = ctx.CUMULATIVE() == null;
if (ctx.identifier() != null) {
final String partitionName = ((Identifier) visit(ctx.identifier())).getValue();
return new CompactionClause(Collections.singletonList(partitionName), baseCompaction, pos);
} else if (ctx.identifierList() != null) {
final List<Identifier> identifierList = visit(ctx.identifierList().identifier(), Identifier.class);
return new CompactionClause(identifierList.stream().map(Identifier::getValue).collect(toList()),
baseCompaction, pos);
} else {
return new CompactionClause(baseCompaction, pos);
}
}
@Override
public ParseNode visitAddPartitionClause(StarRocksParser.AddPartitionClauseContext context) {
boolean temporary = context.TEMPORARY() != null;
PartitionDesc partitionDesc = null;
if (context.singleRangePartition() != null) {
partitionDesc = (PartitionDesc) visitSingleRangePartition(context.singleRangePartition());
} else if (context.multiRangePartition() != null) {
partitionDesc = (PartitionDesc) visitMultiRangePartition(context.multiRangePartition());
} else if (context.singleItemListPartitionDesc() != null) {
partitionDesc = (PartitionDesc) visitSingleItemListPartitionDesc(context.singleItemListPartitionDesc());
} else if (context.multiItemListPartitionDesc() != null) {
partitionDesc = (PartitionDesc) visitMultiItemListPartitionDesc(context.multiItemListPartitionDesc());
}
DistributionDesc distributionDesc = null;
if (context.distributionDesc() != null) {
distributionDesc = (DistributionDesc) visitDistributionDesc(context.distributionDesc());
}
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new AddPartitionClause(partitionDesc, distributionDesc, properties, temporary, createPos(context));
}
@Override
public ParseNode visitDropPartitionClause(StarRocksParser.DropPartitionClauseContext context) {
String partitionName = ((Identifier) visit(context.identifier())).getValue();
boolean temp = context.TEMPORARY() != null;
boolean force = context.FORCE() != null;
boolean exists = context.EXISTS() != null;
return new DropPartitionClause(exists, partitionName, temp, force, createPos(context));
}
@Override
public ParseNode visitTruncatePartitionClause(StarRocksParser.TruncatePartitionClauseContext context) {
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
partitionNames = (PartitionNames) visit(context.partitionNames());
}
return new TruncatePartitionClause(partitionNames, createPos(context));
}
@Override
public ParseNode visitModifyPartitionClause(StarRocksParser.ModifyPartitionClauseContext context) {
Map<String, String> properties = null;
NodePosition pos = createPos(context);
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
if (context.identifier() != null) {
final String partitionName = ((Identifier) visit(context.identifier())).getValue();
return new ModifyPartitionClause(Collections.singletonList(partitionName), properties, pos);
} else if (context.identifierList() != null) {
final List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
return new ModifyPartitionClause(identifierList.stream().map(Identifier::getValue).collect(toList()),
properties, pos);
} else {
return ModifyPartitionClause.createStarClause(properties, pos);
}
}
@Override
public ParseNode visitReplacePartitionClause(StarRocksParser.ReplacePartitionClauseContext context) {
PartitionNames partitionNames = (PartitionNames) visit(context.parName);
PartitionNames newPartitionNames = (PartitionNames) visit(context.tempParName);
return new ReplacePartitionClause(partitionNames, newPartitionNames,
getProperties(context.properties()), createPos(context));
}
@Override
public ParseNode visitPartitionRenameClause(StarRocksParser.PartitionRenameClauseContext context) {
String partitionName = ((Identifier) visit(context.parName)).getValue();
String newPartitionName = ((Identifier) visit(context.newParName)).getValue();
return new PartitionRenameClause(partitionName, newPartitionName, createPos(context));
}
private PipeName resolvePipeName(StarRocksParser.QualifiedNameContext context) {
String dbName = null;
String pipeName = null;
QualifiedName qualifiedName = getQualifiedName(context);
if (qualifiedName.getParts().size() == 2) {
dbName = qualifiedName.getParts().get(0);
pipeName = qualifiedName.getParts().get(1);
} else if (qualifiedName.getParts().size() == 1) {
pipeName = qualifiedName.getParts().get(0);
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidPipeName(qualifiedName.toString()));
}
if (dbName != null && pipeName != null) {
return new PipeName(createPos(context), dbName, pipeName);
} else if (pipeName != null) {
return new PipeName(createPos(context), pipeName);
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidPipeName(qualifiedName.toString()));
}
}
@Override
public ParseNode visitCreatePipeStatement(StarRocksParser.CreatePipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
boolean ifNotExists = context.ifNotExists() != null && context.ifNotExists().IF() != null;
boolean replace = context.orReplace() != null && context.orReplace().OR() != null;
if (ifNotExists && replace) {
throw new ParsingException(PARSER_ERROR_MSG.conflictedOptions("OR REPLACE", "IF NOT EXISTS"));
}
ParseNode insertNode = visit(context.insertStatement());
if (!(insertNode instanceof InsertStmt)) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedStatement(insertNode.toSql()),
context.insertStatement());
}
Map<String, String> properties = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
InsertStmt insertStmt = (InsertStmt) insertNode;
int insertSqlIndex = context.insertStatement().start.getStartIndex();
return new CreatePipeStmt(ifNotExists, replace, pipeName, insertSqlIndex, insertStmt, properties,
createPos(context));
}
@Override
public ParseNode visitDropPipeStatement(StarRocksParser.DropPipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
boolean ifExists = context.IF() != null;
return new DropPipeStmt(ifExists, pipeName, createPos(context));
}
@Override
public ParseNode visitShowPipeStatement(StarRocksParser.ShowPipeStatementContext context) {
String dbName = null;
if (context.qualifiedName() != null) {
dbName = getQualifiedName(context.qualifiedName()).toString();
}
List<OrderByElement> orderBy = null;
if (context.ORDER() != null) {
orderBy = new ArrayList<>();
orderBy.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limit = null;
if (context.limitElement() != null) {
limit = (LimitElement) visit(context.limitElement());
}
if (context.LIKE() != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.pattern);
return new ShowPipeStmt(dbName, stringLiteral.getValue(), null, orderBy, limit, createPos(context));
} else if (context.WHERE() != null) {
return new ShowPipeStmt(dbName, null, (Expr) visit(context.expression()), orderBy, limit,
createPos(context));
} else {
return new ShowPipeStmt(dbName, null, null, orderBy, limit, createPos(context));
}
}
@Override
public ParseNode visitDescPipeStatement(StarRocksParser.DescPipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
return new DescPipeStmt(createPos(context), pipeName);
}
@Override
public ParseNode visitAlterPipeClause(StarRocksParser.AlterPipeClauseContext context) {
if (context.SUSPEND() != null) {
return new AlterPipePauseResume(createPos(context), true);
} else if (context.RESUME() != null) {
return new AlterPipePauseResume(createPos(context), false);
} else if (context.RETRY() != null) {
if (context.ALL() != null) {
return new AlterPipeClauseRetry(createPos(context), true);
} else {
String fileName = ((StringLiteral) visitString(context.fileName)).getStringValue();
return new AlterPipeClauseRetry(createPos(context), false, fileName);
}
} else if (context.SET() != null) {
Map<String, String> properties = getPropertyList(context.propertyList());
if (MapUtils.isEmpty(properties)) {
throw new ParsingException("empty property");
}
return new AlterPipeSetProperty(createPos(context), properties);
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedOpWithInfo(context.toString()));
}
}
@Override
public ParseNode visitAlterPipeStatement(StarRocksParser.AlterPipeStatementContext context) {
PipeName pipeName = resolvePipeName(context.qualifiedName());
AlterPipeClause alterPipeClause = (AlterPipeClause) visit(context.alterPipeClause());
return new AlterPipeStmt(createPos(context), pipeName, alterPipeClause);
}
@Override
public ParseNode visitQueryStatement(StarRocksParser.QueryStatementContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.queryRelation());
QueryStatement queryStatement = new QueryStatement(queryRelation);
if (context.outfile() != null) {
queryStatement.setOutFileClause((OutFileClause) visit(context.outfile()));
}
if (context.explainDesc() != null) {
queryStatement.setIsExplain(true, getExplainType(context.explainDesc()));
}
if (context.optimizerTrace() != null) {
String module = "base";
if (context.optimizerTrace().identifier() != null) {
module = ((Identifier) visit(context.optimizerTrace().identifier())).getValue();
}
queryStatement.setIsTrace(getTraceMode(context.optimizerTrace()), module);
}
return queryStatement;
}
private Tracers.Mode getTraceMode(StarRocksParser.OptimizerTraceContext context) {
if (context.LOGS() != null) {
return Tracers.Mode.LOGS;
} else if (context.VALUES() != null) {
return Tracers.Mode.VARS;
} else if (context.TIMES() != null) {
return Tracers.Mode.TIMER;
} else if (context.ALL() != null) {
return Tracers.Mode.TIMING;
} else {
return Tracers.Mode.NONE;
}
}
@Override
public ParseNode visitQueryRelation(StarRocksParser.QueryRelationContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.queryNoWith());
List<CTERelation> withQuery = new ArrayList<>();
if (context.withClause() != null) {
withQuery = visit(context.withClause().commonTableExpression(), CTERelation.class);
}
withQuery.forEach(queryRelation::addCTERelation);
return queryRelation;
}
@Override
public ParseNode visitCommonTableExpression(StarRocksParser.CommonTableExpressionContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.queryRelation());
return new CTERelation(
RelationId.of(queryRelation).hashCode(),
((Identifier) visit(context.name)).getValue(),
getColumnNames(context.columnAliases()),
new QueryStatement(queryRelation),
queryRelation.getPos());
}
@Override
public ParseNode visitQueryNoWith(StarRocksParser.QueryNoWithContext context) {
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.ORDER() != null) {
orderByElements.addAll(visit(context.sortItem(), OrderByElement.class));
}
LimitElement limitElement = null;
if (context.limitElement() != null) {
limitElement = (LimitElement) visit(context.limitElement());
}
QueryRelation queryRelation = (QueryRelation) visit(context.queryPrimary());
queryRelation.setOrderBy(orderByElements);
queryRelation.setLimit(limitElement);
return queryRelation;
}
@Override
public ParseNode visitSetOperation(StarRocksParser.SetOperationContext context) {
NodePosition pos = createPos(context);
QueryRelation left = (QueryRelation) visit(context.left);
QueryRelation right = (QueryRelation) visit(context.right);
boolean distinct = true;
if (context.setQuantifier() != null) {
if (context.setQuantifier().DISTINCT() != null) {
distinct = true;
} else if (context.setQuantifier().ALL() != null) {
distinct = false;
}
}
SetQualifier setQualifier = distinct ? SetQualifier.DISTINCT : SetQualifier.ALL;
switch (context.operator.getType()) {
case StarRocksLexer.UNION:
if (left instanceof UnionRelation && ((UnionRelation) left).getQualifier().equals(setQualifier)) {
((UnionRelation) left).addRelation(right);
return left;
} else {
return new UnionRelation(Lists.newArrayList(left, right), setQualifier, pos);
}
case StarRocksLexer.INTERSECT:
if (left instanceof IntersectRelation &&
((IntersectRelation) left).getQualifier().equals(setQualifier)) {
((IntersectRelation) left).addRelation(right);
return left;
} else {
return new IntersectRelation(Lists.newArrayList(left, right), setQualifier, pos);
}
default:
if (left instanceof ExceptRelation && ((ExceptRelation) left).getQualifier().equals(setQualifier)) {
((ExceptRelation) left).addRelation(right);
return left;
} else {
return new ExceptRelation(Lists.newArrayList(left, right), setQualifier, pos);
}
}
}
private Map<String, String> extractVarHintValues(List<HintNode> hints) {
Map<String, String> selectHints = new HashMap<>();
if (CollectionUtils.isEmpty(hints)) {
return selectHints;
}
for (HintNode hintNode : hints) {
if (hintNode instanceof SetVarHint) {
selectHints.putAll(hintNode.getValue());
}
}
return selectHints;
}
@Override
public ParseNode visitQuerySpecification(StarRocksParser.QuerySpecificationContext context) {
Relation from = null;
List<SelectListItem> selectItems = visit(context.selectItem(), SelectListItem.class);
if (context.fromClause() instanceof StarRocksParser.DualContext) {
for (SelectListItem item : selectItems) {
if (item.isStar()) {
throw new ParsingException(PARSER_ERROR_MSG.noTableUsed(), item.getPos());
}
}
} else {
StarRocksParser.FromContext fromContext = (StarRocksParser.FromContext) context.fromClause();
if (fromContext.relations() != null) {
List<Relation> relations = visit(fromContext.relations().relation(), Relation.class);
Iterator<Relation> iterator = relations.iterator();
Relation relation = iterator.next();
while (iterator.hasNext()) {
Relation next = iterator.next();
relation = new JoinRelation(null, relation, next, null, false);
}
from = relation;
}
}
/*
from == null means a statement without from or from dual, add a single row of null values here,
so that the semantics are the same, and the processing of subsequent query logic can be simplified,
such as select sum(1) or select sum(1) from dual, will be converted to select sum(1) from (values(null)) t.
This can share the same logic as select sum(1) from table
*/
if (from == null) {
from = ValuesRelation.newDualRelation();
}
boolean isDistinct = context.setQuantifier() != null && context.setQuantifier().DISTINCT() != null;
SelectList selectList = new SelectList(selectItems, isDistinct);
selectList.setHintNodes(hintMap.get(context));
SelectRelation resultSelectRelation = new SelectRelation(
selectList,
from,
(Expr) visitIfPresent(context.where),
(GroupByClause) visitIfPresent(context.groupingElement()),
(Expr) visitIfPresent(context.having),
createPos(context));
if (context.qualifyFunction != null) {
resultSelectRelation.setOrderBy(new ArrayList<>());
SubqueryRelation subqueryRelation = new SubqueryRelation(new QueryStatement(resultSelectRelation));
TableName qualifyTableName = new TableName(null, "__QUALIFY__TABLE");
subqueryRelation.setAlias(qualifyTableName);
SelectListItem windowFunction = selectItems.get(selectItems.size() - 1);
windowFunction.setAlias("__QUALIFY__VALUE");
long selectValue = Long.parseLong(context.limit.getText());
List<SelectListItem> selectItemsVirtual = Lists.newArrayList(selectItems);
selectItemsVirtual.remove(selectItemsVirtual.size() - 1);
List<SelectListItem> selectItemsOuter = new ArrayList<>();
for (SelectListItem item : selectItemsVirtual) {
if (item.getExpr() instanceof SlotRef) {
SlotRef exprRef = (SlotRef) item.getExpr();
String columnName = item.getAlias() == null ? exprRef.getColumnName() : item.getAlias();
SlotRef resultSlotRef = new SlotRef(qualifyTableName, columnName);
selectItemsOuter.add(new SelectListItem(resultSlotRef, null));
} else {
throw new ParsingException("Can't support result other than column.");
}
}
SelectList selectListOuter = new SelectList(selectItemsOuter, isDistinct);
IntLiteral rightValue = new IntLiteral(selectValue);
SlotRef leftSlotRef = new SlotRef(qualifyTableName, "__QUALIFY__VALUE");
BinaryType op = getComparisonOperator(((TerminalNode) context.comparisonOperator()
.getChild(0)).getSymbol());
return new SelectRelation(selectListOuter, subqueryRelation,
new BinaryPredicate(op, leftSlotRef, rightValue), null, null, createPos(context));
} else {
return resultSelectRelation;
}
}
@Override
public ParseNode visitSelectSingle(StarRocksParser.SelectSingleContext context) {
String alias = null;
if (context.identifier() != null) {
alias = ((Identifier) visit(context.identifier())).getValue();
} else if (context.string() != null) {
alias = ((StringLiteral) visit(context.string())).getStringValue();
}
return new SelectListItem((Expr) visit(context.expression()), alias, createPos(context));
}
@Override
public ParseNode visitSelectAll(StarRocksParser.SelectAllContext context) {
NodePosition pos = createPos(context);
if (context.qualifiedName() != null) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
return new SelectListItem(qualifiedNameToTableName(qualifiedName), pos);
}
return new SelectListItem(null, pos);
}
@Override
public ParseNode visitSingleGroupingSet(StarRocksParser.SingleGroupingSetContext context) {
return new GroupByClause(new ArrayList<>(visit(context.expressionList().expression(), Expr.class)),
GroupByClause.GroupingType.GROUP_BY, createPos(context));
}
@Override
public ParseNode visitRollup(StarRocksParser.RollupContext context) {
List<Expr> groupingExprs = visit(context.expressionList().expression(), Expr.class);
return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.ROLLUP, createPos(context));
}
@Override
public ParseNode visitCube(StarRocksParser.CubeContext context) {
List<Expr> groupingExprs = visit(context.expressionList().expression(), Expr.class);
return new GroupByClause(new ArrayList<>(groupingExprs), GroupByClause.GroupingType.CUBE, createPos(context));
}
@Override
public ParseNode visitMultipleGroupingSets(StarRocksParser.MultipleGroupingSetsContext context) {
List<ArrayList<Expr>> groupingSets = new ArrayList<>();
for (StarRocksParser.GroupingSetContext groupingSetContext : context.groupingSet()) {
List<Expr> l = visit(groupingSetContext.expression(), Expr.class);
groupingSets.add(new ArrayList<>(l));
}
return new GroupByClause(groupingSets, GroupByClause.GroupingType.GROUPING_SETS, createPos(context));
}
@Override
public ParseNode visitGroupingOperation(StarRocksParser.GroupingOperationContext context) {
List<Expr> arguments = visit(context.expression(), Expr.class);
return new GroupingFunctionCallExpr("grouping", arguments, createPos(context));
}
@Override
public ParseNode visitWindowFrame(StarRocksParser.WindowFrameContext context) {
NodePosition pos = createPos(context);
if (context.end != null) {
return new AnalyticWindow(
getFrameType(context.frameType),
(AnalyticWindow.Boundary) visit(context.start),
(AnalyticWindow.Boundary) visit(context.end),
pos);
} else {
return new AnalyticWindow(
getFrameType(context.frameType),
(AnalyticWindow.Boundary) visit(context.start),
pos);
}
}
private static AnalyticWindow.Type getFrameType(Token type) {
if (type.getType() == StarRocksLexer.RANGE) {
return AnalyticWindow.Type.RANGE;
} else {
return AnalyticWindow.Type.ROWS;
}
}
@Override
public ParseNode visitUnboundedFrame(StarRocksParser.UnboundedFrameContext context) {
return new AnalyticWindow.Boundary(getUnboundedFrameBoundType(context.boundType), null);
}
@Override
public ParseNode visitBoundedFrame(StarRocksParser.BoundedFrameContext context) {
return new AnalyticWindow.Boundary(getBoundedFrameBoundType(context.boundType),
(Expr) visit(context.expression()));
}
@Override
public ParseNode visitCurrentRowBound(StarRocksParser.CurrentRowBoundContext context) {
return new AnalyticWindow.Boundary(AnalyticWindow.BoundaryType.CURRENT_ROW, null);
}
private static AnalyticWindow.BoundaryType getBoundedFrameBoundType(Token token) {
if (token.getType() == StarRocksLexer.PRECEDING) {
return AnalyticWindow.BoundaryType.PRECEDING;
} else {
return AnalyticWindow.BoundaryType.FOLLOWING;
}
}
private static AnalyticWindow.BoundaryType getUnboundedFrameBoundType(Token token) {
if (token.getType() == StarRocksLexer.PRECEDING) {
return AnalyticWindow.BoundaryType.UNBOUNDED_PRECEDING;
} else {
return AnalyticWindow.BoundaryType.UNBOUNDED_FOLLOWING;
}
}
@Override
public ParseNode visitSortItem(StarRocksParser.SortItemContext context) {
return new OrderByElement(
(Expr) visit(context.expression()),
getOrderingType(context.ordering),
getNullOrderingType(getOrderingType(context.ordering), context.nullOrdering),
createPos(context));
}
private boolean getNullOrderingType(boolean isAsc, Token token) {
if (token == null) {
return (!SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_SORT_NULLS_LAST)) == isAsc;
}
return token.getType() == StarRocksLexer.FIRST;
}
private static boolean getOrderingType(Token token) {
if (token == null) {
return true;
}
return token.getType() == StarRocksLexer.ASC;
}
@Override
public ParseNode visitLimitElement(StarRocksParser.LimitElementContext context) {
if (context.limit.getText().equals("?") || (context.offset != null && context.offset.getText().equals("?"))) {
throw new ParsingException("using parameter(?) as limit or offset not supported");
}
long limit = Long.parseLong(context.limit.getText());
long offset = 0;
if (context.offset != null) {
offset = Long.parseLong(context.offset.getText());
}
return new LimitElement(offset, limit, createPos(context));
}
@Override
public ParseNode visitRelation(StarRocksParser.RelationContext context) {
Relation relation = (Relation) visit(context.relationPrimary());
List<JoinRelation> joinRelations = visit(context.joinRelation(), JoinRelation.class);
Relation leftChildRelation = relation;
for (JoinRelation joinRelation : joinRelations) {
joinRelation.setLeft(leftChildRelation);
leftChildRelation = joinRelation;
}
return leftChildRelation;
}
@Override
public ParseNode visitParenthesizedRelation(StarRocksParser.ParenthesizedRelationContext context) {
if (context.relations().relation().size() == 1) {
return visit(context.relations().relation().get(0));
} else {
List<Relation> relations = visit(context.relations().relation(), Relation.class);
Iterator<Relation> iterator = relations.iterator();
Relation relation = iterator.next();
while (iterator.hasNext()) {
relation = new JoinRelation(null, relation, iterator.next(), null, false);
}
return relation;
}
}
@Override
public ParseNode visitTableAtom(StarRocksParser.TableAtomContext context) {
Token start = context.start;
Token stop = context.stop;
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
TableName tableName = qualifiedNameToTableName(qualifiedName);
PartitionNames partitionNames = null;
if (context.partitionNames() != null) {
stop = context.partitionNames().stop;
partitionNames = (PartitionNames) visit(context.partitionNames());
}
List<Long> tabletIds = Lists.newArrayList();
if (context.tabletList() != null) {
stop = context.tabletList().stop;
tabletIds = context.tabletList().INTEGER_VALUE().stream().map(ParseTree::getText)
.map(Long::parseLong).collect(toList());
}
List<Long> replicaLists = Lists.newArrayList();
if (context.replicaList() != null) {
stop = context.replicaList().stop;
replicaLists = context.replicaList().INTEGER_VALUE().stream().map(ParseTree::getText).map(Long::parseLong)
.collect(toList());
}
TableRelation tableRelation =
new TableRelation(tableName, partitionNames, tabletIds, replicaLists, createPos(start, stop));
if (context.bracketHint() != null) {
for (Identifier identifier : visit(context.bracketHint().identifier(), Identifier.class)) {
tableRelation.addTableHint(identifier.getValue());
}
}
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
tableRelation.setAlias(new TableName(null, identifier.getValue()));
}
if (context.temporalClause() != null) {
StringBuilder sb = new StringBuilder();
for (ParseTree child : context.temporalClause().children) {
sb.append(child.getText());
sb.append(" ");
}
tableRelation.setTemporalClause(sb.toString());
}
return tableRelation;
}
@Override
public ParseNode visitJoinRelation(StarRocksParser.JoinRelationContext context) {
Relation left = null;
Relation right = (Relation) visit(context.rightRelation);
JoinOperator joinType = JoinOperator.INNER_JOIN;
if (context.crossOrInnerJoinType() != null) {
if (context.crossOrInnerJoinType().CROSS() != null) {
joinType = JoinOperator.CROSS_JOIN;
} else {
joinType = JoinOperator.INNER_JOIN;
}
} else if (context.outerAndSemiJoinType().LEFT() != null) {
if (context.outerAndSemiJoinType().OUTER() != null) {
joinType = JoinOperator.LEFT_OUTER_JOIN;
} else if (context.outerAndSemiJoinType().SEMI() != null) {
joinType = JoinOperator.LEFT_SEMI_JOIN;
} else if (context.outerAndSemiJoinType().ANTI() != null) {
joinType = JoinOperator.LEFT_ANTI_JOIN;
} else {
joinType = JoinOperator.LEFT_OUTER_JOIN;
}
} else if (context.outerAndSemiJoinType().RIGHT() != null) {
if (context.outerAndSemiJoinType().OUTER() != null) {
joinType = JoinOperator.RIGHT_OUTER_JOIN;
} else if (context.outerAndSemiJoinType().SEMI() != null) {
joinType = JoinOperator.RIGHT_SEMI_JOIN;
} else if (context.outerAndSemiJoinType().ANTI() != null) {
joinType = JoinOperator.RIGHT_ANTI_JOIN;
} else {
joinType = JoinOperator.RIGHT_OUTER_JOIN;
}
} else if (context.outerAndSemiJoinType().FULL() != null) {
joinType = JoinOperator.FULL_OUTER_JOIN;
}
Expr predicate = null;
List<String> usingColNames = null;
if (context.joinCriteria() != null) {
if (context.joinCriteria().ON() != null) {
predicate = (Expr) visit(context.joinCriteria().expression());
} else {
List<Identifier> criteria = visit(context.joinCriteria().identifier(), Identifier.class);
usingColNames = criteria.stream().map(Identifier::getValue).collect(Collectors.toList());
}
}
JoinRelation joinRelation = new JoinRelation(joinType, left, right, predicate,
context.LATERAL() != null, createPos(context));
joinRelation.setUsingColNames(usingColNames);
if (context.bracketHint() != null) {
joinRelation.setJoinHint(((Identifier) visit(context.bracketHint().identifier(0))).getValue());
if (context.bracketHint().primaryExpression() != null) {
joinRelation.setSkewColumn((Expr) visit(context.bracketHint().primaryExpression()));
}
if (context.bracketHint().literalExpressionList() != null) {
joinRelation.setSkewValues(visit(context.bracketHint().literalExpressionList().literalExpression(),
Expr.class));
}
}
return joinRelation;
}
@Override
public ParseNode visitInlineTable(StarRocksParser.InlineTableContext context) {
List<ValueList> rowValues = visit(context.rowConstructor(), ValueList.class);
List<List<Expr>> rows = rowValues.stream().map(ValueList::getRow).collect(toList());
List<String> colNames = getColumnNames(context.columnAliases());
if (colNames == null) {
colNames = new ArrayList<>();
for (int i = 0; i < rows.get(0).size(); ++i) {
colNames.add("column_" + i);
}
}
ValuesRelation valuesRelation = new ValuesRelation(rows, colNames, createPos(context));
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
valuesRelation.setAlias(new TableName(null, identifier.getValue()));
}
return valuesRelation;
}
@Override
public ParseNode visitNamedArguments(StarRocksParser.NamedArgumentsContext context) {
String name = ((Identifier) visit(context.identifier())).getValue();
if (name == null || name.isEmpty() || name.equals(" ")) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr(" The left of => shouldn't be empty"));
}
Expr node = (Expr) visit(context.expression());
if (node == null) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr(" The right of => shouldn't be null"));
}
return new NamedArgument(name, node);
}
@Override
public ParseNode visitTableFunction(StarRocksParser.TableFunctionContext context) {
QualifiedName functionName = getQualifiedName(context.qualifiedName());
List<Expr> parameters = visit(context.expressionList().expression(), Expr.class);
FunctionCallExpr functionCallExpr =
new FunctionCallExpr(FunctionName.createFnName(functionName.toString()), parameters);
TableFunctionRelation tableFunctionRelation = new TableFunctionRelation(functionCallExpr);
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
tableFunctionRelation.setAlias(new TableName(null, identifier.getValue()));
}
tableFunctionRelation.setColumnOutputNames(getColumnNames(context.columnAliases()));
return tableFunctionRelation;
}
@Override
public ParseNode visitNormalizedTableFunction(StarRocksParser.NormalizedTableFunctionContext context) {
QualifiedName functionName = getQualifiedName(context.qualifiedName());
List<Expr> parameters = null;
if (context.argumentList().expressionList() != null) {
parameters = visit(context.argumentList().expressionList().expression(), Expr.class);
} else {
parameters = visit(context.argumentList().namedArgumentList().namedArgument(), Expr.class);
}
int namedArgNum = parameters.stream().filter(f -> f instanceof NamedArgument).collect(toList()).size();
if (namedArgNum > 0 && namedArgNum < parameters.size()) {
throw new SemanticException("All arguments must be passed by name or all must be passed positionally");
}
FunctionCallExpr functionCallExpr =
new FunctionCallExpr(FunctionName.createFnName(functionName.toString()), parameters,
createPos(context));
TableFunctionRelation relation = new TableFunctionRelation(functionCallExpr);
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
relation.setAlias(new TableName(null, identifier.getValue()));
}
relation.setColumnOutputNames(getColumnNames(context.columnAliases()));
return new NormalizedTableFunctionRelation(relation);
}
@Override
public ParseNode visitFileTableFunction(StarRocksParser.FileTableFunctionContext context) {
Map<String, String> properties = getPropertyList(context.propertyList());
return new FileTableFunctionRelation(properties, NodePosition.ZERO);
}
@Override
public ParseNode visitRowConstructor(StarRocksParser.RowConstructorContext context) {
ArrayList<Expr> row = new ArrayList<>(visit(context.expressionList().expression(), Expr.class));
return new ValueList(row, createPos(context));
}
@Override
public ParseNode visitPartitionNames(StarRocksParser.PartitionNamesContext context) {
if (context.keyPartitions() != null) {
return visit(context.keyPartitions());
}
List<Identifier> identifierList = visit(context.identifierOrString(), Identifier.class);
return new PartitionNames(context.TEMPORARY() != null,
identifierList.stream().map(Identifier::getValue).collect(toList()),
createPos(context));
}
@Override
public ParseNode visitKeyPartitionList(StarRocksParser.KeyPartitionListContext context) {
List<String> partitionColNames = Lists.newArrayList();
List<Expr> partitionColValues = Lists.newArrayList();
for (StarRocksParser.KeyPartitionContext pair : context.keyPartition()) {
Identifier partitionName = (Identifier) visit(pair.partitionColName);
Expr partitionValue = (Expr) visit(pair.partitionColValue);
partitionColNames.add(partitionName.getValue());
partitionColValues.add(partitionValue);
}
return new PartitionNames(false, new ArrayList<>(), partitionColNames, partitionColValues, NodePosition.ZERO);
}
@Override
public ParseNode visitSubquery(StarRocksParser.SubqueryContext context) {
return visit(context.queryRelation());
}
@Override
public ParseNode visitQueryWithParentheses(StarRocksParser.QueryWithParenthesesContext context) {
QueryRelation relation = (QueryRelation) visit(context.subquery());
return new SubqueryRelation(new QueryStatement(relation));
}
@Override
public ParseNode visitSubqueryWithAlias(StarRocksParser.SubqueryWithAliasContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.subquery());
SubqueryRelation subqueryRelation = new SubqueryRelation(new QueryStatement(queryRelation));
if (context.alias != null) {
Identifier identifier = (Identifier) visit(context.alias);
subqueryRelation.setAlias(new TableName(null, identifier.getValue()));
} else {
subqueryRelation.setAlias(new TableName(null, null));
}
subqueryRelation.setColumnOutputNames(getColumnNames(context.columnAliases()));
return subqueryRelation;
}
@Override
public ParseNode visitSubqueryExpression(StarRocksParser.SubqueryExpressionContext context) {
QueryRelation queryRelation = (QueryRelation) visit(context.subquery());
return new Subquery(new QueryStatement(queryRelation));
}
@Override
public ParseNode visitInSubquery(StarRocksParser.InSubqueryContext context) {
boolean isNotIn = context.NOT() != null;
QueryRelation query = (QueryRelation) visit(context.queryRelation());
return new InPredicate((Expr) visit(context.value), new Subquery(new QueryStatement(query)),
isNotIn, createPos(context));
}
@Override
public ParseNode visitTupleInSubquery(StarRocksParser.TupleInSubqueryContext context) {
boolean isNotIn = context.NOT() != null;
QueryRelation query = (QueryRelation) visit(context.queryRelation());
List<Expr> tupleExpressions = visit(context.expression(), Expr.class);
return new MultiInPredicate(tupleExpressions, new Subquery(new QueryStatement(query)), isNotIn,
createPos(context));
}
@Override
public ParseNode visitExists(StarRocksParser.ExistsContext context) {
QueryRelation query = (QueryRelation) visit(context.queryRelation());
return new ExistsPredicate(new Subquery(new QueryStatement(query)), false, createPos(context));
}
@Override
public ParseNode visitScalarSubquery(StarRocksParser.ScalarSubqueryContext context) {
BinaryType op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0))
.getSymbol());
Subquery subquery = new Subquery(new QueryStatement((QueryRelation) visit(context.queryRelation())));
return new BinaryPredicate(op, (Expr) visit(context.booleanExpression()), subquery, createPos(context));
}
@Override
public ParseNode visitShowFunctionsStatement(StarRocksParser.ShowFunctionsStatementContext context) {
boolean isBuiltIn = context.BUILTIN() != null;
boolean isGlobal = context.GLOBAL() != null;
boolean isVerbose = context.FULL() != null;
String dbName = null;
if (context.db != null) {
dbName = getQualifiedName(context.db).toString();
}
String pattern = null;
if (context.pattern != null) {
pattern = ((StringLiteral) visit(context.pattern)).getValue();
}
Expr where = null;
if (context.expression() != null) {
where = (Expr) visit(context.expression());
}
return new ShowFunctionsStmt(dbName, isBuiltIn, isGlobal, isVerbose, pattern, where, createPos(context));
}
@Override
public ParseNode visitShowPrivilegesStatement(StarRocksParser.ShowPrivilegesStatementContext ctx) {
return new ShowPrivilegesStmt();
}
@Override
public ParseNode visitDropFunctionStatement(StarRocksParser.DropFunctionStatementContext context) {
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
String functionName = qualifiedName.toString();
boolean isGlobal = context.GLOBAL() != null;
FunctionName fnName = FunctionName.createFnName(functionName);
if (isGlobal) {
if (!Strings.isNullOrEmpty(fnName.getDb())) {
throw new ParsingException(PARSER_ERROR_MSG.invalidUDFName(functionName), qualifiedName.getPos());
}
fnName.setAsGlobalFunction();
}
return new DropFunctionStmt(fnName, getFunctionArgsDef(context.typeList()), createPos(context));
}
@Override
public ParseNode visitCreateFunctionStatement(StarRocksParser.CreateFunctionStatementContext context) {
String functionType = "SCALAR";
boolean isGlobal = context.GLOBAL() != null;
if (context.functionType != null) {
functionType = context.functionType.getText();
}
QualifiedName qualifiedName = getQualifiedName(context.qualifiedName());
String functionName = qualifiedName.toString();
TypeDef returnTypeDef = new TypeDef(getType(context.returnType), createPos(context.returnType));
TypeDef intermediateType = null;
if (context.intermediateType != null) {
intermediateType = new TypeDef(getType(context.intermediateType), createPos(context.intermediateType));
}
Map<String, String> properties = null;
if (context.properties() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
FunctionName fnName = FunctionName.createFnName(functionName);
if (isGlobal) {
if (!Strings.isNullOrEmpty(fnName.getDb())) {
throw new ParsingException(PARSER_ERROR_MSG.invalidUDFName(functionName), qualifiedName.getPos());
}
fnName.setAsGlobalFunction();
}
return new CreateFunctionStmt(functionType, fnName,
getFunctionArgsDef(context.typeList()), returnTypeDef, intermediateType, properties);
}
@Override
public ParseNode visitCreateUserStatement(StarRocksParser.CreateUserStatementContext context) {
UserDesc userDesc;
Token start = context.user().start;
Token stop;
UserIdentity user = (UserIdentity) visit(context.user());
UserAuthOption authOption = context.authOption() == null ? null : (UserAuthOption) visit(context.authOption());
if (authOption == null) {
userDesc = new UserDesc(user, "", false, user.getPos());
} else if (authOption.getAuthPlugin() == null) {
stop = context.authOption().stop;
userDesc =
new UserDesc(user, authOption.getPassword(), authOption.isPasswordPlain(), createPos(start, stop));
} else {
stop = context.authOption().stop;
userDesc = new UserDesc(user, authOption.getAuthPlugin(), authOption.getAuthString(),
authOption.isPasswordPlain(), createPos(start, stop));
}
boolean ifNotExists = context.IF() != null;
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
return new CreateUserStmt(ifNotExists, userDesc, roles, createPos(context));
}
@Override
public ParseNode visitDropUserStatement(StarRocksParser.DropUserStatementContext context) {
UserIdentity user = (UserIdentity) visit(context.user());
return new DropUserStmt(user, context.EXISTS() != null, createPos(context));
}
@Override
public ParseNode visitAlterUserStatement(StarRocksParser.AlterUserStatementContext context) {
UserDesc userDesc;
UserIdentity user = (UserIdentity) visit(context.user());
Token start = context.user().start;
Token stop;
if (context.ROLE() != null) {
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
SetRoleType setRoleType;
if (context.ALL() != null) {
setRoleType = SetRoleType.ALL;
} else if (context.NONE() != null) {
setRoleType = SetRoleType.NONE;
} else {
setRoleType = SetRoleType.ROLE;
}
return new SetDefaultRoleStmt(user, setRoleType, roles, createPos(context));
}
stop = context.authOption().stop;
UserAuthOption authOption = (UserAuthOption) visit(context.authOption());
if (authOption.getAuthPlugin() == null) {
userDesc =
new UserDesc(user, authOption.getPassword(), authOption.isPasswordPlain(), createPos(start, stop));
} else {
userDesc = new UserDesc(user, authOption.getAuthPlugin(), authOption.getAuthString(),
authOption.isPasswordPlain(), createPos(start, stop));
}
return new AlterUserStmt(userDesc, context.EXISTS() != null, createPos(context));
}
@Override
public ParseNode visitShowUserStatement(StarRocksParser.ShowUserStatementContext context) {
NodePosition pos = createPos(context);
if (context.USERS() != null) {
return new ShowUserStmt(true, pos);
} else {
return new ShowUserStmt(false, pos);
}
}
@Override
public ParseNode visitShowAllAuthentication(StarRocksParser.ShowAllAuthenticationContext context) {
return new ShowAuthenticationStmt(null, true, createPos(context));
}
@Override
public ParseNode visitShowAuthenticationForUser(StarRocksParser.ShowAuthenticationForUserContext context) {
NodePosition pos = createPos(context);
if (context.user() != null) {
return new ShowAuthenticationStmt((UserIdentity) visit(context.user()), false, pos);
} else {
return new ShowAuthenticationStmt(null, false, pos);
}
}
@Override
public ParseNode visitExecuteAsStatement(StarRocksParser.ExecuteAsStatementContext context) {
boolean allowRevert = context.WITH() == null;
return new ExecuteAsStmt((UserIdentity) visit(context.user()), allowRevert, createPos(context));
}
@Override
public ParseNode visitCreateRoleStatement(StarRocksParser.CreateRoleStatementContext context) {
List<String> roles = context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(Collectors.toList());
String comment = context.comment() == null ? "" : ((StringLiteral) visit(context.comment())).getStringValue();
return new CreateRoleStmt(roles, context.NOT() != null, comment, createPos(context));
}
@Override
public ParseNode visitAlterRoleStatement(StarRocksParser.AlterRoleStatementContext context) {
List<String> roles = context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(Collectors.toList());
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
String comment = stringLiteral.getStringValue();
return new AlterRoleStmt(roles, context.IF() != null, comment);
}
@Override
public ParseNode visitDropRoleStatement(StarRocksParser.DropRoleStatementContext context) {
List<String> roles = new ArrayList<>();
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
return new DropRoleStmt(roles, context.EXISTS() != null, createPos(context));
}
@Override
public ParseNode visitShowRolesStatement(StarRocksParser.ShowRolesStatementContext context) {
return new ShowRolesStmt();
}
@Override
public ParseNode visitGrantRoleToUser(StarRocksParser.GrantRoleToUserContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new GrantRoleStmt(roleNameList, (UserIdentity) visit(context.user()), createPos(context));
}
@Override
public ParseNode visitGrantRoleToRole(StarRocksParser.GrantRoleToRoleContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new GrantRoleStmt(roleNameList, ((Identifier) visit(context.identifierOrString())).getValue(),
createPos(context));
}
@Override
public ParseNode visitRevokeRoleFromUser(StarRocksParser.RevokeRoleFromUserContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new RevokeRoleStmt(roleNameList, (UserIdentity) visit(context.user()), createPos(context));
}
@Override
public ParseNode visitRevokeRoleFromRole(StarRocksParser.RevokeRoleFromRoleContext context) {
List<String> roleNameList = new ArrayList<>();
for (StarRocksParser.IdentifierOrStringContext oneContext : context.identifierOrStringList()
.identifierOrString()) {
roleNameList.add(((Identifier) visit(oneContext)).getValue());
}
return new RevokeRoleStmt(roleNameList, ((Identifier) visit(context.identifierOrString())).getValue(),
createPos(context));
}
@Override
public ParseNode visitSetRoleStatement(StarRocksParser.SetRoleStatementContext context) {
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
SetRoleType setRoleType;
if (context.ALL() != null) {
setRoleType = SetRoleType.ALL;
} else if (context.DEFAULT() != null) {
setRoleType = SetRoleType.DEFAULT;
} else if (context.NONE() != null) {
setRoleType = SetRoleType.NONE;
} else {
setRoleType = SetRoleType.ROLE;
}
return new SetRoleStmt(setRoleType, roles, createPos(context));
}
@Override
public ParseNode visitSetDefaultRoleStatement(StarRocksParser.SetDefaultRoleStatementContext context) {
List<String> roles = new ArrayList<>();
if (context.roleList() != null) {
roles.addAll(context.roleList().identifierOrString().stream().map(this::visit).map(
s -> ((Identifier) s).getValue()).collect(toList()));
}
SetRoleType setRoleType;
if (context.ALL() != null) {
setRoleType = SetRoleType.ALL;
} else if (context.NONE() != null) {
setRoleType = SetRoleType.NONE;
} else {
setRoleType = SetRoleType.ROLE;
}
return new SetDefaultRoleStmt((UserIdentity) visit(context.user()), setRoleType, roles, createPos(context));
}
@Override
public ParseNode visitShowGrantsStatement(StarRocksParser.ShowGrantsStatementContext context) {
NodePosition pos = createPos(context);
if (context.ROLE() != null) {
Identifier role = (Identifier) visit(context.identifierOrString());
return new ShowGrantsStmt(null, role.getValue(), pos);
} else {
UserIdentity userId = context.user() == null ? null : (UserIdentity) visit(context.user());
return new ShowGrantsStmt(userId, null, pos);
}
}
@Override
public ParseNode visitAuthWithoutPlugin(StarRocksParser.AuthWithoutPluginContext context) {
String password = ((StringLiteral) visit(context.string())).getStringValue();
boolean isPasswordPlain = context.PASSWORD() == null;
return new UserAuthOption(password, null, null, isPasswordPlain, createPos(context));
}
@Override
public ParseNode visitAuthWithPlugin(StarRocksParser.AuthWithPluginContext context) {
Identifier authPlugin = (Identifier) visit(context.identifierOrString());
String authString =
context.string() == null ? null : ((StringLiteral) visit(context.string())).getStringValue();
boolean isPasswordPlain = context.AS() == null;
return new UserAuthOption(null, authPlugin.getValue().toUpperCase(), authString,
isPasswordPlain, createPos(context));
}
@Override
public ParseNode visitGrantRevokeClause(StarRocksParser.GrantRevokeClauseContext context) {
NodePosition pos = createPos(context);
if (context.user() != null) {
UserIdentity user = (UserIdentity) visit(context.user());
return new GrantRevokeClause(user, null, pos);
} else {
String roleName = ((Identifier) visit(context.identifierOrString())).getValue();
return new GrantRevokeClause(null, roleName, pos);
}
}
@Override
public ParseNode visitGrantOnUser(StarRocksParser.GrantOnUserContext context) {
List<String> privList = Collections.singletonList("IMPERSONATE");
GrantRevokeClause clause = (GrantRevokeClause) visit(context.grantRevokeClause());
List<UserIdentity> users = context.user().stream()
.map(user -> (UserIdentity) visit(user)).collect(toList());
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
objects.setUserPrivilegeObjectList(users);
return new GrantPrivilegeStmt(privList, "USER", clause, objects,
context.WITH() != null, createPos(context));
}
@Override
public ParseNode visitRevokeOnUser(StarRocksParser.RevokeOnUserContext context) {
List<String> privList = Collections.singletonList("IMPERSONATE");
GrantRevokeClause clause = (GrantRevokeClause) visit(context.grantRevokeClause());
List<UserIdentity> users = context.user().stream()
.map(user -> (UserIdentity) visit(user)).collect(toList());
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
objects.setUserPrivilegeObjectList(users);
return new RevokePrivilegeStmt(privList, "USER", clause, objects, createPos(context));
}
@Override
public ParseNode visitGrantOnTableBrief(StarRocksParser.GrantOnTableBriefContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new GrantPrivilegeStmt(privilegeList, "TABLE",
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnTableBrief(StarRocksParser.RevokeOnTableBriefContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new RevokePrivilegeStmt(privilegeList, "TABLE",
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
createPos(context));
}
@Override
public ParseNode visitGrantOnSystem(StarRocksParser.GrantOnSystemContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new GrantPrivilegeStmt(privilegeList, "SYSTEM",
(GrantRevokeClause) visit(context.grantRevokeClause()), null, context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnSystem(StarRocksParser.RevokeOnSystemContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
return new RevokePrivilegeStmt(privilegeList, "SYSTEM",
(GrantRevokeClause) visit(context.grantRevokeClause()), null, createPos(context));
}
@Override
public ParseNode visitGrantOnPrimaryObj(StarRocksParser.GrantOnPrimaryObjContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectType())).getValue().toUpperCase();
return new GrantPrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnPrimaryObj(StarRocksParser.RevokeOnPrimaryObjContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectType())).getValue().toUpperCase();
return new RevokePrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()),
parsePrivilegeObjectNameList(context.privObjectNameList()),
createPos(context));
}
@Override
public ParseNode visitGrantOnFunc(StarRocksParser.GrantOnFuncContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
GrantRevokePrivilegeObjects objects = buildGrantRevokePrivWithFunction(context.privFunctionObjectNameList(),
context.GLOBAL() != null);
return new GrantPrivilegeStmt(privilegeList, extendPrivilegeType(context.GLOBAL() != null, "FUNCTION"),
(GrantRevokeClause) visit(context.grantRevokeClause()), objects, context.WITH() != null,
createPos(context));
}
@Override
public ParseNode visitRevokeOnFunc(StarRocksParser.RevokeOnFuncContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
GrantRevokePrivilegeObjects objects = buildGrantRevokePrivWithFunction(context.privFunctionObjectNameList(),
context.GLOBAL() != null);
return new RevokePrivilegeStmt(privilegeList, extendPrivilegeType(context.GLOBAL() != null, "FUNCTION"),
(GrantRevokeClause) visit(context.grantRevokeClause()), objects,
createPos(context));
}
private GrantRevokePrivilegeObjects buildGrantRevokePrivWithFunction(
StarRocksParser.PrivFunctionObjectNameListContext context, boolean isGlobal) {
List<Pair<FunctionName, FunctionArgsDef>> functions = new ArrayList<>();
int functionSize = context.qualifiedName().size();
List<StarRocksParser.TypeListContext> typeListContexts = context.typeList();
for (int i = 0; i < functionSize; ++i) {
StarRocksParser.QualifiedNameContext qualifiedNameContext = context.qualifiedName(i);
QualifiedName qualifiedName = getQualifiedName(qualifiedNameContext);
FunctionName functionName;
if (qualifiedName.getParts().size() == 1) {
functionName = new FunctionName(qualifiedName.getParts().get(0));
} else if (qualifiedName.getParts().size() == 2) {
functionName = new FunctionName(qualifiedName.getParts().get(0), qualifiedName.getParts().get(1));
} else {
throw new SemanticException("Error function format " + qualifiedName);
}
if (isGlobal) {
functionName.setAsGlobalFunction();
}
FunctionArgsDef argsDef = getFunctionArgsDef(typeListContexts.get(i));
functions.add(Pair.create(functionName, argsDef));
}
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
objects.setFunctions(functions);
return objects;
}
public String extendPrivilegeType(boolean isGlobal, String type) {
if (isGlobal) {
if (type.equals("FUNCTIONS") || type.equals("FUNCTION")) {
return "GLOBAL " + type;
}
}
return type;
}
@Override
public ParseNode visitGrantOnAll(StarRocksParser.GrantOnAllContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectTypePlural())).getValue().toUpperCase();
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
ArrayList<String> tokenList;
if (context.isAll != null) {
tokenList = Lists.newArrayList("*", "*");
} else if (context.IN() != null) {
String dbName = ((Identifier) visit(context.identifierOrString())).getValue();
tokenList = Lists.newArrayList(dbName, "*");
} else {
tokenList = Lists.newArrayList("*");
}
objects.setPrivilegeObjectNameTokensList(Collections.singletonList(tokenList));
GrantPrivilegeStmt grantPrivilegeStmt = new GrantPrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()),
objects, context.WITH() != null, createPos(context));
grantPrivilegeStmt.setGrantOnAll();
return grantPrivilegeStmt;
}
@Override
public ParseNode visitRevokeOnAll(StarRocksParser.RevokeOnAllContext context) {
List<String> privilegeList = context.privilegeTypeList().privilegeType().stream().map(
c -> ((Identifier) visit(c)).getValue().toUpperCase()).collect(toList());
String objectTypeUnResolved = ((Identifier) visit(context.privObjectTypePlural())).getValue().toUpperCase();
GrantRevokePrivilegeObjects objects = new GrantRevokePrivilegeObjects();
ArrayList<String> tokenList;
if (context.isAll != null) {
tokenList = Lists.newArrayList("*", "*");
} else if (context.IN() != null) {
String dbName = ((Identifier) visit(context.identifierOrString())).getValue();
tokenList = Lists.newArrayList(dbName, "*");
} else {
tokenList = Lists.newArrayList("*");
}
objects.setPrivilegeObjectNameTokensList(Collections.singletonList(tokenList));
RevokePrivilegeStmt revokePrivilegeStmt = new RevokePrivilegeStmt(privilegeList, objectTypeUnResolved,
(GrantRevokeClause) visit(context.grantRevokeClause()), objects, createPos(context));
revokePrivilegeStmt.setGrantOnAll();
return revokePrivilegeStmt;
}
@Override
public ParseNode visitPrivilegeType(StarRocksParser.PrivilegeTypeContext context) {
NodePosition pos = createPos(context);
List<String> ps = new ArrayList<>();
for (int i = 0; i < context.getChildCount(); ++i) {
ps.add(context.getChild(i).getText());
}
return new Identifier(Joiner.on(" ").join(ps), pos);
}
@Override
public ParseNode visitPrivObjectType(StarRocksParser.PrivObjectTypeContext context) {
NodePosition pos = createPos(context);
List<String> ps = new ArrayList<>();
for (int i = 0; i < context.getChildCount(); ++i) {
ps.add(context.getChild(i).getText());
}
return new Identifier(Joiner.on(" ").join(ps), pos);
}
@Override
public ParseNode visitPrivObjectTypePlural(StarRocksParser.PrivObjectTypePluralContext context) {
NodePosition pos = createPos(context);
List<String> ps = new ArrayList<>();
for (int i = 0; i < context.getChildCount(); ++i) {
ps.add(context.getChild(i).getText());
}
return new Identifier(Joiner.on(" ").join(ps), pos);
}
private GrantRevokePrivilegeObjects parsePrivilegeObjectNameList(
StarRocksParser.PrivObjectNameListContext context) {
if (context == null) {
return null;
}
GrantRevokePrivilegeObjects grantRevokePrivilegeObjects = new GrantRevokePrivilegeObjects(createPos(context));
List<List<String>> objectNameList = new ArrayList<>();
for (StarRocksParser.PrivObjectNameContext privObjectNameContext : context.privObjectName()) {
objectNameList.add(privObjectNameContext.identifierOrStringOrStar().stream()
.map(c -> ((Identifier) visit(c)).getValue()).collect(toList()));
}
grantRevokePrivilegeObjects.setPrivilegeObjectNameTokensList(objectNameList);
return grantRevokePrivilegeObjects;
}
@Override
public ParseNode visitCreateSecurityIntegrationStatement(
StarRocksParser.CreateSecurityIntegrationStatementContext context) {
String name = ((Identifier) visit(context.identifier())).getValue();
Map<String, String> propertyMap = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
propertyMap.put(property.getKey(), property.getValue());
}
}
return new CreateSecurityIntegrationStatement(name, propertyMap, createPos(context));
}
@Override
public ParseNode visitExpressionOrDefault(StarRocksParser.ExpressionOrDefaultContext context) {
if (context.DEFAULT() != null) {
return new DefaultValueExpr(createPos(context));
} else {
return visit(context.expression());
}
}
@Override
public ParseNode visitExpressionsWithDefault(StarRocksParser.ExpressionsWithDefaultContext context) {
ArrayList<Expr> row = Lists.newArrayList();
for (int i = 0; i < context.expressionOrDefault().size(); ++i) {
row.add((Expr) visit(context.expressionOrDefault(i)));
}
return new ValueList(row, createPos(context));
}
@Override
public ParseNode visitExpressionSingleton(StarRocksParser.ExpressionSingletonContext context) {
return visit(context.expression());
}
@Override
public ParseNode visitLogicalNot(StarRocksParser.LogicalNotContext context) {
return new CompoundPredicate(CompoundPredicate.Operator.NOT, (Expr) visit(context.expression()),
null, createPos(context));
}
@Override
public ParseNode visitLogicalBinary(StarRocksParser.LogicalBinaryContext context) {
Expr left = (Expr) visit(context.left);
Expr right = (Expr) visit(context.right);
return new CompoundPredicate(getLogicalBinaryOperator(context.operator), left, right, createPos(context));
}
private static CompoundPredicate.Operator getLogicalBinaryOperator(Token token) {
switch (token.getType()) {
case StarRocksLexer.AND:
case StarRocksLexer.LOGICAL_AND:
return CompoundPredicate.Operator.AND;
default:
return CompoundPredicate.Operator.OR;
}
}
@Override
public ParseNode visitPredicate(StarRocksParser.PredicateContext context) {
if (context.predicateOperations() != null) {
return visit(context.predicateOperations());
} else if (context.tupleInSubquery() != null) {
return visit(context.tupleInSubquery());
} else {
return visit(context.valueExpression());
}
}
@Override
public ParseNode visitIsNull(StarRocksParser.IsNullContext context) {
Expr child = (Expr) visit(context.booleanExpression());
NodePosition pos = createPos(context);
if (context.NOT() == null) {
return new IsNullPredicate(child, false, pos);
} else {
return new IsNullPredicate(child, true, pos);
}
}
@Override
public ParseNode visitComparison(StarRocksParser.ComparisonContext context) {
BinaryType op = getComparisonOperator(((TerminalNode) context.comparisonOperator().getChild(0))
.getSymbol());
return new BinaryPredicate(op, (Expr) visit(context.left), (Expr) visit(context.right), createPos(context));
}
private static BinaryType getComparisonOperator(Token symbol) {
switch (symbol.getType()) {
case StarRocksParser.EQ:
return BinaryType.EQ;
case StarRocksParser.NEQ:
return BinaryType.NE;
case StarRocksParser.LT:
return BinaryType.LT;
case StarRocksParser.LTE:
return BinaryType.LE;
case StarRocksParser.GT:
return BinaryType.GT;
case StarRocksParser.GTE:
return BinaryType.GE;
default:
return BinaryType.EQ_FOR_NULL;
}
}
@Override
public ParseNode visitInList(StarRocksParser.InListContext context) {
boolean isNotIn = context.NOT() != null;
return new InPredicate(
(Expr) visit(context.value),
visit(context.expressionList().expression(), Expr.class), isNotIn, createPos(context));
}
@Override
public ParseNode visitBetween(StarRocksParser.BetweenContext context) {
boolean isNotBetween = context.NOT() != null;
return new BetweenPredicate(
(Expr) visit(context.value),
(Expr) visit(context.lower),
(Expr) visit(context.upper),
isNotBetween,
createPos(context));
}
@Override
public ParseNode visitLike(StarRocksParser.LikeContext context) {
LikePredicate likePredicate;
NodePosition pos = createPos(context);
if (context.REGEXP() != null || context.RLIKE() != null) {
likePredicate = new LikePredicate(LikePredicate.Operator.REGEXP,
(Expr) visit(context.value),
(Expr) visit(context.pattern),
pos);
} else {
likePredicate = new LikePredicate(
LikePredicate.Operator.LIKE,
(Expr) visit(context.value),
(Expr) visit(context.pattern),
pos);
}
if (context.NOT() != null) {
return new CompoundPredicate(CompoundPredicate.Operator.NOT, likePredicate, null, pos);
} else {
return likePredicate;
}
}
@Override
public ParseNode visitSimpleCase(StarRocksParser.SimpleCaseContext context) {
return new CaseExpr(
(Expr) visit(context.caseExpr),
visit(context.whenClause(), CaseWhenClause.class),
(Expr) visitIfPresent(context.elseExpression),
createPos(context));
}
@Override
public ParseNode visitSearchedCase(StarRocksParser.SearchedCaseContext context) {
return new CaseExpr(
null,
visit(context.whenClause(), CaseWhenClause.class),
(Expr) visitIfPresent(context.elseExpression),
createPos(context));
}
@Override
public ParseNode visitWhenClause(StarRocksParser.WhenClauseContext context) {
return new CaseWhenClause((Expr) visit(context.condition), (Expr) visit(context.result), createPos(context));
}
@Override
public ParseNode visitArithmeticUnary(StarRocksParser.ArithmeticUnaryContext context) {
Expr child = (Expr) visit(context.primaryExpression());
NodePosition pos = createPos(context);
switch (context.operator.getType()) {
case StarRocksLexer.MINUS_SYMBOL:
if (child.isLiteral() && child.getType().isNumericType()) {
try {
((LiteralExpr) child).swapSign();
} catch (NotImplementedException e) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr(child.toSql()), child.getPos());
}
return child;
} else {
return new ArithmeticExpr(ArithmeticExpr.Operator.MULTIPLY, new IntLiteral(-1), child, pos);
}
case StarRocksLexer.PLUS_SYMBOL:
return child;
case StarRocksLexer.BITNOT:
return new ArithmeticExpr(ArithmeticExpr.Operator.BITNOT, child, null, pos);
default:
return new CompoundPredicate(CompoundPredicate.Operator.NOT, child, null, pos);
}
}
@Override
public ParseNode visitArithmeticBinary(StarRocksParser.ArithmeticBinaryContext context) {
Expr left = (Expr) visit(context.left);
Expr right = (Expr) visit(context.right);
NodePosition pos = createPos(context);
if (left instanceof IntervalLiteral) {
return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), right,
((IntervalLiteral) left).getValue(),
((IntervalLiteral) left).getUnitIdentifier().getDescription(),
true, pos);
}
if (right instanceof IntervalLiteral) {
return new TimestampArithmeticExpr(getArithmeticBinaryOperator(context.operator), left,
((IntervalLiteral) right).getValue(),
((IntervalLiteral) right).getUnitIdentifier().getDescription(),
false, pos);
}
return new ArithmeticExpr(getArithmeticBinaryOperator(context.operator), left, right, pos);
}
private static ArithmeticExpr.Operator getArithmeticBinaryOperator(Token operator) {
switch (operator.getType()) {
case StarRocksLexer.PLUS_SYMBOL:
return ArithmeticExpr.Operator.ADD;
case StarRocksLexer.MINUS_SYMBOL:
return ArithmeticExpr.Operator.SUBTRACT;
case StarRocksLexer.ASTERISK_SYMBOL:
return ArithmeticExpr.Operator.MULTIPLY;
case StarRocksLexer.SLASH_SYMBOL:
return ArithmeticExpr.Operator.DIVIDE;
case StarRocksLexer.PERCENT_SYMBOL:
case StarRocksLexer.MOD:
return ArithmeticExpr.Operator.MOD;
case StarRocksLexer.INT_DIV:
return ArithmeticExpr.Operator.INT_DIVIDE;
case StarRocksLexer.BITAND:
return ArithmeticExpr.Operator.BITAND;
case StarRocksLexer.BITOR:
return ArithmeticExpr.Operator.BITOR;
case StarRocksLexer.BITXOR:
return ArithmeticExpr.Operator.BITXOR;
case StarRocksLexer.BIT_SHIFT_LEFT:
return ArithmeticExpr.Operator.BIT_SHIFT_LEFT;
case StarRocksLexer.BIT_SHIFT_RIGHT:
return ArithmeticExpr.Operator.BIT_SHIFT_RIGHT;
case StarRocksLexer.BIT_SHIFT_RIGHT_LOGICAL:
return ArithmeticExpr.Operator.BIT_SHIFT_RIGHT_LOGICAL;
default:
throw new ParsingException(PARSER_ERROR_MSG.wrongTypeOfArgs(operator.getText()),
new NodePosition(operator));
}
}
@Override
public ParseNode visitOdbcFunctionCallExpression(StarRocksParser.OdbcFunctionCallExpressionContext context) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall());
OdbcScalarFunctionCall odbcScalarFunctionCall = new OdbcScalarFunctionCall(functionCallExpr);
return odbcScalarFunctionCall.mappingFunction();
}
private static List<Expr> getArgumentsForTimeSlice(Expr time, Expr value, String ident, String boundary) {
List<Expr> exprs = Lists.newLinkedList();
exprs.add(time);
addArgumentUseTypeInt(value, exprs);
exprs.add(new StringLiteral(ident));
exprs.add(new StringLiteral(boundary));
return exprs;
}
private static void addArgumentUseTypeInt(Expr value, List<Expr> exprs) {
try {
if (value instanceof IntLiteral) {
exprs.add(new IntLiteral(((IntLiteral) value).getValue(), Type.INT));
} else {
exprs.add(value);
}
} catch (Exception e) {
throw new IllegalArgumentException(String.format("Cast argument %s to int type failed.", value.toSql()));
}
}
@Override
@Override
public ParseNode visitAggregationFunctionCall(StarRocksParser.AggregationFunctionCallContext context) {
NodePosition pos = createPos(context);
String functionName;
boolean isGroupConcat = false;
boolean isLegacyGroupConcat = false;
boolean isDistinct = false;
if (context.aggregationFunction().COUNT() != null) {
functionName = FunctionSet.COUNT;
} else if (context.aggregationFunction().AVG() != null) {
functionName = FunctionSet.AVG;
} else if (context.aggregationFunction().SUM() != null) {
functionName = FunctionSet.SUM;
} else if (context.aggregationFunction().MIN() != null) {
functionName = FunctionSet.MIN;
} else if (context.aggregationFunction().ARRAY_AGG() != null) {
functionName = FunctionSet.ARRAY_AGG;
} else if (context.aggregationFunction().ARRAY_AGG_DISTINCT() != null) {
functionName = FunctionSet.ARRAY_AGG;
isDistinct = true;
} else if (context.aggregationFunction().GROUP_CONCAT() != null) {
functionName = FunctionSet.GROUP_CONCAT;
isGroupConcat = true;
isLegacyGroupConcat = SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_GROUP_CONCAT_LEGACY);
} else {
functionName = FunctionSet.MAX;
}
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.aggregationFunction().ORDER() != null) {
orderByElements = visit(context.aggregationFunction().sortItem(), OrderByElement.class);
}
List<String> hints = Lists.newArrayList();
if (context.aggregationFunction().bracketHint() != null) {
hints = context.aggregationFunction().bracketHint().identifier().stream().map(
RuleContext::getText).collect(Collectors.toList());
}
if (context.aggregationFunction().setQuantifier() != null) {
isDistinct = context.aggregationFunction().setQuantifier().DISTINCT() != null;
}
if (isDistinct && CollectionUtils.isEmpty(context.aggregationFunction().expression())) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(functionName), pos);
}
List<Expr> exprs = visit(context.aggregationFunction().expression(), Expr.class);
if (isGroupConcat && !exprs.isEmpty() && context.aggregationFunction().SEPARATOR() == null) {
if (isLegacyGroupConcat) {
if (exprs.size() == 1) {
Expr sepExpr;
String sep = ", ";
sepExpr = new StringLiteral(sep, pos);
exprs.add(sepExpr);
}
} else {
Expr sepExpr;
String sep = ",";
sepExpr = new StringLiteral(sep, pos);
exprs.add(sepExpr);
}
}
if (!orderByElements.isEmpty()) {
int exprSize = exprs.size();
if (isGroupConcat) {
exprSize--;
}
for (OrderByElement orderByElement : orderByElements) {
Expr by = orderByElement.getExpr();
if (by instanceof IntLiteral) {
long ordinal = ((IntLiteral) by).getLongValue();
if (ordinal < 1 || ordinal > exprSize) {
throw new ParsingException(format("ORDER BY position %s is not in %s output list", ordinal,
functionName), pos);
}
by = exprs.get((int) ordinal - 1);
orderByElement.setExpr(by);
}
}
orderByElements = orderByElements.stream().filter(x -> !x.getExpr().isConstant()).collect(toList());
}
if (CollectionUtils.isNotEmpty(orderByElements)) {
orderByElements.stream().forEach(e -> exprs.add(e.getExpr()));
}
FunctionCallExpr functionCallExpr = new FunctionCallExpr(functionName,
context.aggregationFunction().ASTERISK_SYMBOL() == null ?
new FunctionParams(isDistinct, exprs, orderByElements) :
FunctionParams.createStarParam(), pos);
functionCallExpr = SyntaxSugars.parse(functionCallExpr);
functionCallExpr.setHints(hints);
if (context.over() != null) {
return buildOverClause(functionCallExpr, context.over(), pos);
}
return functionCallExpr;
}
@Override
public ParseNode visitWindowFunctionCall(StarRocksParser.WindowFunctionCallContext context) {
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.windowFunction());
return buildOverClause(functionCallExpr, context.over(), createPos(context));
}
@Override
public ParseNode visitWindowFunction(StarRocksParser.WindowFunctionContext context) {
FunctionCallExpr functionCallExpr = new FunctionCallExpr(context.name.getText().toLowerCase(),
new FunctionParams(false, visit(context.expression(), Expr.class)), createPos(context));
functionCallExpr = SyntaxSugars.parse(functionCallExpr);
boolean ignoreNull = CollectionUtils.isNotEmpty(context.ignoreNulls())
&& context.ignoreNulls().stream().anyMatch(Objects::nonNull);
functionCallExpr.setIgnoreNulls(ignoreNull);
return functionCallExpr;
}
private AnalyticExpr buildOverClause(FunctionCallExpr functionCallExpr, StarRocksParser.OverContext context,
NodePosition pos) {
functionCallExpr.setIsAnalyticFnCall(true);
List<OrderByElement> orderByElements = new ArrayList<>();
if (context.ORDER() != null) {
orderByElements = visit(context.sortItem(), OrderByElement.class);
}
List<Expr> partitionExprs = visit(context.partition, Expr.class);
return new AnalyticExpr(functionCallExpr, partitionExprs, orderByElements,
(AnalyticWindow) visitIfPresent(context.windowFrame()),
context.bracketHint() == null ? null : context.bracketHint().identifier().stream()
.map(RuleContext::getText).collect(toList()), pos);
}
@Override
public ParseNode visitExtract(StarRocksParser.ExtractContext context) {
String fieldString = context.identifier().getText();
return new FunctionCallExpr(fieldString,
new FunctionParams(Lists.newArrayList((Expr) visit(context.valueExpression()))), createPos(context));
}
@Override
public ParseNode visitCast(StarRocksParser.CastContext context) {
return new CastExpr(new TypeDef(getType(context.type())), (Expr) visit(context.expression()),
createPos(context));
}
@Override
public ParseNode visitConvert(StarRocksParser.ConvertContext context) {
return new CastExpr(new TypeDef(getType(context.type())), (Expr) visit(context.expression()),
createPos(context));
}
@Override
public ParseNode visitInformationFunctionExpression(StarRocksParser.InformationFunctionExpressionContext context) {
return new InformationFunction(context.name.getText().toUpperCase(), createPos(context));
}
@Override
public ParseNode visitSpecialDateTimeExpression(StarRocksParser.SpecialDateTimeExpressionContext context) {
return new FunctionCallExpr(context.name.getText().toUpperCase(), Lists.newArrayList());
}
@Override
public ParseNode visitSpecialFunctionExpression(StarRocksParser.SpecialFunctionExpressionContext context) {
NodePosition pos = createPos(context);
if (context.CHAR() != null) {
return new FunctionCallExpr("char", visit(context.expression(), Expr.class), pos);
} else if (context.DAY() != null) {
return new FunctionCallExpr("day", visit(context.expression(), Expr.class), pos);
} else if (context.HOUR() != null) {
return new FunctionCallExpr("hour", visit(context.expression(), Expr.class), pos);
} else if (context.IF() != null) {
return new FunctionCallExpr("if", visit(context.expression(), Expr.class), pos);
} else if (context.LEFT() != null) {
return new FunctionCallExpr("left", visit(context.expression(), Expr.class), pos);
} else if (context.LIKE() != null) {
return new FunctionCallExpr("like", visit(context.expression(), Expr.class), pos);
} else if (context.MINUTE() != null) {
return new FunctionCallExpr("minute", visit(context.expression(), Expr.class), pos);
} else if (context.MOD() != null) {
return new FunctionCallExpr("mod", visit(context.expression(), Expr.class), pos);
} else if (context.MONTH() != null) {
return new FunctionCallExpr("month", visit(context.expression(), Expr.class), pos);
} else if (context.QUARTER() != null) {
return new FunctionCallExpr("quarter", visit(context.expression(), Expr.class), pos);
} else if (context.REGEXP() != null) {
return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class), pos);
} else if (context.REPLACE() != null) {
return new FunctionCallExpr("replace", visit(context.expression(), Expr.class), pos);
} else if (context.RIGHT() != null) {
return new FunctionCallExpr("right", visit(context.expression(), Expr.class), pos);
} else if (context.RLIKE() != null) {
return new FunctionCallExpr("regexp", visit(context.expression(), Expr.class), pos);
} else if (context.SECOND() != null) {
return new FunctionCallExpr("second", visit(context.expression(), Expr.class), pos);
} else if (context.YEAR() != null) {
return new FunctionCallExpr("year", visit(context.expression(), Expr.class), pos);
} else if (context.PASSWORD() != null) {
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
return new StringLiteral(new String(MysqlPassword.makeScrambledPassword(stringLiteral.getValue())), pos);
} else if (context.FLOOR() != null) {
return new FunctionCallExpr("floor", visit(context.expression(), Expr.class), pos);
} else if (context.CEIL() != null) {
return new FunctionCallExpr("ceil", visit(context.expression(), Expr.class), pos);
}
String functionName = context.TIMESTAMPADD() != null ? "TIMESTAMPADD" : "TIMESTAMPDIFF";
UnitIdentifier e1 = (UnitIdentifier) visit(context.unitIdentifier());
Expr e2 = (Expr) visit(context.expression(0));
Expr e3 = (Expr) visit(context.expression(1));
return new TimestampArithmeticExpr(functionName, e3, e2, e1.getDescription(), pos);
}
@Override
public ParseNode visitConcat(StarRocksParser.ConcatContext context) {
Expr left = (Expr) visit(context.left);
Expr right = (Expr) visit(context.right);
return new FunctionCallExpr("concat", new FunctionParams(Lists.newArrayList(left, right)),
createPos(context));
}
@Override
public ParseNode visitNullLiteral(StarRocksParser.NullLiteralContext context) {
return new NullLiteral(createPos(context));
}
@Override
public ParseNode visitBooleanLiteral(StarRocksParser.BooleanLiteralContext context) {
NodePosition pos = createPos(context);
String value = context.getText();
return new BoolLiteral("TRUE".equalsIgnoreCase(value), pos);
}
@Override
public ParseNode visitNumericLiteral(StarRocksParser.NumericLiteralContext context) {
return visit(context.number());
}
@Override
public ParseNode visitIntegerValue(StarRocksParser.IntegerValueContext context) {
NodePosition pos = createPos(context);
try {
BigInteger intLiteral = new BigInteger(context.getText());
if (intLiteral.compareTo(LONG_MAX) <= 0) {
return new IntLiteral(intLiteral.longValue(), pos);
} else if (intLiteral.compareTo(LARGEINT_MAX_ABS) <= 0) {
return new LargeIntLiteral(intLiteral.toString(), pos);
} else {
throw new ParsingException(PARSER_ERROR_MSG.numOverflow(context.getText()), pos);
}
} catch (NumberFormatException | AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidNumFormat(context.getText()), pos);
}
}
@Override
public ParseNode visitDoubleValue(StarRocksParser.DoubleValueContext context) {
NodePosition pos = createPos(context);
try {
if (SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_DOUBLE_LITERAL)) {
return new FloatLiteral(context.getText(), pos);
} else {
BigDecimal decimal = new BigDecimal(context.getText());
int precision = DecimalLiteral.getRealPrecision(decimal);
int scale = DecimalLiteral.getRealScale(decimal);
int integerPartWidth = precision - scale;
if (integerPartWidth > 38) {
return new FloatLiteral(context.getText(), pos);
}
return new DecimalLiteral(decimal, pos);
}
} catch (AnalysisException | NumberFormatException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidNumFormat(context.getText()), pos);
}
}
@Override
public ParseNode visitDecimalValue(StarRocksParser.DecimalValueContext context) {
NodePosition pos = createPos(context);
try {
if (SqlModeHelper.check(sqlMode, SqlModeHelper.MODE_DOUBLE_LITERAL)) {
return new FloatLiteral(context.getText(), pos);
} else {
return new DecimalLiteral(context.getText(), pos);
}
} catch (AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidNumFormat(context.getText()), pos);
}
}
@Override
public ParseNode visitDateLiteral(StarRocksParser.DateLiteralContext context) {
NodePosition pos = createPos(context);
String value = ((StringLiteral) visit(context.string())).getValue();
try {
if (context.DATE() != null) {
return new DateLiteral(value, Type.DATE);
} else {
return new DateLiteral(value, Type.DATETIME);
}
} catch (AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidDateFormat(value), pos);
}
}
@Override
public ParseNode visitString(StarRocksParser.StringContext context) {
String quotedString;
NodePosition pos = createPos(context);
if (context.SINGLE_QUOTED_TEXT() != null) {
quotedString = context.SINGLE_QUOTED_TEXT().getText();
quotedString = quotedString.substring(1, quotedString.length() - 1).replace("''", "'");
} else {
quotedString = context.DOUBLE_QUOTED_TEXT().getText();
quotedString = quotedString.substring(1, quotedString.length() - 1).replace("\"\"", "\"");
}
return new StringLiteral(escapeBackSlash(quotedString), pos);
}
@Override
public ParseNode visitBinary(StarRocksParser.BinaryContext context) {
String quotedText;
if (context.BINARY_SINGLE_QUOTED_TEXT() != null) {
quotedText = context.BINARY_SINGLE_QUOTED_TEXT().getText();
} else {
quotedText = context.BINARY_DOUBLE_QUOTED_TEXT().getText();
}
return new VarBinaryLiteral(quotedText.substring(2, quotedText.length() - 1), createPos(context));
}
private static String escapeBackSlash(String str) {
StringWriter writer = new StringWriter();
int strLen = str.length();
for (int i = 0; i < strLen; ++i) {
char c = str.charAt(i);
if (c == '\\' && (i + 1) < strLen) {
switch (str.charAt(i + 1)) {
case 'n':
writer.append('\n');
break;
case 't':
writer.append('\t');
break;
case 'r':
writer.append('\r');
break;
case 'b':
writer.append('\b');
break;
case '0':
writer.append('\0');
break;
case 'Z':
writer.append('\032');
break;
case '_':
case '%':
writer.append('\\');
/* Fall through */
default:
writer.append(str.charAt(i + 1));
break;
}
i++;
} else {
writer.append(c);
}
}
return writer.toString();
}
@Override
public ParseNode visitArrayConstructor(StarRocksParser.ArrayConstructorContext context) {
NodePosition pos = createPos(context);
Type type = null;
if (context.arrayType() != null) {
type = new ArrayType(getType(context.arrayType().type()));
}
List<Expr> exprs;
if (context.expressionList() != null) {
exprs = visit(context.expressionList().expression(), Expr.class);
} else {
exprs = Collections.emptyList();
}
return new ArrayExpr(type, exprs, pos);
}
@Override
public ParseNode visitMapExpression(StarRocksParser.MapExpressionContext context) {
ArrayList<Expr> row = Lists.newArrayList();
Expr key = (Expr) visit(context.key);
Expr value = (Expr) visit(context.value);
row.add(key);
row.add(value);
return new ValueList(row, createPos(context));
}
@Override
public ParseNode visitMapConstructor(StarRocksParser.MapConstructorContext context) {
NodePosition pos = createPos(context);
Type type = Type.ANY_MAP;
if (context.mapType() != null) {
type = getMapType(context.mapType());
}
List<Expr> exprs;
if (context.mapExpressionList() != null) {
List<ValueList> rowValues = visit(context.mapExpressionList().mapExpression(), ValueList.class);
List<List<Expr>> rows = rowValues.stream().map(ValueList::getRow).collect(toList());
exprs = rows.stream().flatMap(Collection::stream).collect(Collectors.toList());
int num = exprs.size();
if (num % 2 == 1) {
throw new ParsingException(PARSER_ERROR_MSG.wrongNumOfArgs(num, "map()",
"Arguments must be in key/value pairs"), pos);
}
} else {
exprs = Collections.emptyList();
}
return new MapExpr(type, exprs, pos);
}
@Override
public ParseNode visitCollectionSubscript(StarRocksParser.CollectionSubscriptContext context) {
Expr value = (Expr) visit(context.value);
Expr index = (Expr) visit(context.index);
return new CollectionElementExpr(value, index, false);
}
@Override
public ParseNode visitArraySlice(StarRocksParser.ArraySliceContext context) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExpr("array slice"), createPos(context));
/*
Expr expr = (Expr) visit(context.primaryExpression());
IntLiteral lowerBound;
if (context.start != null) {
lowerBound = new IntLiteral(Long.parseLong(context.start.getText()));
} else {
lowerBound = new IntLiteral(0);
}
IntLiteral upperBound;
if (context.end != null) {
upperBound = new IntLiteral(Long.parseLong(context.end.getText()));
} else {
upperBound = new IntLiteral(-1);
}
return new ArraySliceExpr(expr, lowerBound, upperBound);
*/
}
@Override
public ParseNode visitInterval(StarRocksParser.IntervalContext context) {
return new IntervalLiteral((Expr) visit(context.value), (UnitIdentifier) visit(context.from),
createPos(context));
}
@Override
public ParseNode visitUnitIdentifier(StarRocksParser.UnitIdentifierContext context) {
return new UnitIdentifier(context.getText(), createPos(context));
}
@Override
public ParseNode visitUnitBoundary(StarRocksParser.UnitBoundaryContext context) {
return new UnitBoundary(context.getText(), createPos(context));
}
@Override
public ParseNode visitDereference(StarRocksParser.DereferenceContext ctx) {
Expr base = (Expr) visit(ctx.base);
NodePosition pos = createPos(ctx);
String fieldName;
if (ctx.DOT_IDENTIFIER() != null) {
fieldName = ctx.DOT_IDENTIFIER().getText().substring(1);
} else {
fieldName = ((Identifier) visit(ctx.fieldName)).getValue();
}
if (base instanceof SlotRef) {
SlotRef tmp = (SlotRef) base;
List<String> parts = new ArrayList<>(tmp.getQualifiedName().getParts());
parts.add(fieldName);
return new SlotRef(QualifiedName.of(parts, pos));
} else if (base instanceof SubfieldExpr) {
SubfieldExpr subfieldExpr = (SubfieldExpr) base;
ImmutableList.Builder<String> builder = new ImmutableList.Builder<>();
for (String tmpFieldName : subfieldExpr.getFieldNames()) {
builder.add(tmpFieldName);
}
builder.add(fieldName);
return new SubfieldExpr(subfieldExpr.getChild(0), builder.build(), pos);
} else {
return new SubfieldExpr(base, ImmutableList.of(fieldName), pos);
}
}
@Override
public ParseNode visitColumnReference(StarRocksParser.ColumnReferenceContext context) {
Identifier identifier = (Identifier) visit(context.identifier());
List<String> parts = new ArrayList<>();
parts.add(identifier.getValue());
QualifiedName qualifiedName = QualifiedName.of(parts, createPos(context));
return new SlotRef(qualifiedName);
}
@Override
public ParseNode visitArrowExpression(StarRocksParser.ArrowExpressionContext context) {
Expr expr = (Expr) visit(context.primaryExpression());
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
return new ArrowExpr(expr, stringLiteral, createPos(context));
}
@Override
public ParseNode visitLambdaFunctionExpr(StarRocksParser.LambdaFunctionExprContext context) {
List<String> names = Lists.newLinkedList();
if (context.identifierList() != null) {
final List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
names = identifierList.stream().map(Identifier::getValue).collect(toList());
} else {
names.add(((Identifier) visit(context.identifier())).getValue());
}
List<Expr> arguments = Lists.newLinkedList();
Expr expr = null;
if (context.expression() != null) {
expr = (Expr) visit(context.expression());
} else if (context.expressionList() != null) {
List<Expr> exprs = visit(context.expressionList().expression(), Expr.class);
if (exprs.size() != 2) {
throw new IllegalArgumentException("The right part of map lambda functions can accept at most 2 " +
"expressions, but there are " + exprs.size());
}
expr = new MapExpr(Type.ANY_MAP, exprs);
}
arguments.add(expr);
for (int i = 0; i < names.size(); ++i) {
arguments.add(new LambdaArgument(names.get(i)));
}
return new LambdaFunctionExpr(arguments);
}
@Override
public ParseNode visitUserVariable(StarRocksParser.UserVariableContext context) {
String variable = ((Identifier) visit(context.identifierOrString())).getValue();
return new VariableExpr(variable, SetType.USER, createPos(context));
}
@Override
public ParseNode visitSystemVariable(StarRocksParser.SystemVariableContext context) {
SetType setType = getVariableType(context.varType());
return new VariableExpr(((Identifier) visit(context.identifier())).getValue(), setType, createPos(context));
}
@Override
public ParseNode visitCollate(StarRocksParser.CollateContext context) {
return visit(context.primaryExpression());
}
@Override
public ParseNode visitParenthesizedExpression(StarRocksParser.ParenthesizedExpressionContext context) {
return visit(context.expression());
}
@Override
public ParseNode visitUnquotedIdentifier(StarRocksParser.UnquotedIdentifierContext context) {
return new Identifier(context.getText(), createPos(context));
}
@Override
public ParseNode visitBackQuotedIdentifier(StarRocksParser.BackQuotedIdentifierContext context) {
return new Identifier(context.getText().replace("`", ""), createPos(context));
}
@Override
public ParseNode visitDigitIdentifier(StarRocksParser.DigitIdentifierContext context) {
return new Identifier(context.getText(), createPos(context));
}
@Override
public ParseNode visitDictionaryGetExpr(StarRocksParser.DictionaryGetExprContext context) {
List<Expr> params = visit(context.expressionList().expression(), Expr.class);
return new DictionaryGetExpr(params);
}
private static StatementBase.ExplainLevel getExplainType(StarRocksParser.ExplainDescContext context) {
StatementBase.ExplainLevel explainLevel = StatementBase.ExplainLevel.NORMAL;
if (context.LOGICAL() != null) {
explainLevel = StatementBase.ExplainLevel.LOGICAL;
} else if (context.ANALYZE() != null) {
explainLevel = StatementBase.ExplainLevel.ANALYZE;
} else if (context.VERBOSE() != null) {
explainLevel = StatementBase.ExplainLevel.VERBOSE;
} else if (context.COSTS() != null) {
explainLevel = StatementBase.ExplainLevel.COST;
} else if (context.SCHEDULER() != null) {
explainLevel = StatementBase.ExplainLevel.SCHEDULER;
}
return explainLevel;
}
public static SetType getVariableType(StarRocksParser.VarTypeContext context) {
if (context == null) {
return null;
}
if (context.GLOBAL() != null) {
return SetType.GLOBAL;
} else if (context.VERBOSE() != null) {
return SetType.VERBOSE;
} else {
return SetType.SESSION;
}
}
@Override
public ParseNode visitAssignment(StarRocksParser.AssignmentContext context) {
String column = ((Identifier) visit(context.identifier())).getValue();
Expr expr = (Expr) visit(context.expressionOrDefault());
return new ColumnAssignment(column, expr, createPos(context));
}
@Override
public ParseNode visitPartitionDesc(StarRocksParser.PartitionDescContext context) {
List<PartitionDesc> partitionDescList = new ArrayList<>();
StarRocksParser.IdentifierListContext identifierListContext = context.identifierList();
if (context.functionCall() != null) {
for (StarRocksParser.RangePartitionDescContext rangePartitionDescContext : context.rangePartitionDesc()) {
final PartitionDesc rangePartitionDesc = (PartitionDesc) visit(rangePartitionDescContext);
partitionDescList.add(rangePartitionDesc);
}
FunctionCallExpr functionCallExpr = (FunctionCallExpr) visit(context.functionCall());
List<String> columnList = AnalyzerUtils.checkAndExtractPartitionCol(functionCallExpr, null);
RangePartitionDesc rangePartitionDesc = new RangePartitionDesc(columnList, partitionDescList);
return new ExpressionPartitionDesc(rangePartitionDesc, functionCallExpr);
}
List<Identifier> identifierList = visit(identifierListContext.identifier(), Identifier.class);
if (context.LIST() == null && context.RANGE() == null) {
List<String> columnList = identifierList.stream().map(Identifier::getValue).collect(toList());
return new ListPartitionDesc(columnList, new ArrayList<>());
} else {
List<PartitionDesc> partitionDesc = visit(context.rangePartitionDesc(), PartitionDesc.class);
return new RangePartitionDesc(
identifierList.stream().map(Identifier::getValue).collect(toList()),
partitionDesc,
createPos(context));
}
}
@Override
public ParseNode visitSingleRangePartition(StarRocksParser.SingleRangePartitionContext context) {
PartitionKeyDesc partitionKeyDesc = (PartitionKeyDesc) visit(context.partitionKeyDesc());
boolean ifNotExists = context.IF() != null;
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new SingleRangePartitionDesc(ifNotExists, ((Identifier) visit(context.identifier())).getValue(),
partitionKeyDesc, properties, createPos(context));
}
@Override
public ParseNode visitMultiRangePartition(StarRocksParser.MultiRangePartitionContext context) {
NodePosition pos = createPos(context);
if (context.interval() != null) {
IntervalLiteral intervalLiteral = (IntervalLiteral) visit(context.interval());
Expr expr = intervalLiteral.getValue();
long intervalVal;
if (expr instanceof IntLiteral) {
intervalVal = ((IntLiteral) expr).getLongValue();
} else {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(expr.toSql(),
"RANGE DESC"), expr.getPos());
}
return new MultiRangePartitionDesc(
((StringLiteral) visit(context.string(0))).getStringValue(),
((StringLiteral) visit(context.string(1))).getStringValue(),
intervalVal,
intervalLiteral.getUnitIdentifier().getDescription(),
pos);
} else {
return new MultiRangePartitionDesc(
((StringLiteral) visit(context.string(0))).getStringValue(),
((StringLiteral) visit(context.string(1))).getStringValue(),
Long.parseLong(context.INTEGER_VALUE().getText()),
null,
pos);
}
}
@Override
public ParseNode visitPartitionRangeDesc(StarRocksParser.PartitionRangeDescContext context) {
return new PartitionRangeDesc(
((StringLiteral) visit(context.string(0))).getStringValue(),
((StringLiteral) visit(context.string(1))).getStringValue(),
createPos(context));
}
@Override
public ParseNode visitSingleItemListPartitionDesc(StarRocksParser.SingleItemListPartitionDescContext context) {
List<String> values =
context.stringList().string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue())
.collect(toList());
boolean ifNotExists = context.IF() != null;
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new SingleItemListPartitionDesc(ifNotExists, ((Identifier) visit(context.identifier())).getValue(),
values, properties, createPos(context));
}
@Override
public ParseNode visitMultiItemListPartitionDesc(StarRocksParser.MultiItemListPartitionDescContext context) {
boolean ifNotExists = context.IF() != null;
List<List<String>> multiValues = new ArrayList<>();
for (StarRocksParser.StringListContext stringListContext : context.stringList()) {
List<String> values =
stringListContext.string().stream().map(c -> ((StringLiteral) visit(c)).getStringValue())
.collect(toList());
multiValues.add(values);
}
Map<String, String> properties = null;
if (context.propertyList() != null) {
properties = new HashMap<>();
List<Property> propertyList = visit(context.propertyList().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return new MultiItemListPartitionDesc(ifNotExists, ((Identifier) visit(context.identifier())).getValue(),
multiValues, properties, createPos(context));
}
@Override
public ParseNode visitPartitionKeyDesc(StarRocksParser.PartitionKeyDescContext context) {
PartitionKeyDesc partitionKeyDesc;
NodePosition pos = createPos(context);
if (context.LESS() != null) {
if (context.MAXVALUE() != null) {
return PartitionKeyDesc.createMaxKeyDesc();
}
List<PartitionValue> partitionValueList =
visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class);
partitionKeyDesc = new PartitionKeyDesc(partitionValueList, pos);
} else {
List<PartitionValue> lowerPartitionValueList =
visit(context.partitionValueList().get(0).partitionValue(), PartitionValue.class);
List<PartitionValue> upperPartitionValueList =
visit(context.partitionValueList().get(1).partitionValue(), PartitionValue.class);
partitionKeyDesc = new PartitionKeyDesc(lowerPartitionValueList, upperPartitionValueList, pos);
}
return partitionKeyDesc;
}
@Override
public ParseNode visitPartitionValue(StarRocksParser.PartitionValueContext context) {
NodePosition pos = createPos(context);
if (context.MAXVALUE() != null) {
return PartitionValue.MAX_VALUE;
} else {
return new PartitionValue(((StringLiteral) visit(context.string())).getStringValue(), pos);
}
}
@Override
public ParseNode visitDistributionDesc(StarRocksParser.DistributionDescContext context) {
int buckets = 0;
NodePosition pos = createPos(context);
if (context.INTEGER_VALUE() != null) {
buckets = Integer.parseInt(context.INTEGER_VALUE().getText());
}
if (context.HASH() != null) {
List<Identifier> identifierList = visit(context.identifierList().identifier(), Identifier.class);
return new HashDistributionDesc(buckets,
identifierList.stream().map(Identifier::getValue).collect(toList()),
pos);
} else {
return new RandomDistributionDesc(buckets, pos);
}
}
@Override
public ParseNode visitRefreshSchemeDesc(StarRocksParser.RefreshSchemeDescContext context) {
LocalDateTime startTime = LocalDateTime.now();
IntervalLiteral intervalLiteral = null;
NodePosition pos = createPos(context);
MaterializedView.RefreshMoment refreshMoment =
Config.default_mv_refresh_immediate ?
MaterializedView.RefreshMoment.IMMEDIATE : MaterializedView.RefreshMoment.DEFERRED;
if (context.DEFERRED() != null) {
refreshMoment = MaterializedView.RefreshMoment.DEFERRED;
} else if (context.IMMEDIATE() != null) {
refreshMoment = MaterializedView.RefreshMoment.IMMEDIATE;
}
if (context.ASYNC() != null) {
boolean defineStartTime = false;
if (context.START() != null) {
NodePosition timePos = createPos(context.string());
StringLiteral stringLiteral = (StringLiteral) visit(context.string());
DateTimeFormatter dateTimeFormatter = null;
try {
dateTimeFormatter = DateUtils.probeFormat(stringLiteral.getStringValue());
LocalDateTime tempStartTime = DateUtils.
parseStringWithDefaultHSM(stringLiteral.getStringValue(), dateTimeFormatter);
startTime = tempStartTime;
defineStartTime = true;
} catch (AnalysisException e) {
throw new ParsingException(PARSER_ERROR_MSG.invalidDateFormat(stringLiteral.getStringValue()),
timePos);
}
}
if (context.interval() != null) {
intervalLiteral = (IntervalLiteral) visit(context.interval());
if (!(intervalLiteral.getValue() instanceof IntLiteral)) {
String exprSql = intervalLiteral.getValue().toSql();
throw new ParsingException(PARSER_ERROR_MSG.unsupportedExprWithInfo(exprSql, "INTERVAL"),
createPos(context.interval()));
}
}
return new AsyncRefreshSchemeDesc(defineStartTime, startTime, intervalLiteral, refreshMoment, pos);
} else if (context.MANUAL() != null) {
return new ManualRefreshSchemeDesc(refreshMoment, pos);
} else if (context.INCREMENTAL() != null) {
return new IncrementalRefreshSchemeDesc(refreshMoment, pos);
}
return null;
}
@Override
public ParseNode visitProperty(StarRocksParser.PropertyContext context) {
return new Property(
((StringLiteral) visit(context.key)).getStringValue(),
((StringLiteral) visit(context.value)).getStringValue(),
createPos(context));
}
@Override
public ParseNode visitOutfile(StarRocksParser.OutfileContext context) {
Map<String, String> properties = new HashMap<>();
if (context.properties() != null) {
List<Property> propertyList = visit(context.properties().property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
String format = null;
if (context.fileFormat() != null) {
if (context.fileFormat().identifier() != null) {
format = ((Identifier) visit(context.fileFormat().identifier())).getValue();
} else if (context.fileFormat().string() != null) {
format = ((StringLiteral) visit(context.fileFormat().string())).getStringValue();
}
}
return new OutFileClause(
((StringLiteral) visit(context.file)).getStringValue(),
format,
properties, createPos(context));
}
@Override
public ParseNode visitColumnNameWithComment(StarRocksParser.ColumnNameWithCommentContext context) {
String comment = null;
if (context.comment() != null) {
comment = ((StringLiteral) visit(context.comment())).getStringValue();
}
return new ColWithComment(((Identifier) visit(context.identifier())).getValue(), comment,
createPos(context));
}
@Override
public ParseNode visitIdentifierOrStringOrStar(StarRocksParser.IdentifierOrStringOrStarContext context) {
String s = null;
if (context.identifier() != null) {
return visit(context.identifier());
} else if (context.string() != null) {
s = ((StringLiteral) visit(context.string())).getStringValue();
} else if (context.ASTERISK_SYMBOL() != null) {
s = "*";
}
return new Identifier(s, createPos(context));
}
@Override
public ParseNode visitIdentifierOrString(StarRocksParser.IdentifierOrStringContext context) {
String s = null;
if (context.identifier() != null) {
return visit(context.identifier());
} else if (context.string() != null) {
s = ((StringLiteral) visit(context.string())).getStringValue();
}
return new Identifier(s, createPos(context));
}
@Override
public ParseNode visitUserWithHostAndBlanket(StarRocksParser.UserWithHostAndBlanketContext context) {
Identifier user = (Identifier) visit(context.identifierOrString(0));
Identifier host = (Identifier) visit(context.identifierOrString(1));
return new UserIdentity(user.getValue(), host.getValue(), true, createPos(context), false);
}
@Override
public ParseNode visitUserWithHost(StarRocksParser.UserWithHostContext context) {
Identifier user = (Identifier) visit(context.identifierOrString(0));
Identifier host = (Identifier) visit(context.identifierOrString(1));
return new UserIdentity(user.getValue(), host.getValue(), false, createPos(context), false);
}
@Override
public ParseNode visitUserWithoutHost(StarRocksParser.UserWithoutHostContext context) {
Identifier user = (Identifier) visit(context.identifierOrString());
return new UserIdentity(user.getValue(), "%", false, createPos(context), false);
}
@Override
public ParseNode visitPrepareStatement(StarRocksParser.PrepareStatementContext context) {
String stmtName = context.identifier().getText();
StatementBase statement = null;
if (context.prepareSql().statement() != null) {
statement = (StatementBase) visitStatement(context.prepareSql().statement());
return new PrepareStmt(stmtName, statement, parameters);
} else if (context.prepareSql().SINGLE_QUOTED_TEXT() != null) {
String sql = context.prepareSql().SINGLE_QUOTED_TEXT().getText();
statement = SqlParser.parseSingleStatement(sql.substring(1, sql.length() - 1), sqlMode);
if (null != statement && statement instanceof PrepareStmt) {
PrepareStmt prepareStmt = (PrepareStmt) statement;
return new PrepareStmt(stmtName, prepareStmt.getInnerStmt(), prepareStmt.getParameters());
} else {
return new PrepareStmt(stmtName, statement, ImmutableList.of());
}
}
throw new ParsingException("error prepare sql");
}
@Override
public ParseNode visitDeallocateStatement(StarRocksParser.DeallocateStatementContext ctx) {
return new DeallocateStmt(ctx.identifier().getText());
}
@Override
public ParseNode visitExecuteStatement(StarRocksParser.ExecuteStatementContext context) {
String stmtName = context.identifier().getText();
List<StarRocksParser.IdentifierOrStringContext> queryStatementContext = context.identifierOrString();
List<Expr> variableExprs = new ArrayList<>();
if (context.identifierOrString() != null) {
queryStatementContext.forEach(varNameContext -> {
Identifier identifier = (Identifier) visit(varNameContext);
variableExprs.add(new VariableExpr(identifier.getValue(), SetType.USER));
});
}
return new ExecuteStmt(stmtName, variableExprs);
}
@Override
public ParseNode visitParameter(StarRocksParser.ParameterContext ctx) {
if (parameters == null) {
parameters = new ArrayList<>();
}
Parameter parameter = new Parameter(placeHolderSlotId++);
parameters.add(parameter);
return parameter;
}
@Override
public ParseNode visitDecommissionDiskClause(StarRocksParser.DecommissionDiskClauseContext context) {
throw new SemanticException("not support");
}
@Override
public ParseNode visitCancelDecommissionDiskClause(StarRocksParser.CancelDecommissionDiskClauseContext context) {
throw new SemanticException("not support");
}
@Override
public ParseNode visitDisableDiskClause(StarRocksParser.DisableDiskClauseContext context) {
throw new SemanticException("not support");
}
@Override
public ParseNode visitCancelDisableDiskClause(StarRocksParser.CancelDisableDiskClauseContext context) {
throw new SemanticException("not support");
}
private <T> List<T> visit(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
return contexts.stream()
.map(this::visit)
.map(clazz::cast)
.collect(toList());
}
private <T> List<T> visitIfPresent(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
if (contexts != null && contexts.size() != 0) {
return contexts.stream()
.map(this::visit)
.map(clazz::cast)
.collect(toList());
} else {
return null;
}
}
private ParseNode visitIfPresent(ParserRuleContext context) {
if (context != null) {
return visit(context);
} else {
return null;
}
}
private FunctionArgsDef getFunctionArgsDef(StarRocksParser.TypeListContext typeList) {
List<TypeDef> typeDefList = new ArrayList<>();
for (StarRocksParser.TypeContext typeContext : typeList.type()) {
typeDefList.add(new TypeDef(getType(typeContext)));
}
boolean isVariadic = typeList.DOTDOTDOT() != null;
return new FunctionArgsDef(typeDefList, isVariadic);
}
private String getIdentifierName(StarRocksParser.IdentifierContext context) {
return ((Identifier) visit(context)).getValue();
}
private QualifiedName getQualifiedName(StarRocksParser.QualifiedNameContext context) {
List<String> parts = new ArrayList<>();
NodePosition pos = createPos(context);
for (ParseTree c : context.children) {
if (c instanceof TerminalNode) {
TerminalNode t = (TerminalNode) c;
if (t.getSymbol().getType() == StarRocksParser.DOT_IDENTIFIER) {
parts.add(t.getText().substring(1));
}
} else if (c instanceof StarRocksParser.IdentifierContext) {
StarRocksParser.IdentifierContext identifierContext = (StarRocksParser.IdentifierContext) c;
Identifier identifier = (Identifier) visit(identifierContext);
parts.add(identifier.getValue());
}
}
return QualifiedName.of(parts, pos);
}
private TaskName qualifiedNameToTaskName(QualifiedName qualifiedName) {
List<String> parts = qualifiedName.getParts();
if (parts.size() == 2) {
return new TaskName(parts.get(0), parts.get(1), qualifiedName.getPos());
} else if (parts.size() == 1) {
return new TaskName(null, parts.get(0), qualifiedName.getPos());
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidTaskFormat(qualifiedName.toString()),
qualifiedName.getPos());
}
}
private TableName qualifiedNameToTableName(QualifiedName qualifiedName) {
List<String> parts = qualifiedName.getParts();
if (parts.size() == 3) {
return new TableName(parts.get(0), parts.get(1), parts.get(2), qualifiedName.getPos());
} else if (parts.size() == 2) {
return new TableName(null, qualifiedName.getParts().get(0), qualifiedName.getParts().get(1),
qualifiedName.getPos());
} else if (parts.size() == 1) {
return new TableName(null, null, qualifiedName.getParts().get(0), qualifiedName.getPos());
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidTableFormat(qualifiedName.toString()));
}
}
public Type getType(StarRocksParser.TypeContext context) {
if (context.baseType() != null) {
return getBaseType(context.baseType());
} else if (context.decimalType() != null) {
return getDecimalType(context.decimalType());
} else if (context.arrayType() != null) {
return getArrayType(context.arrayType());
} else if (context.structType() != null) {
return getStructType(context.structType());
} else {
return getMapType(context.mapType());
}
}
private Type getBaseType(StarRocksParser.BaseTypeContext context) {
int length = -1;
if (context.typeParameter() != null) {
length = Integer.parseInt(context.typeParameter().INTEGER_VALUE().toString());
}
if (context.STRING() != null || context.TEXT() != null) {
ScalarType type = ScalarType.createVarcharType(ScalarType.DEFAULT_STRING_LENGTH);
return type;
} else if (context.VARCHAR() != null) {
ScalarType type = ScalarType.createVarcharType(length);
return type;
} else if (context.CHAR() != null) {
ScalarType type = ScalarType.createCharType(length);
return type;
} else if (context.SIGNED() != null) {
return Type.INT;
} else if (context.HLL() != null) {
ScalarType type = ScalarType.createHllType();
return type;
} else if (context.BINARY() != null || context.VARBINARY() != null) {
ScalarType type = ScalarType.createVarbinary(length);
return type;
} else {
return ScalarType.createType(context.getChild(0).getText());
}
}
public ScalarType getDecimalType(StarRocksParser.DecimalTypeContext context) {
Integer precision = null;
Integer scale = null;
if (context.precision != null) {
precision = Integer.parseInt(context.precision.getText());
if (context.scale != null) {
scale = Integer.parseInt(context.scale.getText());
}
}
if (context.DECIMAL() != null || context.NUMBER() != null || context.NUMERIC() != null) {
if (precision != null) {
if (scale != null) {
return ScalarType.createUnifiedDecimalType(precision, scale);
}
return ScalarType.createUnifiedDecimalType(precision);
}
return ScalarType.createUnifiedDecimalType(10, 0);
} else if (context.DECIMAL32() != null || context.DECIMAL64() != null || context.DECIMAL128() != null) {
try {
ScalarType.checkEnableDecimalV3();
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
}
final PrimitiveType primitiveType = PrimitiveType.valueOf(context.children.get(0).getText().toUpperCase());
if (precision != null) {
if (scale != null) {
return ScalarType.createDecimalV3Type(primitiveType, precision, scale);
}
return ScalarType.createDecimalV3Type(primitiveType, precision);
}
return ScalarType.createDecimalV3Type(primitiveType);
} else if (context.DECIMALV2() != null) {
if (precision != null) {
if (scale != null) {
return ScalarType.createDecimalV2Type(precision, scale);
}
return ScalarType.createDecimalV2Type(precision);
}
return ScalarType.createDecimalV2Type();
} else {
throw new IllegalArgumentException("Unsupported type " + context.getText());
}
}
public ArrayType getArrayType(StarRocksParser.ArrayTypeContext context) {
return new ArrayType(getType(context.type()));
}
public StructType getStructType(StarRocksParser.StructTypeContext context) {
ArrayList<StructField> fields = new ArrayList<>();
List<StarRocksParser.SubfieldDescContext> subfields =
context.subfieldDescs().subfieldDesc();
for (StarRocksParser.SubfieldDescContext type : subfields) {
Identifier fieldIdentifier = (Identifier) visit(type.identifier());
String fieldName = fieldIdentifier.getValue();
fields.add(new StructField(fieldName, getType(type.type()), null));
}
return new StructType(fields);
}
public MapType getMapType(StarRocksParser.MapTypeContext context) {
Type keyType = getType(context.type(0));
if (!keyType.isValidMapKeyType()) {
throw new ParsingException(PARSER_ERROR_MSG.unsupportedType(keyType.toString(),
"for map's key, which should be base types"),
createPos(context.type(0)));
}
Type valueType = getType(context.type(1));
return new MapType(keyType, valueType);
}
private LabelName qualifiedNameToLabelName(QualifiedName qualifiedName) {
List<String> parts = qualifiedName.getParts();
if (parts.size() == 2) {
return new LabelName(parts.get(0), parts.get(1), qualifiedName.getPos());
} else if (parts.size() == 1) {
return new LabelName(null, parts.get(0), qualifiedName.getPos());
} else {
throw new ParsingException(PARSER_ERROR_MSG.invalidTableFormat(qualifiedName.toString()),
qualifiedName.getPos());
}
}
private Map<String, String> getProperties(StarRocksParser.PropertiesContext context) {
Map<String, String> properties = new HashMap<>();
if (context != null && context.property() != null) {
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return properties;
}
private Map<String, String> getPropertyList(StarRocksParser.PropertyListContext context) {
Map<String, String> properties = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
if (context != null && context.property() != null) {
List<Property> propertyList = visit(context.property(), Property.class);
for (Property property : propertyList) {
properties.put(property.getKey(), property.getValue());
}
}
return properties;
}
private List<ParseNode> getLoadPropertyList(List<StarRocksParser.LoadPropertiesContext> loadPropertiesContexts) {
List<ParseNode> loadPropertyList = new ArrayList<>();
Preconditions.checkNotNull(loadPropertiesContexts, "load properties is null");
for (StarRocksParser.LoadPropertiesContext loadPropertiesContext : loadPropertiesContexts) {
if (loadPropertiesContext.colSeparatorProperty() != null) {
StringLiteral literal = (StringLiteral) visit(loadPropertiesContext.colSeparatorProperty().string());
loadPropertyList.add(new ColumnSeparator(literal.getValue(), literal.getPos()));
}
if (loadPropertiesContext.rowDelimiterProperty() != null) {
StringLiteral literal = (StringLiteral) visit(loadPropertiesContext.rowDelimiterProperty().string());
loadPropertyList.add(new RowDelimiter(literal.getValue(), literal.getPos()));
}
if (loadPropertiesContext.importColumns() != null) {
ImportColumnsStmt importColumnsStmt = (ImportColumnsStmt) visit(loadPropertiesContext.importColumns());
loadPropertyList.add(importColumnsStmt);
}
if (loadPropertiesContext.expression() != null) {
Expr where = (Expr) visit(loadPropertiesContext.expression());
loadPropertyList.add(new ImportWhereStmt(where, where.getPos()));
}
if (loadPropertiesContext.partitionNames() != null) {
loadPropertyList.add(visit(loadPropertiesContext.partitionNames()));
}
}
return loadPropertyList;
}
@Override
public ParseNode visitImportColumns(StarRocksParser.ImportColumnsContext importColumnsContext) {
List<ImportColumnDesc> columns = new ArrayList<>();
for (StarRocksParser.QualifiedNameContext qualifiedNameContext :
importColumnsContext.columnProperties().qualifiedName()) {
String column = ((Identifier) (visit(qualifiedNameContext))).getValue();
ImportColumnDesc columnDesc = new ImportColumnDesc(column, null, createPos(qualifiedNameContext));
columns.add(columnDesc);
}
for (StarRocksParser.AssignmentContext assignmentContext :
importColumnsContext.columnProperties().assignment()) {
ColumnAssignment columnAssignment = (ColumnAssignment) (visit(assignmentContext));
Expr expr = columnAssignment.getExpr();
ImportColumnDesc columnDesc = new ImportColumnDesc(columnAssignment.getColumn(), expr,
createPos(assignmentContext));
columns.add(columnDesc);
}
return new ImportColumnsStmt(columns, createPos(importColumnsContext));
}
private Map<String, String> getJobProperties(StarRocksParser.JobPropertiesContext jobPropertiesContext) {
Map<String, String> jobProperties = new HashMap<>();
if (jobPropertiesContext != null) {
List<Property> propertyList = visit(jobPropertiesContext.properties().property(), Property.class);
for (Property property : propertyList) {
jobProperties.put(property.getKey(), property.getValue());
}
}
return jobProperties;
}
private Map<String, String> getDataSourceProperties(
StarRocksParser.DataSourcePropertiesContext dataSourcePropertiesContext) {
Map<String, String> dataSourceProperties = new HashMap<>();
if (dataSourcePropertiesContext != null) {
List<Property> propertyList = visit(dataSourcePropertiesContext.propertyList().property(), Property.class);
for (Property property : propertyList) {
dataSourceProperties.put(property.getKey(), property.getValue());
}
}
return dataSourceProperties;
}
public List<String> getColumnNames(StarRocksParser.ColumnAliasesContext context) {
if (context == null) {
return null;
}
List<Identifier> targetColumnNamesIdentifiers = visitIfPresent(context.identifier(), Identifier.class);
if (targetColumnNamesIdentifiers != null) {
return targetColumnNamesIdentifiers.stream()
.map(Identifier::getValue).map(String::toLowerCase).collect(toList());
} else {
return null;
}
}
private NodePosition createPos(ParserRuleContext context) {
return createPos(context.start, context.stop);
}
private NodePosition createPos(Token start, Token stop) {
if (start == null) {
return NodePosition.ZERO;
}
if (stop == null) {
return new NodePosition(start.getLine(), start.getCharPositionInLine());
}
return new NodePosition(start, stop);
}
private LabelName createLabelName(StarRocksParser.QualifiedNameContext dbCtx,
StarRocksParser.IdentifierContext nameCtx) {
Token start = null;
Token stop = null;
String name = null;
if (nameCtx != null) {
name = getIdentifierName(nameCtx);
start = nameCtx.start;
stop = nameCtx.stop;
}
String dbName = null;
if (dbCtx != null) {
dbName = getQualifiedName(dbCtx).toString();
start = dbCtx.start;
}
return new LabelName(dbName, name, createPos(start, stop));
}
private List<HintNode> extractQueryScopeHintNode() {
List<HintNode> res = Lists.newArrayList();
for (Map.Entry<ParserRuleContext, List<HintNode>> entry : hintMap.entrySet()) {
for (HintNode hintNode : entry.getValue()) {
if (hintNode.getScope() == HintNode.Scope.QUERY) {
res.add(hintNode);
}
}
}
Collections.sort(res);
return res;
}
} |
TODO Vespa 7? | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | |
TODO Vespa 7 yes | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | |
TODO later this sprint. This was never a public api :) | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | |
Add "Vespa 7" to the comment | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | public SslContextFactory getInstance(String containerId, int port) {
ConnectorConfig.Ssl sslConfig = connectorConfig.ssl();
SslContextFactory factory = new JDiscSslContextFactory();
switch (sslConfig.clientAuth()) {
case NEED_AUTH:
factory.setNeedClientAuth(true);
break;
case WANT_AUTH:
factory.setWantClientAuth(true);
break;
}
String[] excludedCiphersWithoutTlsRsaExclusion = Arrays.stream(factory.getExcludeCipherSuites())
.filter(cipher -> !cipher.equals("^TLS_RSA_.*$"))
.toArray(String[]::new);
factory.setExcludeCipherSuites(excludedCiphersWithoutTlsRsaExclusion);
if (!sslConfig.privateKeyFile().isEmpty()) {
factory.setKeyStore(createKeystore(sslConfig));
if (!sslConfig.caCertificateFile().isEmpty()) {
factory.setTrustStore(createTruststore(sslConfig));
}
factory.setProtocol("TLS");
} else {
sslKeyStoreConfigurator.configure(new DefaultSslKeyStoreContext(factory));
sslTrustStoreConfigurator.configure(new DefaultSslTrustStoreContext(factory));
if (!sslConfig.prng().isEmpty()) {
factory.setSecureRandomAlgorithm(sslConfig.prng());
}
setStringArrayParameter(
factory, sslConfig.excludeProtocol(), ConnectorConfig.Ssl.ExcludeProtocol::name, SslContextFactory::setExcludeProtocols);
setStringArrayParameter(
factory, sslConfig.includeProtocol(), ConnectorConfig.Ssl.IncludeProtocol::name, SslContextFactory::setIncludeProtocols);
setStringArrayParameter(
factory, sslConfig.excludeCipherSuite(), ConnectorConfig.Ssl.ExcludeCipherSuite::name, SslContextFactory::setExcludeCipherSuites);
setStringArrayParameter(
factory, sslConfig.includeCipherSuite(), ConnectorConfig.Ssl.IncludeCipherSuite::name, SslContextFactory::setIncludeCipherSuites);
factory.setKeyManagerFactoryAlgorithm(sslConfig.sslKeyManagerFactoryAlgorithm());
factory.setProtocol(sslConfig.protocol());
}
return factory;
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | class DefaultSslContextFactoryProvider implements SslContextFactoryProvider {
private final ConnectorConfig connectorConfig;
private final SslKeyStoreConfigurator sslKeyStoreConfigurator;
private final SslTrustStoreConfigurator sslTrustStoreConfigurator;
public DefaultSslContextFactoryProvider(ConnectorConfig connectorConfig,
SslKeyStoreConfigurator sslKeyStoreConfigurator,
SslTrustStoreConfigurator sslTrustStoreConfigurator) {
this.connectorConfig = connectorConfig;
this.sslKeyStoreConfigurator = sslKeyStoreConfigurator;
this.sslTrustStoreConfigurator = sslTrustStoreConfigurator;
}
@Override
private static KeyStore createTruststore(ConnectorConfig.Ssl sslConfig) {
List<X509Certificate> caCertificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.caCertificateFile()));
KeyStoreBuilder truststoreBuilder = KeyStoreBuilder.withType(KeyStoreType.JKS);
for (int i = 0; i < caCertificates.size(); i++) {
truststoreBuilder.withCertificateEntry("entry-" + i, caCertificates.get(i));
}
return truststoreBuilder.build();
}
private static KeyStore createKeystore(ConnectorConfig.Ssl sslConfig) {
PrivateKey privateKey = KeyUtils.fromPemEncodedPrivateKey(readToString(sslConfig.privateKeyFile()));
List<X509Certificate> certificates = X509CertificateUtils.certificateListFromPem(readToString(sslConfig.certificateFile()));
return KeyStoreBuilder.withType(KeyStoreType.JKS).withKeyEntry("default", privateKey, certificates).build();
}
private static String readToString(String filename) {
try {
return new String(Files.readAllBytes(Paths.get(filename)));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static <T extends InnerNode> void setStringArrayParameter(SslContextFactory sslContextFactory,
List<T> configValues,
Function<T, String> nameProperty,
BiConsumer<SslContextFactory, String[]> setter) {
if (!configValues.isEmpty()) {
String[] nameArray = configValues.stream().map(nameProperty).toArray(String[]::new);
setter.accept(sslContextFactory, nameArray);
}
}
} | |
I think you need to actually check if there is a container on the same host and find the port used by it (use Application.getHosts() and look at HostInfo and ServiceInfo to see if one exists) from that. You can probably throw IllegalArgumentException if no container exists (it will end up as a 404) | private String getLogServerHostname(ApplicationId applicationId) {
Application application = getApplication(applicationId);
VespaModel model = (VespaModel) application.getModel();
String logServerHostname = model.getAdmin().getLogserver().getHostName();
return logServerHostname;
} | String logServerHostname = model.getAdmin().getLogserver().getHostName(); | private String getLogServerHostname(ApplicationId applicationId) {
Application application = getApplication(applicationId);
VespaModel model = (VespaModel) application.getModel();
String logServerHostname = model.getAdmin().getLogserver().getHostName();
Collection<HostInfo> hostInfos = application.getModel().getHosts();
HostInfo logServerHostInfo = hostInfos.stream()
.filter(host -> host.getHostname().equals(logServerHostname))
.findFirst().orElseThrow(() -> new IllegalArgumentException("Could not find HostInfo"));
ServiceInfo serviceInfo = logServerHostInfo.getServices().stream()
.filter(service -> service.getServiceType().equals("container"))
.findFirst().orElseThrow(() -> new IllegalArgumentException("No container running on logserver host"));
int port = serviceInfo.getPorts().stream()
.filter(portInfo -> portInfo.getTags().stream()
.filter(tag -> tag.equalsIgnoreCase("http")).count() > 0)
.findFirst().orElseThrow(() -> new IllegalArgumentException("Could not find HTTP port"))
.getPort();
return logServerHostname + ":" + port + "/logs";
} | class ApplicationRepository implements com.yahoo.config.provision.Deployer {
private static final Logger log = Logger.getLogger(ApplicationRepository.class.getName());
private final TenantRepository tenantRepository;
private final Optional<Provisioner> hostProvisioner;
private final ConfigConvergenceChecker convergeChecker;
private final HttpProxy httpProxy;
private final Clock clock;
private final DeployLogger logger = new SilentDeployLogger();
private final ConfigserverConfig configserverConfig;
private final FileDistributionStatus fileDistributionStatus;
@Inject
public ApplicationRepository(TenantRepository tenantRepository,
HostProvisionerProvider hostProvisionerProvider,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig) {
this(tenantRepository, hostProvisionerProvider.getHostProvisioner(),
configConvergenceChecker, httpProxy, configserverConfig, Clock.systemUTC(), new FileDistributionStatus());
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock) {
this(tenantRepository, hostProvisioner, clock, new ConfigserverConfig(new ConfigserverConfig.Builder()));
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock,
ConfigserverConfig configserverConfig) {
this(tenantRepository, Optional.of(hostProvisioner), new ConfigConvergenceChecker(), new HttpProxy(new SimpleHttpFetcher()),
configserverConfig, clock, new FileDistributionStatus());
}
private ApplicationRepository(TenantRepository tenantRepository,
Optional<Provisioner> hostProvisioner,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig,
Clock clock,
FileDistributionStatus fileDistributionStatus) {
this.tenantRepository = tenantRepository;
this.hostProvisioner = hostProvisioner;
this.convergeChecker = configConvergenceChecker;
this.httpProxy = httpProxy;
this.clock = clock;
this.configserverConfig = configserverConfig;
this.fileDistributionStatus = fileDistributionStatus;
}
public PrepareResult prepare(Tenant tenant, long sessionId, PrepareParams prepareParams, Instant now) {
validateThatLocalSessionIsNotActive(tenant, sessionId);
LocalSession session = getLocalSession(tenant, sessionId);
ApplicationId applicationId = prepareParams.getApplicationId();
Optional<ApplicationSet> currentActiveApplicationSet = getCurrentActiveApplicationSet(tenant, applicationId);
Slime deployLog = createDeployLog();
DeployLogger logger = new DeployHandlerLogger(deployLog.get().setArray("log"), prepareParams.isVerbose(), applicationId);
ConfigChangeActions actions = session.prepare(logger, prepareParams, currentActiveApplicationSet, tenant.getPath(), now);
logConfigChangeActions(actions, logger);
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " prepared successfully. ");
return new PrepareResult(sessionId, actions, deployLog);
}
public PrepareResult prepareAndActivate(Tenant tenant, long sessionId, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
PrepareResult result = prepare(tenant, sessionId, prepareParams, now);
activate(tenant, sessionId, prepareParams.getTimeoutBudget(), ignoreLockFailure, ignoreSessionStaleFailure);
return result;
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) {
return deploy(in, prepareParams, false, false, clock.instant());
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
File tempDir = Files.createTempDir();
PrepareResult prepareResult;
try {
prepareResult = deploy(decompressApplication(in, tempDir), prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
} finally {
cleanupTempDirectory(tempDir);
}
return prepareResult;
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams) {
return deploy(applicationPackage, prepareParams, false, false, Instant.now());
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
ApplicationId applicationId = prepareParams.getApplicationId();
long sessionId = createSession(applicationId, prepareParams.getTimeoutBudget(), applicationPackage);
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return prepareAndActivate(tenant, sessionId, prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application) {
return deployFromLocalActive(application, false);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param bootstrap the deployment is done when bootstrapping
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application,
boolean bootstrap) {
return deployFromLocalActive(application,
Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()).plus(Duration.ofSeconds(5)),
bootstrap);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param timeout the timeout to use for each individual deployment operation
* @param bootstrap the deployment is done when bootstrapping
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application,
Duration timeout,
boolean bootstrap) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
TimeoutBudget timeoutBudget = new TimeoutBudget(clock, timeout);
LocalSession newSession = tenant.getSessionFactory().createSessionFromExisting(activeSession, logger, true, timeoutBudget);
tenant.getLocalSessionRepo().addSession(newSession);
Version version = decideVersion(application, zone().environment(), newSession.getVespaVersion(), bootstrap);
return Optional.of(Deployment.unprepared(newSession, this, hostProvisioner, tenant, timeout, clock,
false /* don't validate as this is already deployed */, version,
bootstrap));
}
@Override
public Optional<Instant> lastDeployTime(ApplicationId application) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
return Optional.of(Instant.ofEpochSecond(activeSession.getCreateTime()));
}
public ApplicationId activate(Tenant tenant,
long sessionId,
TimeoutBudget timeoutBudget,
boolean ignoreLockFailure,
boolean ignoreSessionStaleFailure) {
LocalSession localSession = getLocalSession(tenant, sessionId);
Deployment deployment = deployFromPreparedSession(localSession, tenant, timeoutBudget.timeLeft());
deployment.setIgnoreLockFailure(ignoreLockFailure);
deployment.setIgnoreSessionStaleFailure(ignoreSessionStaleFailure);
deployment.activate();
return localSession.getApplicationId();
}
private Deployment deployFromPreparedSession(LocalSession session, Tenant tenant, Duration timeout) {
return Deployment.prepared(session, this, hostProvisioner, tenant, timeout, clock, false);
}
/**
* Deletes an application
*
* @return true if the application was found and deleted, false if it was not present
* @throws RuntimeException if the delete transaction fails. This method is exception safe.
*/
public boolean delete(ApplicationId applicationId) {
return configserverConfig.deleteApplicationLegacy() ? deleteApplicationLegacy(applicationId) : deleteApplication(applicationId);
}
/**
* Deletes an application
*
* @return true if the application was found and deleted, false if it was not present
* @throws RuntimeException if the delete transaction fails. This method is exception safe.
*/
boolean deleteApplication(ApplicationId applicationId) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
if (tenant == null) return false;
TenantApplications tenantApplications = tenant.getApplicationRepo();
if (!tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
RemoteSession remoteSession = getRemoteSession(tenant, sessionId);
remoteSession.createDeleteTransaction().commit();
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Waiting for session " + sessionId + " to be deleted");
Duration waitTime = Duration.ofSeconds(60);
if (localSessionHasBeenDeleted(applicationId, sessionId, waitTime)) {
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " deleted");
} else {
log.log(LogLevel.ERROR, TenantRepository.logPre(applicationId) + "Session " + sessionId + " was not deleted (waited " + waitTime + ")");
return false;
}
NestedTransaction transaction = new NestedTransaction();
transaction.add(new Rotations(tenant.getCurator(), tenant.getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
/**
* Deletes an application the legacy way (if there is more than one config server, the call needs to be done
* on the config server the application was deployed)
*
* @return true if the application was found and deleted, false if it was not present
* @throws RuntimeException if the delete transaction fails. This method is exception safe.
*/
boolean deleteApplicationLegacy(ApplicationId applicationId) {
Optional<Tenant> owner = Optional.ofNullable(tenantRepository.getTenant(applicationId.tenant()));
if (!owner.isPresent()) return false;
TenantApplications tenantApplications = owner.get().getApplicationRepo();
if (!tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
LocalSessionRepo localSessionRepo = owner.get().getLocalSessionRepo();
LocalSession session = localSessionRepo.getSession(sessionId);
if (session == null) return false;
NestedTransaction transaction = new NestedTransaction();
localSessionRepo.removeSession(session.getSessionId(), transaction);
session.delete(transaction);
transaction.add(new Rotations(owner.get().getCurator(), owner.get().getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
public HttpResponse clusterControllerStatusPage(ApplicationId applicationId, String hostName, String pathSuffix) {
String relativePath = "clustercontroller-status/" + pathSuffix;
return httpProxy.get(getApplication(applicationId), hostName, "container-clustercontroller", relativePath);
}
public Long getApplicationGeneration(ApplicationId applicationId) {
return getApplication(applicationId).getApplicationGeneration();
}
public void restart(ApplicationId applicationId, HostFilter hostFilter) {
hostProvisioner.ifPresent(provisioner -> provisioner.restart(applicationId, hostFilter));
}
public HttpResponse filedistributionStatus(ApplicationId applicationId, Duration timeout) {
return fileDistributionStatus.status(getApplication(applicationId), timeout);
}
public Set<String> deleteUnusedFiledistributionReferences(File fileReferencesPath) {
if (!fileReferencesPath.isDirectory()) throw new RuntimeException(fileReferencesPath + " is not a directory");
Set<String> fileReferencesInUse = new HashSet<>();
Set<ApplicationId> applicationIds = listApplications();
applicationIds.forEach(applicationId -> fileReferencesInUse.addAll(getApplication(applicationId).getModel().fileReferences()
.stream()
.map(FileReference::value)
.collect(Collectors.toSet())));
log.log(LogLevel.DEBUG, "File references in use : " + fileReferencesInUse);
Set<String> fileReferencesOnDisk = new HashSet<>();
File[] filesOnDisk = fileReferencesPath.listFiles();
if (filesOnDisk != null)
fileReferencesOnDisk.addAll(Arrays.stream(filesOnDisk).map(File::getName).collect(Collectors.toSet()));
log.log(LogLevel.DEBUG, "File references on disk (in " + fileReferencesPath + "): " + fileReferencesOnDisk);
Instant instant = Instant.now().minus(Duration.ofDays(14));
Set<String> fileReferencesToDelete = fileReferencesOnDisk
.stream()
.filter(fileReference -> ! fileReferencesInUse.contains(fileReference))
.filter(fileReference -> isFileLastModifiedBefore(new File(fileReferencesPath, fileReference), instant))
.collect(Collectors.toSet());
if (fileReferencesToDelete.size() > 0) {
log.log(LogLevel.INFO, "Will delete file references not in use: " + fileReferencesToDelete);
fileReferencesToDelete.forEach(fileReference -> {
File file = new File(fileReferencesPath, fileReference);
if ( ! IOUtils.recursiveDeleteDir(file))
log.log(LogLevel.WARNING, "Could not delete " + file.getAbsolutePath());
});
}
return fileReferencesToDelete;
}
public ApplicationFile getApplicationFileFromSession(TenantName tenantName, long sessionId, String path, LocalSession.Mode mode) {
Tenant tenant = tenantRepository.getTenant(tenantName);
return getLocalSession(tenant, sessionId).getApplicationFile(Path.fromString(path), mode);
}
private Application getApplication(ApplicationId applicationId) {
try {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
long sessionId = getSessionIdForApplication(tenant, applicationId);
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId, 0);
return session.ensureApplicationLoaded().getForVersionOrLatest(Optional.empty(), clock.instant());
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed getting application for '" + applicationId + "'", e);
throw e;
}
}
Set<ApplicationId> listApplications() {
return tenantRepository.getAllTenants().stream()
.flatMap(tenant -> tenant.getApplicationRepo().listApplications().stream())
.collect(Collectors.toSet());
}
private boolean isFileLastModifiedBefore(File fileReference, Instant instant) {
BasicFileAttributes fileAttributes;
try {
fileAttributes = readAttributes(fileReference.toPath(), BasicFileAttributes.class);
return fileAttributes.lastModifiedTime().toInstant().isBefore(instant);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private boolean localSessionHasBeenDeleted(ApplicationId applicationId, long sessionId, Duration waitTime) {
RemoteSessionRepo remoteSessionRepo = tenantRepository.getTenant(applicationId.tenant()).getRemoteSessionRepo();
Instant end = Instant.now().plus(waitTime);
do {
if (remoteSessionRepo.getSession(sessionId) == null) return true;
try { Thread.sleep(10); } catch (InterruptedException e) { /* ignored */}
} while (Instant.now().isBefore(end));
return false;
}
public HttpResponse checkServiceForConfigConvergence(ApplicationId applicationId, String hostAndPort, URI uri, Duration timeout) {
return convergeChecker.checkService(getApplication(applicationId), hostAndPort, uri, timeout);
}
public HttpResponse servicesToCheckForConfigConvergence(ApplicationId applicationId, URI uri, Duration timeout) {
return convergeChecker.servicesToCheck(getApplication(applicationId), uri, timeout);
}
public HttpResponse getLogs(ApplicationId applicationId) {
String logServerHostName = getLogServerHostname(applicationId);
return LogRetriever.getLogs(logServerHostName);
}
/**
* Gets the active Session for the given application id.
*
* @return the active session, or null if there is no active session for the given application id.
*/
public LocalSession getActiveSession(ApplicationId applicationId) {
return getActiveSession(tenantRepository.getTenant(applicationId.tenant()), applicationId);
}
public long getSessionIdForApplication(Tenant tenant, ApplicationId applicationId) {
return tenant.getApplicationRepo().getSessionIdForApplication(applicationId);
}
public void validateThatRemoteSessionIsNotActive(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
public void validateThatRemoteSessionIsPrepared(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if ( ! Session.Status.PREPARE.equals(session.getStatus()))
throw new IllegalStateException("Session not prepared: " + sessionId);
}
public long createSessionFromExisting(ApplicationId applicationId,
DeployLogger logger,
boolean internalRedeploy,
TimeoutBudget timeoutBudget) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession fromSession = getExistingSession(tenant, applicationId);
LocalSession session = sessionFactory.createSessionFromExisting(fromSession, logger, internalRedeploy, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, InputStream in, String contentType) {
File tempDir = Files.createTempDir();
long sessionId;
try {
sessionId = createSession(applicationId, timeoutBudget, decompressApplication(in, contentType, tempDir));
} finally {
cleanupTempDirectory(tempDir);
}
return sessionId;
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, File applicationDirectory) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession session = sessionFactory.createSession(applicationDirectory, applicationId, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public void deleteExpiredLocalSessions() {
listApplications().forEach(app -> tenantRepository.getTenant(app.tenant()).getLocalSessionRepo().purgeOldSessions());
}
public int deleteExpiredRemoteSessions(Duration expiryTime) {
return listApplications()
.stream()
.map(app -> tenantRepository.getTenant(app.tenant()).getRemoteSessionRepo().deleteExpiredSessions(expiryTime))
.mapToInt(i -> i)
.sum();
}
public Set<TenantName> deleteUnusedTenants(Duration ttlForUnusedTenant, Instant now) {
return tenantRepository.getAllTenantNames().stream()
.filter(tenantName -> activeApplications(tenantName).isEmpty())
.filter(tenantName -> !tenantName.equals(TenantName.defaultName()))
.filter(tenantName -> !tenantName.equals(TenantRepository.HOSTED_VESPA_TENANT))
.filter(tenantName -> tenantRepository.getTenant(tenantName).getCreatedTime().isBefore(now.minus(ttlForUnusedTenant)))
.peek(tenantRepository::deleteTenant)
.collect(Collectors.toSet());
}
public void deleteTenant(TenantName tenantName) {
List<ApplicationId> activeApplications = activeApplications(tenantName);
if (activeApplications.isEmpty())
tenantRepository.deleteTenant(tenantName);
else
throw new IllegalArgumentException("Cannot delete tenant '" + tenantName + "', it has active applications: " + activeApplications);
}
private List<ApplicationId> activeApplications(TenantName tenantName) {
return tenantRepository.getTenant(tenantName).getApplicationRepo().listApplications();
}
public Tenant verifyTenantAndApplication(ApplicationId applicationId) {
TenantName tenantName = applicationId.tenant();
if (!tenantRepository.checkThatTenantExists(tenantName)) {
throw new IllegalArgumentException("Tenant " + tenantName + " was not found.");
}
Tenant tenant = tenantRepository.getTenant(tenantName);
List<ApplicationId> applicationIds = listApplicationIds(tenant);
if (!applicationIds.contains(applicationId)) {
throw new IllegalArgumentException("No such application id: " + applicationId);
}
return tenant;
}
public ApplicationMetaData getMetadataFromSession(Tenant tenant, long sessionId) {
return getLocalSession(tenant, sessionId).getMetaData();
}
public ConfigserverConfig configserverConfig() {
return configserverConfig;
}
private void validateThatLocalSessionIsNotActive(Tenant tenant, long sessionId) {
LocalSession session = getLocalSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
private LocalSession getLocalSession(Tenant tenant, long sessionId) {
LocalSession session = tenant.getLocalSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private RemoteSession getRemoteSession(Tenant tenant, long sessionId) {
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private Optional<ApplicationSet> getCurrentActiveApplicationSet(Tenant tenant, ApplicationId appId) {
Optional<ApplicationSet> currentActiveApplicationSet = Optional.empty();
TenantApplications applicationRepo = tenant.getApplicationRepo();
try {
long currentActiveSessionId = applicationRepo.getSessionIdForApplication(appId);
RemoteSession currentActiveSession = getRemoteSession(tenant, currentActiveSessionId);
if (currentActiveSession != null) {
currentActiveApplicationSet = Optional.ofNullable(currentActiveSession.ensureApplicationLoaded());
}
} catch (IllegalArgumentException e) {
}
return currentActiveApplicationSet;
}
private File decompressApplication(InputStream in, String contentType, File tempDir) {
try (CompressedApplicationInputStream application =
CompressedApplicationInputStream.createFromCompressedStream(in, contentType)) {
return decompressApplication(application, tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress data in body", e);
}
}
private File decompressApplication(CompressedApplicationInputStream in, File tempDir) {
try {
return in.decompress(tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress stream", e);
}
}
private List<ApplicationId> listApplicationIds(Tenant tenant) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return applicationRepo.listApplications();
}
private void cleanupTempDirectory(File tempDir) {
logger.log(LogLevel.DEBUG, "Deleting tmp dir '" + tempDir + "'");
if (!IOUtils.recursiveDeleteDir(tempDir)) {
logger.log(LogLevel.WARNING, "Not able to delete tmp dir '" + tempDir + "'");
}
}
private LocalSession getExistingSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return getLocalSession(tenant, applicationRepo.getSessionIdForApplication(applicationId));
}
private LocalSession getActiveSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
if (applicationRepo.listApplications().contains(applicationId)) {
return tenant.getLocalSessionRepo().getSession(applicationRepo.getSessionIdForApplication(applicationId));
}
return null;
}
private static void logConfigChangeActions(ConfigChangeActions actions, DeployLogger logger) {
RestartActions restartActions = actions.getRestartActions();
if ( ! restartActions.isEmpty()) {
logger.log(Level.WARNING, "Change(s) between active and new application that require restart:\n" +
restartActions.format());
}
RefeedActions refeedActions = actions.getRefeedActions();
if ( ! refeedActions.isEmpty()) {
boolean allAllowed = refeedActions.getEntries().stream().allMatch(RefeedActions.Entry::allowed);
logger.log(allAllowed ? Level.INFO : Level.WARNING,
"Change(s) between active and new application that may require re-feed:\n" +
refeedActions.format());
}
}
/** Returns version to use when deploying application in given environment */
static Version decideVersion(ApplicationId application, Environment environment, Version targetVersion, boolean bootstrap) {
if (environment.isManuallyDeployed() &&
!"hosted-vespa".equals(application.tenant().value()) &&
!bootstrap) {
return Vtag.currentVersion;
}
return targetVersion;
}
public Slime createDeployLog() {
Slime deployLog = new Slime();
deployLog.setObject();
return deployLog;
}
public Zone zone() {
return new Zone(SystemName.from(configserverConfig.system()),
Environment.from(configserverConfig.environment()),
RegionName.from(configserverConfig.region()));
}
} | class ApplicationRepository implements com.yahoo.config.provision.Deployer {
private static final Logger log = Logger.getLogger(ApplicationRepository.class.getName());
private final TenantRepository tenantRepository;
private final Optional<Provisioner> hostProvisioner;
private final ConfigConvergenceChecker convergeChecker;
private final HttpProxy httpProxy;
private final Clock clock;
private final DeployLogger logger = new SilentDeployLogger();
private final ConfigserverConfig configserverConfig;
private final FileDistributionStatus fileDistributionStatus;
@Inject
public ApplicationRepository(TenantRepository tenantRepository,
HostProvisionerProvider hostProvisionerProvider,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig) {
this(tenantRepository, hostProvisionerProvider.getHostProvisioner(),
configConvergenceChecker, httpProxy, configserverConfig, Clock.systemUTC(), new FileDistributionStatus());
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock) {
this(tenantRepository, hostProvisioner, clock, new ConfigserverConfig(new ConfigserverConfig.Builder()));
}
public ApplicationRepository(TenantRepository tenantRepository,
Provisioner hostProvisioner,
Clock clock,
ConfigserverConfig configserverConfig) {
this(tenantRepository, Optional.of(hostProvisioner), new ConfigConvergenceChecker(), new HttpProxy(new SimpleHttpFetcher()),
configserverConfig, clock, new FileDistributionStatus());
}
private ApplicationRepository(TenantRepository tenantRepository,
Optional<Provisioner> hostProvisioner,
ConfigConvergenceChecker configConvergenceChecker,
HttpProxy httpProxy,
ConfigserverConfig configserverConfig,
Clock clock,
FileDistributionStatus fileDistributionStatus) {
this.tenantRepository = tenantRepository;
this.hostProvisioner = hostProvisioner;
this.convergeChecker = configConvergenceChecker;
this.httpProxy = httpProxy;
this.clock = clock;
this.configserverConfig = configserverConfig;
this.fileDistributionStatus = fileDistributionStatus;
}
public PrepareResult prepare(Tenant tenant, long sessionId, PrepareParams prepareParams, Instant now) {
validateThatLocalSessionIsNotActive(tenant, sessionId);
LocalSession session = getLocalSession(tenant, sessionId);
ApplicationId applicationId = prepareParams.getApplicationId();
Optional<ApplicationSet> currentActiveApplicationSet = getCurrentActiveApplicationSet(tenant, applicationId);
Slime deployLog = createDeployLog();
DeployLogger logger = new DeployHandlerLogger(deployLog.get().setArray("log"), prepareParams.isVerbose(), applicationId);
ConfigChangeActions actions = session.prepare(logger, prepareParams, currentActiveApplicationSet, tenant.getPath(), now);
logConfigChangeActions(actions, logger);
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " prepared successfully. ");
return new PrepareResult(sessionId, actions, deployLog);
}
public PrepareResult prepareAndActivate(Tenant tenant, long sessionId, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
PrepareResult result = prepare(tenant, sessionId, prepareParams, now);
activate(tenant, sessionId, prepareParams.getTimeoutBudget(), ignoreLockFailure, ignoreSessionStaleFailure);
return result;
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) {
return deploy(in, prepareParams, false, false, clock.instant());
}
public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
File tempDir = Files.createTempDir();
PrepareResult prepareResult;
try {
prepareResult = deploy(decompressApplication(in, tempDir), prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
} finally {
cleanupTempDirectory(tempDir);
}
return prepareResult;
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams) {
return deploy(applicationPackage, prepareParams, false, false, Instant.now());
}
public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams,
boolean ignoreLockFailure, boolean ignoreSessionStaleFailure, Instant now) {
ApplicationId applicationId = prepareParams.getApplicationId();
long sessionId = createSession(applicationId, prepareParams.getTimeoutBudget(), applicationPackage);
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
return prepareAndActivate(tenant, sessionId, prepareParams, ignoreLockFailure, ignoreSessionStaleFailure, now);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application) {
return deployFromLocalActive(application, false);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param bootstrap the deployment is done when bootstrapping
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application,
boolean bootstrap) {
return deployFromLocalActive(application,
Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()).plus(Duration.ofSeconds(5)),
bootstrap);
}
/**
* Creates a new deployment from the active application, if available.
* This is used for system internal redeployments, not on application package changes.
*
* @param application the active application to be redeployed
* @param timeout the timeout to use for each individual deployment operation
* @param bootstrap the deployment is done when bootstrapping
* @return a new deployment from the local active, or empty if a local active application
* was not present for this id (meaning it either is not active or active on another
* node in the config server cluster)
*/
@Override
public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application,
Duration timeout,
boolean bootstrap) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
TimeoutBudget timeoutBudget = new TimeoutBudget(clock, timeout);
LocalSession newSession = tenant.getSessionFactory().createSessionFromExisting(activeSession, logger, true, timeoutBudget);
tenant.getLocalSessionRepo().addSession(newSession);
Version version = decideVersion(application, zone().environment(), newSession.getVespaVersion(), bootstrap);
return Optional.of(Deployment.unprepared(newSession, this, hostProvisioner, tenant, timeout, clock,
false /* don't validate as this is already deployed */, version,
bootstrap));
}
@Override
public Optional<Instant> lastDeployTime(ApplicationId application) {
Tenant tenant = tenantRepository.getTenant(application.tenant());
if (tenant == null) return Optional.empty();
LocalSession activeSession = getActiveSession(tenant, application);
if (activeSession == null) return Optional.empty();
return Optional.of(Instant.ofEpochSecond(activeSession.getCreateTime()));
}
public ApplicationId activate(Tenant tenant,
long sessionId,
TimeoutBudget timeoutBudget,
boolean ignoreLockFailure,
boolean ignoreSessionStaleFailure) {
LocalSession localSession = getLocalSession(tenant, sessionId);
Deployment deployment = deployFromPreparedSession(localSession, tenant, timeoutBudget.timeLeft());
deployment.setIgnoreLockFailure(ignoreLockFailure);
deployment.setIgnoreSessionStaleFailure(ignoreSessionStaleFailure);
deployment.activate();
return localSession.getApplicationId();
}
private Deployment deployFromPreparedSession(LocalSession session, Tenant tenant, Duration timeout) {
return Deployment.prepared(session, this, hostProvisioner, tenant, timeout, clock, false);
}
/**
* Deletes an application
*
* @return true if the application was found and deleted, false if it was not present
* @throws RuntimeException if the delete transaction fails. This method is exception safe.
*/
public boolean delete(ApplicationId applicationId) {
return configserverConfig.deleteApplicationLegacy() ? deleteApplicationLegacy(applicationId) : deleteApplication(applicationId);
}
/**
* Deletes an application
*
* @return true if the application was found and deleted, false if it was not present
* @throws RuntimeException if the delete transaction fails. This method is exception safe.
*/
boolean deleteApplication(ApplicationId applicationId) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
if (tenant == null) return false;
TenantApplications tenantApplications = tenant.getApplicationRepo();
if (!tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
RemoteSession remoteSession = getRemoteSession(tenant, sessionId);
remoteSession.createDeleteTransaction().commit();
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Waiting for session " + sessionId + " to be deleted");
Duration waitTime = Duration.ofSeconds(60);
if (localSessionHasBeenDeleted(applicationId, sessionId, waitTime)) {
log.log(LogLevel.INFO, TenantRepository.logPre(applicationId) + "Session " + sessionId + " deleted");
} else {
log.log(LogLevel.ERROR, TenantRepository.logPre(applicationId) + "Session " + sessionId + " was not deleted (waited " + waitTime + ")");
return false;
}
NestedTransaction transaction = new NestedTransaction();
transaction.add(new Rotations(tenant.getCurator(), tenant.getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
/**
* Deletes an application the legacy way (if there is more than one config server, the call needs to be done
* on the config server the application was deployed)
*
* @return true if the application was found and deleted, false if it was not present
* @throws RuntimeException if the delete transaction fails. This method is exception safe.
*/
boolean deleteApplicationLegacy(ApplicationId applicationId) {
Optional<Tenant> owner = Optional.ofNullable(tenantRepository.getTenant(applicationId.tenant()));
if (!owner.isPresent()) return false;
TenantApplications tenantApplications = owner.get().getApplicationRepo();
if (!tenantApplications.listApplications().contains(applicationId)) return false;
long sessionId = tenantApplications.getSessionIdForApplication(applicationId);
LocalSessionRepo localSessionRepo = owner.get().getLocalSessionRepo();
LocalSession session = localSessionRepo.getSession(sessionId);
if (session == null) return false;
NestedTransaction transaction = new NestedTransaction();
localSessionRepo.removeSession(session.getSessionId(), transaction);
session.delete(transaction);
transaction.add(new Rotations(owner.get().getCurator(), owner.get().getPath()).delete(applicationId));
transaction.add(tenantApplications.deleteApplication(applicationId));
hostProvisioner.ifPresent(provisioner -> provisioner.remove(transaction, applicationId));
transaction.onCommitted(() -> log.log(LogLevel.INFO, "Deleted " + applicationId));
transaction.commit();
return true;
}
public HttpResponse clusterControllerStatusPage(ApplicationId applicationId, String hostName, String pathSuffix) {
String relativePath = "clustercontroller-status/" + pathSuffix;
return httpProxy.get(getApplication(applicationId), hostName, "container-clustercontroller", relativePath);
}
public Long getApplicationGeneration(ApplicationId applicationId) {
return getApplication(applicationId).getApplicationGeneration();
}
public void restart(ApplicationId applicationId, HostFilter hostFilter) {
hostProvisioner.ifPresent(provisioner -> provisioner.restart(applicationId, hostFilter));
}
public HttpResponse filedistributionStatus(ApplicationId applicationId, Duration timeout) {
return fileDistributionStatus.status(getApplication(applicationId), timeout);
}
public Set<String> deleteUnusedFiledistributionReferences(File fileReferencesPath) {
if (!fileReferencesPath.isDirectory()) throw new RuntimeException(fileReferencesPath + " is not a directory");
Set<String> fileReferencesInUse = new HashSet<>();
Set<ApplicationId> applicationIds = listApplications();
applicationIds.forEach(applicationId -> fileReferencesInUse.addAll(getApplication(applicationId).getModel().fileReferences()
.stream()
.map(FileReference::value)
.collect(Collectors.toSet())));
log.log(LogLevel.DEBUG, "File references in use : " + fileReferencesInUse);
Set<String> fileReferencesOnDisk = new HashSet<>();
File[] filesOnDisk = fileReferencesPath.listFiles();
if (filesOnDisk != null)
fileReferencesOnDisk.addAll(Arrays.stream(filesOnDisk).map(File::getName).collect(Collectors.toSet()));
log.log(LogLevel.DEBUG, "File references on disk (in " + fileReferencesPath + "): " + fileReferencesOnDisk);
Instant instant = Instant.now().minus(Duration.ofDays(14));
Set<String> fileReferencesToDelete = fileReferencesOnDisk
.stream()
.filter(fileReference -> ! fileReferencesInUse.contains(fileReference))
.filter(fileReference -> isFileLastModifiedBefore(new File(fileReferencesPath, fileReference), instant))
.collect(Collectors.toSet());
if (fileReferencesToDelete.size() > 0) {
log.log(LogLevel.INFO, "Will delete file references not in use: " + fileReferencesToDelete);
fileReferencesToDelete.forEach(fileReference -> {
File file = new File(fileReferencesPath, fileReference);
if ( ! IOUtils.recursiveDeleteDir(file))
log.log(LogLevel.WARNING, "Could not delete " + file.getAbsolutePath());
});
}
return fileReferencesToDelete;
}
public ApplicationFile getApplicationFileFromSession(TenantName tenantName, long sessionId, String path, LocalSession.Mode mode) {
Tenant tenant = tenantRepository.getTenant(tenantName);
return getLocalSession(tenant, sessionId).getApplicationFile(Path.fromString(path), mode);
}
private Application getApplication(ApplicationId applicationId) {
try {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
long sessionId = getSessionIdForApplication(tenant, applicationId);
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId, 0);
return session.ensureApplicationLoaded().getForVersionOrLatest(Optional.empty(), clock.instant());
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed getting application for '" + applicationId + "'", e);
throw e;
}
}
Set<ApplicationId> listApplications() {
return tenantRepository.getAllTenants().stream()
.flatMap(tenant -> tenant.getApplicationRepo().listApplications().stream())
.collect(Collectors.toSet());
}
private boolean isFileLastModifiedBefore(File fileReference, Instant instant) {
BasicFileAttributes fileAttributes;
try {
fileAttributes = readAttributes(fileReference.toPath(), BasicFileAttributes.class);
return fileAttributes.lastModifiedTime().toInstant().isBefore(instant);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private boolean localSessionHasBeenDeleted(ApplicationId applicationId, long sessionId, Duration waitTime) {
RemoteSessionRepo remoteSessionRepo = tenantRepository.getTenant(applicationId.tenant()).getRemoteSessionRepo();
Instant end = Instant.now().plus(waitTime);
do {
if (remoteSessionRepo.getSession(sessionId) == null) return true;
try { Thread.sleep(10); } catch (InterruptedException e) { /* ignored */}
} while (Instant.now().isBefore(end));
return false;
}
public HttpResponse checkServiceForConfigConvergence(ApplicationId applicationId, String hostAndPort, URI uri, Duration timeout) {
return convergeChecker.checkService(getApplication(applicationId), hostAndPort, uri, timeout);
}
public HttpResponse servicesToCheckForConfigConvergence(ApplicationId applicationId, URI uri, Duration timeout) {
return convergeChecker.servicesToCheck(getApplication(applicationId), uri, timeout);
}
public HttpResponse getLogs(ApplicationId applicationId) {
String logServerHostName = getLogServerHostname(applicationId);
LogRetriever logRetriever = new LogRetriever();
return logRetriever.getLogs(logServerHostName);
}
/**
* Gets the active Session for the given application id.
*
* @return the active session, or null if there is no active session for the given application id.
*/
public LocalSession getActiveSession(ApplicationId applicationId) {
return getActiveSession(tenantRepository.getTenant(applicationId.tenant()), applicationId);
}
public long getSessionIdForApplication(Tenant tenant, ApplicationId applicationId) {
return tenant.getApplicationRepo().getSessionIdForApplication(applicationId);
}
public void validateThatRemoteSessionIsNotActive(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
public void validateThatRemoteSessionIsPrepared(Tenant tenant, long sessionId) {
RemoteSession session = getRemoteSession(tenant, sessionId);
if ( ! Session.Status.PREPARE.equals(session.getStatus()))
throw new IllegalStateException("Session not prepared: " + sessionId);
}
public long createSessionFromExisting(ApplicationId applicationId,
DeployLogger logger,
boolean internalRedeploy,
TimeoutBudget timeoutBudget) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession fromSession = getExistingSession(tenant, applicationId);
LocalSession session = sessionFactory.createSessionFromExisting(fromSession, logger, internalRedeploy, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, InputStream in, String contentType) {
File tempDir = Files.createTempDir();
long sessionId;
try {
sessionId = createSession(applicationId, timeoutBudget, decompressApplication(in, contentType, tempDir));
} finally {
cleanupTempDirectory(tempDir);
}
return sessionId;
}
public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, File applicationDirectory) {
Tenant tenant = tenantRepository.getTenant(applicationId.tenant());
LocalSessionRepo localSessionRepo = tenant.getLocalSessionRepo();
SessionFactory sessionFactory = tenant.getSessionFactory();
LocalSession session = sessionFactory.createSession(applicationDirectory, applicationId, timeoutBudget);
localSessionRepo.addSession(session);
return session.getSessionId();
}
public void deleteExpiredLocalSessions() {
listApplications().forEach(app -> tenantRepository.getTenant(app.tenant()).getLocalSessionRepo().purgeOldSessions());
}
public int deleteExpiredRemoteSessions(Duration expiryTime) {
return listApplications()
.stream()
.map(app -> tenantRepository.getTenant(app.tenant()).getRemoteSessionRepo().deleteExpiredSessions(expiryTime))
.mapToInt(i -> i)
.sum();
}
public Set<TenantName> deleteUnusedTenants(Duration ttlForUnusedTenant, Instant now) {
return tenantRepository.getAllTenantNames().stream()
.filter(tenantName -> activeApplications(tenantName).isEmpty())
.filter(tenantName -> !tenantName.equals(TenantName.defaultName()))
.filter(tenantName -> !tenantName.equals(TenantRepository.HOSTED_VESPA_TENANT))
.filter(tenantName -> tenantRepository.getTenant(tenantName).getCreatedTime().isBefore(now.minus(ttlForUnusedTenant)))
.peek(tenantRepository::deleteTenant)
.collect(Collectors.toSet());
}
public void deleteTenant(TenantName tenantName) {
List<ApplicationId> activeApplications = activeApplications(tenantName);
if (activeApplications.isEmpty())
tenantRepository.deleteTenant(tenantName);
else
throw new IllegalArgumentException("Cannot delete tenant '" + tenantName + "', it has active applications: " + activeApplications);
}
private List<ApplicationId> activeApplications(TenantName tenantName) {
return tenantRepository.getTenant(tenantName).getApplicationRepo().listApplications();
}
public Tenant verifyTenantAndApplication(ApplicationId applicationId) {
TenantName tenantName = applicationId.tenant();
if (!tenantRepository.checkThatTenantExists(tenantName)) {
throw new IllegalArgumentException("Tenant " + tenantName + " was not found.");
}
Tenant tenant = tenantRepository.getTenant(tenantName);
List<ApplicationId> applicationIds = listApplicationIds(tenant);
if (!applicationIds.contains(applicationId)) {
throw new IllegalArgumentException("No such application id: " + applicationId);
}
return tenant;
}
public ApplicationMetaData getMetadataFromSession(Tenant tenant, long sessionId) {
return getLocalSession(tenant, sessionId).getMetaData();
}
public ConfigserverConfig configserverConfig() {
return configserverConfig;
}
private void validateThatLocalSessionIsNotActive(Tenant tenant, long sessionId) {
LocalSession session = getLocalSession(tenant, sessionId);
if (Session.Status.ACTIVATE.equals(session.getStatus())) {
throw new IllegalStateException("Session is active: " + sessionId);
}
}
private LocalSession getLocalSession(Tenant tenant, long sessionId) {
LocalSession session = tenant.getLocalSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private RemoteSession getRemoteSession(Tenant tenant, long sessionId) {
RemoteSession session = tenant.getRemoteSessionRepo().getSession(sessionId);
if (session == null) throw new NotFoundException("Session " + sessionId + " was not found");
return session;
}
private Optional<ApplicationSet> getCurrentActiveApplicationSet(Tenant tenant, ApplicationId appId) {
Optional<ApplicationSet> currentActiveApplicationSet = Optional.empty();
TenantApplications applicationRepo = tenant.getApplicationRepo();
try {
long currentActiveSessionId = applicationRepo.getSessionIdForApplication(appId);
RemoteSession currentActiveSession = getRemoteSession(tenant, currentActiveSessionId);
if (currentActiveSession != null) {
currentActiveApplicationSet = Optional.ofNullable(currentActiveSession.ensureApplicationLoaded());
}
} catch (IllegalArgumentException e) {
}
return currentActiveApplicationSet;
}
private File decompressApplication(InputStream in, String contentType, File tempDir) {
try (CompressedApplicationInputStream application =
CompressedApplicationInputStream.createFromCompressedStream(in, contentType)) {
return decompressApplication(application, tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress data in body", e);
}
}
private File decompressApplication(CompressedApplicationInputStream in, File tempDir) {
try {
return in.decompress(tempDir);
} catch (IOException e) {
throw new IllegalArgumentException("Unable to decompress stream", e);
}
}
private List<ApplicationId> listApplicationIds(Tenant tenant) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return applicationRepo.listApplications();
}
private void cleanupTempDirectory(File tempDir) {
logger.log(LogLevel.DEBUG, "Deleting tmp dir '" + tempDir + "'");
if (!IOUtils.recursiveDeleteDir(tempDir)) {
logger.log(LogLevel.WARNING, "Not able to delete tmp dir '" + tempDir + "'");
}
}
private LocalSession getExistingSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
return getLocalSession(tenant, applicationRepo.getSessionIdForApplication(applicationId));
}
private LocalSession getActiveSession(Tenant tenant, ApplicationId applicationId) {
TenantApplications applicationRepo = tenant.getApplicationRepo();
if (applicationRepo.listApplications().contains(applicationId)) {
return tenant.getLocalSessionRepo().getSession(applicationRepo.getSessionIdForApplication(applicationId));
}
return null;
}
private static void logConfigChangeActions(ConfigChangeActions actions, DeployLogger logger) {
RestartActions restartActions = actions.getRestartActions();
if ( ! restartActions.isEmpty()) {
logger.log(Level.WARNING, "Change(s) between active and new application that require restart:\n" +
restartActions.format());
}
RefeedActions refeedActions = actions.getRefeedActions();
if ( ! refeedActions.isEmpty()) {
boolean allAllowed = refeedActions.getEntries().stream().allMatch(RefeedActions.Entry::allowed);
logger.log(allAllowed ? Level.INFO : Level.WARNING,
"Change(s) between active and new application that may require re-feed:\n" +
refeedActions.format());
}
}
/** Returns version to use when deploying application in given environment */
static Version decideVersion(ApplicationId application, Environment environment, Version targetVersion, boolean bootstrap) {
if (environment.isManuallyDeployed() &&
!"hosted-vespa".equals(application.tenant().value()) &&
!bootstrap) {
return Vtag.currentVersion;
}
return targetVersion;
}
public Slime createDeployLog() {
Slime deployLog = new Slime();
deployLog.setObject();
return deployLog;
}
public Zone zone() {
return new Zone(SystemName.from(configserverConfig.system()),
Environment.from(configserverConfig.environment()),
RegionName.from(configserverConfig.region()));
}
} |
Missing dot? | private void addLogHandler(ContainerCluster cluster) {
Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handlerLogHandler");
logHandler.addServerBindings("http:
cluster.addComponent(logHandler);
} | Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handlerLogHandler"); | private void addLogHandler(ContainerCluster cluster) {
Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handler.LogHandler");
logHandler.addServerBindings("http:
cluster.addComponent(logHandler);
} | class DomAdminV4Builder extends DomAdminBuilderBase {
private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default");
private final Collection<ContainerModel> containerModels;
private final ConfigModelContext context;
public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs,
Collection<ContainerModel> containerModels) {
super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant,
configServerSpecs);
this.containerModels = containerModels;
this.context = context;
}
@Override
protected void doBuildAdmin(Admin admin, Element w3cAdminElement) {
ModelElement adminElement = new ModelElement(w3cAdminElement);
admin.addConfigservers(getConfigServersFromSpec(admin));
Optional<NodesSpecification> requestedSlobroks =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context);
Optional<NodesSpecification> requestedLogservers =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context);
assignSlobroks(requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin);
assignLogserver(requestedLogservers.orElse(NodesSpecification.nonDedicated(1, context)), admin);
addLogForwarders(adminElement.getChild("logforwarding"), admin);
}
private void assignSlobroks(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.isDedicated()) {
createSlobroks(admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification));
}
else {
createSlobroks(admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2));
}
}
private void assignLogserver(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server");
if (nodesSpecification.isDedicated()) {
Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification);
if (hosts.isEmpty()) return;
Logserver logserver = createLogserver(admin, hosts);
if (context.getDeployState().isHosted() && context.getDeployState().zone().system() == SystemName.cd)
createAdditionalContainerOnLogserverHost(admin, logserver.getHostResource());
} else if (containerModels.iterator().hasNext()) {
List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false);
if (hosts.isEmpty()) return;
createLogserver(admin, hosts);
} else {
context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver");
}
}
private void createAdditionalContainerOnLogserverHost(Admin admin, HostResource hostResource) {
ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", RankProfileList.empty);
ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId()));
logserverClusterModel.setCluster(logServerCluster);
addLogHandler(logServerCluster);
Container container = new Container(logServerCluster, "logserver-container", 0);
container.setHostResource(hostResource);
container.initService();
logServerCluster.addContainer(container);
admin.addAndInitializeService(hostResource, container);
}
private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) {
return nodesSpecification.provision(hostSystem,
ClusterSpec.Type.admin,
ClusterSpec.Id.from(clusterId),
context.getDeployLogger()).keySet();
}
/**
* Returns a list of container hosts to use for an auxiliary cluster.
* The list returns the same nodes on each invocation given the same available nodes.
*
* @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition
* on topology changes, and less nodes may be returned if fewer are available
* @param minHostsPerContainerCluster the desired number of hosts per cluster
*/
private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) {
Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream()
.filter(this::shouldHaveSlobrok)
.collect(Collectors.toList());
int hostsPerCluster = (int) Math.max(
minHostsPerContainerCluster,
Math.ceil((double) count / containerModelsWithSlobrok.size()));
List<HostResource> picked = new ArrayList<>();
for (ContainerModel containerModel : containerModelsWithSlobrok)
picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster));
return picked;
}
private boolean shouldHaveSlobrok(ContainerModel containerModel) {
ApplicationId applicationId = context.getDeployState().getProperties().applicationId();
if (!applicationId.equals(ZONE_APPLICATION_ID)) {
return true;
}
String clustername = containerModel.getCluster().getName();
return !Objects.equals(clustername, "node-admin");
}
private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) {
boolean retired = true;
List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired);
picked.addAll(sortedContainerHostsFrom(model, count, retired));
return picked;
}
/** Returns the count first containers in the current model having isRetired set to the given value */
private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) {
List<HostResource> hosts = model.getCluster().getContainers().stream()
.filter(container -> retired == container.isRetired())
.map(Container::getHostResource)
.collect(Collectors.toList());
return HostResource.pickHosts(hosts, count, 1);
}
private Logserver createLogserver(Admin admin, Collection<HostResource> hosts) {
Logserver logserver = new Logserver(admin);
logserver.setHostResource(hosts.iterator().next());
admin.setLogserver(logserver);
logserver.initService();
return logserver;
}
private void createSlobroks(Admin admin, Collection<HostResource> hosts) {
if (hosts.isEmpty()) return;
List<Slobrok> slobroks = new ArrayList<>();
int index = 0;
for (HostResource host : hosts) {
Slobrok slobrok = new Slobrok(admin, index++);
slobrok.setHostResource(host);
slobroks.add(slobrok);
slobrok.initService();
}
admin.addSlobroks(slobroks);
}
} | class DomAdminV4Builder extends DomAdminBuilderBase {
private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default");
private final Collection<ContainerModel> containerModels;
private final ConfigModelContext context;
public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs,
Collection<ContainerModel> containerModels) {
super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant,
configServerSpecs);
this.containerModels = containerModels;
this.context = context;
}
@Override
protected void doBuildAdmin(Admin admin, Element w3cAdminElement) {
ModelElement adminElement = new ModelElement(w3cAdminElement);
admin.addConfigservers(getConfigServersFromSpec(admin));
Optional<NodesSpecification> requestedSlobroks =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context);
Optional<NodesSpecification> requestedLogservers =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context);
assignSlobroks(requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin);
assignLogserver(requestedLogservers.orElse(NodesSpecification.nonDedicated(1, context)), admin);
addLogForwarders(adminElement.getChild("logforwarding"), admin);
}
private void assignSlobroks(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.isDedicated()) {
createSlobroks(admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification));
}
else {
createSlobroks(admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2));
}
}
private void assignLogserver(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server");
if (nodesSpecification.isDedicated()) {
Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification);
if (hosts.isEmpty()) return;
Logserver logserver = createLogserver(admin, hosts);
if (context.getDeployState().isHosted() && context.getDeployState().zone().system() == SystemName.cd)
createAdditionalContainerOnLogserverHost(admin, logserver.getHostResource());
} else if (containerModels.iterator().hasNext()) {
List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false);
if (hosts.isEmpty()) return;
createLogserver(admin, hosts);
} else {
context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver");
}
}
private void createAdditionalContainerOnLogserverHost(Admin admin, HostResource hostResource) {
ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", RankProfileList.empty);
ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId()));
logserverClusterModel.setCluster(logServerCluster);
addLogHandler(logServerCluster);
Container container = new Container(logServerCluster, "logserver-container", 0);
container.setHostResource(hostResource);
container.initService();
logServerCluster.addContainer(container);
admin.addAndInitializeService(hostResource, container);
}
private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) {
return nodesSpecification.provision(hostSystem,
ClusterSpec.Type.admin,
ClusterSpec.Id.from(clusterId),
context.getDeployLogger()).keySet();
}
/**
* Returns a list of container hosts to use for an auxiliary cluster.
* The list returns the same nodes on each invocation given the same available nodes.
*
* @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition
* on topology changes, and less nodes may be returned if fewer are available
* @param minHostsPerContainerCluster the desired number of hosts per cluster
*/
private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) {
Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream()
.filter(this::shouldHaveSlobrok)
.collect(Collectors.toList());
int hostsPerCluster = (int) Math.max(
minHostsPerContainerCluster,
Math.ceil((double) count / containerModelsWithSlobrok.size()));
List<HostResource> picked = new ArrayList<>();
for (ContainerModel containerModel : containerModelsWithSlobrok)
picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster));
return picked;
}
private boolean shouldHaveSlobrok(ContainerModel containerModel) {
ApplicationId applicationId = context.getDeployState().getProperties().applicationId();
if (!applicationId.equals(ZONE_APPLICATION_ID)) {
return true;
}
String clustername = containerModel.getCluster().getName();
return !Objects.equals(clustername, "node-admin");
}
private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) {
boolean retired = true;
List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired);
picked.addAll(sortedContainerHostsFrom(model, count, retired));
return picked;
}
/** Returns the count first containers in the current model having isRetired set to the given value */
private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) {
List<HostResource> hosts = model.getCluster().getContainers().stream()
.filter(container -> retired == container.isRetired())
.map(Container::getHostResource)
.collect(Collectors.toList());
return HostResource.pickHosts(hosts, count, 1);
}
private Logserver createLogserver(Admin admin, Collection<HostResource> hosts) {
Logserver logserver = new Logserver(admin);
logserver.setHostResource(hosts.iterator().next());
admin.setLogserver(logserver);
logserver.initService();
return logserver;
}
private void createSlobroks(Admin admin, Collection<HostResource> hosts) {
if (hosts.isEmpty()) return;
List<Slobrok> slobroks = new ArrayList<>();
int index = 0;
for (HostResource host : hosts) {
Slobrok slobrok = new Slobrok(admin, index++);
slobrok.setHostResource(host);
slobroks.add(slobrok);
slobrok.initService();
}
admin.addSlobroks(slobroks);
}
} |
Ouch, yes | private void addLogHandler(ContainerCluster cluster) {
Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handlerLogHandler");
logHandler.addServerBindings("http:
cluster.addComponent(logHandler);
} | Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handlerLogHandler"); | private void addLogHandler(ContainerCluster cluster) {
Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handler.LogHandler");
logHandler.addServerBindings("http:
cluster.addComponent(logHandler);
} | class DomAdminV4Builder extends DomAdminBuilderBase {
private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default");
private final Collection<ContainerModel> containerModels;
private final ConfigModelContext context;
public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs,
Collection<ContainerModel> containerModels) {
super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant,
configServerSpecs);
this.containerModels = containerModels;
this.context = context;
}
@Override
protected void doBuildAdmin(Admin admin, Element w3cAdminElement) {
ModelElement adminElement = new ModelElement(w3cAdminElement);
admin.addConfigservers(getConfigServersFromSpec(admin));
Optional<NodesSpecification> requestedSlobroks =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context);
Optional<NodesSpecification> requestedLogservers =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context);
assignSlobroks(requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin);
assignLogserver(requestedLogservers.orElse(NodesSpecification.nonDedicated(1, context)), admin);
addLogForwarders(adminElement.getChild("logforwarding"), admin);
}
private void assignSlobroks(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.isDedicated()) {
createSlobroks(admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification));
}
else {
createSlobroks(admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2));
}
}
private void assignLogserver(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server");
if (nodesSpecification.isDedicated()) {
Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification);
if (hosts.isEmpty()) return;
Logserver logserver = createLogserver(admin, hosts);
if (context.getDeployState().isHosted() && context.getDeployState().zone().system() == SystemName.cd)
createAdditionalContainerOnLogserverHost(admin, logserver.getHostResource());
} else if (containerModels.iterator().hasNext()) {
List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false);
if (hosts.isEmpty()) return;
createLogserver(admin, hosts);
} else {
context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver");
}
}
private void createAdditionalContainerOnLogserverHost(Admin admin, HostResource hostResource) {
ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", RankProfileList.empty);
ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId()));
logserverClusterModel.setCluster(logServerCluster);
addLogHandler(logServerCluster);
Container container = new Container(logServerCluster, "logserver-container", 0);
container.setHostResource(hostResource);
container.initService();
logServerCluster.addContainer(container);
admin.addAndInitializeService(hostResource, container);
}
private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) {
return nodesSpecification.provision(hostSystem,
ClusterSpec.Type.admin,
ClusterSpec.Id.from(clusterId),
context.getDeployLogger()).keySet();
}
/**
* Returns a list of container hosts to use for an auxiliary cluster.
* The list returns the same nodes on each invocation given the same available nodes.
*
* @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition
* on topology changes, and less nodes may be returned if fewer are available
* @param minHostsPerContainerCluster the desired number of hosts per cluster
*/
private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) {
Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream()
.filter(this::shouldHaveSlobrok)
.collect(Collectors.toList());
int hostsPerCluster = (int) Math.max(
minHostsPerContainerCluster,
Math.ceil((double) count / containerModelsWithSlobrok.size()));
List<HostResource> picked = new ArrayList<>();
for (ContainerModel containerModel : containerModelsWithSlobrok)
picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster));
return picked;
}
private boolean shouldHaveSlobrok(ContainerModel containerModel) {
ApplicationId applicationId = context.getDeployState().getProperties().applicationId();
if (!applicationId.equals(ZONE_APPLICATION_ID)) {
return true;
}
String clustername = containerModel.getCluster().getName();
return !Objects.equals(clustername, "node-admin");
}
private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) {
boolean retired = true;
List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired);
picked.addAll(sortedContainerHostsFrom(model, count, retired));
return picked;
}
/** Returns the count first containers in the current model having isRetired set to the given value */
private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) {
List<HostResource> hosts = model.getCluster().getContainers().stream()
.filter(container -> retired == container.isRetired())
.map(Container::getHostResource)
.collect(Collectors.toList());
return HostResource.pickHosts(hosts, count, 1);
}
private Logserver createLogserver(Admin admin, Collection<HostResource> hosts) {
Logserver logserver = new Logserver(admin);
logserver.setHostResource(hosts.iterator().next());
admin.setLogserver(logserver);
logserver.initService();
return logserver;
}
private void createSlobroks(Admin admin, Collection<HostResource> hosts) {
if (hosts.isEmpty()) return;
List<Slobrok> slobroks = new ArrayList<>();
int index = 0;
for (HostResource host : hosts) {
Slobrok slobrok = new Slobrok(admin, index++);
slobrok.setHostResource(host);
slobroks.add(slobrok);
slobrok.initService();
}
admin.addSlobroks(slobroks);
}
} | class DomAdminV4Builder extends DomAdminBuilderBase {
private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default");
private final Collection<ContainerModel> containerModels;
private final ConfigModelContext context;
public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs,
Collection<ContainerModel> containerModels) {
super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant,
configServerSpecs);
this.containerModels = containerModels;
this.context = context;
}
@Override
protected void doBuildAdmin(Admin admin, Element w3cAdminElement) {
ModelElement adminElement = new ModelElement(w3cAdminElement);
admin.addConfigservers(getConfigServersFromSpec(admin));
Optional<NodesSpecification> requestedSlobroks =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context);
Optional<NodesSpecification> requestedLogservers =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context);
assignSlobroks(requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin);
assignLogserver(requestedLogservers.orElse(NodesSpecification.nonDedicated(1, context)), admin);
addLogForwarders(adminElement.getChild("logforwarding"), admin);
}
private void assignSlobroks(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.isDedicated()) {
createSlobroks(admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification));
}
else {
createSlobroks(admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2));
}
}
private void assignLogserver(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server");
if (nodesSpecification.isDedicated()) {
Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification);
if (hosts.isEmpty()) return;
Logserver logserver = createLogserver(admin, hosts);
if (context.getDeployState().isHosted() && context.getDeployState().zone().system() == SystemName.cd)
createAdditionalContainerOnLogserverHost(admin, logserver.getHostResource());
} else if (containerModels.iterator().hasNext()) {
List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false);
if (hosts.isEmpty()) return;
createLogserver(admin, hosts);
} else {
context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver");
}
}
private void createAdditionalContainerOnLogserverHost(Admin admin, HostResource hostResource) {
ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", RankProfileList.empty);
ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId()));
logserverClusterModel.setCluster(logServerCluster);
addLogHandler(logServerCluster);
Container container = new Container(logServerCluster, "logserver-container", 0);
container.setHostResource(hostResource);
container.initService();
logServerCluster.addContainer(container);
admin.addAndInitializeService(hostResource, container);
}
private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) {
return nodesSpecification.provision(hostSystem,
ClusterSpec.Type.admin,
ClusterSpec.Id.from(clusterId),
context.getDeployLogger()).keySet();
}
/**
* Returns a list of container hosts to use for an auxiliary cluster.
* The list returns the same nodes on each invocation given the same available nodes.
*
* @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition
* on topology changes, and less nodes may be returned if fewer are available
* @param minHostsPerContainerCluster the desired number of hosts per cluster
*/
private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) {
Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream()
.filter(this::shouldHaveSlobrok)
.collect(Collectors.toList());
int hostsPerCluster = (int) Math.max(
minHostsPerContainerCluster,
Math.ceil((double) count / containerModelsWithSlobrok.size()));
List<HostResource> picked = new ArrayList<>();
for (ContainerModel containerModel : containerModelsWithSlobrok)
picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster));
return picked;
}
private boolean shouldHaveSlobrok(ContainerModel containerModel) {
ApplicationId applicationId = context.getDeployState().getProperties().applicationId();
if (!applicationId.equals(ZONE_APPLICATION_ID)) {
return true;
}
String clustername = containerModel.getCluster().getName();
return !Objects.equals(clustername, "node-admin");
}
private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) {
boolean retired = true;
List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired);
picked.addAll(sortedContainerHostsFrom(model, count, retired));
return picked;
}
/** Returns the count first containers in the current model having isRetired set to the given value */
private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) {
List<HostResource> hosts = model.getCluster().getContainers().stream()
.filter(container -> retired == container.isRetired())
.map(Container::getHostResource)
.collect(Collectors.toList());
return HostResource.pickHosts(hosts, count, 1);
}
private Logserver createLogserver(Admin admin, Collection<HostResource> hosts) {
Logserver logserver = new Logserver(admin);
logserver.setHostResource(hosts.iterator().next());
admin.setLogserver(logserver);
logserver.initService();
return logserver;
}
private void createSlobroks(Admin admin, Collection<HostResource> hosts) {
if (hosts.isEmpty()) return;
List<Slobrok> slobroks = new ArrayList<>();
int index = 0;
for (HostResource host : hosts) {
Slobrok slobrok = new Slobrok(admin, index++);
slobrok.setHostResource(host);
slobroks.add(slobrok);
slobrok.initService();
}
admin.addSlobroks(slobroks);
}
} |
Fixed | private void addLogHandler(ContainerCluster cluster) {
Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handlerLogHandler");
logHandler.addServerBindings("http:
cluster.addComponent(logHandler);
} | Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handlerLogHandler"); | private void addLogHandler(ContainerCluster cluster) {
Handler<?> logHandler = Handler.fromClassName("com.yahoo.container.handler.LogHandler");
logHandler.addServerBindings("http:
cluster.addComponent(logHandler);
} | class DomAdminV4Builder extends DomAdminBuilderBase {
private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default");
private final Collection<ContainerModel> containerModels;
private final ConfigModelContext context;
public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs,
Collection<ContainerModel> containerModels) {
super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant,
configServerSpecs);
this.containerModels = containerModels;
this.context = context;
}
@Override
protected void doBuildAdmin(Admin admin, Element w3cAdminElement) {
ModelElement adminElement = new ModelElement(w3cAdminElement);
admin.addConfigservers(getConfigServersFromSpec(admin));
Optional<NodesSpecification> requestedSlobroks =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context);
Optional<NodesSpecification> requestedLogservers =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context);
assignSlobroks(requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin);
assignLogserver(requestedLogservers.orElse(NodesSpecification.nonDedicated(1, context)), admin);
addLogForwarders(adminElement.getChild("logforwarding"), admin);
}
private void assignSlobroks(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.isDedicated()) {
createSlobroks(admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification));
}
else {
createSlobroks(admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2));
}
}
private void assignLogserver(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server");
if (nodesSpecification.isDedicated()) {
Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification);
if (hosts.isEmpty()) return;
Logserver logserver = createLogserver(admin, hosts);
if (context.getDeployState().isHosted() && context.getDeployState().zone().system() == SystemName.cd)
createAdditionalContainerOnLogserverHost(admin, logserver.getHostResource());
} else if (containerModels.iterator().hasNext()) {
List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false);
if (hosts.isEmpty()) return;
createLogserver(admin, hosts);
} else {
context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver");
}
}
private void createAdditionalContainerOnLogserverHost(Admin admin, HostResource hostResource) {
ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", RankProfileList.empty);
ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId()));
logserverClusterModel.setCluster(logServerCluster);
addLogHandler(logServerCluster);
Container container = new Container(logServerCluster, "logserver-container", 0);
container.setHostResource(hostResource);
container.initService();
logServerCluster.addContainer(container);
admin.addAndInitializeService(hostResource, container);
}
private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) {
return nodesSpecification.provision(hostSystem,
ClusterSpec.Type.admin,
ClusterSpec.Id.from(clusterId),
context.getDeployLogger()).keySet();
}
/**
* Returns a list of container hosts to use for an auxiliary cluster.
* The list returns the same nodes on each invocation given the same available nodes.
*
* @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition
* on topology changes, and less nodes may be returned if fewer are available
* @param minHostsPerContainerCluster the desired number of hosts per cluster
*/
private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) {
Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream()
.filter(this::shouldHaveSlobrok)
.collect(Collectors.toList());
int hostsPerCluster = (int) Math.max(
minHostsPerContainerCluster,
Math.ceil((double) count / containerModelsWithSlobrok.size()));
List<HostResource> picked = new ArrayList<>();
for (ContainerModel containerModel : containerModelsWithSlobrok)
picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster));
return picked;
}
private boolean shouldHaveSlobrok(ContainerModel containerModel) {
ApplicationId applicationId = context.getDeployState().getProperties().applicationId();
if (!applicationId.equals(ZONE_APPLICATION_ID)) {
return true;
}
String clustername = containerModel.getCluster().getName();
return !Objects.equals(clustername, "node-admin");
}
private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) {
boolean retired = true;
List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired);
picked.addAll(sortedContainerHostsFrom(model, count, retired));
return picked;
}
/** Returns the count first containers in the current model having isRetired set to the given value */
private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) {
List<HostResource> hosts = model.getCluster().getContainers().stream()
.filter(container -> retired == container.isRetired())
.map(Container::getHostResource)
.collect(Collectors.toList());
return HostResource.pickHosts(hosts, count, 1);
}
private Logserver createLogserver(Admin admin, Collection<HostResource> hosts) {
Logserver logserver = new Logserver(admin);
logserver.setHostResource(hosts.iterator().next());
admin.setLogserver(logserver);
logserver.initService();
return logserver;
}
private void createSlobroks(Admin admin, Collection<HostResource> hosts) {
if (hosts.isEmpty()) return;
List<Slobrok> slobroks = new ArrayList<>();
int index = 0;
for (HostResource host : hosts) {
Slobrok slobrok = new Slobrok(admin, index++);
slobrok.setHostResource(host);
slobroks.add(slobrok);
slobrok.initService();
}
admin.addSlobroks(slobroks);
}
} | class DomAdminV4Builder extends DomAdminBuilderBase {
private ApplicationId ZONE_APPLICATION_ID = ApplicationId.from("hosted-vespa", "routing", "default");
private final Collection<ContainerModel> containerModels;
private final ConfigModelContext context;
public DomAdminV4Builder(ConfigModelContext context, boolean multitenant, List<ConfigServerSpec> configServerSpecs,
Collection<ContainerModel> containerModels) {
super(context.getApplicationType(), context.getDeployState().getFileRegistry(), multitenant,
configServerSpecs);
this.containerModels = containerModels;
this.context = context;
}
@Override
protected void doBuildAdmin(Admin admin, Element w3cAdminElement) {
ModelElement adminElement = new ModelElement(w3cAdminElement);
admin.addConfigservers(getConfigServersFromSpec(admin));
Optional<NodesSpecification> requestedSlobroks =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("slobroks"), context);
Optional<NodesSpecification> requestedLogservers =
NodesSpecification.optionalDedicatedFromParent(adminElement.getChild("logservers"), context);
assignSlobroks(requestedSlobroks.orElse(NodesSpecification.nonDedicated(3, context)), admin);
assignLogserver(requestedLogservers.orElse(NodesSpecification.nonDedicated(1, context)), admin);
addLogForwarders(adminElement.getChild("logforwarding"), admin);
}
private void assignSlobroks(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.isDedicated()) {
createSlobroks(admin, allocateHosts(admin.getHostSystem(), "slobroks", nodesSpecification));
}
else {
createSlobroks(admin, pickContainerHostsForSlobrok(nodesSpecification.count(), 2));
}
}
private void assignLogserver(NodesSpecification nodesSpecification, Admin admin) {
if (nodesSpecification.count() > 1) throw new IllegalArgumentException("You can only request a single log server");
if (nodesSpecification.isDedicated()) {
Collection<HostResource> hosts = allocateHosts(admin.getHostSystem(), "logserver", nodesSpecification);
if (hosts.isEmpty()) return;
Logserver logserver = createLogserver(admin, hosts);
if (context.getDeployState().isHosted() && context.getDeployState().zone().system() == SystemName.cd)
createAdditionalContainerOnLogserverHost(admin, logserver.getHostResource());
} else if (containerModels.iterator().hasNext()) {
List<HostResource> hosts = sortedContainerHostsFrom(containerModels.iterator().next(), nodesSpecification.count(), false);
if (hosts.isEmpty()) return;
createLogserver(admin, hosts);
} else {
context.getDeployLogger().log(LogLevel.INFO, "No container host available to use for running logserver");
}
}
private void createAdditionalContainerOnLogserverHost(Admin admin, HostResource hostResource) {
ContainerCluster logServerCluster = new ContainerCluster(admin, "logserver-cluster", "logserver-cluster", RankProfileList.empty);
ContainerModel logserverClusterModel = new ContainerModel(context.withParent(admin).withId(logServerCluster.getSubId()));
logserverClusterModel.setCluster(logServerCluster);
addLogHandler(logServerCluster);
Container container = new Container(logServerCluster, "logserver-container", 0);
container.setHostResource(hostResource);
container.initService();
logServerCluster.addContainer(container);
admin.addAndInitializeService(hostResource, container);
}
private Collection<HostResource> allocateHosts(HostSystem hostSystem, String clusterId, NodesSpecification nodesSpecification) {
return nodesSpecification.provision(hostSystem,
ClusterSpec.Type.admin,
ClusterSpec.Id.from(clusterId),
context.getDeployLogger()).keySet();
}
/**
* Returns a list of container hosts to use for an auxiliary cluster.
* The list returns the same nodes on each invocation given the same available nodes.
*
* @param count the desired number of nodes. More nodes may be returned to ensure a smooth transition
* on topology changes, and less nodes may be returned if fewer are available
* @param minHostsPerContainerCluster the desired number of hosts per cluster
*/
private List<HostResource> pickContainerHostsForSlobrok(int count, int minHostsPerContainerCluster) {
Collection<ContainerModel> containerModelsWithSlobrok = containerModels.stream()
.filter(this::shouldHaveSlobrok)
.collect(Collectors.toList());
int hostsPerCluster = (int) Math.max(
minHostsPerContainerCluster,
Math.ceil((double) count / containerModelsWithSlobrok.size()));
List<HostResource> picked = new ArrayList<>();
for (ContainerModel containerModel : containerModelsWithSlobrok)
picked.addAll(pickContainerHostsFrom(containerModel, hostsPerCluster));
return picked;
}
private boolean shouldHaveSlobrok(ContainerModel containerModel) {
ApplicationId applicationId = context.getDeployState().getProperties().applicationId();
if (!applicationId.equals(ZONE_APPLICATION_ID)) {
return true;
}
String clustername = containerModel.getCluster().getName();
return !Objects.equals(clustername, "node-admin");
}
private List<HostResource> pickContainerHostsFrom(ContainerModel model, int count) {
boolean retired = true;
List<HostResource> picked = sortedContainerHostsFrom(model, count, !retired);
picked.addAll(sortedContainerHostsFrom(model, count, retired));
return picked;
}
/** Returns the count first containers in the current model having isRetired set to the given value */
private List<HostResource> sortedContainerHostsFrom(ContainerModel model, int count, boolean retired) {
List<HostResource> hosts = model.getCluster().getContainers().stream()
.filter(container -> retired == container.isRetired())
.map(Container::getHostResource)
.collect(Collectors.toList());
return HostResource.pickHosts(hosts, count, 1);
}
private Logserver createLogserver(Admin admin, Collection<HostResource> hosts) {
Logserver logserver = new Logserver(admin);
logserver.setHostResource(hosts.iterator().next());
admin.setLogserver(logserver);
logserver.initService();
return logserver;
}
private void createSlobroks(Admin admin, Collection<HostResource> hosts) {
if (hosts.isEmpty()) return;
List<Slobrok> slobroks = new ArrayList<>();
int index = 0;
for (HostResource host : hosts) {
Slobrok slobrok = new Slobrok(admin, index++);
slobrok.setHostResource(host);
slobroks.add(slobrok);
slobrok.initService();
}
admin.addSlobroks(slobroks);
}
} |
This should be removed too. | public void createContainer(ContainerName containerName, NodeSpec node, ContainerData containerData) {
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
logger.info("Creating container " + containerName);
Inet6Address ipV6Address = environment.getIpAddresses().getIPv6Address(node.getHostname()).orElseThrow(
() -> new RuntimeException("Unable to find a valid IPv6 address for " + node.getHostname() +
". Missing an AAAA DNS entry?"));
String configServers = String.join(",", environment.getConfigServerHostNames());
Docker.CreateContainerCommand command = docker.createContainerCommand(
node.getWantedDockerImage().get(),
ContainerResources.from(node.getMinCpuCores(), node.getMinMainMemoryAvailableGb()),
containerName,
node.getHostname())
.withManagedBy(MANAGER_NAME)
.withEnvironment("VESPA_CONFIGSERVERS", configServers)
.withEnvironment("CONTAINER_ENVIRONMENT_SETTINGS",
environment.getContainerEnvironmentResolver().createSettings(environment, node))
.withUlimit("nofile", 262_144, 262_144)
.withUlimit("nproc", 32_768, 409_600)
.withUlimit("core", -1, -1)
.withAddCapability("SYS_PTRACE")
.withAddCapability("SYS_ADMIN");
if (environment.getNodeType() == NodeType.confighost ||
environment.getNodeType() == NodeType.proxyhost) {
command.withVolume("/var/lib/sia", "/var/lib/sia");
}
if (environment.getNodeType() == NodeType.proxyhost) {
command.withVolume("/opt/yahoo/share/ssl/certs/", "/opt/yahoo/share/ssl/certs/");
}
if (environment.getNodeType() == NodeType.host) {
Path zpePathInNode = environment.pathInNodeUnderVespaHome("var/zpe");
if (environment.isRunningOnHost()) {
command.withSharedVolume("/var/zpe", zpePathInNode.toString());
} else {
command.withVolume(environment.pathInHostFromPathInNode(containerName, zpePathInNode).toString(), zpePathInNode.toString());
}
}
DockerNetworking networking = environment.getDockerNetworking();
command.withNetworkMode(networking.getDockerNetworkMode());
if (networking == DockerNetworking.MACVLAN) {
command.withIpAddress(ipV6Address);
command.withNetworkMode(DockerImpl.DOCKER_CUSTOM_MACVLAN_NETWORK_NAME);
command.withSharedVolume("/etc/hosts", "/etc/hosts");
} else if (networking == DockerNetworking.NPT) {
InetAddress ipV6Prefix = InetAddresses.forString(IPV6_NPT_PREFIX);
InetAddress ipV6Local = IPAddresses.prefixTranslate(ipV6Address, ipV6Prefix, 8);
command.withIpAddress(ipV6Local);
Optional<InetAddress> ipV4Local = environment.getIpAddresses().getIPv4Address(node.getHostname())
.map(ipV4Address -> {
InetAddress ipV4Prefix = InetAddresses.forString(IPV4_NPT_PREFIX);
return IPAddresses.prefixTranslate(ipV4Address, ipV4Prefix, 2);
});
ipV4Local.ifPresent(command::withIpAddress);
addEtcHosts(containerData, node.getHostname(), ipV4Local, ipV6Local);
}
for (Path pathInNode : directoriesToMount.keySet()) {
String pathInHost = environment.pathInHostFromPathInNode(containerName, pathInNode).toString();
command.withVolume(pathInHost, pathInNode.toString());
}
long minMainMemoryAvailableMb = (long) (node.getMinMainMemoryAvailableGb() * 1024);
if (minMainMemoryAvailableMb > 0) {
command.withEnvironment("VESPA_TOTAL_MEMORY_MB", Long.toString(minMainMemoryAvailableMb));
}
logger.info("Creating new container with args: " + command);
command.create();
docker.createContainer(command);
} | command.withNetworkMode(DockerImpl.DOCKER_CUSTOM_MACVLAN_NETWORK_NAME); | public void createContainer(ContainerName containerName, NodeSpec node, ContainerData containerData) {
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
logger.info("Creating container " + containerName);
Inet6Address ipV6Address = environment.getIpAddresses().getIPv6Address(node.getHostname()).orElseThrow(
() -> new RuntimeException("Unable to find a valid IPv6 address for " + node.getHostname() +
". Missing an AAAA DNS entry?"));
String configServers = String.join(",", environment.getConfigServerHostNames());
Docker.CreateContainerCommand command = docker.createContainerCommand(
node.getWantedDockerImage().get(),
ContainerResources.from(node.getMinCpuCores(), node.getMinMainMemoryAvailableGb()),
containerName,
node.getHostname())
.withManagedBy(MANAGER_NAME)
.withEnvironment("VESPA_CONFIGSERVERS", configServers)
.withEnvironment("CONTAINER_ENVIRONMENT_SETTINGS",
environment.getContainerEnvironmentResolver().createSettings(environment, node))
.withUlimit("nofile", 262_144, 262_144)
.withUlimit("nproc", 32_768, 409_600)
.withUlimit("core", -1, -1)
.withAddCapability("SYS_PTRACE")
.withAddCapability("SYS_ADMIN");
if (environment.getNodeType() == NodeType.confighost ||
environment.getNodeType() == NodeType.proxyhost) {
command.withVolume("/var/lib/sia", "/var/lib/sia");
}
if (environment.getNodeType() == NodeType.proxyhost) {
command.withVolume("/opt/yahoo/share/ssl/certs/", "/opt/yahoo/share/ssl/certs/");
}
if (environment.getNodeType() == NodeType.host) {
Path zpePathInNode = environment.pathInNodeUnderVespaHome("var/zpe");
if (environment.isRunningOnHost()) {
command.withSharedVolume("/var/zpe", zpePathInNode.toString());
} else {
command.withVolume(environment.pathInHostFromPathInNode(containerName, zpePathInNode).toString(), zpePathInNode.toString());
}
}
DockerNetworking networking = environment.getDockerNetworking();
command.withNetworkMode(networking.getDockerNetworkMode());
if (networking == DockerNetworking.MACVLAN) {
command.withIpAddress(ipV6Address);
command.withSharedVolume("/etc/hosts", "/etc/hosts");
} else if (networking == DockerNetworking.NPT) {
InetAddress ipV6Prefix = InetAddresses.forString(IPV6_NPT_PREFIX);
InetAddress ipV6Local = IPAddresses.prefixTranslate(ipV6Address, ipV6Prefix, 8);
command.withIpAddress(ipV6Local);
Optional<InetAddress> ipV4Local = environment.getIpAddresses().getIPv4Address(node.getHostname())
.map(ipV4Address -> {
InetAddress ipV4Prefix = InetAddresses.forString(IPV4_NPT_PREFIX);
return IPAddresses.prefixTranslate(ipV4Address, ipV4Prefix, 2);
});
ipV4Local.ifPresent(command::withIpAddress);
addEtcHosts(containerData, node.getHostname(), ipV4Local, ipV6Local);
}
for (Path pathInNode : directoriesToMount.keySet()) {
String pathInHost = environment.pathInHostFromPathInNode(containerName, pathInNode).toString();
command.withVolume(pathInHost, pathInNode.toString());
}
long minMainMemoryAvailableMb = (long) (node.getMinMainMemoryAvailableGb() * 1024);
if (minMainMemoryAvailableMb > 0) {
command.withEnvironment("VESPA_TOTAL_MEMORY_MB", Long.toString(minMainMemoryAvailableMb));
}
logger.info("Creating new container with args: " + command);
command.create();
docker.createContainer(command);
} | class DockerOperationsImpl implements DockerOperations {
private static final String MANAGER_NAME = "node-admin";
private static final String IPV6_NPT_PREFIX = "fd00::";
private static final String IPV4_NPT_PREFIX = "172.17.0.0";
private final Docker docker;
private final Environment environment;
private final ProcessExecuter processExecuter;
private final String nodeProgram;
private final Map<Path, Boolean> directoriesToMount;
public DockerOperationsImpl(Docker docker, Environment environment, ProcessExecuter processExecuter) {
this.docker = docker;
this.environment = environment;
this.processExecuter = processExecuter;
this.nodeProgram = environment.pathInNodeUnderVespaHome("bin/vespa-nodectl").toString();
this.directoriesToMount = getDirectoriesToMount(environment);
}
@Override
void addEtcHosts(ContainerData containerData,
String hostname,
Optional<InetAddress> ipV4Local,
InetAddress ipV6Local) {
StringBuilder etcHosts = new StringBuilder(
"
"127.0.0.1\tlocalhost\n" +
"::1\tlocalhost ip6-localhost ip6-loopback\n" +
"fe00::0\tip6-localnet\n" +
"ff00::0\tip6-mcastprefix\n" +
"ff02::1\tip6-allnodes\n" +
"ff02::2\tip6-allrouters\n" +
ipV6Local.getHostAddress() + '\t' + hostname + '\n');
ipV4Local.ifPresent(ipv4 -> etcHosts.append(ipv4.getHostAddress() + '\t' + hostname + '\n'));
containerData.addFile(Paths.get("/etc/hosts"), etcHosts.toString());
}
@Override
public void startContainer(ContainerName containerName) {
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
logger.info("Starting container " + containerName);
if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) {
docker.connectContainerToNetwork(containerName, "bridge");
}
docker.startContainer(containerName);
if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) {
setupContainerNetworkConnectivity(containerName);
}
directoriesToMount.entrySet().stream()
.filter(Map.Entry::getValue)
.map(Map.Entry::getKey)
.forEach(path ->
docker.executeInContainerAsRoot(containerName, "chmod", "-R", "a+w", path.toString()));
}
@Override
public void removeContainer(Container existingContainer) {
final ContainerName containerName = existingContainer.name;
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
if (existingContainer.state.isRunning()) {
logger.info("Stopping container " + containerName.asString());
docker.stopContainer(containerName);
}
logger.info("Deleting container " + containerName.asString());
docker.deleteContainer(containerName);
}
@Override
public Optional<Container> getContainer(ContainerName containerName) {
return docker.getContainer(containerName);
}
/**
* Try to suspend node. Suspending a node means the node should be taken offline,
* such that maintenance can be done of the node (upgrading, rebooting, etc),
* and such that we will start serving again as soon as possible afterwards.
* <p>
* Any failures are logged and ignored.
*/
@Override
public void trySuspendNode(ContainerName containerName) {
try {
executeCommandInContainer(containerName, nodeProgram, "suspend");
} catch (RuntimeException e) {
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
logger.warning("Failed trying to suspend container " + containerName.asString(), e);
}
}
/**
* For macvlan:
* <p>
* Due to a bug in docker (https:
* IPv6 gateway in containers connected to more than one docker network
*/
private void setupContainerNetworkConnectivity(ContainerName containerName) {
InetAddress hostDefaultGateway = uncheck(() -> DockerNetworkCreator.getDefaultGatewayLinux(true));
executeCommandInNetworkNamespace(containerName,
"route", "-A", "inet6", "add", "default", "gw", hostDefaultGateway.getHostAddress(), "dev", "eth1");
}
@Override
public boolean pullImageAsyncIfNeeded(DockerImage dockerImage) {
return docker.pullImageAsyncIfNeeded(dockerImage);
}
ProcessResult executeCommandInContainer(ContainerName containerName, String... command) {
ProcessResult result = docker.executeInContainerAsRoot(containerName, command);
if (!result.isSuccess()) {
throw new RuntimeException("Container " + containerName.asString() +
": command " + Arrays.toString(command) + " failed: " + result);
}
return result;
}
@Override
public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, Long timeoutSeconds, String... command) {
return docker.executeInContainerAsRoot(containerName, timeoutSeconds, command);
}
@Override
public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, String... command) {
return docker.executeInContainerAsRoot(containerName, command);
}
@Override
public ProcessResult executeCommandInNetworkNamespace(ContainerName containerName, String... command) {
final PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
final Integer containerPid = docker.getContainer(containerName)
.filter(container -> container.state.isRunning())
.map(container -> container.pid)
.orElseThrow(() -> new RuntimeException("PID not found for container with name: " +
containerName.asString()));
Path procPath = environment.getPathResolver().getPathToRootOfHost().resolve("proc");
final String[] wrappedCommand = Stream.concat(
Stream.of("sudo", "nsenter", String.format("--net=%s/%d/ns/net", procPath, containerPid), "--"),
Stream.of(command))
.toArray(String[]::new);
try {
Pair<Integer, String> result = processExecuter.exec(wrappedCommand);
if (result.getFirst() != 0) {
String msg = String.format(
"Failed to execute %s in network namespace for %s (PID = %d), exit code: %d, output: %s",
Arrays.toString(wrappedCommand), containerName.asString(), containerPid, result.getFirst(), result.getSecond());
logger.error(msg);
throw new RuntimeException(msg);
}
return new ProcessResult(0, result.getSecond(), "");
} catch (IOException e) {
logger.warning(String.format("IOException while executing %s in network namespace for %s (PID = %d)",
Arrays.toString(wrappedCommand), containerName.asString(), containerPid), e);
throw new RuntimeException(e);
}
}
@Override
public void resumeNode(ContainerName containerName) {
executeCommandInContainer(containerName, nodeProgram, "resume");
}
@Override
public void restartVespaOnNode(ContainerName containerName) {
executeCommandInContainer(containerName, nodeProgram, "restart-vespa");
}
@Override
public void stopServicesOnNode(ContainerName containerName) {
executeCommandInContainer(containerName, nodeProgram, "stop");
}
@Override
public Optional<Docker.ContainerStats> getContainerStats(ContainerName containerName) {
return docker.getContainerStats(containerName);
}
@Override
public List<Container> getAllManagedContainers() {
return docker.getAllContainersManagedBy(MANAGER_NAME);
}
@Override
public void deleteUnusedDockerImages() {
docker.deleteUnusedDockerImages();
}
/**
* Returns map of directories to mount and whether they should be writable by everyone
*/
private static Map<Path, Boolean> getDirectoriesToMount(Environment environment) {
final Map<Path, Boolean> directoriesToMount = new HashMap<>();
directoriesToMount.put(Paths.get("/etc/yamas-agent"), true);
directoriesToMount.put(Paths.get("/etc/filebeat"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/daemontools_y"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/jdisc_core"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/langdetect/"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/vespa"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yca"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yck"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yell"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykey"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykeyd"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yms_agent"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ysar"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ystatus"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/zpu"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/cache"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/crash"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/jdisc"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/vespa"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_container"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_core"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/maven"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/run"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/scoreboards"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/service"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/share"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/spool"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/yca"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/ycore++"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/zookeeper"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("tmp"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/container-data"), false);
if (environment.getNodeType() == NodeType.proxyhost)
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa-hosted/routing"), true);
if (environment.getNodeType() == NodeType.host)
directoriesToMount.put(Paths.get("/var/lib/sia"), true);
return Collections.unmodifiableMap(directoriesToMount);
}
} | class DockerOperationsImpl implements DockerOperations {
private static final String MANAGER_NAME = "node-admin";
private static final String IPV6_NPT_PREFIX = "fd00::";
private static final String IPV4_NPT_PREFIX = "172.17.0.0";
private final Docker docker;
private final Environment environment;
private final ProcessExecuter processExecuter;
private final String nodeProgram;
private final Map<Path, Boolean> directoriesToMount;
public DockerOperationsImpl(Docker docker, Environment environment, ProcessExecuter processExecuter) {
this.docker = docker;
this.environment = environment;
this.processExecuter = processExecuter;
this.nodeProgram = environment.pathInNodeUnderVespaHome("bin/vespa-nodectl").toString();
this.directoriesToMount = getDirectoriesToMount(environment);
}
@Override
void addEtcHosts(ContainerData containerData,
String hostname,
Optional<InetAddress> ipV4Local,
InetAddress ipV6Local) {
StringBuilder etcHosts = new StringBuilder(
"
"127.0.0.1\tlocalhost\n" +
"::1\tlocalhost ip6-localhost ip6-loopback\n" +
"fe00::0\tip6-localnet\n" +
"ff00::0\tip6-mcastprefix\n" +
"ff02::1\tip6-allnodes\n" +
"ff02::2\tip6-allrouters\n" +
ipV6Local.getHostAddress() + '\t' + hostname + '\n');
ipV4Local.ifPresent(ipv4 -> etcHosts.append(ipv4.getHostAddress() + '\t' + hostname + '\n'));
containerData.addFile(Paths.get("/etc/hosts"), etcHosts.toString());
}
@Override
public void startContainer(ContainerName containerName) {
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
logger.info("Starting container " + containerName);
if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) {
docker.connectContainerToNetwork(containerName, "bridge");
}
docker.startContainer(containerName);
if (environment.getDockerNetworking() == DockerNetworking.MACVLAN) {
setupContainerNetworkConnectivity(containerName);
}
directoriesToMount.entrySet().stream()
.filter(Map.Entry::getValue)
.map(Map.Entry::getKey)
.forEach(path ->
docker.executeInContainerAsRoot(containerName, "chmod", "-R", "a+w", path.toString()));
}
@Override
public void removeContainer(Container existingContainer) {
final ContainerName containerName = existingContainer.name;
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
if (existingContainer.state.isRunning()) {
logger.info("Stopping container " + containerName.asString());
docker.stopContainer(containerName);
}
logger.info("Deleting container " + containerName.asString());
docker.deleteContainer(containerName);
}
@Override
public Optional<Container> getContainer(ContainerName containerName) {
return docker.getContainer(containerName);
}
/**
* Try to suspend node. Suspending a node means the node should be taken offline,
* such that maintenance can be done of the node (upgrading, rebooting, etc),
* and such that we will start serving again as soon as possible afterwards.
* <p>
* Any failures are logged and ignored.
*/
@Override
public void trySuspendNode(ContainerName containerName) {
try {
executeCommandInContainer(containerName, nodeProgram, "suspend");
} catch (RuntimeException e) {
PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
logger.warning("Failed trying to suspend container " + containerName.asString(), e);
}
}
/**
* For macvlan:
* <p>
* Due to a bug in docker (https:
* IPv6 gateway in containers connected to more than one docker network
*/
private void setupContainerNetworkConnectivity(ContainerName containerName) {
InetAddress hostDefaultGateway = uncheck(() -> DockerNetworkCreator.getDefaultGatewayLinux(true));
executeCommandInNetworkNamespace(containerName,
"route", "-A", "inet6", "add", "default", "gw", hostDefaultGateway.getHostAddress(), "dev", "eth1");
}
@Override
public boolean pullImageAsyncIfNeeded(DockerImage dockerImage) {
return docker.pullImageAsyncIfNeeded(dockerImage);
}
ProcessResult executeCommandInContainer(ContainerName containerName, String... command) {
ProcessResult result = docker.executeInContainerAsRoot(containerName, command);
if (!result.isSuccess()) {
throw new RuntimeException("Container " + containerName.asString() +
": command " + Arrays.toString(command) + " failed: " + result);
}
return result;
}
@Override
public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, Long timeoutSeconds, String... command) {
return docker.executeInContainerAsRoot(containerName, timeoutSeconds, command);
}
@Override
public ProcessResult executeCommandInContainerAsRoot(ContainerName containerName, String... command) {
return docker.executeInContainerAsRoot(containerName, command);
}
@Override
public ProcessResult executeCommandInNetworkNamespace(ContainerName containerName, String... command) {
final PrefixLogger logger = PrefixLogger.getNodeAgentLogger(DockerOperationsImpl.class, containerName);
final Integer containerPid = docker.getContainer(containerName)
.filter(container -> container.state.isRunning())
.map(container -> container.pid)
.orElseThrow(() -> new RuntimeException("PID not found for container with name: " +
containerName.asString()));
Path procPath = environment.getPathResolver().getPathToRootOfHost().resolve("proc");
final String[] wrappedCommand = Stream.concat(
Stream.of("sudo", "nsenter", String.format("--net=%s/%d/ns/net", procPath, containerPid), "--"),
Stream.of(command))
.toArray(String[]::new);
try {
Pair<Integer, String> result = processExecuter.exec(wrappedCommand);
if (result.getFirst() != 0) {
String msg = String.format(
"Failed to execute %s in network namespace for %s (PID = %d), exit code: %d, output: %s",
Arrays.toString(wrappedCommand), containerName.asString(), containerPid, result.getFirst(), result.getSecond());
logger.error(msg);
throw new RuntimeException(msg);
}
return new ProcessResult(0, result.getSecond(), "");
} catch (IOException e) {
logger.warning(String.format("IOException while executing %s in network namespace for %s (PID = %d)",
Arrays.toString(wrappedCommand), containerName.asString(), containerPid), e);
throw new RuntimeException(e);
}
}
@Override
public void resumeNode(ContainerName containerName) {
executeCommandInContainer(containerName, nodeProgram, "resume");
}
@Override
public void restartVespaOnNode(ContainerName containerName) {
executeCommandInContainer(containerName, nodeProgram, "restart-vespa");
}
@Override
public void stopServicesOnNode(ContainerName containerName) {
executeCommandInContainer(containerName, nodeProgram, "stop");
}
@Override
public Optional<Docker.ContainerStats> getContainerStats(ContainerName containerName) {
return docker.getContainerStats(containerName);
}
@Override
public List<Container> getAllManagedContainers() {
return docker.getAllContainersManagedBy(MANAGER_NAME);
}
@Override
public void deleteUnusedDockerImages() {
docker.deleteUnusedDockerImages();
}
/**
* Returns map of directories to mount and whether they should be writable by everyone
*/
private static Map<Path, Boolean> getDirectoriesToMount(Environment environment) {
final Map<Path, Boolean> directoriesToMount = new HashMap<>();
directoriesToMount.put(Paths.get("/etc/yamas-agent"), true);
directoriesToMount.put(Paths.get("/etc/filebeat"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/daemontools_y"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/jdisc_core"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/langdetect/"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/vespa"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yca"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yck"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yell"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykey"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ykeykeyd"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/yms_agent"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ysar"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/ystatus"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("logs/zpu"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/cache"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/crash"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/jdisc"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/db/vespa"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_container"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/jdisc_core"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/maven"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/run"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/scoreboards"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/service"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/share"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/spool"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/yca"), true);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/ycore++"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/zookeeper"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("tmp"), false);
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/container-data"), false);
if (environment.getNodeType() == NodeType.proxyhost)
directoriesToMount.put(environment.pathInNodeUnderVespaHome("var/vespa-hosted/routing"), true);
if (environment.getNodeType() == NodeType.host)
directoriesToMount.put(Paths.get("/var/lib/sia"), true);
return Collections.unmodifiableMap(directoriesToMount);
}
} |
Not entirely sure if I understand the intended semantics of this. Is it guaranteed that `upd` has the same field ID as the update previously kept at the location? Otherwise the hash map backing `fieldUpdates` may be arbitrarily rehashed and reordered, completely disassociating existing indices. Optionally add a check that field ID matches existing update to avoid Fun Times(tm). | public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
if (index < 0 || index >= fieldUpdates.size()) {
throw new IndexOutOfBoundsException("Index " + index + " is outside of [" + 0 + ", " + fieldUpdates.size() + ">");
}
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
if (index-- == 0) {
addFieldUpdateNoCheck(fieldUpdate);
return fieldUpdate;
}
}
return null;
} | return fieldUpdate; | public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
FieldUpdate old = fieldUpdates.get(index);
if (old.getField().equals(upd.getField())) {
fieldUpdates.set(index, upd);
id2FieldUpdateMap.put(upd.getField().getId(), upd);
} else {
throw new IllegalArgumentException("You can not replace a FieldUpdate for field '" + old.getField() +
"' with an update for field '" + upd.getField() + "'");
}
return old;
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private Map<Integer, FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new HashMap<Integer, FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new HashMap<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, Map<Integer, FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<Map.Entry<Integer, FieldUpdate>> iter = fieldUpdates.entrySet().iterator(); iter.hasNext();) {
Map.Entry<Integer, FieldUpdate> entry = iter.next();
FieldUpdate update = entry.getValue();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && currentValue.equals(last.getValue())) {
iter.remove();
}
} else if (last instanceof ClearValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if (currentValue == null) {
iter.remove();
} else {
FieldValue copy = currentValue.clone();
copy.clear();
if (currentValue.equals(copy)) {
iter.remove();
}
}
}
}
}
return this;
}
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
@Deprecated
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(new ArrayList<>(fieldUpdates.values()));
}
/**
* Get an unmodifiable collection of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public Collection<FieldUpdate> getFieldUpdatesCollection() {
return Collections.unmodifiableCollection(fieldUpdates.values());
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
@Deprecated
public FieldUpdate getFieldUpdate(int index) {
if (index < 0 || index >= fieldUpdates.size()) {
throw new IndexOutOfBoundsException("Index " + index + " is outside of [" + 0 + ", " + fieldUpdates.size() + ">");
}
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
if (index-- == 0) {
return fieldUpdate;
}
}
return null;
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
@Deprecated
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdateById(field.getId());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
if (fieldUpdate.getField().getName().equals(fieldName))
return fieldUpdate;
}
return null;
}
private FieldUpdate getFieldUpdateById(Integer fieldId) {
return fieldUpdates.get(fieldId);
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(Collection<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
for (FieldUpdate fieldUpdate : fieldUpdates) {
this.fieldUpdates.put(fieldUpdate.getField().getId(), fieldUpdate);
}
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" + fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.put(update.getField().getId(), update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.put(fieldUpdate.getField().getId(), fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates.values()) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
@Deprecated
public FieldUpdate removeFieldUpdate(int index) {
FieldUpdate prev = getFieldUpdate(index);
return removeFieldUpdate(prev.getField());
}
public FieldUpdate removeFieldUpdate(Field field) {
return fieldUpdates.remove(field.getId());
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.values().iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private final Map<Integer, FieldUpdate> id2FieldUpdateMap;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new HashMap<>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
id2FieldUpdateMap = new HashMap<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, Map<Integer, FieldUpdate> id2fieldUpdateMap) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = new ArrayList<>(id2fieldUpdateMap.values());
id2FieldUpdateMap = id2fieldUpdateMap;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : id2FieldUpdateMap.values()) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<Map.Entry<Integer, FieldUpdate>> iter = id2FieldUpdateMap.entrySet().iterator(); iter.hasNext();) {
Map.Entry<Integer, FieldUpdate> entry = iter.next();
FieldUpdate update = entry.getValue();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && currentValue.equals(last.getValue())) {
iter.remove();
}
} else if (last instanceof ClearValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if (currentValue == null) {
iter.remove();
} else {
FieldValue copy = currentValue.clone();
copy.clear();
if (currentValue.equals(copy)) {
iter.remove();
}
}
}
}
}
return this;
}
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
* @deprecated Use fieldUpdates() instead.
*/
@Deprecated
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable collection of all field updates that this document update specifies.
*
* @return a collection of all FieldUpdates in this DocumentUpdate
*/
public Collection<FieldUpdate> fieldUpdates() {
return Collections.unmodifiableCollection(id2FieldUpdateMap.values());
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
* @deprecated Use fieldPathUpdates() instead.
*/
@Deprecated
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/**
* Get an unmodifiable collection of all field path updates that this document update specifies.
*
* @return a collection of all FieldPathUpdates in this DocumentUpdate
*/
public Collection<FieldPathUpdate> fieldPathUpdates() {
return Collections.unmodifiableCollection(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
* @deprecated use getFieldUpdate(Field field) instead.
*/
@Deprecated
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
* @deprecated Use removeFieldUpdate/addFieldUpdate instead
*/
@Deprecated
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdateById(field.getId());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
id2FieldUpdateMap.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : id2FieldUpdateMap.values()) {
if (fieldUpdate.getField().getName().equals(fieldName))
return fieldUpdate;
}
return null;
}
private FieldUpdate getFieldUpdateById(Integer fieldId) {
return id2FieldUpdateMap.get(fieldId);
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
* @deprecated Iterate and use addFieldUpdate instead
*/
@Deprecated
public void setFieldUpdates(Collection<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
fieldUpdates.clear();
for (FieldUpdate fieldUpdate : fieldUpdates) {
addFieldUpdate(fieldUpdate);
}
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return id2FieldUpdateMap.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
Integer fieldId = update.getField().getId();
if (documentType.getField(fieldId) == null) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" + update.getField().getName() + "'.");
}
FieldUpdate prevUpdate = getFieldUpdateById(fieldId);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
id2FieldUpdateMap.put(fieldId, update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates()) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
* @deprecated use removeFieldUpdate(Field field) instead.
*/
@Deprecated
public FieldUpdate removeFieldUpdate(int index) {
FieldUpdate prev = getFieldUpdate(index);
fieldUpdates.remove(index);
return removeFieldUpdate(prev.getField());
}
public FieldUpdate removeFieldUpdate(Field field) {
return id2FieldUpdateMap.remove(field.getId());
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (id2FieldUpdateMap != null ? id2FieldUpdateMap.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = id2FieldUpdateMap.values().iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return id2FieldUpdateMap.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} |
Logically it seems better to put this before setFrozen? | void tick() {
State wantedStateCopy;
synchronized (monitor) {
while (! workToDoNow) {
Duration timeSinceLastConverge = Duration.between(lastTick, clock.instant());
long remainder = nodeAdminConvergeStateInterval.minus(timeSinceLastConverge).toMillis();
if (remainder > 0) {
try {
monitor.wait(remainder);
} catch (InterruptedException e) {
log.info("Interrupted, but ignoring this: NodeAdminStateUpdater");
}
} else break;
}
lastTick = clock.instant();
workToDoNow = false;
wantedStateCopy = this.wantedState;
}
try {
convergeState(wantedStateCopy);
setLastConvergenceException(null);
} catch (OrchestratorException | ConvergenceException | HttpException e) {
setLastConvergenceException(e);
log.info("Unable to converge to " + wantedStateCopy + ": " + e.getMessage());
} catch (RuntimeException e) {
setLastConvergenceException(e);
log.log(LogLevel.ERROR, "Error while trying to converge to " + wantedStateCopy, e);
}
if (wantedStateCopy != RESUMED && currentState == TRANSITIONING) {
Duration subsystemFreezeDuration = nodeAdmin.subsystemFreezeDuration();
if (subsystemFreezeDuration.compareTo(FREEZE_CONVERGENCE_TIMEOUT) > 0) {
log.info("Timed out trying to freeze, will force unfreezed ticks");
nodeAdmin.setFrozen(false);
fetchContainersToRunFromNodeRepository();
}
} else if (currentState == RESUMED) {
fetchContainersToRunFromNodeRepository();
}
} | fetchContainersToRunFromNodeRepository(); | void tick() {
State wantedStateCopy;
synchronized (monitor) {
while (! workToDoNow) {
Duration timeSinceLastConverge = Duration.between(lastTick, clock.instant());
long remainder = nodeAdminConvergeStateInterval.minus(timeSinceLastConverge).toMillis();
if (remainder > 0) {
try {
monitor.wait(remainder);
} catch (InterruptedException e) {
log.info("Interrupted, but ignoring this: NodeAdminStateUpdater");
}
} else break;
}
lastTick = clock.instant();
workToDoNow = false;
wantedStateCopy = this.wantedState;
}
try {
convergeState(wantedStateCopy);
setLastConvergenceException(null);
} catch (OrchestratorException | ConvergenceException | HttpException e) {
setLastConvergenceException(e);
log.info("Unable to converge to " + wantedStateCopy + ": " + e.getMessage());
} catch (RuntimeException e) {
setLastConvergenceException(e);
log.log(LogLevel.ERROR, "Error while trying to converge to " + wantedStateCopy, e);
}
if (wantedStateCopy != RESUMED && currentState == TRANSITIONING) {
Duration subsystemFreezeDuration = nodeAdmin.subsystemFreezeDuration();
if (subsystemFreezeDuration.compareTo(FREEZE_CONVERGENCE_TIMEOUT) > 0) {
log.info("Timed out trying to freeze, will force unfreezed ticks");
fetchContainersToRunFromNodeRepository();
nodeAdmin.setFrozen(false);
}
} else if (currentState == RESUMED) {
fetchContainersToRunFromNodeRepository();
}
} | class NodeAdminStateUpdaterImpl implements NodeAdminStateUpdater {
static final Duration FREEZE_CONVERGENCE_TIMEOUT = Duration.ofMinutes(5);
static final String TRANSITION_EXCEPTION_MESSAGE = "NodeAdminStateUpdater has not run since current wanted state was set";
private final AtomicBoolean terminated = new AtomicBoolean(false);
private State currentState = SUSPENDED_NODE_ADMIN;
private State wantedState = RESUMED;
private boolean workToDoNow = true;
private final Object monitor = new Object();
private RuntimeException lastConvergenceException;
private final Logger log = Logger.getLogger(NodeAdminStateUpdater.class.getName());
private final ScheduledExecutorService specVerifierScheduler =
Executors.newScheduledThreadPool(1, ThreadFactoryFactory.getDaemonThreadFactory("specverifier"));
private final Thread loopThread;
private final NodeRepository nodeRepository;
private final Orchestrator orchestrator;
private final NodeAdmin nodeAdmin;
private final Clock clock;
private final String dockerHostHostName;
private final Duration nodeAdminConvergeStateInterval;
private final Optional<ClassLocking> classLocking;
private Optional<ClassLock> classLock = Optional.empty();
private Instant lastTick;
public NodeAdminStateUpdaterImpl(
NodeRepository nodeRepository,
Orchestrator orchestrator,
StorageMaintainer storageMaintainer,
NodeAdmin nodeAdmin,
String dockerHostHostName,
Clock clock,
Duration nodeAdminConvergeStateInterval,
Optional<ClassLocking> classLocking) {
log.info(objectToString() + ": Creating object");
this.nodeRepository = nodeRepository;
this.orchestrator = orchestrator;
this.nodeAdmin = nodeAdmin;
this.dockerHostHostName = dockerHostHostName;
this.clock = clock;
this.nodeAdminConvergeStateInterval = nodeAdminConvergeStateInterval;
this.classLocking = classLocking;
this.lastTick = clock.instant();
this.loopThread = new Thread(() -> {
if (classLocking.isPresent()) {
log.info(objectToString() + ": Acquiring lock");
try {
classLock = Optional.of(classLocking.get().lockWhile(NodeAdminStateUpdater.class, () -> !terminated.get()));
} catch (LockInterruptException e) {
classLock = Optional.empty();
return;
}
}
log.info(objectToString() + ": Starting threads and schedulers");
nodeAdmin.start();
specVerifierScheduler.scheduleWithFixedDelay(() ->
updateHardwareDivergence(storageMaintainer), 5, 60, TimeUnit.MINUTES);
while (! terminated.get()) {
tick();
}
});
this.loopThread.setName("tick-NodeAdminStateUpdater");
}
private String objectToString() {
return this.getClass().getSimpleName() + "@" + Integer.toString(System.identityHashCode(this));
}
@Override
public Map<String, Object> getDebugPage() {
Map<String, Object> debug = new LinkedHashMap<>();
synchronized (monitor) {
debug.put("dockerHostHostName", dockerHostHostName);
debug.put("wantedState", wantedState);
debug.put("currentState", currentState);
debug.put("NodeAdmin", nodeAdmin.debugInfo());
}
return debug;
}
private void updateHardwareDivergence(StorageMaintainer maintainer) {
if (currentState != RESUMED) return;
try {
NodeSpec node = nodeRepository.getNode(dockerHostHostName);
String hardwareDivergence = maintainer.getHardwareDivergence(node);
if (!node.getHardwareDivergence().orElse("null").equals(hardwareDivergence)) {
NodeAttributes nodeAttributes = new NodeAttributes().withHardwareDivergence(hardwareDivergence);
nodeRepository.updateNodeAttributes(dockerHostHostName, nodeAttributes);
}
} catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to report hardware divergence", e);
}
}
@Override
public void setResumeStateAndCheckIfResumed(State wantedState) {
synchronized (monitor) {
if (this.wantedState != wantedState) {
log.info("Wanted state change: " + this.wantedState + " -> " + wantedState);
this.wantedState = wantedState;
setLastConvergenceException(null);
signalWorkToBeDone();
}
if (currentState != wantedState) {
throw Optional.ofNullable(lastConvergenceException)
.orElseGet(() -> new RuntimeException(TRANSITION_EXCEPTION_MESSAGE));
}
}
}
void signalWorkToBeDone() {
synchronized (monitor) {
if (! workToDoNow) {
workToDoNow = true;
monitor.notifyAll();
}
}
}
private void setLastConvergenceException(RuntimeException exception) {
synchronized (monitor) {
lastConvergenceException = exception;
}
}
/**
* This method attempts to converge node-admin w/agents to a {@link State}
* with respect to: freeze, Orchestrator, and services running.
*/
private void convergeState(State wantedState) {
if (currentState == wantedState) return;
synchronized (monitor) {
currentState = TRANSITIONING;
}
boolean wantFrozen = wantedState != RESUMED;
if (!nodeAdmin.setFrozen(wantFrozen)) {
throw new ConvergenceException("NodeAdmin is not yet " + (wantFrozen ? "frozen" : "unfrozen"));
}
boolean hostIsActiveInNR = nodeRepository.getNode(dockerHostHostName).getState() == Node.State.active;
switch (wantedState) {
case RESUMED:
if (hostIsActiveInNR) orchestrator.resume(dockerHostHostName);
break;
case SUSPENDED_NODE_ADMIN:
if (hostIsActiveInNR) orchestrator.suspend(dockerHostHostName);
break;
case SUSPENDED:
List<String> nodesInActiveState = getNodesInActiveState();
List<String> nodesToSuspend = new ArrayList<>(nodesInActiveState);
if (hostIsActiveInNR) nodesToSuspend.add(dockerHostHostName);
if (!nodesToSuspend.isEmpty()) {
orchestrator.suspend(dockerHostHostName, nodesToSuspend);
log.info("Orchestrator allows suspension of " + nodesToSuspend);
}
nodeAdmin.stopNodeAgentServices(nodesInActiveState);
break;
default:
throw new IllegalStateException("Unknown wanted state " + wantedState);
}
log.info("State changed from " + currentState + " to " + wantedState);
synchronized (monitor) {
currentState = wantedState;
}
}
private void fetchContainersToRunFromNodeRepository() {
try {
final List<NodeSpec> containersToRun = nodeRepository.getNodes(dockerHostHostName);
nodeAdmin.refreshContainersToRun(containersToRun);
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed to update which containers should be running", e);
}
}
private List<String> getNodesInActiveState() {
return nodeRepository.getNodes(dockerHostHostName)
.stream()
.filter(node -> node.getState() == Node.State.active)
.map(NodeSpec::getHostname)
.collect(Collectors.toList());
}
public void start() {
loopThread.start();
}
public void stop() {
log.info(objectToString() + ": Stop called");
if (!terminated.compareAndSet(false, true)) {
throw new RuntimeException("Can not re-stop a node agent.");
}
classLocking.ifPresent(ClassLocking::interrupt);
signalWorkToBeDone();
specVerifierScheduler.shutdown();
do {
try {
loopThread.join();
specVerifierScheduler.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e1) {
log.info("Interrupted while waiting for NodeAdminStateUpdater thread and specVerfierScheduler to shutdown");
}
} while (loopThread.isAlive() || !specVerifierScheduler.isTerminated());
nodeAdmin.stop();
classLock.ifPresent(lock -> {
log.info(objectToString() + ": Releasing lock");
lock.close();
});
log.info(objectToString() + ": Stop complete");
}
} | class NodeAdminStateUpdaterImpl implements NodeAdminStateUpdater {
static final Duration FREEZE_CONVERGENCE_TIMEOUT = Duration.ofMinutes(5);
static final String TRANSITION_EXCEPTION_MESSAGE = "NodeAdminStateUpdater has not run since current wanted state was set";
private final AtomicBoolean terminated = new AtomicBoolean(false);
private State currentState = SUSPENDED_NODE_ADMIN;
private State wantedState = RESUMED;
private boolean workToDoNow = true;
private final Object monitor = new Object();
private RuntimeException lastConvergenceException;
private final Logger log = Logger.getLogger(NodeAdminStateUpdater.class.getName());
private final ScheduledExecutorService specVerifierScheduler =
Executors.newScheduledThreadPool(1, ThreadFactoryFactory.getDaemonThreadFactory("specverifier"));
private final Thread loopThread;
private final NodeRepository nodeRepository;
private final Orchestrator orchestrator;
private final NodeAdmin nodeAdmin;
private final Clock clock;
private final String dockerHostHostName;
private final Duration nodeAdminConvergeStateInterval;
private final Optional<ClassLocking> classLocking;
private Optional<ClassLock> classLock = Optional.empty();
private Instant lastTick;
public NodeAdminStateUpdaterImpl(
NodeRepository nodeRepository,
Orchestrator orchestrator,
StorageMaintainer storageMaintainer,
NodeAdmin nodeAdmin,
String dockerHostHostName,
Clock clock,
Duration nodeAdminConvergeStateInterval,
Optional<ClassLocking> classLocking) {
log.info(objectToString() + ": Creating object");
this.nodeRepository = nodeRepository;
this.orchestrator = orchestrator;
this.nodeAdmin = nodeAdmin;
this.dockerHostHostName = dockerHostHostName;
this.clock = clock;
this.nodeAdminConvergeStateInterval = nodeAdminConvergeStateInterval;
this.classLocking = classLocking;
this.lastTick = clock.instant();
this.loopThread = new Thread(() -> {
if (classLocking.isPresent()) {
log.info(objectToString() + ": Acquiring lock");
try {
classLock = Optional.of(classLocking.get().lockWhile(NodeAdminStateUpdater.class, () -> !terminated.get()));
} catch (LockInterruptException e) {
classLock = Optional.empty();
return;
}
}
log.info(objectToString() + ": Starting threads and schedulers");
nodeAdmin.start();
specVerifierScheduler.scheduleWithFixedDelay(() ->
updateHardwareDivergence(storageMaintainer), 5, 60, TimeUnit.MINUTES);
while (! terminated.get()) {
tick();
}
});
this.loopThread.setName("tick-NodeAdminStateUpdater");
}
private String objectToString() {
return this.getClass().getSimpleName() + "@" + Integer.toString(System.identityHashCode(this));
}
@Override
public Map<String, Object> getDebugPage() {
Map<String, Object> debug = new LinkedHashMap<>();
synchronized (monitor) {
debug.put("dockerHostHostName", dockerHostHostName);
debug.put("wantedState", wantedState);
debug.put("currentState", currentState);
debug.put("NodeAdmin", nodeAdmin.debugInfo());
}
return debug;
}
private void updateHardwareDivergence(StorageMaintainer maintainer) {
if (currentState != RESUMED) return;
try {
NodeSpec node = nodeRepository.getNode(dockerHostHostName);
String hardwareDivergence = maintainer.getHardwareDivergence(node);
if (!node.getHardwareDivergence().orElse("null").equals(hardwareDivergence)) {
NodeAttributes nodeAttributes = new NodeAttributes().withHardwareDivergence(hardwareDivergence);
nodeRepository.updateNodeAttributes(dockerHostHostName, nodeAttributes);
}
} catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to report hardware divergence", e);
}
}
@Override
public void setResumeStateAndCheckIfResumed(State wantedState) {
synchronized (monitor) {
if (this.wantedState != wantedState) {
log.info("Wanted state change: " + this.wantedState + " -> " + wantedState);
this.wantedState = wantedState;
setLastConvergenceException(null);
signalWorkToBeDone();
}
if (currentState != wantedState) {
throw Optional.ofNullable(lastConvergenceException)
.orElseGet(() -> new RuntimeException(TRANSITION_EXCEPTION_MESSAGE));
}
}
}
void signalWorkToBeDone() {
synchronized (monitor) {
if (! workToDoNow) {
workToDoNow = true;
monitor.notifyAll();
}
}
}
private void setLastConvergenceException(RuntimeException exception) {
synchronized (monitor) {
lastConvergenceException = exception;
}
}
/**
* This method attempts to converge node-admin w/agents to a {@link State}
* with respect to: freeze, Orchestrator, and services running.
*/
private void convergeState(State wantedState) {
if (currentState == wantedState) return;
synchronized (monitor) {
currentState = TRANSITIONING;
}
boolean wantFrozen = wantedState != RESUMED;
if (!nodeAdmin.setFrozen(wantFrozen)) {
throw new ConvergenceException("NodeAdmin is not yet " + (wantFrozen ? "frozen" : "unfrozen"));
}
boolean hostIsActiveInNR = nodeRepository.getNode(dockerHostHostName).getState() == Node.State.active;
switch (wantedState) {
case RESUMED:
if (hostIsActiveInNR) orchestrator.resume(dockerHostHostName);
break;
case SUSPENDED_NODE_ADMIN:
if (hostIsActiveInNR) orchestrator.suspend(dockerHostHostName);
break;
case SUSPENDED:
List<String> nodesInActiveState = getNodesInActiveState();
List<String> nodesToSuspend = new ArrayList<>(nodesInActiveState);
if (hostIsActiveInNR) nodesToSuspend.add(dockerHostHostName);
if (!nodesToSuspend.isEmpty()) {
orchestrator.suspend(dockerHostHostName, nodesToSuspend);
log.info("Orchestrator allows suspension of " + nodesToSuspend);
}
nodeAdmin.stopNodeAgentServices(nodesInActiveState);
break;
default:
throw new IllegalStateException("Unknown wanted state " + wantedState);
}
log.info("State changed from " + currentState + " to " + wantedState);
synchronized (monitor) {
currentState = wantedState;
}
}
private void fetchContainersToRunFromNodeRepository() {
try {
final List<NodeSpec> containersToRun = nodeRepository.getNodes(dockerHostHostName);
nodeAdmin.refreshContainersToRun(containersToRun);
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed to update which containers should be running", e);
}
}
private List<String> getNodesInActiveState() {
return nodeRepository.getNodes(dockerHostHostName)
.stream()
.filter(node -> node.getState() == Node.State.active)
.map(NodeSpec::getHostname)
.collect(Collectors.toList());
}
public void start() {
loopThread.start();
}
public void stop() {
log.info(objectToString() + ": Stop called");
if (!terminated.compareAndSet(false, true)) {
throw new RuntimeException("Can not re-stop a node agent.");
}
classLocking.ifPresent(ClassLocking::interrupt);
signalWorkToBeDone();
specVerifierScheduler.shutdown();
do {
try {
loopThread.join();
specVerifierScheduler.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e1) {
log.info("Interrupted while waiting for NodeAdminStateUpdater thread and specVerfierScheduler to shutdown");
}
} while (loopThread.isAlive() || !specVerifierScheduler.isTerminated());
nodeAdmin.stop();
classLock.ifPresent(lock -> {
log.info(objectToString() + ": Releasing lock");
lock.close();
});
log.info(objectToString() + ": Stop complete");
}
} |
I think this one behaves better now. | public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
if (index < 0 || index >= fieldUpdates.size()) {
throw new IndexOutOfBoundsException("Index " + index + " is outside of [" + 0 + ", " + fieldUpdates.size() + ">");
}
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
if (index-- == 0) {
addFieldUpdateNoCheck(fieldUpdate);
return fieldUpdate;
}
}
return null;
} | return fieldUpdate; | public FieldUpdate setFieldUpdate(int index, FieldUpdate upd) {
FieldUpdate old = fieldUpdates.get(index);
if (old.getField().equals(upd.getField())) {
fieldUpdates.set(index, upd);
id2FieldUpdateMap.put(upd.getField().getId(), upd);
} else {
throw new IllegalArgumentException("You can not replace a FieldUpdate for field '" + old.getField() +
"' with an update for field '" + upd.getField() + "'");
}
return old;
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private Map<Integer, FieldUpdate> fieldUpdates;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new HashMap<Integer, FieldUpdate>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new HashMap<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, Map<Integer, FieldUpdate> fieldUpdates) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = fieldUpdates;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<Map.Entry<Integer, FieldUpdate>> iter = fieldUpdates.entrySet().iterator(); iter.hasNext();) {
Map.Entry<Integer, FieldUpdate> entry = iter.next();
FieldUpdate update = entry.getValue();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && currentValue.equals(last.getValue())) {
iter.remove();
}
} else if (last instanceof ClearValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if (currentValue == null) {
iter.remove();
} else {
FieldValue copy = currentValue.clone();
copy.clear();
if (currentValue.equals(copy)) {
iter.remove();
}
}
}
}
}
return this;
}
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
@Deprecated
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(new ArrayList<>(fieldUpdates.values()));
}
/**
* Get an unmodifiable collection of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
*/
public Collection<FieldUpdate> getFieldUpdatesCollection() {
return Collections.unmodifiableCollection(fieldUpdates.values());
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
*/
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
*/
@Deprecated
public FieldUpdate getFieldUpdate(int index) {
if (index < 0 || index >= fieldUpdates.size()) {
throw new IndexOutOfBoundsException("Index " + index + " is outside of [" + 0 + ", " + fieldUpdates.size() + ">");
}
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
if (index-- == 0) {
return fieldUpdate;
}
}
return null;
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
@Deprecated
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdateById(field.getId());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : fieldUpdates.values()) {
if (fieldUpdate.getField().getName().equals(fieldName))
return fieldUpdate;
}
return null;
}
private FieldUpdate getFieldUpdateById(Integer fieldId) {
return fieldUpdates.get(fieldId);
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
*/
public void setFieldUpdates(Collection<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
for (FieldUpdate fieldUpdate : fieldUpdates) {
this.fieldUpdates.put(fieldUpdate.getField().getId(), fieldUpdate);
}
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return fieldUpdates.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
String fieldName = update.getField().getName();
if (!documentType.hasField(fieldName)) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" + fieldName + "'.");
}
FieldUpdate prevUpdate = getFieldUpdate(fieldName);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.put(update.getField().getId(), update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
public void addFieldUpdateNoCheck(FieldUpdate fieldUpdate) {
fieldUpdates.put(fieldUpdate.getField().getId(), fieldUpdate);
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates.values()) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
*/
@Deprecated
public FieldUpdate removeFieldUpdate(int index) {
FieldUpdate prev = getFieldUpdate(index);
return removeFieldUpdate(prev.getField());
}
public FieldUpdate removeFieldUpdate(Field field) {
return fieldUpdates.remove(field.getId());
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (fieldUpdates != null ? fieldUpdates.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = fieldUpdates.values().iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return fieldUpdates.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} | class DocumentUpdate extends DocumentOperation implements Iterable<FieldPathUpdate> {
public static final int CLASSID = 0x1000 + 6;
private DocumentId docId;
private List<FieldUpdate> fieldUpdates;
private final Map<Integer, FieldUpdate> id2FieldUpdateMap;
private List<FieldPathUpdate> fieldPathUpdates;
private DocumentType documentType;
private Optional<Boolean> createIfNonExistent = Optional.empty();
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, DocumentId docId) {
this(docType, docId, new HashMap<>());
}
/**
* Creates a new document update using a reader
*/
public DocumentUpdate(DocumentUpdateReader reader) {
docId = null;
documentType = null;
fieldUpdates = new ArrayList<>();
id2FieldUpdateMap = new HashMap<>();
fieldPathUpdates = new ArrayList<>();
reader.read(this);
}
/**
* Creates a DocumentUpdate.
*
* @param docId the ID of the update
* @param docType the document type that this update is valid for
*/
public DocumentUpdate(DocumentType docType, String docId) {
this(docType, new DocumentId(docId));
}
private DocumentUpdate(DocumentType docType, DocumentId docId, Map<Integer, FieldUpdate> id2fieldUpdateMap) {
this.docId = docId;
this.documentType = docType;
this.fieldUpdates = new ArrayList<>(id2fieldUpdateMap.values());
id2FieldUpdateMap = id2fieldUpdateMap;
this.fieldPathUpdates = new ArrayList<>();
}
public DocumentId getId() {
return docId;
}
/**
* Sets the document id of the document to update.
* Use only while deserializing - changing the document id after creation has undefined behaviour.
*/
public void setId(DocumentId id) {
docId = id;
}
private void verifyType(Document doc) {
if (!documentType.equals(doc.getDataType())) {
throw new IllegalArgumentException(
"Document " + doc.getId() + " with type " + doc.getDataType() + " must have same type as update, which is type " + documentType);
}
}
/**
* Applies this document update.
*
* @param doc the document to apply the update to
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate applyTo(Document doc) {
verifyType(doc);
for (FieldUpdate fieldUpdate : id2FieldUpdateMap.values()) {
fieldUpdate.applyTo(doc);
}
for (FieldPathUpdate fieldPathUpdate : fieldPathUpdates) {
fieldPathUpdate.applyTo(doc);
}
return this;
}
/**
* Prune away any field update that will not modify any field in the document.
* @param doc document to check against
* @return a reference to itself
* @throws IllegalArgumentException if the document does not have the same document type as this update
*/
public DocumentUpdate prune(Document doc) {
verifyType(doc);
for (Iterator<Map.Entry<Integer, FieldUpdate>> iter = id2FieldUpdateMap.entrySet().iterator(); iter.hasNext();) {
Map.Entry<Integer, FieldUpdate> entry = iter.next();
FieldUpdate update = entry.getValue();
if (!update.isEmpty()) {
ValueUpdate last = update.getValueUpdate(update.size() - 1);
if (last instanceof AssignValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if ((currentValue != null) && currentValue.equals(last.getValue())) {
iter.remove();
}
} else if (last instanceof ClearValueUpdate) {
FieldValue currentValue = doc.getFieldValue(update.getField());
if (currentValue == null) {
iter.remove();
} else {
FieldValue copy = currentValue.clone();
copy.clear();
if (currentValue.equals(copy)) {
iter.remove();
}
}
}
}
}
return this;
}
/**
* Get an unmodifiable list of all field updates that this document update specifies.
*
* @return a list of all FieldUpdates in this DocumentUpdate
* @deprecated Use fieldUpdates() instead.
*/
@Deprecated
public List<FieldUpdate> getFieldUpdates() {
return Collections.unmodifiableList(fieldUpdates);
}
/**
* Get an unmodifiable collection of all field updates that this document update specifies.
*
* @return a collection of all FieldUpdates in this DocumentUpdate
*/
public Collection<FieldUpdate> fieldUpdates() {
return Collections.unmodifiableCollection(id2FieldUpdateMap.values());
}
/**
* Get an unmodifiable list of all field path updates this document update specifies.
*
* @return Returns a list of all field path updates in this document update.
* @deprecated Use fieldPathUpdates() instead.
*/
@Deprecated
public List<FieldPathUpdate> getFieldPathUpdates() {
return Collections.unmodifiableList(fieldPathUpdates);
}
/**
* Get an unmodifiable collection of all field path updates that this document update specifies.
*
* @return a collection of all FieldPathUpdates in this DocumentUpdate
*/
public Collection<FieldPathUpdate> fieldPathUpdates() {
return Collections.unmodifiableCollection(fieldPathUpdates);
}
/** Returns the type of the document this updates
*
* @return The documentype of the document
*/
public DocumentType getDocumentType() {
return documentType;
}
/**
* Sets the document type. Use only while deserializing - changing the document type after creation
* has undefined behaviour.
*/
public void setDocumentType(DocumentType type) {
documentType = type;
}
/**
* Get the field update at the specified index in the list of field updates.
*
* @param index the index of the FieldUpdate to return
* @return the FieldUpdate at the specified index
* @throws IndexOutOfBoundsException if index is out of range
* @deprecated use getFieldUpdate(Field field) instead.
*/
@Deprecated
public FieldUpdate getFieldUpdate(int index) {
return fieldUpdates.get(index);
}
/**
* Replaces the field update at the specified index in the list of field updates.
*
* @param index index of the FieldUpdate to replace
* @param upd the FieldUpdate to be stored at the specified position
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
* @deprecated Use removeFieldUpdate/addFieldUpdate instead
*/
@Deprecated
/**
* Returns the update for a field
*
* @param field the field to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(Field field) {
return getFieldUpdateById(field.getId());
}
/** Removes all field updates from the list for field updates. */
public void clearFieldUpdates() {
fieldUpdates.clear();
id2FieldUpdateMap.clear();
}
/**
* Returns the update for a field name
*
* @param fieldName the field name to return the update of
* @return the update for the field, or null if that field has no update in this
*/
public FieldUpdate getFieldUpdate(String fieldName) {
for (FieldUpdate fieldUpdate : id2FieldUpdateMap.values()) {
if (fieldUpdate.getField().getName().equals(fieldName))
return fieldUpdate;
}
return null;
}
private FieldUpdate getFieldUpdateById(Integer fieldId) {
return id2FieldUpdateMap.get(fieldId);
}
/**
* Assigns the field updates of this document update.
* This document update receives ownership of the list - it can not be subsequently used
* by the caller.
*
* @param fieldUpdates the new list of updates of this
* @throws NullPointerException if the argument passed is null
* @deprecated Iterate and use addFieldUpdate instead
*/
@Deprecated
public void setFieldUpdates(Collection<FieldUpdate> fieldUpdates) {
if (fieldUpdates == null) {
throw new NullPointerException("The field updates of a document update can not be null");
}
fieldUpdates.clear();
for (FieldUpdate fieldUpdate : fieldUpdates) {
addFieldUpdate(fieldUpdate);
}
}
/**
* Get the number of field updates in this document update.
*
* @return the size of the List of FieldUpdates
*/
public int size() {
return id2FieldUpdateMap.size();
}
/**
* Adds the given {@link FieldUpdate} to this DocumentUpdate. If this DocumentUpdate already contains a FieldUpdate
* for the named field, the content of the given FieldUpdate is added to the existing one.
*
* @param update The FieldUpdate to add to this DocumentUpdate.
* @return This, to allow chaining.
* @throws IllegalArgumentException If the {@link DocumentType} of this DocumentUpdate does not have a corresponding
* field.
*/
public DocumentUpdate addFieldUpdate(FieldUpdate update) {
Integer fieldId = update.getField().getId();
if (documentType.getField(fieldId) == null) {
throw new IllegalArgumentException("Document type '" + documentType.getName() + "' does not have field '" + update.getField().getName() + "'.");
}
FieldUpdate prevUpdate = getFieldUpdateById(fieldId);
if (prevUpdate != update) {
if (prevUpdate != null) {
prevUpdate.addAll(update);
} else {
fieldUpdates.add(update);
id2FieldUpdateMap.put(fieldId, update);
}
}
return this;
}
/**
* Adds a field path update to perform on the document.
*
* @return a reference to itself.
*/
public DocumentUpdate addFieldPathUpdate(FieldPathUpdate fieldPathUpdate) {
fieldPathUpdates.add(fieldPathUpdate);
return this;
}
/**
* Adds all the field- and field path updates of the given document update to this. If the given update refers to a
* different document or document type than this, this method throws an exception.
*
* @param update The update whose content to add to this.
* @throws IllegalArgumentException If the {@link DocumentId} or {@link DocumentType} of the given DocumentUpdate
* does not match the content of this.
*/
public void addAll(DocumentUpdate update) {
if (update == null) {
return;
}
if (!docId.equals(update.docId)) {
throw new IllegalArgumentException("Expected " + docId + ", got " + update.docId + ".");
}
if (!documentType.equals(update.documentType)) {
throw new IllegalArgumentException("Expected " + documentType + ", got " + update.documentType + ".");
}
for (FieldUpdate fieldUpd : update.fieldUpdates()) {
addFieldUpdate(fieldUpd);
}
for (FieldPathUpdate pathUpd : update.fieldPathUpdates) {
addFieldPathUpdate(pathUpd);
}
}
/**
* Removes the field update at the specified position in the list of field updates.
*
* @param index the index of the FieldUpdate to remove
* @return the FieldUpdate previously at the specified position
* @throws IndexOutOfBoundsException if index is out of range
* @deprecated use removeFieldUpdate(Field field) instead.
*/
@Deprecated
public FieldUpdate removeFieldUpdate(int index) {
FieldUpdate prev = getFieldUpdate(index);
fieldUpdates.remove(index);
return removeFieldUpdate(prev.getField());
}
public FieldUpdate removeFieldUpdate(Field field) {
return id2FieldUpdateMap.remove(field.getId());
}
/**
* Returns the document type of this document update.
*
* @return the document type of this document update
*/
public DocumentType getType() {
return documentType;
}
public final void serialize(GrowableByteBuffer buf) {
serialize(DocumentSerializerFactory.create42(buf));
}
public void serialize(DocumentUpdateWriter data) {
data.write(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DocumentUpdate)) return false;
DocumentUpdate that = (DocumentUpdate) o;
if (docId != null ? !docId.equals(that.docId) : that.docId != null) return false;
if (documentType != null ? !documentType.equals(that.documentType) : that.documentType != null) return false;
if (fieldPathUpdates != null ? !fieldPathUpdates.equals(that.fieldPathUpdates) : that.fieldPathUpdates != null)
return false;
if (fieldUpdates != null ? !fieldUpdates.equals(that.fieldUpdates) : that.fieldUpdates != null) return false;
if (this.getCreateIfNonExistent() != ((DocumentUpdate) o).getCreateIfNonExistent()) return false;
return true;
}
@Override
public int hashCode() {
int result = docId != null ? docId.hashCode() : 0;
result = 31 * result + (id2FieldUpdateMap != null ? id2FieldUpdateMap.hashCode() : 0);
result = 31 * result + (fieldPathUpdates != null ? fieldPathUpdates.hashCode() : 0);
result = 31 * result + (documentType != null ? documentType.hashCode() : 0);
return result;
}
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("update of document '");
string.append(docId);
string.append("': ");
string.append("create-if-non-existent=");
string.append(createIfNonExistent.orElse(false));
string.append(": ");
string.append("[");
for (Iterator<FieldUpdate> i = id2FieldUpdateMap.values().iterator(); i.hasNext();) {
FieldUpdate fieldUpdate = i.next();
string.append(fieldUpdate);
if (i.hasNext()) {
string.append(", ");
}
}
string.append("]");
if (fieldPathUpdates.size() > 0) {
string.append(" [ ");
for (FieldPathUpdate up : fieldPathUpdates) {
string.append(up.toString() + " ");
}
string.append(" ]");
}
return string.toString();
}
public Iterator<FieldPathUpdate> iterator() {
return fieldPathUpdates.iterator();
}
/**
* Returns whether or not this field update contains any field- or field path updates.
*
* @return True if this update is empty.
*/
public boolean isEmpty() {
return id2FieldUpdateMap.isEmpty() && fieldPathUpdates.isEmpty();
}
/**
* Sets whether this update should create the document it updates if that document does not exist.
* In this case an empty document is created before the update is applied.
*
* @since 5.17
* @param value Whether the document it updates should be created.
*/
public void setCreateIfNonExistent(boolean value) {
createIfNonExistent = Optional.of(value);
}
/**
* Gets whether this update should create the document it updates if that document does not exist.
*
* @since 5.17
* @return Whether the document it updates should be created.
*/
public boolean getCreateIfNonExistent() {
return createIfNonExistent.orElse(false);
}
public Optional<Boolean> getOptionalCreateIfNonExistent() {
return createIfNonExistent;
}
} |
Ok, but I think we need another way to do this which only exposes the external bindings. | private HttpResponse listTypeDetails(HttpRequest request, FunctionEvaluator evaluator) {
Slime slime = new Slime();
Cursor root = slime.setObject();
Cursor bindings = root.setArray("bindings");
for (String bindingName : evaluator.context().names()) {
if (bindingName.startsWith("constant(")) {
continue;
}
if (bindingName.startsWith("rankingExpression(")) {
continue;
}
Cursor binding = bindings.addObject();
binding.setString("name", bindingName);
binding.setString("type", "");
}
return new Response(200, com.yahoo.slime.JsonFormat.toJsonBytes(slime));
} | if (bindingName.startsWith("constant(")) { | private HttpResponse listTypeDetails(HttpRequest request, FunctionEvaluator evaluator) {
Slime slime = new Slime();
Cursor root = slime.setObject();
Cursor bindings = root.setArray("bindings");
for (String bindingName : evaluator.context().names()) {
if (bindingName.startsWith("constant(")) {
continue;
}
if (bindingName.startsWith("rankingExpression(")) {
continue;
}
Cursor binding = bindings.addObject();
binding.setString("name", bindingName);
binding.setString("type", "");
}
return new Response(200, com.yahoo.slime.JsonFormat.toJsonBytes(slime));
} | class ModelsEvaluationHandler extends ThreadedHttpRequestHandler {
public static final String API_ROOT = "model-evaluation";
public static final String VERSION_V1 = "v1";
public static final String EVALUATE = "eval";
private final ModelsEvaluator modelsEvaluator;
public ModelsEvaluationHandler(ModelsEvaluator modelsEvaluator, Executor executor) {
super(executor);
this.modelsEvaluator = modelsEvaluator;
}
@Override
public HttpResponse handle(HttpRequest request) {
Path path = new Path(request);
Optional<String> apiName = path.segment(0);
Optional<String> version = path.segment(1);
Optional<String> modelName = path.segment(2);
if ( ! apiName.isPresent() || ! apiName.get().equalsIgnoreCase(API_ROOT)) {
return new ErrorResponse(400, "unknown API");
}
if ( ! version.isPresent() || ! version.get().equalsIgnoreCase(VERSION_V1)) {
return new ErrorResponse(400, "unknown API version");
}
if ( ! modelName.isPresent()) {
return listAllModels(request);
}
if ( ! modelsEvaluator.models().containsKey(modelName.get())) {
return new ErrorResponse(400, "no model with name '" + modelName.get() + "' found");
}
Model model = modelsEvaluator.models().get(modelName.get());
if (path.segments() == 3) {
if (model.functions().size() > 1) {
return listModelDetails(request, modelName.get());
}
return listTypeDetails(request, modelName.get());
}
if (path.segments() == 4) {
if ( ! path.segment(3).get().equalsIgnoreCase(EVALUATE)) {
return listTypeDetails(request, modelName.get(), path.segment(3).get());
}
if (model.functions().stream().anyMatch(f -> f.getName().equalsIgnoreCase(EVALUATE))) {
return listTypeDetails(request, modelName.get(), path.segment(3).get());
}
if (model.functions().size() <= 1) {
return evaluateModel(request, modelName.get());
}
return new ErrorResponse(400, "attempt to evaluate model without specifying function");
}
if (path.segments() == 5) {
if (path.segment(4).get().equalsIgnoreCase(EVALUATE)) {
return evaluateModel(request, modelName.get(), path.segment(3).get());
}
}
return new ErrorResponse(400, "unrecognized request");
}
private HttpResponse listAllModels(HttpRequest request) {
Slime slime = new Slime();
Cursor root = slime.setObject();
for (String modelName: modelsEvaluator.models().keySet()) {
root.setString(modelName, baseUrl(request) + modelName);
}
return new Response(200, com.yahoo.slime.JsonFormat.toJsonBytes(slime));
}
private HttpResponse listModelDetails(HttpRequest request, String modelName) {
Model model = modelsEvaluator.models().get(modelName);
Slime slime = new Slime();
Cursor root = slime.setObject();
for (ExpressionFunction func : model.functions()) {
root.setString(func.getName(), baseUrl(request) + modelName + "/" + func.getName());
}
return new Response(200, com.yahoo.slime.JsonFormat.toJsonBytes(slime));
}
private HttpResponse listTypeDetails(HttpRequest request, String modelName) {
return listTypeDetails(request, modelsEvaluator.evaluatorOf(modelName));
}
private HttpResponse listTypeDetails(HttpRequest request, String modelName, String signatureAndOutput) {
return listTypeDetails(request, modelsEvaluator.evaluatorOf(modelName, signatureAndOutput));
}
private HttpResponse evaluateModel(HttpRequest request, String modelName) {
return evaluateModel(request, modelsEvaluator.evaluatorOf(modelName));
}
private HttpResponse evaluateModel(HttpRequest request, String modelName, String signatureAndOutput) {
return evaluateModel(request, modelsEvaluator.evaluatorOf(modelName, signatureAndOutput));
}
private HttpResponse evaluateModel(HttpRequest request, FunctionEvaluator evaluator) {
for (String bindingName : evaluator.context().names()) {
property(request, bindingName).ifPresent(s -> evaluator.bind(bindingName, Tensor.from(s)));
}
Tensor result = evaluator.evaluate();
return new Response(200, JsonFormat.encode(result));
}
private Optional<String> property(HttpRequest request, String name) {
return Optional.ofNullable(request.getProperty(name));
}
private String baseUrl(HttpRequest request) {
URI uri = request.getUri();
StringBuilder sb = new StringBuilder();
sb.append(uri.getScheme()).append(":
if (uri.getPort() >= 0) {
sb.append(":").append(uri.getPort());
}
sb.append("/").append(API_ROOT).append("/").append(VERSION_V1).append("/");
return sb.toString();
}
private static class Path {
private final String[] segments;
public Path(HttpRequest httpRequest) {
segments = splitPath(httpRequest);
}
Optional<String> segment(int index) {
return (index < 0 || index >= segments.length) ? Optional.empty() : Optional.of(segments[index]);
}
int segments() {
return segments.length;
}
private static String[] splitPath(HttpRequest request) {
String path = request.getUri().getPath().toLowerCase();
if (path.startsWith("/")) {
path = path.substring("/".length());
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path.split("/");
}
}
private static class Response extends HttpResponse {
private final byte[] data;
Response(int code, byte[] data) {
super(code);
this.data = data;
}
Response(int code, String data) {
this(code, data.getBytes(Charset.forName(DEFAULT_CHARACTER_ENCODING)));
}
@Override
public String getContentType() {
return "application/json";
}
@Override
public void render(OutputStream outputStream) throws IOException {
outputStream.write(data);
}
}
private static class ErrorResponse extends Response {
ErrorResponse(int code, String data) {
super(code, "{\"error\":\"" + data + "\"}");
}
}
} | class ModelsEvaluationHandler extends ThreadedHttpRequestHandler {
public static final String API_ROOT = "model-evaluation";
public static final String VERSION_V1 = "v1";
public static final String EVALUATE = "eval";
private final ModelsEvaluator modelsEvaluator;
public ModelsEvaluationHandler(ModelsEvaluator modelsEvaluator, Executor executor) {
super(executor);
this.modelsEvaluator = modelsEvaluator;
}
@Override
public HttpResponse handle(HttpRequest request) {
Path path = new Path(request);
Optional<String> apiName = path.segment(0);
Optional<String> version = path.segment(1);
Optional<String> modelName = path.segment(2);
if ( ! apiName.isPresent() || ! apiName.get().equalsIgnoreCase(API_ROOT)) {
return new ErrorResponse(404, "unknown API");
}
if ( ! version.isPresent() || ! version.get().equalsIgnoreCase(VERSION_V1)) {
return new ErrorResponse(404, "unknown API version");
}
if ( ! modelName.isPresent()) {
return listAllModels(request);
}
if ( ! modelsEvaluator.models().containsKey(modelName.get())) {
return new ErrorResponse(404, "no model with name '" + modelName.get() + "' found");
}
Model model = modelsEvaluator.models().get(modelName.get());
if (path.segments() == 3) {
if (model.functions().size() > 1) {
return listModelDetails(request, modelName.get());
}
return listTypeDetails(request, modelName.get());
}
if (path.segments() == 4) {
if ( ! path.segment(3).get().equalsIgnoreCase(EVALUATE)) {
return listTypeDetails(request, modelName.get(), path.segment(3).get());
}
if (model.functions().stream().anyMatch(f -> f.getName().equalsIgnoreCase(EVALUATE))) {
return listTypeDetails(request, modelName.get(), path.segment(3).get());
}
if (model.functions().size() <= 1) {
return evaluateModel(request, modelName.get());
}
return new ErrorResponse(404, "attempt to evaluate model without specifying function");
}
if (path.segments() == 5) {
if (path.segment(4).get().equalsIgnoreCase(EVALUATE)) {
return evaluateModel(request, modelName.get(), path.segment(3).get());
}
}
return new ErrorResponse(404, "unrecognized request");
}
private HttpResponse listAllModels(HttpRequest request) {
Slime slime = new Slime();
Cursor root = slime.setObject();
for (String modelName: modelsEvaluator.models().keySet()) {
root.setString(modelName, baseUrl(request) + modelName);
}
return new Response(200, com.yahoo.slime.JsonFormat.toJsonBytes(slime));
}
private HttpResponse listModelDetails(HttpRequest request, String modelName) {
Model model = modelsEvaluator.models().get(modelName);
Slime slime = new Slime();
Cursor root = slime.setObject();
for (ExpressionFunction func : model.functions()) {
root.setString(func.getName(), baseUrl(request) + modelName + "/" + func.getName());
}
return new Response(200, com.yahoo.slime.JsonFormat.toJsonBytes(slime));
}
private HttpResponse listTypeDetails(HttpRequest request, String modelName) {
return listTypeDetails(request, modelsEvaluator.evaluatorOf(modelName));
}
private HttpResponse listTypeDetails(HttpRequest request, String modelName, String signatureAndOutput) {
return listTypeDetails(request, modelsEvaluator.evaluatorOf(modelName, signatureAndOutput));
}
private HttpResponse evaluateModel(HttpRequest request, String modelName) {
return evaluateModel(request, modelsEvaluator.evaluatorOf(modelName));
}
private HttpResponse evaluateModel(HttpRequest request, String modelName, String signatureAndOutput) {
return evaluateModel(request, modelsEvaluator.evaluatorOf(modelName, signatureAndOutput));
}
private HttpResponse evaluateModel(HttpRequest request, FunctionEvaluator evaluator) {
for (String bindingName : evaluator.context().names()) {
property(request, bindingName).ifPresent(s -> evaluator.bind(bindingName, Tensor.from(s)));
}
Tensor result = evaluator.evaluate();
return new Response(200, JsonFormat.encode(result));
}
private Optional<String> property(HttpRequest request, String name) {
return Optional.ofNullable(request.getProperty(name));
}
private String baseUrl(HttpRequest request) {
URI uri = request.getUri();
StringBuilder sb = new StringBuilder();
sb.append(uri.getScheme()).append(":
if (uri.getPort() >= 0) {
sb.append(":").append(uri.getPort());
}
sb.append("/").append(API_ROOT).append("/").append(VERSION_V1).append("/");
return sb.toString();
}
private static class Path {
private final String[] segments;
public Path(HttpRequest httpRequest) {
segments = splitPath(httpRequest);
}
Optional<String> segment(int index) {
return (index < 0 || index >= segments.length) ? Optional.empty() : Optional.of(segments[index]);
}
int segments() {
return segments.length;
}
private static String[] splitPath(HttpRequest request) {
String path = request.getUri().getPath().toLowerCase();
if (path.startsWith("/")) {
path = path.substring("/".length());
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
return path.split("/");
}
}
private static class Response extends HttpResponse {
private final byte[] data;
Response(int code, byte[] data) {
super(code);
this.data = data;
}
Response(int code, String data) {
this(code, data.getBytes(Charset.forName(DEFAULT_CHARACTER_ENCODING)));
}
@Override
public String getContentType() {
return "application/json";
}
@Override
public void render(OutputStream outputStream) throws IOException {
outputStream.write(data);
}
}
private static class ErrorResponse extends Response {
ErrorResponse(int code, String data) {
super(code, "{\"error\":\"" + data + "\"}");
}
}
} |
`charAt(i+1)` may throw `IndexOutOfBoundsException` if high surrogate is last char in string. Also check if i+1 would go out of bounds if `charCount` is 2? | public static OptionalInt validateTextString(String string) {
for (int i = 0; i < string.length(); ) {
int codePoint = string.codePointAt(i);
if ( ! Text.isTextCharacter(codePoint))
return OptionalInt.of(codePoint);
int charCount = Character.charCount(codePoint);
if (Character.isHighSurrogate(string.charAt(i))) {
if ( charCount == 1) {
return OptionalInt.of(string.codePointAt(i));
} else if ( !Character.isLowSurrogate(string.charAt(i+1))) {
return OptionalInt.of(string.codePointAt(i+1));
}
}
i += charCount;
}
return OptionalInt.empty();
} | return OptionalInt.of(string.codePointAt(i+1)); | public static OptionalInt validateTextString(String string) {
for (int i = 0; i < string.length(); ) {
int codePoint = string.codePointAt(i);
if ( ! Text.isTextCharacter(codePoint))
return OptionalInt.of(codePoint);
int charCount = Character.charCount(codePoint);
if (Character.isHighSurrogate(string.charAt(i))) {
if ( charCount == 1) {
return OptionalInt.of(string.codePointAt(i));
} else if ( !Character.isLowSurrogate(string.charAt(i+1))) {
return OptionalInt.of(string.codePointAt(i+1));
}
}
i += charCount;
}
return OptionalInt.empty();
} | class Text {
private static final boolean[] allowedAsciiChars = new boolean[0x80];
static {
allowedAsciiChars[0x0] = false;
allowedAsciiChars[0x1] = false;
allowedAsciiChars[0x2] = false;
allowedAsciiChars[0x3] = false;
allowedAsciiChars[0x4] = false;
allowedAsciiChars[0x5] = false;
allowedAsciiChars[0x6] = false;
allowedAsciiChars[0x7] = false;
allowedAsciiChars[0x8] = false;
allowedAsciiChars[0x9] = true;
allowedAsciiChars[0xA] = true;
allowedAsciiChars[0xB] = false;
allowedAsciiChars[0xC] = false;
allowedAsciiChars[0xD] = true;
for (int i = 0xE; i < 0x20; i++) {
allowedAsciiChars[i] = false;
}
for (int i = 0x20; i < 0x7F; i++) {
allowedAsciiChars[i] = true;
}
allowedAsciiChars[0x7F] = true;
}
/** No instantiation */
private Text() {}
/**
* Returns whether the given codepoint is a valid text character, potentially suitable for
* purposes such as indexing and display, see http:
*/
public static boolean isTextCharacter(int codepoint) {
if (codepoint < 0x80) return allowedAsciiChars[codepoint];
if (codepoint < 0xFDD0) return true;
if (codepoint <= 0xFDDF) return false;
if (codepoint < 0x1FFFE) return true;
if (codepoint <= 0x1FFFF) return false;
if (codepoint < 0x2FFFE) return true;
if (codepoint <= 0x2FFFF) return false;
if (codepoint < 0x3FFFE) return true;
if (codepoint <= 0x3FFFF) return false;
if (codepoint < 0x4FFFE) return true;
if (codepoint <= 0x4FFFF) return false;
if (codepoint < 0x5FFFE) return true;
if (codepoint <= 0x5FFFF) return false;
if (codepoint < 0x6FFFE) return true;
if (codepoint <= 0x6FFFF) return false;
if (codepoint < 0x7FFFE) return true;
if (codepoint <= 0x7FFFF) return false;
if (codepoint < 0x8FFFE) return true;
if (codepoint <= 0x8FFFF) return false;
if (codepoint < 0x9FFFE) return true;
if (codepoint <= 0x9FFFF) return false;
if (codepoint < 0xAFFFE) return true;
if (codepoint <= 0xAFFFF) return false;
if (codepoint < 0xBFFFE) return true;
if (codepoint <= 0xBFFFF) return false;
if (codepoint < 0xCFFFE) return true;
if (codepoint <= 0xCFFFF) return false;
if (codepoint < 0xDFFFE) return true;
if (codepoint <= 0xDFFFF) return false;
if (codepoint < 0xEFFFE) return true;
if (codepoint <= 0xEFFFF) return false;
if (codepoint < 0xFFFFE) return true;
if (codepoint <= 0xFFFFF) return false;
if (codepoint < 0x10FFFE) return true;
if (codepoint <= 0x10FFFF) return false;
return true;
}
/**
* Validates that the given string value only contains text characters and
* returns the first illegal code point if one is found.
*/
private static StringBuilder lazy(StringBuilder sb, String s, int i) {
if (sb == null) {
sb = new StringBuilder(s.substring(0, i));
}
sb.append(' ');
return sb;
}
/**
* Returns a string where any invalid characters in the input string is replaced by spaces
*/
public static String stripInvalidCharacters(String string) {
StringBuilder stripped = null;
for (int i = 0; i < string.length();) {
int codePoint = string.codePointAt(i);
int charCount = Character.charCount(codePoint);
if ( ! Text.isTextCharacter(codePoint)) {
stripped = lazy(stripped, string, i);
} else {
if (Character.isHighSurrogate(string.charAt(i))) {
if (charCount == 1) {
stripped = lazy(stripped, string, i);
} else if (!Character.isLowSurrogate(string.charAt(i+1))) {
stripped = lazy(stripped, string, i);
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
}
i += charCount;
}
return stripped != null ? stripped.toString() : string;
}
} | class Text {
private static final boolean[] allowedAsciiChars = new boolean[0x80];
static {
allowedAsciiChars[0x0] = false;
allowedAsciiChars[0x1] = false;
allowedAsciiChars[0x2] = false;
allowedAsciiChars[0x3] = false;
allowedAsciiChars[0x4] = false;
allowedAsciiChars[0x5] = false;
allowedAsciiChars[0x6] = false;
allowedAsciiChars[0x7] = false;
allowedAsciiChars[0x8] = false;
allowedAsciiChars[0x9] = true;
allowedAsciiChars[0xA] = true;
allowedAsciiChars[0xB] = false;
allowedAsciiChars[0xC] = false;
allowedAsciiChars[0xD] = true;
for (int i = 0xE; i < 0x20; i++) {
allowedAsciiChars[i] = false;
}
for (int i = 0x20; i < 0x7F; i++) {
allowedAsciiChars[i] = true;
}
allowedAsciiChars[0x7F] = true;
}
/** No instantiation */
private Text() {}
/**
* Returns whether the given codepoint is a valid text character, potentially suitable for
* purposes such as indexing and display, see http:
*/
public static boolean isTextCharacter(int codepoint) {
if (codepoint < 0x80) return allowedAsciiChars[codepoint];
if (codepoint < 0xFDD0) return true;
if (codepoint <= 0xFDDF) return false;
if (codepoint < 0x1FFFE) return true;
if (codepoint <= 0x1FFFF) return false;
if (codepoint < 0x2FFFE) return true;
if (codepoint <= 0x2FFFF) return false;
if (codepoint < 0x3FFFE) return true;
if (codepoint <= 0x3FFFF) return false;
if (codepoint < 0x4FFFE) return true;
if (codepoint <= 0x4FFFF) return false;
if (codepoint < 0x5FFFE) return true;
if (codepoint <= 0x5FFFF) return false;
if (codepoint < 0x6FFFE) return true;
if (codepoint <= 0x6FFFF) return false;
if (codepoint < 0x7FFFE) return true;
if (codepoint <= 0x7FFFF) return false;
if (codepoint < 0x8FFFE) return true;
if (codepoint <= 0x8FFFF) return false;
if (codepoint < 0x9FFFE) return true;
if (codepoint <= 0x9FFFF) return false;
if (codepoint < 0xAFFFE) return true;
if (codepoint <= 0xAFFFF) return false;
if (codepoint < 0xBFFFE) return true;
if (codepoint <= 0xBFFFF) return false;
if (codepoint < 0xCFFFE) return true;
if (codepoint <= 0xCFFFF) return false;
if (codepoint < 0xDFFFE) return true;
if (codepoint <= 0xDFFFF) return false;
if (codepoint < 0xEFFFE) return true;
if (codepoint <= 0xEFFFF) return false;
if (codepoint < 0xFFFFE) return true;
if (codepoint <= 0xFFFFF) return false;
if (codepoint < 0x10FFFE) return true;
if (codepoint <= 0x10FFFF) return false;
return true;
}
/**
* Validates that the given string value only contains text characters and
* returns the first illegal code point if one is found.
*/
private static StringBuilder lazy(StringBuilder sb, String s, int i) {
if (sb == null) {
sb = new StringBuilder(s.substring(0, i));
}
sb.append(' ');
return sb;
}
/**
* Returns a string where any invalid characters in the input string is replaced by spaces
*/
public static String stripInvalidCharacters(String string) {
StringBuilder stripped = null;
for (int i = 0; i < string.length();) {
int codePoint = string.codePointAt(i);
int charCount = Character.charCount(codePoint);
if ( ! Text.isTextCharacter(codePoint)) {
stripped = lazy(stripped, string, i);
} else {
if (Character.isHighSurrogate(string.charAt(i))) {
if (charCount == 1) {
stripped = lazy(stripped, string, i);
} else if (!Character.isLowSurrogate(string.charAt(i+1))) {
stripped = lazy(stripped, string, i);
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
}
i += charCount;
}
return stripped != null ? stripped.toString() : string;
}
} |
charAt(i+1) will not be reached as it will return at the charCount == 1 branch. If charCount is 2 both i and i+1 will be exist. | public static OptionalInt validateTextString(String string) {
for (int i = 0; i < string.length(); ) {
int codePoint = string.codePointAt(i);
if ( ! Text.isTextCharacter(codePoint))
return OptionalInt.of(codePoint);
int charCount = Character.charCount(codePoint);
if (Character.isHighSurrogate(string.charAt(i))) {
if ( charCount == 1) {
return OptionalInt.of(string.codePointAt(i));
} else if ( !Character.isLowSurrogate(string.charAt(i+1))) {
return OptionalInt.of(string.codePointAt(i+1));
}
}
i += charCount;
}
return OptionalInt.empty();
} | return OptionalInt.of(string.codePointAt(i+1)); | public static OptionalInt validateTextString(String string) {
for (int i = 0; i < string.length(); ) {
int codePoint = string.codePointAt(i);
if ( ! Text.isTextCharacter(codePoint))
return OptionalInt.of(codePoint);
int charCount = Character.charCount(codePoint);
if (Character.isHighSurrogate(string.charAt(i))) {
if ( charCount == 1) {
return OptionalInt.of(string.codePointAt(i));
} else if ( !Character.isLowSurrogate(string.charAt(i+1))) {
return OptionalInt.of(string.codePointAt(i+1));
}
}
i += charCount;
}
return OptionalInt.empty();
} | class Text {
private static final boolean[] allowedAsciiChars = new boolean[0x80];
static {
allowedAsciiChars[0x0] = false;
allowedAsciiChars[0x1] = false;
allowedAsciiChars[0x2] = false;
allowedAsciiChars[0x3] = false;
allowedAsciiChars[0x4] = false;
allowedAsciiChars[0x5] = false;
allowedAsciiChars[0x6] = false;
allowedAsciiChars[0x7] = false;
allowedAsciiChars[0x8] = false;
allowedAsciiChars[0x9] = true;
allowedAsciiChars[0xA] = true;
allowedAsciiChars[0xB] = false;
allowedAsciiChars[0xC] = false;
allowedAsciiChars[0xD] = true;
for (int i = 0xE; i < 0x20; i++) {
allowedAsciiChars[i] = false;
}
for (int i = 0x20; i < 0x7F; i++) {
allowedAsciiChars[i] = true;
}
allowedAsciiChars[0x7F] = true;
}
/** No instantiation */
private Text() {}
/**
* Returns whether the given codepoint is a valid text character, potentially suitable for
* purposes such as indexing and display, see http:
*/
public static boolean isTextCharacter(int codepoint) {
if (codepoint < 0x80) return allowedAsciiChars[codepoint];
if (codepoint < 0xFDD0) return true;
if (codepoint <= 0xFDDF) return false;
if (codepoint < 0x1FFFE) return true;
if (codepoint <= 0x1FFFF) return false;
if (codepoint < 0x2FFFE) return true;
if (codepoint <= 0x2FFFF) return false;
if (codepoint < 0x3FFFE) return true;
if (codepoint <= 0x3FFFF) return false;
if (codepoint < 0x4FFFE) return true;
if (codepoint <= 0x4FFFF) return false;
if (codepoint < 0x5FFFE) return true;
if (codepoint <= 0x5FFFF) return false;
if (codepoint < 0x6FFFE) return true;
if (codepoint <= 0x6FFFF) return false;
if (codepoint < 0x7FFFE) return true;
if (codepoint <= 0x7FFFF) return false;
if (codepoint < 0x8FFFE) return true;
if (codepoint <= 0x8FFFF) return false;
if (codepoint < 0x9FFFE) return true;
if (codepoint <= 0x9FFFF) return false;
if (codepoint < 0xAFFFE) return true;
if (codepoint <= 0xAFFFF) return false;
if (codepoint < 0xBFFFE) return true;
if (codepoint <= 0xBFFFF) return false;
if (codepoint < 0xCFFFE) return true;
if (codepoint <= 0xCFFFF) return false;
if (codepoint < 0xDFFFE) return true;
if (codepoint <= 0xDFFFF) return false;
if (codepoint < 0xEFFFE) return true;
if (codepoint <= 0xEFFFF) return false;
if (codepoint < 0xFFFFE) return true;
if (codepoint <= 0xFFFFF) return false;
if (codepoint < 0x10FFFE) return true;
if (codepoint <= 0x10FFFF) return false;
return true;
}
/**
* Validates that the given string value only contains text characters and
* returns the first illegal code point if one is found.
*/
private static StringBuilder lazy(StringBuilder sb, String s, int i) {
if (sb == null) {
sb = new StringBuilder(s.substring(0, i));
}
sb.append(' ');
return sb;
}
/**
* Returns a string where any invalid characters in the input string is replaced by spaces
*/
public static String stripInvalidCharacters(String string) {
StringBuilder stripped = null;
for (int i = 0; i < string.length();) {
int codePoint = string.codePointAt(i);
int charCount = Character.charCount(codePoint);
if ( ! Text.isTextCharacter(codePoint)) {
stripped = lazy(stripped, string, i);
} else {
if (Character.isHighSurrogate(string.charAt(i))) {
if (charCount == 1) {
stripped = lazy(stripped, string, i);
} else if (!Character.isLowSurrogate(string.charAt(i+1))) {
stripped = lazy(stripped, string, i);
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
}
i += charCount;
}
return stripped != null ? stripped.toString() : string;
}
} | class Text {
private static final boolean[] allowedAsciiChars = new boolean[0x80];
static {
allowedAsciiChars[0x0] = false;
allowedAsciiChars[0x1] = false;
allowedAsciiChars[0x2] = false;
allowedAsciiChars[0x3] = false;
allowedAsciiChars[0x4] = false;
allowedAsciiChars[0x5] = false;
allowedAsciiChars[0x6] = false;
allowedAsciiChars[0x7] = false;
allowedAsciiChars[0x8] = false;
allowedAsciiChars[0x9] = true;
allowedAsciiChars[0xA] = true;
allowedAsciiChars[0xB] = false;
allowedAsciiChars[0xC] = false;
allowedAsciiChars[0xD] = true;
for (int i = 0xE; i < 0x20; i++) {
allowedAsciiChars[i] = false;
}
for (int i = 0x20; i < 0x7F; i++) {
allowedAsciiChars[i] = true;
}
allowedAsciiChars[0x7F] = true;
}
/** No instantiation */
private Text() {}
/**
* Returns whether the given codepoint is a valid text character, potentially suitable for
* purposes such as indexing and display, see http:
*/
public static boolean isTextCharacter(int codepoint) {
if (codepoint < 0x80) return allowedAsciiChars[codepoint];
if (codepoint < 0xFDD0) return true;
if (codepoint <= 0xFDDF) return false;
if (codepoint < 0x1FFFE) return true;
if (codepoint <= 0x1FFFF) return false;
if (codepoint < 0x2FFFE) return true;
if (codepoint <= 0x2FFFF) return false;
if (codepoint < 0x3FFFE) return true;
if (codepoint <= 0x3FFFF) return false;
if (codepoint < 0x4FFFE) return true;
if (codepoint <= 0x4FFFF) return false;
if (codepoint < 0x5FFFE) return true;
if (codepoint <= 0x5FFFF) return false;
if (codepoint < 0x6FFFE) return true;
if (codepoint <= 0x6FFFF) return false;
if (codepoint < 0x7FFFE) return true;
if (codepoint <= 0x7FFFF) return false;
if (codepoint < 0x8FFFE) return true;
if (codepoint <= 0x8FFFF) return false;
if (codepoint < 0x9FFFE) return true;
if (codepoint <= 0x9FFFF) return false;
if (codepoint < 0xAFFFE) return true;
if (codepoint <= 0xAFFFF) return false;
if (codepoint < 0xBFFFE) return true;
if (codepoint <= 0xBFFFF) return false;
if (codepoint < 0xCFFFE) return true;
if (codepoint <= 0xCFFFF) return false;
if (codepoint < 0xDFFFE) return true;
if (codepoint <= 0xDFFFF) return false;
if (codepoint < 0xEFFFE) return true;
if (codepoint <= 0xEFFFF) return false;
if (codepoint < 0xFFFFE) return true;
if (codepoint <= 0xFFFFF) return false;
if (codepoint < 0x10FFFE) return true;
if (codepoint <= 0x10FFFF) return false;
return true;
}
/**
* Validates that the given string value only contains text characters and
* returns the first illegal code point if one is found.
*/
private static StringBuilder lazy(StringBuilder sb, String s, int i) {
if (sb == null) {
sb = new StringBuilder(s.substring(0, i));
}
sb.append(' ');
return sb;
}
/**
* Returns a string where any invalid characters in the input string is replaced by spaces
*/
public static String stripInvalidCharacters(String string) {
StringBuilder stripped = null;
for (int i = 0; i < string.length();) {
int codePoint = string.codePointAt(i);
int charCount = Character.charCount(codePoint);
if ( ! Text.isTextCharacter(codePoint)) {
stripped = lazy(stripped, string, i);
} else {
if (Character.isHighSurrogate(string.charAt(i))) {
if (charCount == 1) {
stripped = lazy(stripped, string, i);
} else if (!Character.isLowSurrogate(string.charAt(i+1))) {
stripped = lazy(stripped, string, i);
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
} else {
if (stripped != null) {
stripped.appendCodePoint(codePoint);
}
}
}
i += charCount;
}
return stripped != null ? stripped.toString() : string;
}
} |
Consider appending `". Retrying in " + maintenanceInterval()`. Same below. | protected void maintain() {
try {
confirmApplicationOwnerships();
ensureConfirmationResponses();
}
catch (UncheckedIOException e) {
log.log(Level.INFO, () -> "IO exception handling issues, will retry: '" + Exceptions.toMessageString(e));
}
} | log.log(Level.INFO, () -> "IO exception handling issues, will retry: '" + Exceptions.toMessageString(e)); | protected void maintain() {
try {
confirmApplicationOwnerships();
ensureConfirmationResponses();
}
catch (UncheckedIOException e) {
log.log(Level.INFO, () -> "IO exception handling issues, will retry in " + maintenanceInterval() + ": '" + Exceptions.toMessageString(e));
}
} | class ApplicationOwnershipConfirmer extends Maintainer {
private final OwnershipIssues ownershipIssues;
public ApplicationOwnershipConfirmer(Controller controller, Duration interval, JobControl jobControl, OwnershipIssues ownershipIssues) {
super(controller, interval, jobControl);
this.ownershipIssues = ownershipIssues;
}
@Override
/** File an ownership issue with the owners of all applications we know about. */
private void confirmApplicationOwnerships() {
ApplicationList.from(controller().applications().asList())
.notPullRequest()
.hasProductionDeployment()
.asList()
.forEach(application -> {
try {
Tenant tenant = ownerOf(application.id());
Optional<IssueId> ourIssueId = application.ownershipIssueId();
ourIssueId = tenant instanceof AthenzTenant
? ownershipIssues.confirmOwnership(ourIssueId, application.id(), propertyIdFor((AthenzTenant) tenant))
: ownershipIssues.confirmOwnership(ourIssueId, application.id(), userFor(tenant));
ourIssueId.ifPresent(issueId -> store(issueId, application.id()));
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to file an issue for " + application.id(), e);
}
});
}
/** Escalate ownership issues which have not been closed before a defined amount of time has passed. */
private void ensureConfirmationResponses() {
for (Application application : controller().applications().asList())
application.ownershipIssueId().ifPresent(issueId -> {
try {
Optional<PropertyId> propertyId = Optional.of(application.id())
.map(this::ownerOf)
.filter(t -> t instanceof AthenzTenant)
.map(AthenzTenant.class::cast)
.flatMap(AthenzTenant::propertyId);
ownershipIssues.ensureResponse(issueId, propertyId);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to escalate issue with id " + issueId, e);
}
});
}
private Tenant ownerOf(ApplicationId applicationId) {
return controller().tenants().tenant(applicationId.tenant())
.orElseThrow(() -> new IllegalStateException("No tenant found for application " + applicationId));
}
protected User userFor(Tenant tenant) {
return User.from(tenant.name().value().replaceFirst(Tenant.userPrefix, ""));
}
protected PropertyId propertyIdFor(AthenzTenant tenant) {
return tenant.propertyId()
.orElseThrow(() -> new NoSuchElementException("No PropertyId is listed for non-user tenant " +
tenant));
}
protected void store(IssueId issueId, ApplicationId applicationId) {
controller().applications().lockIfPresent(applicationId, application ->
controller().applications().store(application.withOwnershipIssueId(issueId)));
}
} | class ApplicationOwnershipConfirmer extends Maintainer {
private final OwnershipIssues ownershipIssues;
public ApplicationOwnershipConfirmer(Controller controller, Duration interval, JobControl jobControl, OwnershipIssues ownershipIssues) {
super(controller, interval, jobControl);
this.ownershipIssues = ownershipIssues;
}
@Override
/** File an ownership issue with the owners of all applications we know about. */
private void confirmApplicationOwnerships() {
ApplicationList.from(controller().applications().asList())
.notPullRequest()
.hasProductionDeployment()
.asList()
.forEach(application -> {
try {
Tenant tenant = ownerOf(application.id());
Optional<IssueId> ourIssueId = application.ownershipIssueId();
ourIssueId = tenant instanceof AthenzTenant
? ownershipIssues.confirmOwnership(ourIssueId, application.id(), propertyIdFor((AthenzTenant) tenant))
: ownershipIssues.confirmOwnership(ourIssueId, application.id(), userFor(tenant));
ourIssueId.ifPresent(issueId -> store(issueId, application.id()));
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to file an issue for " + application.id(), e);
}
});
}
/** Escalate ownership issues which have not been closed before a defined amount of time has passed. */
private void ensureConfirmationResponses() {
for (Application application : controller().applications().asList())
application.ownershipIssueId().ifPresent(issueId -> {
try {
Optional<PropertyId> propertyId = Optional.of(application.id())
.map(this::ownerOf)
.filter(t -> t instanceof AthenzTenant)
.map(AthenzTenant.class::cast)
.flatMap(AthenzTenant::propertyId);
ownershipIssues.ensureResponse(issueId, propertyId);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to escalate issue with id " + issueId, e);
}
});
}
private Tenant ownerOf(ApplicationId applicationId) {
return controller().tenants().tenant(applicationId.tenant())
.orElseThrow(() -> new IllegalStateException("No tenant found for application " + applicationId));
}
protected User userFor(Tenant tenant) {
return User.from(tenant.name().value().replaceFirst(Tenant.userPrefix, ""));
}
protected PropertyId propertyIdFor(AthenzTenant tenant) {
return tenant.propertyId()
.orElseThrow(() -> new NoSuchElementException("No PropertyId is listed for non-user tenant " +
tenant));
}
protected void store(IssueId issueId, ApplicationId applicationId) {
controller().applications().lockIfPresent(applicationId, application ->
controller().applications().store(application.withOwnershipIssueId(issueId)));
}
} |
Done. | protected void maintain() {
try {
confirmApplicationOwnerships();
ensureConfirmationResponses();
}
catch (UncheckedIOException e) {
log.log(Level.INFO, () -> "IO exception handling issues, will retry: '" + Exceptions.toMessageString(e));
}
} | log.log(Level.INFO, () -> "IO exception handling issues, will retry: '" + Exceptions.toMessageString(e)); | protected void maintain() {
try {
confirmApplicationOwnerships();
ensureConfirmationResponses();
}
catch (UncheckedIOException e) {
log.log(Level.INFO, () -> "IO exception handling issues, will retry in " + maintenanceInterval() + ": '" + Exceptions.toMessageString(e));
}
} | class ApplicationOwnershipConfirmer extends Maintainer {
private final OwnershipIssues ownershipIssues;
public ApplicationOwnershipConfirmer(Controller controller, Duration interval, JobControl jobControl, OwnershipIssues ownershipIssues) {
super(controller, interval, jobControl);
this.ownershipIssues = ownershipIssues;
}
@Override
/** File an ownership issue with the owners of all applications we know about. */
private void confirmApplicationOwnerships() {
ApplicationList.from(controller().applications().asList())
.notPullRequest()
.hasProductionDeployment()
.asList()
.forEach(application -> {
try {
Tenant tenant = ownerOf(application.id());
Optional<IssueId> ourIssueId = application.ownershipIssueId();
ourIssueId = tenant instanceof AthenzTenant
? ownershipIssues.confirmOwnership(ourIssueId, application.id(), propertyIdFor((AthenzTenant) tenant))
: ownershipIssues.confirmOwnership(ourIssueId, application.id(), userFor(tenant));
ourIssueId.ifPresent(issueId -> store(issueId, application.id()));
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to file an issue for " + application.id(), e);
}
});
}
/** Escalate ownership issues which have not been closed before a defined amount of time has passed. */
private void ensureConfirmationResponses() {
for (Application application : controller().applications().asList())
application.ownershipIssueId().ifPresent(issueId -> {
try {
Optional<PropertyId> propertyId = Optional.of(application.id())
.map(this::ownerOf)
.filter(t -> t instanceof AthenzTenant)
.map(AthenzTenant.class::cast)
.flatMap(AthenzTenant::propertyId);
ownershipIssues.ensureResponse(issueId, propertyId);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to escalate issue with id " + issueId, e);
}
});
}
private Tenant ownerOf(ApplicationId applicationId) {
return controller().tenants().tenant(applicationId.tenant())
.orElseThrow(() -> new IllegalStateException("No tenant found for application " + applicationId));
}
protected User userFor(Tenant tenant) {
return User.from(tenant.name().value().replaceFirst(Tenant.userPrefix, ""));
}
protected PropertyId propertyIdFor(AthenzTenant tenant) {
return tenant.propertyId()
.orElseThrow(() -> new NoSuchElementException("No PropertyId is listed for non-user tenant " +
tenant));
}
protected void store(IssueId issueId, ApplicationId applicationId) {
controller().applications().lockIfPresent(applicationId, application ->
controller().applications().store(application.withOwnershipIssueId(issueId)));
}
} | class ApplicationOwnershipConfirmer extends Maintainer {
private final OwnershipIssues ownershipIssues;
public ApplicationOwnershipConfirmer(Controller controller, Duration interval, JobControl jobControl, OwnershipIssues ownershipIssues) {
super(controller, interval, jobControl);
this.ownershipIssues = ownershipIssues;
}
@Override
/** File an ownership issue with the owners of all applications we know about. */
private void confirmApplicationOwnerships() {
ApplicationList.from(controller().applications().asList())
.notPullRequest()
.hasProductionDeployment()
.asList()
.forEach(application -> {
try {
Tenant tenant = ownerOf(application.id());
Optional<IssueId> ourIssueId = application.ownershipIssueId();
ourIssueId = tenant instanceof AthenzTenant
? ownershipIssues.confirmOwnership(ourIssueId, application.id(), propertyIdFor((AthenzTenant) tenant))
: ownershipIssues.confirmOwnership(ourIssueId, application.id(), userFor(tenant));
ourIssueId.ifPresent(issueId -> store(issueId, application.id()));
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to file an issue for " + application.id(), e);
}
});
}
/** Escalate ownership issues which have not been closed before a defined amount of time has passed. */
private void ensureConfirmationResponses() {
for (Application application : controller().applications().asList())
application.ownershipIssueId().ifPresent(issueId -> {
try {
Optional<PropertyId> propertyId = Optional.of(application.id())
.map(this::ownerOf)
.filter(t -> t instanceof AthenzTenant)
.map(AthenzTenant.class::cast)
.flatMap(AthenzTenant::propertyId);
ownershipIssues.ensureResponse(issueId, propertyId);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Exception caught when attempting to escalate issue with id " + issueId, e);
}
});
}
private Tenant ownerOf(ApplicationId applicationId) {
return controller().tenants().tenant(applicationId.tenant())
.orElseThrow(() -> new IllegalStateException("No tenant found for application " + applicationId));
}
protected User userFor(Tenant tenant) {
return User.from(tenant.name().value().replaceFirst(Tenant.userPrefix, ""));
}
protected PropertyId propertyIdFor(AthenzTenant tenant) {
return tenant.propertyId()
.orElseThrow(() -> new NoSuchElementException("No PropertyId is listed for non-user tenant " +
tenant));
}
protected void store(IssueId issueId, ApplicationId applicationId) {
controller().applications().lockIfPresent(applicationId, application ->
controller().applications().store(application.withOwnershipIssueId(issueId)));
}
} |
Reversed order. | void tick() {
State wantedStateCopy;
synchronized (monitor) {
while (! workToDoNow) {
Duration timeSinceLastConverge = Duration.between(lastTick, clock.instant());
long remainder = nodeAdminConvergeStateInterval.minus(timeSinceLastConverge).toMillis();
if (remainder > 0) {
try {
monitor.wait(remainder);
} catch (InterruptedException e) {
log.info("Interrupted, but ignoring this: NodeAdminStateUpdater");
}
} else break;
}
lastTick = clock.instant();
workToDoNow = false;
wantedStateCopy = this.wantedState;
}
try {
convergeState(wantedStateCopy);
setLastConvergenceException(null);
} catch (OrchestratorException | ConvergenceException | HttpException e) {
setLastConvergenceException(e);
log.info("Unable to converge to " + wantedStateCopy + ": " + e.getMessage());
} catch (RuntimeException e) {
setLastConvergenceException(e);
log.log(LogLevel.ERROR, "Error while trying to converge to " + wantedStateCopy, e);
}
if (wantedStateCopy != RESUMED && currentState == TRANSITIONING) {
Duration subsystemFreezeDuration = nodeAdmin.subsystemFreezeDuration();
if (subsystemFreezeDuration.compareTo(FREEZE_CONVERGENCE_TIMEOUT) > 0) {
log.info("Timed out trying to freeze, will force unfreezed ticks");
nodeAdmin.setFrozen(false);
fetchContainersToRunFromNodeRepository();
}
} else if (currentState == RESUMED) {
fetchContainersToRunFromNodeRepository();
}
} | fetchContainersToRunFromNodeRepository(); | void tick() {
State wantedStateCopy;
synchronized (monitor) {
while (! workToDoNow) {
Duration timeSinceLastConverge = Duration.between(lastTick, clock.instant());
long remainder = nodeAdminConvergeStateInterval.minus(timeSinceLastConverge).toMillis();
if (remainder > 0) {
try {
monitor.wait(remainder);
} catch (InterruptedException e) {
log.info("Interrupted, but ignoring this: NodeAdminStateUpdater");
}
} else break;
}
lastTick = clock.instant();
workToDoNow = false;
wantedStateCopy = this.wantedState;
}
try {
convergeState(wantedStateCopy);
setLastConvergenceException(null);
} catch (OrchestratorException | ConvergenceException | HttpException e) {
setLastConvergenceException(e);
log.info("Unable to converge to " + wantedStateCopy + ": " + e.getMessage());
} catch (RuntimeException e) {
setLastConvergenceException(e);
log.log(LogLevel.ERROR, "Error while trying to converge to " + wantedStateCopy, e);
}
if (wantedStateCopy != RESUMED && currentState == TRANSITIONING) {
Duration subsystemFreezeDuration = nodeAdmin.subsystemFreezeDuration();
if (subsystemFreezeDuration.compareTo(FREEZE_CONVERGENCE_TIMEOUT) > 0) {
log.info("Timed out trying to freeze, will force unfreezed ticks");
fetchContainersToRunFromNodeRepository();
nodeAdmin.setFrozen(false);
}
} else if (currentState == RESUMED) {
fetchContainersToRunFromNodeRepository();
}
} | class NodeAdminStateUpdaterImpl implements NodeAdminStateUpdater {
static final Duration FREEZE_CONVERGENCE_TIMEOUT = Duration.ofMinutes(5);
static final String TRANSITION_EXCEPTION_MESSAGE = "NodeAdminStateUpdater has not run since current wanted state was set";
private final AtomicBoolean terminated = new AtomicBoolean(false);
private State currentState = SUSPENDED_NODE_ADMIN;
private State wantedState = RESUMED;
private boolean workToDoNow = true;
private final Object monitor = new Object();
private RuntimeException lastConvergenceException;
private final Logger log = Logger.getLogger(NodeAdminStateUpdater.class.getName());
private final ScheduledExecutorService specVerifierScheduler =
Executors.newScheduledThreadPool(1, ThreadFactoryFactory.getDaemonThreadFactory("specverifier"));
private final Thread loopThread;
private final NodeRepository nodeRepository;
private final Orchestrator orchestrator;
private final NodeAdmin nodeAdmin;
private final Clock clock;
private final String dockerHostHostName;
private final Duration nodeAdminConvergeStateInterval;
private final Optional<ClassLocking> classLocking;
private Optional<ClassLock> classLock = Optional.empty();
private Instant lastTick;
public NodeAdminStateUpdaterImpl(
NodeRepository nodeRepository,
Orchestrator orchestrator,
StorageMaintainer storageMaintainer,
NodeAdmin nodeAdmin,
String dockerHostHostName,
Clock clock,
Duration nodeAdminConvergeStateInterval,
Optional<ClassLocking> classLocking) {
log.info(objectToString() + ": Creating object");
this.nodeRepository = nodeRepository;
this.orchestrator = orchestrator;
this.nodeAdmin = nodeAdmin;
this.dockerHostHostName = dockerHostHostName;
this.clock = clock;
this.nodeAdminConvergeStateInterval = nodeAdminConvergeStateInterval;
this.classLocking = classLocking;
this.lastTick = clock.instant();
this.loopThread = new Thread(() -> {
if (classLocking.isPresent()) {
log.info(objectToString() + ": Acquiring lock");
try {
classLock = Optional.of(classLocking.get().lockWhile(NodeAdminStateUpdater.class, () -> !terminated.get()));
} catch (LockInterruptException e) {
classLock = Optional.empty();
return;
}
}
log.info(objectToString() + ": Starting threads and schedulers");
nodeAdmin.start();
specVerifierScheduler.scheduleWithFixedDelay(() ->
updateHardwareDivergence(storageMaintainer), 5, 60, TimeUnit.MINUTES);
while (! terminated.get()) {
tick();
}
});
this.loopThread.setName("tick-NodeAdminStateUpdater");
}
private String objectToString() {
return this.getClass().getSimpleName() + "@" + Integer.toString(System.identityHashCode(this));
}
@Override
public Map<String, Object> getDebugPage() {
Map<String, Object> debug = new LinkedHashMap<>();
synchronized (monitor) {
debug.put("dockerHostHostName", dockerHostHostName);
debug.put("wantedState", wantedState);
debug.put("currentState", currentState);
debug.put("NodeAdmin", nodeAdmin.debugInfo());
}
return debug;
}
private void updateHardwareDivergence(StorageMaintainer maintainer) {
if (currentState != RESUMED) return;
try {
NodeSpec node = nodeRepository.getNode(dockerHostHostName);
String hardwareDivergence = maintainer.getHardwareDivergence(node);
if (!node.getHardwareDivergence().orElse("null").equals(hardwareDivergence)) {
NodeAttributes nodeAttributes = new NodeAttributes().withHardwareDivergence(hardwareDivergence);
nodeRepository.updateNodeAttributes(dockerHostHostName, nodeAttributes);
}
} catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to report hardware divergence", e);
}
}
@Override
public void setResumeStateAndCheckIfResumed(State wantedState) {
synchronized (monitor) {
if (this.wantedState != wantedState) {
log.info("Wanted state change: " + this.wantedState + " -> " + wantedState);
this.wantedState = wantedState;
setLastConvergenceException(null);
signalWorkToBeDone();
}
if (currentState != wantedState) {
throw Optional.ofNullable(lastConvergenceException)
.orElseGet(() -> new RuntimeException(TRANSITION_EXCEPTION_MESSAGE));
}
}
}
void signalWorkToBeDone() {
synchronized (monitor) {
if (! workToDoNow) {
workToDoNow = true;
monitor.notifyAll();
}
}
}
private void setLastConvergenceException(RuntimeException exception) {
synchronized (monitor) {
lastConvergenceException = exception;
}
}
/**
* This method attempts to converge node-admin w/agents to a {@link State}
* with respect to: freeze, Orchestrator, and services running.
*/
private void convergeState(State wantedState) {
if (currentState == wantedState) return;
synchronized (monitor) {
currentState = TRANSITIONING;
}
boolean wantFrozen = wantedState != RESUMED;
if (!nodeAdmin.setFrozen(wantFrozen)) {
throw new ConvergenceException("NodeAdmin is not yet " + (wantFrozen ? "frozen" : "unfrozen"));
}
boolean hostIsActiveInNR = nodeRepository.getNode(dockerHostHostName).getState() == Node.State.active;
switch (wantedState) {
case RESUMED:
if (hostIsActiveInNR) orchestrator.resume(dockerHostHostName);
break;
case SUSPENDED_NODE_ADMIN:
if (hostIsActiveInNR) orchestrator.suspend(dockerHostHostName);
break;
case SUSPENDED:
List<String> nodesInActiveState = getNodesInActiveState();
List<String> nodesToSuspend = new ArrayList<>(nodesInActiveState);
if (hostIsActiveInNR) nodesToSuspend.add(dockerHostHostName);
if (!nodesToSuspend.isEmpty()) {
orchestrator.suspend(dockerHostHostName, nodesToSuspend);
log.info("Orchestrator allows suspension of " + nodesToSuspend);
}
nodeAdmin.stopNodeAgentServices(nodesInActiveState);
break;
default:
throw new IllegalStateException("Unknown wanted state " + wantedState);
}
log.info("State changed from " + currentState + " to " + wantedState);
synchronized (monitor) {
currentState = wantedState;
}
}
private void fetchContainersToRunFromNodeRepository() {
try {
final List<NodeSpec> containersToRun = nodeRepository.getNodes(dockerHostHostName);
nodeAdmin.refreshContainersToRun(containersToRun);
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed to update which containers should be running", e);
}
}
private List<String> getNodesInActiveState() {
return nodeRepository.getNodes(dockerHostHostName)
.stream()
.filter(node -> node.getState() == Node.State.active)
.map(NodeSpec::getHostname)
.collect(Collectors.toList());
}
public void start() {
loopThread.start();
}
public void stop() {
log.info(objectToString() + ": Stop called");
if (!terminated.compareAndSet(false, true)) {
throw new RuntimeException("Can not re-stop a node agent.");
}
classLocking.ifPresent(ClassLocking::interrupt);
signalWorkToBeDone();
specVerifierScheduler.shutdown();
do {
try {
loopThread.join();
specVerifierScheduler.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e1) {
log.info("Interrupted while waiting for NodeAdminStateUpdater thread and specVerfierScheduler to shutdown");
}
} while (loopThread.isAlive() || !specVerifierScheduler.isTerminated());
nodeAdmin.stop();
classLock.ifPresent(lock -> {
log.info(objectToString() + ": Releasing lock");
lock.close();
});
log.info(objectToString() + ": Stop complete");
}
} | class NodeAdminStateUpdaterImpl implements NodeAdminStateUpdater {
static final Duration FREEZE_CONVERGENCE_TIMEOUT = Duration.ofMinutes(5);
static final String TRANSITION_EXCEPTION_MESSAGE = "NodeAdminStateUpdater has not run since current wanted state was set";
private final AtomicBoolean terminated = new AtomicBoolean(false);
private State currentState = SUSPENDED_NODE_ADMIN;
private State wantedState = RESUMED;
private boolean workToDoNow = true;
private final Object monitor = new Object();
private RuntimeException lastConvergenceException;
private final Logger log = Logger.getLogger(NodeAdminStateUpdater.class.getName());
private final ScheduledExecutorService specVerifierScheduler =
Executors.newScheduledThreadPool(1, ThreadFactoryFactory.getDaemonThreadFactory("specverifier"));
private final Thread loopThread;
private final NodeRepository nodeRepository;
private final Orchestrator orchestrator;
private final NodeAdmin nodeAdmin;
private final Clock clock;
private final String dockerHostHostName;
private final Duration nodeAdminConvergeStateInterval;
private final Optional<ClassLocking> classLocking;
private Optional<ClassLock> classLock = Optional.empty();
private Instant lastTick;
public NodeAdminStateUpdaterImpl(
NodeRepository nodeRepository,
Orchestrator orchestrator,
StorageMaintainer storageMaintainer,
NodeAdmin nodeAdmin,
String dockerHostHostName,
Clock clock,
Duration nodeAdminConvergeStateInterval,
Optional<ClassLocking> classLocking) {
log.info(objectToString() + ": Creating object");
this.nodeRepository = nodeRepository;
this.orchestrator = orchestrator;
this.nodeAdmin = nodeAdmin;
this.dockerHostHostName = dockerHostHostName;
this.clock = clock;
this.nodeAdminConvergeStateInterval = nodeAdminConvergeStateInterval;
this.classLocking = classLocking;
this.lastTick = clock.instant();
this.loopThread = new Thread(() -> {
if (classLocking.isPresent()) {
log.info(objectToString() + ": Acquiring lock");
try {
classLock = Optional.of(classLocking.get().lockWhile(NodeAdminStateUpdater.class, () -> !terminated.get()));
} catch (LockInterruptException e) {
classLock = Optional.empty();
return;
}
}
log.info(objectToString() + ": Starting threads and schedulers");
nodeAdmin.start();
specVerifierScheduler.scheduleWithFixedDelay(() ->
updateHardwareDivergence(storageMaintainer), 5, 60, TimeUnit.MINUTES);
while (! terminated.get()) {
tick();
}
});
this.loopThread.setName("tick-NodeAdminStateUpdater");
}
private String objectToString() {
return this.getClass().getSimpleName() + "@" + Integer.toString(System.identityHashCode(this));
}
@Override
public Map<String, Object> getDebugPage() {
Map<String, Object> debug = new LinkedHashMap<>();
synchronized (monitor) {
debug.put("dockerHostHostName", dockerHostHostName);
debug.put("wantedState", wantedState);
debug.put("currentState", currentState);
debug.put("NodeAdmin", nodeAdmin.debugInfo());
}
return debug;
}
private void updateHardwareDivergence(StorageMaintainer maintainer) {
if (currentState != RESUMED) return;
try {
NodeSpec node = nodeRepository.getNode(dockerHostHostName);
String hardwareDivergence = maintainer.getHardwareDivergence(node);
if (!node.getHardwareDivergence().orElse("null").equals(hardwareDivergence)) {
NodeAttributes nodeAttributes = new NodeAttributes().withHardwareDivergence(hardwareDivergence);
nodeRepository.updateNodeAttributes(dockerHostHostName, nodeAttributes);
}
} catch (RuntimeException e) {
log.log(Level.WARNING, "Failed to report hardware divergence", e);
}
}
@Override
public void setResumeStateAndCheckIfResumed(State wantedState) {
synchronized (monitor) {
if (this.wantedState != wantedState) {
log.info("Wanted state change: " + this.wantedState + " -> " + wantedState);
this.wantedState = wantedState;
setLastConvergenceException(null);
signalWorkToBeDone();
}
if (currentState != wantedState) {
throw Optional.ofNullable(lastConvergenceException)
.orElseGet(() -> new RuntimeException(TRANSITION_EXCEPTION_MESSAGE));
}
}
}
void signalWorkToBeDone() {
synchronized (monitor) {
if (! workToDoNow) {
workToDoNow = true;
monitor.notifyAll();
}
}
}
private void setLastConvergenceException(RuntimeException exception) {
synchronized (monitor) {
lastConvergenceException = exception;
}
}
/**
* This method attempts to converge node-admin w/agents to a {@link State}
* with respect to: freeze, Orchestrator, and services running.
*/
private void convergeState(State wantedState) {
if (currentState == wantedState) return;
synchronized (monitor) {
currentState = TRANSITIONING;
}
boolean wantFrozen = wantedState != RESUMED;
if (!nodeAdmin.setFrozen(wantFrozen)) {
throw new ConvergenceException("NodeAdmin is not yet " + (wantFrozen ? "frozen" : "unfrozen"));
}
boolean hostIsActiveInNR = nodeRepository.getNode(dockerHostHostName).getState() == Node.State.active;
switch (wantedState) {
case RESUMED:
if (hostIsActiveInNR) orchestrator.resume(dockerHostHostName);
break;
case SUSPENDED_NODE_ADMIN:
if (hostIsActiveInNR) orchestrator.suspend(dockerHostHostName);
break;
case SUSPENDED:
List<String> nodesInActiveState = getNodesInActiveState();
List<String> nodesToSuspend = new ArrayList<>(nodesInActiveState);
if (hostIsActiveInNR) nodesToSuspend.add(dockerHostHostName);
if (!nodesToSuspend.isEmpty()) {
orchestrator.suspend(dockerHostHostName, nodesToSuspend);
log.info("Orchestrator allows suspension of " + nodesToSuspend);
}
nodeAdmin.stopNodeAgentServices(nodesInActiveState);
break;
default:
throw new IllegalStateException("Unknown wanted state " + wantedState);
}
log.info("State changed from " + currentState + " to " + wantedState);
synchronized (monitor) {
currentState = wantedState;
}
}
private void fetchContainersToRunFromNodeRepository() {
try {
final List<NodeSpec> containersToRun = nodeRepository.getNodes(dockerHostHostName);
nodeAdmin.refreshContainersToRun(containersToRun);
} catch (Exception e) {
log.log(LogLevel.WARNING, "Failed to update which containers should be running", e);
}
}
private List<String> getNodesInActiveState() {
return nodeRepository.getNodes(dockerHostHostName)
.stream()
.filter(node -> node.getState() == Node.State.active)
.map(NodeSpec::getHostname)
.collect(Collectors.toList());
}
public void start() {
loopThread.start();
}
public void stop() {
log.info(objectToString() + ": Stop called");
if (!terminated.compareAndSet(false, true)) {
throw new RuntimeException("Can not re-stop a node agent.");
}
classLocking.ifPresent(ClassLocking::interrupt);
signalWorkToBeDone();
specVerifierScheduler.shutdown();
do {
try {
loopThread.join();
specVerifierScheduler.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e1) {
log.info("Interrupted while waiting for NodeAdminStateUpdater thread and specVerfierScheduler to shutdown");
}
} while (loopThread.isAlive() || !specVerifierScheduler.isTerminated());
nodeAdmin.stop();
classLock.ifPresent(lock -> {
log.info(objectToString() + ": Releasing lock");
lock.close();
});
log.info(objectToString() + ": Stop complete");
}
} |
Consdier replacing with `org.junit.Assert.assertEquals()` | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | if(actual != expect) { | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} |
that would produce a much worse error message when the test fails | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | if(actual != expect) { | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} |
There is an overload of `assertEquals()` taking a custom error message. | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | if(actual != expect) { | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} |
I tried, it made the error less readable | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | if(actual != expect) { | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} |
will adjust message to make it work | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | if(actual != expect) { | static private void checkTls(boolean expect, int ... values) {
byte[] data = new byte[values.length];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) values[i];
}
boolean actual = MaybeTlsCryptoSocket.looksLikeTlsToMe(data);
if(actual != expect) {
throw new AssertionError(message(data, actual));
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} | class TlsDetectionTest {
static private String message(byte[] data, boolean actual) {
String msg = "[";
String delimiter = "";
for (byte b: data) {
msg += delimiter + (b & 0xff);
delimiter = ", ";
}
if (actual) {
msg += "] wrongfully detected as tls";
} else {
msg += "] wrongfully rejected as not tls";
}
return msg;
}
@org.junit.Test public void testValidHandshake() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(true, 22, 3, 3, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataOfWrongSize() {
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10);
checkTls(false, 22, 3, 1, 10, 255, 1, 0, 10, 251, 0);
}
@org.junit.Test public void testDataNotTaggedAsHandshake() {
checkTls(false, 23, 3, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMajorVersion() {
checkTls(false, 22, 0, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 1, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 2, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 4, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 5, 1, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataWithBadMinorVersion() {
checkTls(false, 22, 3, 0, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 2, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 4, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 5, 10, 255, 1, 0, 10, 251);
}
@org.junit.Test public void testDataNotTaggedAsClientHello() {
checkTls(false, 22, 3, 1, 10, 255, 0, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 2, 0, 10, 251);
}
@org.junit.Test public void testFrameSizeLimits() {
checkTls(false, 22, 3, 1, 255, 255, 1, 0, 255, 251);
checkTls(false, 22, 3, 1, 72, 1, 1, 0, 71, 253);
checkTls(true, 22, 3, 1, 72, 0, 1, 0, 71, 252);
checkTls(true, 22, 3, 1, 0, 4, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 0, 0, 0);
checkTls(false, 22, 3, 1, 0, 3, 1, 255, 255, 255);
}
@org.junit.Test public void testFrameAndClientHelloSizeRelationship() {
checkTls(true, 22, 3, 1, 10, 255, 1, 0, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 1, 10, 251);
checkTls(false, 22, 3, 1, 10, 255, 1, 2, 10, 251);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 10, 2);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 9, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(false, 22, 3, 1, 10, 5, 1, 0, 11, 1);
checkTls(true, 22, 3, 1, 10, 5, 1, 0, 10, 1);
checkTls(true, 22, 3, 1, 10, 4, 1, 0, 10, 0);
checkTls(true, 22, 3, 1, 10, 3, 1, 0, 9, 255);
checkTls(true, 22, 3, 1, 10, 2, 1, 0, 9, 254);
checkTls(true, 22, 3, 1, 10, 1, 1, 0, 9, 253);
checkTls(true, 22, 3, 1, 10, 0, 1, 0, 9, 252);
}
} |
I think this is wrong. The API should never return direct/VIP config server URLs, as those are not usable by clients anyway due to certificate mismatch. It should instead return the appropriate URL under `/zone/v2/` so that the final URL is becomes `controller-host:port/zone/v2/<env>/<region>/serviceview/v1/tenant/...`. | private HttpResponse services(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationView applicationView = controller.getApplicationView(tenantName, applicationName, instanceName, environment, region);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(applicationView);
return response;
} | controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)), | private HttpResponse services(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationView applicationView = controller.getApplicationView(tenantName, applicationName, instanceName, environment, region);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(applicationView);
return response;
} | class ApplicationApiHandler extends LoggingRequestHandler {
private final Controller controller;
private final AthenzClientFactory athenzClientFactory;
@Inject
public ApplicationApiHandler(LoggingRequestHandler.Context parentCtx,
Controller controller,
AthenzClientFactory athenzClientFactory) {
super(parentCtx);
this.controller = controller;
this.athenzClientFactory = athenzClientFactory;
}
@Override
public Duration getTimeout() {
return Duration.ofMinutes(20);
}
@Override
public HttpResponse handle(HttpRequest request) {
try {
switch (request.getMethod()) {
case GET: return handleGET(request);
case PUT: return handlePUT(request);
case POST: return handlePOST(request);
case DELETE: return handleDELETE(request);
case OPTIONS: return handleOPTIONS();
default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported");
}
}
catch (ForbiddenException e) {
return ErrorResponse.forbidden(Exceptions.toMessageString(e));
}
catch (NotAuthorizedException e) {
return ErrorResponse.unauthorized(Exceptions.toMessageString(e));
}
catch (NotExistsException e) {
return ErrorResponse.notFoundError(Exceptions.toMessageString(e));
}
catch (IllegalArgumentException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
catch (ConfigServerException e) {
return ErrorResponse.from(e);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e);
return ErrorResponse.internalServerError(Exceptions.toMessageString(e));
}
}
private HttpResponse handleGET(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/")) return root(request);
if (path.matches("/application/v4/user")) return authenticatedUser(request);
if (path.matches("/application/v4/tenant")) return tenants(request);
if (path.matches("/application/v4/tenant-pipeline")) return tenantPipelines();
if (path.matches("/application/v4/athensDomain")) return athenzDomains(request);
if (path.matches("/application/v4/property")) return properties();
if (path.matches("/application/v4/cookiefreshness")) return cookieFreshness(request);
if (path.matches("/application/v4/tenant/{tenant}")) return tenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application")) return applications(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return application(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.getUri().getQuery());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job")) return JobControllerApiHandlerHelper.jobTypeResponse(controller, appIdFromPath(path), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.runResponse(controller.jobController().runs(appIdFromPath(path), jobTypeFromPath(path)), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/run/{number}")) return JobControllerApiHandlerHelper.runDetailsResponse(controller.jobController(), runIdFromPath(path), request.getProperty("after"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePUT(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/user")) return createUser(request);
if (path.matches("/application/v4/tenant/{tenant}")) return updateTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePOST(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return createTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return createApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/promote")) return promoteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return deploy(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/jobreport")) return notifyJobCompletion(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/promote")) return promoteApplicationDeployment(path.get("tenant"), path.get("application"), path.get("environment"), path.get("region"), path.get("instance"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleDELETE(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return deleteTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return deleteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleOPTIONS() {
EmptyJsonResponse response = new EmptyJsonResponse();
response.headers().put("Allow", "GET,PUT,POST,DELETE,OPTIONS");
return response;
}
private HttpResponse recursiveRoot(HttpRequest request) {
Slime slime = new Slime();
Cursor tenantArray = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
toSlime(tenantArray.addObject(), tenant, request, true);
return new SlimeJsonResponse(slime);
}
private HttpResponse root(HttpRequest request) {
return recurseOverTenants(request)
? recursiveRoot(request)
: new ResourceResponse(request, "user", "tenant", "tenant-pipeline", "athensDomain", "property", "cookiefreshness");
}
private HttpResponse authenticatedUser(HttpRequest request) {
String userIdString = request.getProperty("userOverride");
if (userIdString == null)
userIdString = getUserId(request)
.map(UserId::id)
.orElseThrow(() -> new ForbiddenException("You must be authenticated or specify userOverride"));
UserId userId = new UserId(userIdString);
List<Tenant> tenants = controller.tenants().asList(userId);
Slime slime = new Slime();
Cursor response = slime.setObject();
response.setString("user", userId.id());
Cursor tenantsArray = response.setArray("tenants");
for (Tenant tenant : tenants)
tenantInTenantsListToSlime(tenant, request.getUri(), tenantsArray.addObject());
response.setBool("tenantExists", tenants.stream().anyMatch(tenant -> tenant instanceof UserTenant &&
((UserTenant) tenant).is(userId.id())));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenants(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
tenantInTenantsListToSlime(tenant, request.getUri(), response.addObject());
return new SlimeJsonResponse(slime);
}
/** Lists the screwdriver project id for each application */
private HttpResponse tenantPipelines() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor pipelinesArray = response.setArray("tenantPipelines");
for (Application application : controller.applications().asList()) {
if ( ! application.deploymentJobs().projectId().isPresent()) continue;
Cursor pipelineObject = pipelinesArray.addObject();
pipelineObject.setString("screwdriverId", String.valueOf(application.deploymentJobs().projectId().getAsLong()));
pipelineObject.setString("tenant", application.id().tenant().value());
pipelineObject.setString("application", application.id().application().value());
pipelineObject.setString("instance", application.id().instance().value());
}
response.setArray("brokenTenantPipelines");
return new SlimeJsonResponse(slime);
}
private HttpResponse athenzDomains(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("data");
for (AthenzDomain athenzDomain : controller.getDomainList(request.getProperty("prefix"))) {
array.addString(athenzDomain.getName());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse properties() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("properties");
for (Map.Entry<PropertyId, Property> entry : controller.fetchPropertyList().entrySet()) {
Cursor propertyObject = array.addObject();
propertyObject.setString("propertyid", entry.getKey().id());
propertyObject.setString("property", entry.getValue().id());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse cookieFreshness(HttpRequest request) {
Slime slime = new Slime();
String passThruHeader = request.getHeader(SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_HEADER_FIELD);
slime.setObject().setBool("shouldRefreshCookie",
! SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_COOKIE_OK.equals(passThruHeader));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenant(String tenantName, HttpRequest request) {
return controller.tenants().tenant(TenantName.from(tenantName))
.map(tenant -> tenant(tenant, request, true))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist"));
}
private HttpResponse tenant(Tenant tenant, HttpRequest request, boolean listApplications) {
Slime slime = new Slime();
toSlime(slime.setObject(), tenant, request, listApplications);
return new SlimeJsonResponse(slime);
}
private HttpResponse applications(String tenantName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
Slime slime = new Slime();
Cursor array = slime.setArray();
for (Application application : controller.applications().asList(tenant))
toSlime(application, array.addObject(), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse application(String tenantName, String applicationName, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, "default");
Application application =
controller.applications().get(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
Slime slime = new Slime();
toSlime(slime.setObject(), application, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse logs(String tenantName, String applicationName, String instanceName, String environment, String region, String query) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
HashMap<String, String> queryParameters = getParameters(query);
Optional<Logs> response = controller.configServer().getLogs(deployment, queryParameters);
Slime slime = new Slime();
Cursor object = slime.setObject();
if (response.isPresent()) {
response.get().logs().entrySet().stream().forEach(entry -> object.setString(entry.getKey(), entry.getValue()));
}
return new SlimeJsonResponse(slime);
}
private HashMap<String, String> getParameters(String query) {
HashMap<String, String> keyValPair = new HashMap<>();
Arrays.stream(query.split("&")).forEach(pair -> {
String[] splitPair = pair.split("=");
keyValPair.put(splitPair[0], splitPair[1]);
});
return keyValPair;
}
private void toSlime(Cursor object, Application application, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/instance/" + application.id().instance().value() + "/job/",
request.getUri()).toString());
if (application.change().isPresent()) {
toSlime(object.setObject("deploying"), application.change());
}
if (application.outstandingChange().isPresent()) {
toSlime(object.setObject("outstandingChange"), application.outstandingChange());
}
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedJobs(application.deploymentJobs().jobStatus().values());
Cursor deploymentsArray = object.setArray("deploymentJobs");
for (JobStatus job : jobStatus) {
Cursor jobObject = deploymentsArray.addObject();
jobObject.setString("type", job.type().jobName());
jobObject.setBool("success", job.isSuccess());
job.lastTriggered().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastTriggered")));
job.lastCompleted().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastCompleted")));
job.firstFailing().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("firstFailing")));
job.lastSuccess().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastSuccess")));
}
Cursor changeBlockers = object.setArray("changeBlockers");
application.deploymentSpec().changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
});
object.setString("compileVersion", application.oldestDeployedPlatform().orElse(controller.systemVersion()).toFullString());
Cursor globalRotationsArray = object.setArray("globalRotations");
application.rotation().ifPresent(rotation -> {
globalRotationsArray.addString(rotation.url().toString());
globalRotationsArray.addString(rotation.secureUrl().toString());
object.setString("rotationId", rotation.id().asString());
});
List<Deployment> deployments = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedDeployments(application.deployments().values());
Cursor instancesArray = object.setArray("instances");
for (Deployment deployment : deployments) {
Cursor deploymentObject = instancesArray.addObject();
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("instance", application.id().instance().value());
controller.applications().rotationRepository().getRotation(application).ifPresent(rotation -> {
Map<String, RotationStatus> rotationHealthStatus = controller.rotationStatus(rotation);
setRotationStatus(deployment, rotationHealthStatus, deploymentObject);
});
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(application.id(), deployment.zone()), deployment, request);
else
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value() +
"/instance/" + application.id().instance().value(),
request.getUri()).toString());
}
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.deploymentJobs().issueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private HttpResponse deployment(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().get(id)
.orElseThrow(() -> new NotExistsException(id + " not found"));
DeploymentId deploymentId = new DeploymentId(application.id(),
ZoneId.from(environment, region));
Deployment deployment = application.deployments().get(deploymentId.zoneId());
if (deployment == null)
throw new NotExistsException(application + " is not deployed in " + deploymentId.zoneId());
Slime slime = new Slime();
toSlime(slime.setObject(), deploymentId, deployment, request);
return new SlimeJsonResponse(slime);
}
private void toSlime(Cursor object, Change change) {
change.platform().ifPresent(version -> object.setString("version", version.toString()));
change.application()
.filter(version -> !version.isUnknown())
.ifPresent(version -> toSlime(version, object.setObject("revision")));
}
private void toSlime(Cursor response, DeploymentId deploymentId, Deployment deployment, HttpRequest request) {
Cursor serviceUrlArray = response.setArray("serviceUrls");
controller.applications().getDeploymentEndpoints(deploymentId)
.ifPresent(endpoints -> endpoints.forEach(endpoint -> serviceUrlArray.addString(endpoint.toString())));
response.setString("nodes", withPath("/zone/v2/" + deploymentId.zoneId().environment() + "/" + deploymentId.zoneId().region() + "/nodes/v2/node/?&recursive=true&application=" + deploymentId.applicationId().tenant() + "." + deploymentId.applicationId().application() + "." + deploymentId.applicationId().instance(), request.getUri()).toString());
controller.zoneRegistry().getLogServerUri(deploymentId)
.ifPresent(elkUrl -> response.setString("elkUrl", elkUrl.toString()));
response.setString("yamasUrl", monitoringSystemUri(deploymentId).toString());
response.setString("version", deployment.version().toFullString());
response.setString("revision", deployment.applicationVersion().id());
response.setLong("deployTimeEpochMs", deployment.at().toEpochMilli());
controller.zoneRegistry().getDeploymentTimeToLive(deploymentId.zoneId())
.ifPresent(deploymentTimeToLive -> response.setLong("expiryTimeEpochMs", deployment.at().plus(deploymentTimeToLive).toEpochMilli()));
controller.applications().require(deploymentId.applicationId()).deploymentJobs().projectId()
.ifPresent(i -> response.setString("screwdriverId", String.valueOf(i)));
sourceRevisionToSlime(deployment.applicationVersion().source(), response);
Cursor activity = response.setObject("activity");
deployment.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried",
instant.toEpochMilli()));
deployment.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten",
instant.toEpochMilli()));
deployment.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
deployment.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
DeploymentCost appCost = deployment.calculateCost();
Cursor costObject = response.setObject("cost");
toSlime(appCost, costObject);
DeploymentMetrics metrics = deployment.metrics();
Cursor metricsObject = response.setObject("metrics");
metricsObject.setDouble("queriesPerSecond", metrics.queriesPerSecond());
metricsObject.setDouble("writesPerSecond", metrics.writesPerSecond());
metricsObject.setDouble("documentCount", metrics.documentCount());
metricsObject.setDouble("queryLatencyMillis", metrics.queryLatencyMillis());
metricsObject.setDouble("writeLatencyMillis", metrics.writeLatencyMillis());
}
private void toSlime(ApplicationVersion applicationVersion, Cursor object) {
if (!applicationVersion.isUnknown()) {
object.setString("hash", applicationVersion.id());
sourceRevisionToSlime(applicationVersion.source(), object.setObject("source"));
}
}
private void sourceRevisionToSlime(Optional<SourceRevision> revision, Cursor object) {
if ( ! revision.isPresent()) return;
object.setString("gitRepository", revision.get().repository());
object.setString("gitBranch", revision.get().branch());
object.setString("gitCommit", revision.get().commit());
}
private URI monitoringSystemUri(DeploymentId deploymentId) {
return controller.zoneRegistry().getMonitoringSystemUri(deploymentId);
}
private HttpResponse setGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region, boolean inService, HttpRequest request) {
Optional<Tenant> existingTenant = controller.tenants().tenant(tenantName);
if (!existingTenant.isPresent())
return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
String reason = mandatory("reason", requestData).asString();
String agent = getUserPrincipal(request).getIdentity().getFullName();
long timestamp = controller.clock().instant().getEpochSecond();
EndpointStatus.Status status = inService ? EndpointStatus.Status.in : EndpointStatus.Status.out;
EndpointStatus endPointStatus = new EndpointStatus(status, reason, agent, timestamp);
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
try {
List<String> rotations = controller.applications().setGlobalRotationStatus(deploymentId, endPointStatus);
return new MessageResponse(String.format("Rotations %s successfully set to %s service", rotations.toString(), inService ? "in" : "out of"));
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to alter rotation status: " + e.getMessage());
}
}
private HttpResponse getGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Slime slime = new Slime();
Cursor c1 = slime.setObject().setArray("globalrotationoverride");
try {
Map<String, EndpointStatus> rotations = controller.applications().getGlobalRotationStatus(deploymentId);
for (String rotation : rotations.keySet()) {
EndpointStatus currentStatus = rotations.get(rotation);
c1.addString(rotation);
Cursor c2 = c1.addObject();
c2.setString("status", currentStatus.getStatus().name());
c2.setString("reason", currentStatus.getReason() == null ? "" : currentStatus.getReason());
c2.setString("agent", currentStatus.getAgent() == null ? "" : currentStatus.getAgent());
c2.setLong("timestamp", currentStatus.getEpoch());
}
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to get rotation status: " + e.getMessage());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse rotationStatus(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().require(applicationId);
if (!application.rotation().isPresent()) {
throw new NotExistsException("global rotation does not exist for '" + environment + "." + region + "'");
}
Slime slime = new Slime();
Cursor response = slime.setObject();
Map<String, RotationStatus> rotationStatus = controller.applications().rotationRepository()
.getRotation(application)
.map(controller::rotationStatus)
.orElseGet(Collections::emptyMap);
for (String rotationEndpoint : rotationStatus.keySet()) {
if (rotationEndpoint.contains(toDns(environment)) && rotationEndpoint.contains(toDns(region))) {
Cursor bcpStatusObject = response.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", rotationStatus.getOrDefault(rotationEndpoint, RotationStatus.UNKNOWN).name());
}
}
return new SlimeJsonResponse(slime);
}
private HttpResponse service(String tenantName, String applicationName, String instanceName, String environment, String region, String serviceName, String restPath, HttpRequest request) {
Map<?,?> result = controller.getServiceApiResponse(tenantName, applicationName, instanceName, environment, region, serviceName, restPath);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(result, serviceName, restPath);
return response;
}
private HttpResponse createUser(HttpRequest request) {
Optional<UserId> user = getUserId(request);
if ( ! user.isPresent() ) throw new ForbiddenException("Not authenticated or not an user.");
String username = UserTenant.normalizeUser(user.get().id());
try {
controller.tenants().create(UserTenant.create(username));
return new MessageResponse("Created user '" + username + "'");
} catch (AlreadyExistsException e) {
return new MessageResponse("User '" + username + "' already exists");
}
}
private HttpResponse updateTenant(String tenantName, HttpRequest request) {
Optional<AthenzTenant> tenant = controller.tenants().athenzTenant(TenantName.from(tenantName));
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
NToken token = requireNToken(request, "Could not update " + tenantName);
controller.tenants().lockOrThrow(tenant.get().name(), lockedTenant -> {
lockedTenant = lockedTenant.with(new Property(mandatory("property", requestData).asString()));
lockedTenant = controller.tenants().withDomain(
lockedTenant,
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
token
);
Optional<PropertyId> propertyId = optional("propertyId", requestData).map(PropertyId::new);
if (propertyId.isPresent()) {
lockedTenant = lockedTenant.with(propertyId.get());
}
controller.tenants().store(lockedTenant);
});
return tenant(controller.tenants().requireAthenzTenant(tenant.get().name()), request, true);
}
private HttpResponse createTenant(String tenantName, HttpRequest request) {
Inspector requestData = toSlime(request.getData()).get();
AthenzTenant tenant = AthenzTenant.create(TenantName.from(tenantName),
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
new Property(mandatory("property", requestData).asString()),
optional("propertyId", requestData).map(PropertyId::new));
throwIfNotAthenzDomainAdmin(tenant.domain(), request);
controller.tenants().create(tenant, requireNToken(request, "Could not create " + tenantName));
return tenant(tenant, request, true);
}
private HttpResponse createApplication(String tenantName, String applicationName, HttpRequest request) {
Application application;
try {
application = controller.applications().createApplication(ApplicationId.from(tenantName, applicationName, "default"), getUserPrincipal(request).getNToken());
}
catch (ZmsException e) {
if (e.getCode() == com.yahoo.jdisc.Response.Status.FORBIDDEN)
throw new ForbiddenException("Not authorized to create application", e);
else
throw e;
}
Slime slime = new Slime();
toSlime(application, slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
/** Trigger deployment of the last built application package, on a given version */
private HttpResponse deploy(String tenantName, String applicationName, HttpRequest request) {
Version version = decideDeployVersion(request);
if ( ! systemHasVersion(version))
throw new IllegalArgumentException("Cannot trigger deployment of version '" + version + "': " +
"Version is not active in this system. " +
"Active versions: " + controller.versionStatus().versions()
.stream()
.map(VespaVersion::versionNumber)
.map(Version::toString)
.collect(Collectors.joining(", ")));
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().lockOrThrow(id, application -> {
controller.applications().deploymentTrigger().triggerChange(application.get().id(),
application.get().change().with(version));
});
return new MessageResponse("Triggered deployment of application '" + id + "' on version " + version);
}
/** Cancel any ongoing change for given application */
private HttpResponse cancelDeploy(String tenantName, String applicationName) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
Application application = controller.applications().require(id);
Change change = application.change();
if ( ! change.isPresent())
return new MessageResponse("No deployment in progress for " + application + " at this time");
controller.applications().lockOrThrow(id, lockedApplication ->
controller.applications().deploymentTrigger().cancelChange(id, false));
return new MessageResponse("Cancelled " + change + " for " + application);
}
/** Schedule restart of deployment, or specific host in a deployment */
private HttpResponse restart(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Optional<Hostname> hostname = Optional.ofNullable(request.getProperty("hostname")).map(Hostname::new);
controller.applications().restart(deploymentId, hostname);
return new StringResponse("Requested restart of " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
private HttpResponse deploy(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
if ( ! dataParts.containsKey("deployOptions"))
return ErrorResponse.badRequest("Missing required form part 'deployOptions'");
Inspector deployOptions = SlimeUtils.jsonToSlime(dataParts.get("deployOptions")).get();
Optional<ApplicationPackage> applicationPackage = Optional.ofNullable(dataParts.get("applicationZip"))
.map(ApplicationPackage::new);
verifyApplicationIdentityConfiguration(tenantName, applicationPackage);
DeployOptions deployOptionsJsonClass = new DeployOptions(deployOptions.field("deployDirectly").asBool(),
optional("vespaVersion", deployOptions).map(Version::new),
deployOptions.field("ignoreValidationErrors").asBool(),
deployOptions.field("deployCurrentVersion").asBool());
ActivateResult result = controller.applications().deploy(applicationId,
zone,
applicationPackage,
deployOptionsJsonClass);
return new SlimeJsonResponse(toSlime(result));
}
private void verifyApplicationIdentityConfiguration(String tenantName, Optional<ApplicationPackage> applicationPackage) {
applicationPackage.map(ApplicationPackage::deploymentSpec).flatMap(DeploymentSpec::athenzDomain)
.ifPresent(identityDomain -> {
AthenzTenant tenant = controller.tenants().athenzTenant(TenantName.from(tenantName))
.orElseThrow(() -> new IllegalArgumentException("Tenant does not exist"));
AthenzDomain tenantDomain = tenant.domain();
if (! Objects.equals(tenantDomain.getName(), identityDomain.value())) {
throw new ForbiddenException(
String.format(
"Athenz domain in deployment.xml: [%s] must match tenant domain: [%s]",
identityDomain.value(),
tenantDomain.getName()
));
}
});
}
private HttpResponse deleteTenant(String tenantName, HttpRequest request) {
Optional<Tenant> tenant = controller.tenants().tenant(tenantName);
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Could not delete tenant '" + tenantName + "': Tenant not found");
if (tenant.get() instanceof AthenzTenant) {
controller.tenants().deleteTenant((AthenzTenant) tenant.get(),
requireNToken(request, "Could not delete " + tenantName));
} else if (tenant.get() instanceof UserTenant) {
controller.tenants().deleteTenant((UserTenant) tenant.get());
} else {
throw new IllegalArgumentException("Unknown tenant type:" + tenant.get().getClass().getSimpleName() +
", for " + tenant.get());
}
return tenant(tenant.get(), request, false);
}
private HttpResponse deleteApplication(String tenantName, String applicationName, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().deleteApplication(id, getUserPrincipal(request).getNToken());
return new EmptyJsonResponse();
}
private HttpResponse deactivate(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
Application application = controller.applications().require(ApplicationId.from(tenantName, applicationName, instanceName));
controller.applications().deactivate(application.id(), ZoneId.from(environment, region));
return new StringResponse("Deactivated " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
/**
* Promote application Chef environments. To be used by component jobs only
*/
private HttpResponse promoteApplication(String tenantName, String applicationName, HttpRequest request) {
try{
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.systemChefEnvironment();
String targetEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s)", tenantName, applicationName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
/**
* Promote application Chef environments for jobs that deploy applications
*/
private HttpResponse promoteApplicationDeployment(String tenantName, String applicationName, String environmentName, String regionName, String instanceName, HttpRequest request) {
try {
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
String targetEnvironment = chefEnvironment.applicationTargetEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName), Environment.from(environmentName), RegionName.from(regionName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s %s.%s)", tenantName, applicationName, environmentName, regionName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
private HttpResponse notifyJobCompletion(String tenant, String applicationName, HttpRequest request) {
try {
controller.applications().deploymentTrigger().notifyOfCompletion(toJobReport(tenant, applicationName, toSlime(request.getData()).get()));
return new MessageResponse("ok");
} catch (IllegalStateException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
}
private static DeploymentJobs.JobReport toJobReport(String tenantName, String applicationName, Inspector report) {
Optional<DeploymentJobs.JobError> jobError = Optional.empty();
if (report.field("jobError").valid()) {
jobError = Optional.of(DeploymentJobs.JobError.valueOf(report.field("jobError").asString()));
}
return new DeploymentJobs.JobReport(
ApplicationId.from(tenantName, applicationName, report.field("instance").asString()),
JobType.fromJobName(report.field("jobName").asString()),
report.field("projectId").asLong(),
report.field("buildNumber").asLong(),
toSourceRevision(report.field("sourceRevision")),
jobError
);
}
private static Optional<SourceRevision> toSourceRevision(Inspector object) {
if (!object.field("repository").valid() ||
!object.field("branch").valid() ||
!object.field("commit").valid()) {
return Optional.empty();
}
return Optional.of(new SourceRevision(object.field("repository").asString(), object.field("branch").asString(),
object.field("commit").asString()));
}
private Tenant getTenantOrThrow(String tenantName) {
return controller.tenants().tenant(tenantName)
.orElseThrow(() -> new NotExistsException(new TenantId(tenantName)));
}
private void toSlime(Cursor object, Tenant tenant, HttpRequest request, boolean listApplications) {
object.setString("tenant", tenant.name().value());
object.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
object.setString("athensDomain", athenzTenant.domain().getName());
object.setString("property", athenzTenant.property().id());
athenzTenant.propertyId().ifPresent(id -> object.setString("propertyId", id.toString()));
}
Cursor applicationArray = object.setArray("applications");
if (listApplications) {
for (Application application : controller.applications().asList(tenant.name())) {
if (application.id().instance().isDefault()) {
if (recurseOverApplications(request))
toSlime(applicationArray.addObject(), application, request);
else
toSlime(application, applicationArray.addObject(), request);
}
}
}
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
athenzTenant.contact().ifPresent(c -> {
object.setString("propertyUrl", c.propertyUrl().toString());
object.setString("contactsUrl", c.url().toString());
object.setString("issueCreationUrl", c.issueTrackerUrl().toString());
Cursor contactsArray = object.setArray("contacts");
c.persons().forEach(persons -> {
Cursor personArray = contactsArray.addArray();
persons.forEach(personArray::addString);
});
});
}
}
private void tenantInTenantsListToSlime(Tenant tenant, URI requestURI, Cursor object) {
object.setString("tenant", tenant.name().value());
Cursor metaData = object.setObject("metaData");
metaData.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
metaData.setString("athensDomain", athenzTenant.domain().getName());
metaData.setString("property", athenzTenant.property().id());
}
object.setString("url", withPath("/application/v4/tenant/" + tenant.name().value(), requestURI).toString());
}
/** Returns a copy of the given URI with the host and port from the given URI and the path set to the given path */
private URI withPath(String newPath, URI uri) {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), newPath, null, null);
}
catch (URISyntaxException e) {
throw new RuntimeException("Will not happen", e);
}
}
private void setRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus, Cursor object) {
if ( ! deployment.zone().environment().equals(Environment.prod)) return;
Cursor bcpStatusObject = object.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", findRotationStatus(deployment, healthStatus).name());
}
private RotationStatus findRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus) {
for (String endpoint : healthStatus.keySet()) {
if (endpoint.contains(toDns(deployment.zone().environment().value())) &&
endpoint.contains(toDns(deployment.zone().region().value()))) {
return healthStatus.getOrDefault(endpoint, RotationStatus.UNKNOWN);
}
}
return RotationStatus.UNKNOWN;
}
private String toDns(String id) {
return id.replace('_', '-');
}
private long asLong(String valueOrNull, long defaultWhenNull) {
if (valueOrNull == null) return defaultWhenNull;
try {
return Long.parseLong(valueOrNull);
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("Expected an integer but got '" + valueOrNull + "'");
}
}
private void toSlime(JobStatus.JobRun jobRun, Cursor object) {
object.setLong("id", jobRun.id());
object.setString("version", jobRun.platform().toFullString());
if (!jobRun.application().isUnknown())
toSlime(jobRun.application(), object.setObject("revision"));
object.setString("reason", jobRun.reason());
object.setLong("at", jobRun.at().toEpochMilli());
}
private Slime toSlime(InputStream jsonStream) {
try {
byte[] jsonBytes = IOUtils.readBytes(jsonStream, 1000 * 1000);
return SlimeUtils.jsonToSlime(jsonBytes);
} catch (IOException e) {
throw new RuntimeException();
}
}
private void throwIfNotAthenzDomainAdmin(AthenzDomain tenantDomain, HttpRequest request) {
AthenzIdentity identity = getUserPrincipal(request).getIdentity();
boolean isDomainAdmin = athenzClientFactory.createZmsClientWithServicePrincipal()
.isDomainAdmin(identity, tenantDomain);
if ( ! isDomainAdmin) {
throw new ForbiddenException(
String.format("The user '%s' is not admin in Athenz domain '%s'", identity.getFullName(), tenantDomain.getName()));
}
}
private static Optional<UserId> getUserId(HttpRequest request) {
return Optional.of(getUserPrincipal(request))
.map(AthenzPrincipal::getIdentity)
.filter(AthenzUser.class::isInstance)
.map(AthenzUser.class::cast)
.map(AthenzUser::getName)
.map(UserId::new);
}
private static AthenzPrincipal getUserPrincipal(HttpRequest request) {
Principal principal = request.getJDiscRequest().getUserPrincipal();
if (principal == null) throw new InternalServerErrorException("Expected a user principal");
if (!(principal instanceof AthenzPrincipal))
throw new InternalServerErrorException(
String.format("Expected principal of type %s, got %s",
AthenzPrincipal.class.getSimpleName(), principal.getClass().getName()));
return (AthenzPrincipal) principal;
}
private Inspector mandatory(String key, Inspector object) {
if ( ! object.field(key).valid())
throw new IllegalArgumentException("'" + key + "' is missing");
return object.field(key);
}
private Optional<String> optional(String key, Inspector object) {
return SlimeUtils.optionalString(object.field(key));
}
private static String path(Object... elements) {
return Joiner.on("/").join(elements);
}
private void toSlime(Application application, Cursor object, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("url", withPath("/application/v4/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value(), request.getUri()).toString());
}
private Slime toSlime(ActivateResult result) {
Slime slime = new Slime();
Cursor object = slime.setObject();
object.setString("revisionId", result.revisionId().id());
object.setLong("applicationZipSize", result.applicationZipSizeBytes());
Cursor logArray = object.setArray("prepareMessages");
if (result.prepareResponse().log != null) {
for (Log logMessage : result.prepareResponse().log) {
Cursor logObject = logArray.addObject();
logObject.setLong("time", logMessage.time);
logObject.setString("level", logMessage.level);
logObject.setString("message", logMessage.message);
}
}
Cursor changeObject = object.setObject("configChangeActions");
Cursor restartActionsArray = changeObject.setArray("restart");
for (RestartAction restartAction : result.prepareResponse().configChangeActions.restartActions) {
Cursor restartActionObject = restartActionsArray.addObject();
restartActionObject.setString("clusterName", restartAction.clusterName);
restartActionObject.setString("clusterType", restartAction.clusterType);
restartActionObject.setString("serviceType", restartAction.serviceType);
serviceInfosToSlime(restartAction.services, restartActionObject.setArray("services"));
stringsToSlime(restartAction.messages, restartActionObject.setArray("messages"));
}
Cursor refeedActionsArray = changeObject.setArray("refeed");
for (RefeedAction refeedAction : result.prepareResponse().configChangeActions.refeedActions) {
Cursor refeedActionObject = refeedActionsArray.addObject();
refeedActionObject.setString("name", refeedAction.name);
refeedActionObject.setBool("allowed", refeedAction.allowed);
refeedActionObject.setString("documentType", refeedAction.documentType);
refeedActionObject.setString("clusterName", refeedAction.clusterName);
serviceInfosToSlime(refeedAction.services, refeedActionObject.setArray("services"));
stringsToSlime(refeedAction.messages, refeedActionObject.setArray("messages"));
}
return slime;
}
private void serviceInfosToSlime(List<ServiceInfo> serviceInfoList, Cursor array) {
for (ServiceInfo serviceInfo : serviceInfoList) {
Cursor serviceInfoObject = array.addObject();
serviceInfoObject.setString("serviceName", serviceInfo.serviceName);
serviceInfoObject.setString("serviceType", serviceInfo.serviceType);
serviceInfoObject.setString("configId", serviceInfo.configId);
serviceInfoObject.setString("hostName", serviceInfo.hostName);
}
}
private void stringsToSlime(List<String> strings, Cursor array) {
for (String string : strings)
array.addString(string);
}
private String readToString(InputStream stream) {
Scanner scanner = new Scanner(stream).useDelimiter("\\A");
if ( ! scanner.hasNext()) return null;
return scanner.next();
}
private boolean systemHasVersion(Version version) {
return controller.versionStatus().versions().stream().anyMatch(v -> v.versionNumber().equals(version));
}
private Version decideDeployVersion(HttpRequest request) {
String requestVersion = readToString(request.getData());
if (requestVersion != null)
return new Version(requestVersion);
else
return controller.systemVersion();
}
public static void toSlime(DeploymentCost deploymentCost, Cursor object) {
object.setLong("tco", (long)deploymentCost.getTco());
object.setLong("waste", (long)deploymentCost.getWaste());
object.setDouble("utilization", deploymentCost.getUtilization());
Cursor clustersObject = object.setObject("cluster");
for (Map.Entry<String, ClusterCost> clusterEntry : deploymentCost.getCluster().entrySet())
toSlime(clusterEntry.getValue(), clustersObject.setObject(clusterEntry.getKey()));
}
private static void toSlime(ClusterCost clusterCost, Cursor object) {
object.setLong("count", clusterCost.getClusterInfo().getHostnames().size());
object.setString("resource", getResourceName(clusterCost.getResultUtilization()));
object.setDouble("utilization", clusterCost.getResultUtilization().getMaxUtilization());
object.setLong("tco", (int)clusterCost.getTco());
object.setLong("waste", (int)clusterCost.getWaste());
object.setString("flavor", clusterCost.getClusterInfo().getFlavor());
object.setDouble("flavorCost", clusterCost.getClusterInfo().getFlavorCost());
object.setDouble("flavorCpu", clusterCost.getClusterInfo().getFlavorCPU());
object.setDouble("flavorMem", clusterCost.getClusterInfo().getFlavorMem());
object.setDouble("flavorDisk", clusterCost.getClusterInfo().getFlavorDisk());
object.setString("type", clusterCost.getClusterInfo().getClusterType().name());
Cursor utilObject = object.setObject("util");
utilObject.setDouble("cpu", clusterCost.getResultUtilization().getCpu());
utilObject.setDouble("mem", clusterCost.getResultUtilization().getMemory());
utilObject.setDouble("disk", clusterCost.getResultUtilization().getDisk());
utilObject.setDouble("diskBusy", clusterCost.getResultUtilization().getDiskBusy());
Cursor usageObject = object.setObject("usage");
usageObject.setDouble("cpu", clusterCost.getSystemUtilization().getCpu());
usageObject.setDouble("mem", clusterCost.getSystemUtilization().getMemory());
usageObject.setDouble("disk", clusterCost.getSystemUtilization().getDisk());
usageObject.setDouble("diskBusy", clusterCost.getSystemUtilization().getDiskBusy());
Cursor hostnamesArray = object.setArray("hostnames");
for (String hostname : clusterCost.getClusterInfo().getHostnames())
hostnamesArray.addString(hostname);
}
private static String getResourceName(ClusterUtilization utilization) {
String name = "cpu";
double max = utilization.getMaxUtilization();
if (utilization.getMemory() == max) {
name = "mem";
} else if (utilization.getDisk() == max) {
name = "disk";
} else if (utilization.getDiskBusy() == max) {
name = "diskbusy";
}
return name;
}
private static boolean recurseOverTenants(HttpRequest request) {
return recurseOverApplications(request) || "tenant".equals(request.getProperty("recursive"));
}
private static boolean recurseOverApplications(HttpRequest request) {
return recurseOverDeployments(request) || "application".equals(request.getProperty("recursive"));
}
private static boolean recurseOverDeployments(HttpRequest request) {
return ImmutableSet.of("all", "true", "deployment").contains(request.getProperty("recursive"));
}
private static String tentantType(Tenant tenant) {
if (tenant instanceof AthenzTenant) {
return "ATHENS";
} else if (tenant instanceof UserTenant) {
return "USER";
}
throw new IllegalArgumentException("Unknown tenant type: " + tenant.getClass().getSimpleName());
}
private static NToken requireNToken(HttpRequest request, String message) {
return getUserPrincipal(request).getNToken().orElseThrow(() -> new IllegalArgumentException(
message + ": No NToken provided"));
}
private static ApplicationId appIdFromPath(Path path) {
return ApplicationId.from(path.get("tenant"), path.get("application"), path.get("instance"));
}
private static JobType jobTypeFromPath(Path path) {
return JobType.fromJobName(path.get("jobtype"));
}
private static RunId runIdFromPath(Path path) {
long number = Long.parseLong(path.get("number"));
return new RunId(appIdFromPath(path), jobTypeFromPath(path), number);
}
private HttpResponse submit(String tenant, String application, HttpRequest request) {
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
Inspector submitOptions = SlimeUtils.jsonToSlime(dataParts.get(EnvironmentResource.SUBMIT_OPTIONS)).get();
SourceRevision sourceRevision = toSourceRevision(submitOptions).orElseThrow(() ->
new IllegalArgumentException("Must specify 'repository', 'branch', and 'commit'"));
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP));
if ( ! applicationPackage.deploymentSpec().athenzDomain().isPresent())
throw new IllegalArgumentException("Application must define an Athenz service in deployment.xml!");
verifyApplicationIdentityConfiguration(tenant, Optional.of(applicationPackage));
return JobControllerApiHandlerHelper.submitResponse(controller.jobController(), tenant, application,
sourceRevision,
applicationPackage.zippedContent(),
dataParts.get(EnvironmentResource.APPLICATION_TEST_ZIP));
}
} | class ApplicationApiHandler extends LoggingRequestHandler {
private final Controller controller;
private final AthenzClientFactory athenzClientFactory;
@Inject
public ApplicationApiHandler(LoggingRequestHandler.Context parentCtx,
Controller controller,
AthenzClientFactory athenzClientFactory) {
super(parentCtx);
this.controller = controller;
this.athenzClientFactory = athenzClientFactory;
}
@Override
public Duration getTimeout() {
return Duration.ofMinutes(20);
}
@Override
public HttpResponse handle(HttpRequest request) {
try {
switch (request.getMethod()) {
case GET: return handleGET(request);
case PUT: return handlePUT(request);
case POST: return handlePOST(request);
case DELETE: return handleDELETE(request);
case OPTIONS: return handleOPTIONS();
default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported");
}
}
catch (ForbiddenException e) {
return ErrorResponse.forbidden(Exceptions.toMessageString(e));
}
catch (NotAuthorizedException e) {
return ErrorResponse.unauthorized(Exceptions.toMessageString(e));
}
catch (NotExistsException e) {
return ErrorResponse.notFoundError(Exceptions.toMessageString(e));
}
catch (IllegalArgumentException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
catch (ConfigServerException e) {
return ErrorResponse.from(e);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e);
return ErrorResponse.internalServerError(Exceptions.toMessageString(e));
}
}
private HttpResponse handleGET(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/")) return root(request);
if (path.matches("/application/v4/user")) return authenticatedUser(request);
if (path.matches("/application/v4/tenant")) return tenants(request);
if (path.matches("/application/v4/tenant-pipeline")) return tenantPipelines();
if (path.matches("/application/v4/athensDomain")) return athenzDomains(request);
if (path.matches("/application/v4/property")) return properties();
if (path.matches("/application/v4/cookiefreshness")) return cookieFreshness(request);
if (path.matches("/application/v4/tenant/{tenant}")) return tenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application")) return applications(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return application(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.getUri().getQuery());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job")) return JobControllerApiHandlerHelper.jobTypeResponse(controller, appIdFromPath(path), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.runResponse(controller.jobController().runs(appIdFromPath(path), jobTypeFromPath(path)), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/run/{number}")) return JobControllerApiHandlerHelper.runDetailsResponse(controller.jobController(), runIdFromPath(path), request.getProperty("after"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePUT(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/user")) return createUser(request);
if (path.matches("/application/v4/tenant/{tenant}")) return updateTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePOST(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return createTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return createApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/promote")) return promoteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return deploy(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/jobreport")) return notifyJobCompletion(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/promote")) return promoteApplicationDeployment(path.get("tenant"), path.get("application"), path.get("environment"), path.get("region"), path.get("instance"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleDELETE(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return deleteTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return deleteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleOPTIONS() {
EmptyJsonResponse response = new EmptyJsonResponse();
response.headers().put("Allow", "GET,PUT,POST,DELETE,OPTIONS");
return response;
}
private HttpResponse recursiveRoot(HttpRequest request) {
Slime slime = new Slime();
Cursor tenantArray = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
toSlime(tenantArray.addObject(), tenant, request, true);
return new SlimeJsonResponse(slime);
}
private HttpResponse root(HttpRequest request) {
return recurseOverTenants(request)
? recursiveRoot(request)
: new ResourceResponse(request, "user", "tenant", "tenant-pipeline", "athensDomain", "property", "cookiefreshness");
}
private HttpResponse authenticatedUser(HttpRequest request) {
String userIdString = request.getProperty("userOverride");
if (userIdString == null)
userIdString = getUserId(request)
.map(UserId::id)
.orElseThrow(() -> new ForbiddenException("You must be authenticated or specify userOverride"));
UserId userId = new UserId(userIdString);
List<Tenant> tenants = controller.tenants().asList(userId);
Slime slime = new Slime();
Cursor response = slime.setObject();
response.setString("user", userId.id());
Cursor tenantsArray = response.setArray("tenants");
for (Tenant tenant : tenants)
tenantInTenantsListToSlime(tenant, request.getUri(), tenantsArray.addObject());
response.setBool("tenantExists", tenants.stream().anyMatch(tenant -> tenant instanceof UserTenant &&
((UserTenant) tenant).is(userId.id())));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenants(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
tenantInTenantsListToSlime(tenant, request.getUri(), response.addObject());
return new SlimeJsonResponse(slime);
}
/** Lists the screwdriver project id for each application */
private HttpResponse tenantPipelines() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor pipelinesArray = response.setArray("tenantPipelines");
for (Application application : controller.applications().asList()) {
if ( ! application.deploymentJobs().projectId().isPresent()) continue;
Cursor pipelineObject = pipelinesArray.addObject();
pipelineObject.setString("screwdriverId", String.valueOf(application.deploymentJobs().projectId().getAsLong()));
pipelineObject.setString("tenant", application.id().tenant().value());
pipelineObject.setString("application", application.id().application().value());
pipelineObject.setString("instance", application.id().instance().value());
}
response.setArray("brokenTenantPipelines");
return new SlimeJsonResponse(slime);
}
private HttpResponse athenzDomains(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("data");
for (AthenzDomain athenzDomain : controller.getDomainList(request.getProperty("prefix"))) {
array.addString(athenzDomain.getName());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse properties() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("properties");
for (Map.Entry<PropertyId, Property> entry : controller.fetchPropertyList().entrySet()) {
Cursor propertyObject = array.addObject();
propertyObject.setString("propertyid", entry.getKey().id());
propertyObject.setString("property", entry.getValue().id());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse cookieFreshness(HttpRequest request) {
Slime slime = new Slime();
String passThruHeader = request.getHeader(SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_HEADER_FIELD);
slime.setObject().setBool("shouldRefreshCookie",
! SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_COOKIE_OK.equals(passThruHeader));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenant(String tenantName, HttpRequest request) {
return controller.tenants().tenant(TenantName.from(tenantName))
.map(tenant -> tenant(tenant, request, true))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist"));
}
private HttpResponse tenant(Tenant tenant, HttpRequest request, boolean listApplications) {
Slime slime = new Slime();
toSlime(slime.setObject(), tenant, request, listApplications);
return new SlimeJsonResponse(slime);
}
private HttpResponse applications(String tenantName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
Slime slime = new Slime();
Cursor array = slime.setArray();
for (Application application : controller.applications().asList(tenant))
toSlime(application, array.addObject(), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse application(String tenantName, String applicationName, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, "default");
Application application =
controller.applications().get(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
Slime slime = new Slime();
toSlime(slime.setObject(), application, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse logs(String tenantName, String applicationName, String instanceName, String environment, String region, String query) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
HashMap<String, String> queryParameters = getParameters(query);
Optional<Logs> response = controller.configServer().getLogs(deployment, queryParameters);
Slime slime = new Slime();
Cursor object = slime.setObject();
if (response.isPresent()) {
response.get().logs().entrySet().stream().forEach(entry -> object.setString(entry.getKey(), entry.getValue()));
}
return new SlimeJsonResponse(slime);
}
private HashMap<String, String> getParameters(String query) {
HashMap<String, String> keyValPair = new HashMap<>();
Arrays.stream(query.split("&")).forEach(pair -> {
String[] splitPair = pair.split("=");
keyValPair.put(splitPair[0], splitPair[1]);
});
return keyValPair;
}
private void toSlime(Cursor object, Application application, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/instance/" + application.id().instance().value() + "/job/",
request.getUri()).toString());
if (application.change().isPresent()) {
toSlime(object.setObject("deploying"), application.change());
}
if (application.outstandingChange().isPresent()) {
toSlime(object.setObject("outstandingChange"), application.outstandingChange());
}
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedJobs(application.deploymentJobs().jobStatus().values());
Cursor deploymentsArray = object.setArray("deploymentJobs");
for (JobStatus job : jobStatus) {
Cursor jobObject = deploymentsArray.addObject();
jobObject.setString("type", job.type().jobName());
jobObject.setBool("success", job.isSuccess());
job.lastTriggered().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastTriggered")));
job.lastCompleted().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastCompleted")));
job.firstFailing().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("firstFailing")));
job.lastSuccess().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastSuccess")));
}
Cursor changeBlockers = object.setArray("changeBlockers");
application.deploymentSpec().changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
});
object.setString("compileVersion", application.oldestDeployedPlatform().orElse(controller.systemVersion()).toFullString());
Cursor globalRotationsArray = object.setArray("globalRotations");
application.rotation().ifPresent(rotation -> {
globalRotationsArray.addString(rotation.url().toString());
globalRotationsArray.addString(rotation.secureUrl().toString());
object.setString("rotationId", rotation.id().asString());
});
List<Deployment> deployments = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedDeployments(application.deployments().values());
Cursor instancesArray = object.setArray("instances");
for (Deployment deployment : deployments) {
Cursor deploymentObject = instancesArray.addObject();
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("instance", application.id().instance().value());
controller.applications().rotationRepository().getRotation(application).ifPresent(rotation -> {
Map<String, RotationStatus> rotationHealthStatus = controller.rotationStatus(rotation);
setRotationStatus(deployment, rotationHealthStatus, deploymentObject);
});
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(application.id(), deployment.zone()), deployment, request);
else
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value() +
"/instance/" + application.id().instance().value(),
request.getUri()).toString());
}
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.deploymentJobs().issueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private HttpResponse deployment(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().get(id)
.orElseThrow(() -> new NotExistsException(id + " not found"));
DeploymentId deploymentId = new DeploymentId(application.id(),
ZoneId.from(environment, region));
Deployment deployment = application.deployments().get(deploymentId.zoneId());
if (deployment == null)
throw new NotExistsException(application + " is not deployed in " + deploymentId.zoneId());
Slime slime = new Slime();
toSlime(slime.setObject(), deploymentId, deployment, request);
return new SlimeJsonResponse(slime);
}
private void toSlime(Cursor object, Change change) {
change.platform().ifPresent(version -> object.setString("version", version.toString()));
change.application()
.filter(version -> !version.isUnknown())
.ifPresent(version -> toSlime(version, object.setObject("revision")));
}
private void toSlime(Cursor response, DeploymentId deploymentId, Deployment deployment, HttpRequest request) {
Cursor serviceUrlArray = response.setArray("serviceUrls");
controller.applications().getDeploymentEndpoints(deploymentId)
.ifPresent(endpoints -> endpoints.forEach(endpoint -> serviceUrlArray.addString(endpoint.toString())));
response.setString("nodes", withPath("/zone/v2/" + deploymentId.zoneId().environment() + "/" + deploymentId.zoneId().region() + "/nodes/v2/node/?&recursive=true&application=" + deploymentId.applicationId().tenant() + "." + deploymentId.applicationId().application() + "." + deploymentId.applicationId().instance(), request.getUri()).toString());
controller.zoneRegistry().getLogServerUri(deploymentId)
.ifPresent(elkUrl -> response.setString("elkUrl", elkUrl.toString()));
response.setString("yamasUrl", monitoringSystemUri(deploymentId).toString());
response.setString("version", deployment.version().toFullString());
response.setString("revision", deployment.applicationVersion().id());
response.setLong("deployTimeEpochMs", deployment.at().toEpochMilli());
controller.zoneRegistry().getDeploymentTimeToLive(deploymentId.zoneId())
.ifPresent(deploymentTimeToLive -> response.setLong("expiryTimeEpochMs", deployment.at().plus(deploymentTimeToLive).toEpochMilli()));
controller.applications().require(deploymentId.applicationId()).deploymentJobs().projectId()
.ifPresent(i -> response.setString("screwdriverId", String.valueOf(i)));
sourceRevisionToSlime(deployment.applicationVersion().source(), response);
Cursor activity = response.setObject("activity");
deployment.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried",
instant.toEpochMilli()));
deployment.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten",
instant.toEpochMilli()));
deployment.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
deployment.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
DeploymentCost appCost = deployment.calculateCost();
Cursor costObject = response.setObject("cost");
toSlime(appCost, costObject);
DeploymentMetrics metrics = deployment.metrics();
Cursor metricsObject = response.setObject("metrics");
metricsObject.setDouble("queriesPerSecond", metrics.queriesPerSecond());
metricsObject.setDouble("writesPerSecond", metrics.writesPerSecond());
metricsObject.setDouble("documentCount", metrics.documentCount());
metricsObject.setDouble("queryLatencyMillis", metrics.queryLatencyMillis());
metricsObject.setDouble("writeLatencyMillis", metrics.writeLatencyMillis());
}
private void toSlime(ApplicationVersion applicationVersion, Cursor object) {
if (!applicationVersion.isUnknown()) {
object.setString("hash", applicationVersion.id());
sourceRevisionToSlime(applicationVersion.source(), object.setObject("source"));
}
}
private void sourceRevisionToSlime(Optional<SourceRevision> revision, Cursor object) {
if ( ! revision.isPresent()) return;
object.setString("gitRepository", revision.get().repository());
object.setString("gitBranch", revision.get().branch());
object.setString("gitCommit", revision.get().commit());
}
private URI monitoringSystemUri(DeploymentId deploymentId) {
return controller.zoneRegistry().getMonitoringSystemUri(deploymentId);
}
private HttpResponse setGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region, boolean inService, HttpRequest request) {
Optional<Tenant> existingTenant = controller.tenants().tenant(tenantName);
if (!existingTenant.isPresent())
return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
String reason = mandatory("reason", requestData).asString();
String agent = getUserPrincipal(request).getIdentity().getFullName();
long timestamp = controller.clock().instant().getEpochSecond();
EndpointStatus.Status status = inService ? EndpointStatus.Status.in : EndpointStatus.Status.out;
EndpointStatus endPointStatus = new EndpointStatus(status, reason, agent, timestamp);
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
try {
List<String> rotations = controller.applications().setGlobalRotationStatus(deploymentId, endPointStatus);
return new MessageResponse(String.format("Rotations %s successfully set to %s service", rotations.toString(), inService ? "in" : "out of"));
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to alter rotation status: " + e.getMessage());
}
}
private HttpResponse getGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Slime slime = new Slime();
Cursor c1 = slime.setObject().setArray("globalrotationoverride");
try {
Map<String, EndpointStatus> rotations = controller.applications().getGlobalRotationStatus(deploymentId);
for (String rotation : rotations.keySet()) {
EndpointStatus currentStatus = rotations.get(rotation);
c1.addString(rotation);
Cursor c2 = c1.addObject();
c2.setString("status", currentStatus.getStatus().name());
c2.setString("reason", currentStatus.getReason() == null ? "" : currentStatus.getReason());
c2.setString("agent", currentStatus.getAgent() == null ? "" : currentStatus.getAgent());
c2.setLong("timestamp", currentStatus.getEpoch());
}
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to get rotation status: " + e.getMessage());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse rotationStatus(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().require(applicationId);
if (!application.rotation().isPresent()) {
throw new NotExistsException("global rotation does not exist for '" + environment + "." + region + "'");
}
Slime slime = new Slime();
Cursor response = slime.setObject();
Map<String, RotationStatus> rotationStatus = controller.applications().rotationRepository()
.getRotation(application)
.map(controller::rotationStatus)
.orElseGet(Collections::emptyMap);
for (String rotationEndpoint : rotationStatus.keySet()) {
if (rotationEndpoint.contains(toDns(environment)) && rotationEndpoint.contains(toDns(region))) {
Cursor bcpStatusObject = response.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", rotationStatus.getOrDefault(rotationEndpoint, RotationStatus.UNKNOWN).name());
}
}
return new SlimeJsonResponse(slime);
}
private HttpResponse service(String tenantName, String applicationName, String instanceName, String environment, String region, String serviceName, String restPath, HttpRequest request) {
Map<?,?> result = controller.getServiceApiResponse(tenantName, applicationName, instanceName, environment, region, serviceName, restPath);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(result, serviceName, restPath);
return response;
}
private HttpResponse createUser(HttpRequest request) {
Optional<UserId> user = getUserId(request);
if ( ! user.isPresent() ) throw new ForbiddenException("Not authenticated or not an user.");
String username = UserTenant.normalizeUser(user.get().id());
try {
controller.tenants().create(UserTenant.create(username));
return new MessageResponse("Created user '" + username + "'");
} catch (AlreadyExistsException e) {
return new MessageResponse("User '" + username + "' already exists");
}
}
private HttpResponse updateTenant(String tenantName, HttpRequest request) {
Optional<AthenzTenant> tenant = controller.tenants().athenzTenant(TenantName.from(tenantName));
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
NToken token = requireNToken(request, "Could not update " + tenantName);
controller.tenants().lockOrThrow(tenant.get().name(), lockedTenant -> {
lockedTenant = lockedTenant.with(new Property(mandatory("property", requestData).asString()));
lockedTenant = controller.tenants().withDomain(
lockedTenant,
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
token
);
Optional<PropertyId> propertyId = optional("propertyId", requestData).map(PropertyId::new);
if (propertyId.isPresent()) {
lockedTenant = lockedTenant.with(propertyId.get());
}
controller.tenants().store(lockedTenant);
});
return tenant(controller.tenants().requireAthenzTenant(tenant.get().name()), request, true);
}
private HttpResponse createTenant(String tenantName, HttpRequest request) {
Inspector requestData = toSlime(request.getData()).get();
AthenzTenant tenant = AthenzTenant.create(TenantName.from(tenantName),
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
new Property(mandatory("property", requestData).asString()),
optional("propertyId", requestData).map(PropertyId::new));
throwIfNotAthenzDomainAdmin(tenant.domain(), request);
controller.tenants().create(tenant, requireNToken(request, "Could not create " + tenantName));
return tenant(tenant, request, true);
}
private HttpResponse createApplication(String tenantName, String applicationName, HttpRequest request) {
Application application;
try {
application = controller.applications().createApplication(ApplicationId.from(tenantName, applicationName, "default"), getUserPrincipal(request).getNToken());
}
catch (ZmsException e) {
if (e.getCode() == com.yahoo.jdisc.Response.Status.FORBIDDEN)
throw new ForbiddenException("Not authorized to create application", e);
else
throw e;
}
Slime slime = new Slime();
toSlime(application, slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
/** Trigger deployment of the last built application package, on a given version */
private HttpResponse deploy(String tenantName, String applicationName, HttpRequest request) {
Version version = decideDeployVersion(request);
if ( ! systemHasVersion(version))
throw new IllegalArgumentException("Cannot trigger deployment of version '" + version + "': " +
"Version is not active in this system. " +
"Active versions: " + controller.versionStatus().versions()
.stream()
.map(VespaVersion::versionNumber)
.map(Version::toString)
.collect(Collectors.joining(", ")));
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().lockOrThrow(id, application -> {
controller.applications().deploymentTrigger().triggerChange(application.get().id(),
application.get().change().with(version));
});
return new MessageResponse("Triggered deployment of application '" + id + "' on version " + version);
}
/** Cancel any ongoing change for given application */
private HttpResponse cancelDeploy(String tenantName, String applicationName) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
Application application = controller.applications().require(id);
Change change = application.change();
if ( ! change.isPresent())
return new MessageResponse("No deployment in progress for " + application + " at this time");
controller.applications().lockOrThrow(id, lockedApplication ->
controller.applications().deploymentTrigger().cancelChange(id, false));
return new MessageResponse("Cancelled " + change + " for " + application);
}
/** Schedule restart of deployment, or specific host in a deployment */
private HttpResponse restart(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Optional<Hostname> hostname = Optional.ofNullable(request.getProperty("hostname")).map(Hostname::new);
controller.applications().restart(deploymentId, hostname);
return new StringResponse("Requested restart of " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
private HttpResponse deploy(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
if ( ! dataParts.containsKey("deployOptions"))
return ErrorResponse.badRequest("Missing required form part 'deployOptions'");
Inspector deployOptions = SlimeUtils.jsonToSlime(dataParts.get("deployOptions")).get();
Optional<ApplicationPackage> applicationPackage = Optional.ofNullable(dataParts.get("applicationZip"))
.map(ApplicationPackage::new);
verifyApplicationIdentityConfiguration(tenantName, applicationPackage);
DeployOptions deployOptionsJsonClass = new DeployOptions(deployOptions.field("deployDirectly").asBool(),
optional("vespaVersion", deployOptions).map(Version::new),
deployOptions.field("ignoreValidationErrors").asBool(),
deployOptions.field("deployCurrentVersion").asBool());
ActivateResult result = controller.applications().deploy(applicationId,
zone,
applicationPackage,
deployOptionsJsonClass);
return new SlimeJsonResponse(toSlime(result));
}
private void verifyApplicationIdentityConfiguration(String tenantName, Optional<ApplicationPackage> applicationPackage) {
applicationPackage.map(ApplicationPackage::deploymentSpec).flatMap(DeploymentSpec::athenzDomain)
.ifPresent(identityDomain -> {
AthenzTenant tenant = controller.tenants().athenzTenant(TenantName.from(tenantName))
.orElseThrow(() -> new IllegalArgumentException("Tenant does not exist"));
AthenzDomain tenantDomain = tenant.domain();
if (! Objects.equals(tenantDomain.getName(), identityDomain.value())) {
throw new ForbiddenException(
String.format(
"Athenz domain in deployment.xml: [%s] must match tenant domain: [%s]",
identityDomain.value(),
tenantDomain.getName()
));
}
});
}
private HttpResponse deleteTenant(String tenantName, HttpRequest request) {
Optional<Tenant> tenant = controller.tenants().tenant(tenantName);
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Could not delete tenant '" + tenantName + "': Tenant not found");
if (tenant.get() instanceof AthenzTenant) {
controller.tenants().deleteTenant((AthenzTenant) tenant.get(),
requireNToken(request, "Could not delete " + tenantName));
} else if (tenant.get() instanceof UserTenant) {
controller.tenants().deleteTenant((UserTenant) tenant.get());
} else {
throw new IllegalArgumentException("Unknown tenant type:" + tenant.get().getClass().getSimpleName() +
", for " + tenant.get());
}
return tenant(tenant.get(), request, false);
}
private HttpResponse deleteApplication(String tenantName, String applicationName, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().deleteApplication(id, getUserPrincipal(request).getNToken());
return new EmptyJsonResponse();
}
private HttpResponse deactivate(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
Application application = controller.applications().require(ApplicationId.from(tenantName, applicationName, instanceName));
controller.applications().deactivate(application.id(), ZoneId.from(environment, region));
return new StringResponse("Deactivated " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
/**
* Promote application Chef environments. To be used by component jobs only
*/
private HttpResponse promoteApplication(String tenantName, String applicationName, HttpRequest request) {
try{
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.systemChefEnvironment();
String targetEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s)", tenantName, applicationName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
/**
* Promote application Chef environments for jobs that deploy applications
*/
private HttpResponse promoteApplicationDeployment(String tenantName, String applicationName, String environmentName, String regionName, String instanceName, HttpRequest request) {
try {
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
String targetEnvironment = chefEnvironment.applicationTargetEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName), Environment.from(environmentName), RegionName.from(regionName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s %s.%s)", tenantName, applicationName, environmentName, regionName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
private HttpResponse notifyJobCompletion(String tenant, String applicationName, HttpRequest request) {
try {
controller.applications().deploymentTrigger().notifyOfCompletion(toJobReport(tenant, applicationName, toSlime(request.getData()).get()));
return new MessageResponse("ok");
} catch (IllegalStateException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
}
private static DeploymentJobs.JobReport toJobReport(String tenantName, String applicationName, Inspector report) {
Optional<DeploymentJobs.JobError> jobError = Optional.empty();
if (report.field("jobError").valid()) {
jobError = Optional.of(DeploymentJobs.JobError.valueOf(report.field("jobError").asString()));
}
return new DeploymentJobs.JobReport(
ApplicationId.from(tenantName, applicationName, report.field("instance").asString()),
JobType.fromJobName(report.field("jobName").asString()),
report.field("projectId").asLong(),
report.field("buildNumber").asLong(),
toSourceRevision(report.field("sourceRevision")),
jobError
);
}
private static Optional<SourceRevision> toSourceRevision(Inspector object) {
if (!object.field("repository").valid() ||
!object.field("branch").valid() ||
!object.field("commit").valid()) {
return Optional.empty();
}
return Optional.of(new SourceRevision(object.field("repository").asString(), object.field("branch").asString(),
object.field("commit").asString()));
}
private Tenant getTenantOrThrow(String tenantName) {
return controller.tenants().tenant(tenantName)
.orElseThrow(() -> new NotExistsException(new TenantId(tenantName)));
}
private void toSlime(Cursor object, Tenant tenant, HttpRequest request, boolean listApplications) {
object.setString("tenant", tenant.name().value());
object.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
object.setString("athensDomain", athenzTenant.domain().getName());
object.setString("property", athenzTenant.property().id());
athenzTenant.propertyId().ifPresent(id -> object.setString("propertyId", id.toString()));
}
Cursor applicationArray = object.setArray("applications");
if (listApplications) {
for (Application application : controller.applications().asList(tenant.name())) {
if (application.id().instance().isDefault()) {
if (recurseOverApplications(request))
toSlime(applicationArray.addObject(), application, request);
else
toSlime(application, applicationArray.addObject(), request);
}
}
}
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
athenzTenant.contact().ifPresent(c -> {
object.setString("propertyUrl", c.propertyUrl().toString());
object.setString("contactsUrl", c.url().toString());
object.setString("issueCreationUrl", c.issueTrackerUrl().toString());
Cursor contactsArray = object.setArray("contacts");
c.persons().forEach(persons -> {
Cursor personArray = contactsArray.addArray();
persons.forEach(personArray::addString);
});
});
}
}
private void tenantInTenantsListToSlime(Tenant tenant, URI requestURI, Cursor object) {
object.setString("tenant", tenant.name().value());
Cursor metaData = object.setObject("metaData");
metaData.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
metaData.setString("athensDomain", athenzTenant.domain().getName());
metaData.setString("property", athenzTenant.property().id());
}
object.setString("url", withPath("/application/v4/tenant/" + tenant.name().value(), requestURI).toString());
}
/** Returns a copy of the given URI with the host and port from the given URI and the path set to the given path */
private URI withPath(String newPath, URI uri) {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), newPath, null, null);
}
catch (URISyntaxException e) {
throw new RuntimeException("Will not happen", e);
}
}
private void setRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus, Cursor object) {
if ( ! deployment.zone().environment().equals(Environment.prod)) return;
Cursor bcpStatusObject = object.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", findRotationStatus(deployment, healthStatus).name());
}
private RotationStatus findRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus) {
for (String endpoint : healthStatus.keySet()) {
if (endpoint.contains(toDns(deployment.zone().environment().value())) &&
endpoint.contains(toDns(deployment.zone().region().value()))) {
return healthStatus.getOrDefault(endpoint, RotationStatus.UNKNOWN);
}
}
return RotationStatus.UNKNOWN;
}
private String toDns(String id) {
return id.replace('_', '-');
}
private long asLong(String valueOrNull, long defaultWhenNull) {
if (valueOrNull == null) return defaultWhenNull;
try {
return Long.parseLong(valueOrNull);
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("Expected an integer but got '" + valueOrNull + "'");
}
}
private void toSlime(JobStatus.JobRun jobRun, Cursor object) {
object.setLong("id", jobRun.id());
object.setString("version", jobRun.platform().toFullString());
if (!jobRun.application().isUnknown())
toSlime(jobRun.application(), object.setObject("revision"));
object.setString("reason", jobRun.reason());
object.setLong("at", jobRun.at().toEpochMilli());
}
private Slime toSlime(InputStream jsonStream) {
try {
byte[] jsonBytes = IOUtils.readBytes(jsonStream, 1000 * 1000);
return SlimeUtils.jsonToSlime(jsonBytes);
} catch (IOException e) {
throw new RuntimeException();
}
}
private void throwIfNotAthenzDomainAdmin(AthenzDomain tenantDomain, HttpRequest request) {
AthenzIdentity identity = getUserPrincipal(request).getIdentity();
boolean isDomainAdmin = athenzClientFactory.createZmsClientWithServicePrincipal()
.isDomainAdmin(identity, tenantDomain);
if ( ! isDomainAdmin) {
throw new ForbiddenException(
String.format("The user '%s' is not admin in Athenz domain '%s'", identity.getFullName(), tenantDomain.getName()));
}
}
private static Optional<UserId> getUserId(HttpRequest request) {
return Optional.of(getUserPrincipal(request))
.map(AthenzPrincipal::getIdentity)
.filter(AthenzUser.class::isInstance)
.map(AthenzUser.class::cast)
.map(AthenzUser::getName)
.map(UserId::new);
}
private static AthenzPrincipal getUserPrincipal(HttpRequest request) {
Principal principal = request.getJDiscRequest().getUserPrincipal();
if (principal == null) throw new InternalServerErrorException("Expected a user principal");
if (!(principal instanceof AthenzPrincipal))
throw new InternalServerErrorException(
String.format("Expected principal of type %s, got %s",
AthenzPrincipal.class.getSimpleName(), principal.getClass().getName()));
return (AthenzPrincipal) principal;
}
private Inspector mandatory(String key, Inspector object) {
if ( ! object.field(key).valid())
throw new IllegalArgumentException("'" + key + "' is missing");
return object.field(key);
}
private Optional<String> optional(String key, Inspector object) {
return SlimeUtils.optionalString(object.field(key));
}
private static String path(Object... elements) {
return Joiner.on("/").join(elements);
}
private void toSlime(Application application, Cursor object, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("url", withPath("/application/v4/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value(), request.getUri()).toString());
}
private Slime toSlime(ActivateResult result) {
Slime slime = new Slime();
Cursor object = slime.setObject();
object.setString("revisionId", result.revisionId().id());
object.setLong("applicationZipSize", result.applicationZipSizeBytes());
Cursor logArray = object.setArray("prepareMessages");
if (result.prepareResponse().log != null) {
for (Log logMessage : result.prepareResponse().log) {
Cursor logObject = logArray.addObject();
logObject.setLong("time", logMessage.time);
logObject.setString("level", logMessage.level);
logObject.setString("message", logMessage.message);
}
}
Cursor changeObject = object.setObject("configChangeActions");
Cursor restartActionsArray = changeObject.setArray("restart");
for (RestartAction restartAction : result.prepareResponse().configChangeActions.restartActions) {
Cursor restartActionObject = restartActionsArray.addObject();
restartActionObject.setString("clusterName", restartAction.clusterName);
restartActionObject.setString("clusterType", restartAction.clusterType);
restartActionObject.setString("serviceType", restartAction.serviceType);
serviceInfosToSlime(restartAction.services, restartActionObject.setArray("services"));
stringsToSlime(restartAction.messages, restartActionObject.setArray("messages"));
}
Cursor refeedActionsArray = changeObject.setArray("refeed");
for (RefeedAction refeedAction : result.prepareResponse().configChangeActions.refeedActions) {
Cursor refeedActionObject = refeedActionsArray.addObject();
refeedActionObject.setString("name", refeedAction.name);
refeedActionObject.setBool("allowed", refeedAction.allowed);
refeedActionObject.setString("documentType", refeedAction.documentType);
refeedActionObject.setString("clusterName", refeedAction.clusterName);
serviceInfosToSlime(refeedAction.services, refeedActionObject.setArray("services"));
stringsToSlime(refeedAction.messages, refeedActionObject.setArray("messages"));
}
return slime;
}
private void serviceInfosToSlime(List<ServiceInfo> serviceInfoList, Cursor array) {
for (ServiceInfo serviceInfo : serviceInfoList) {
Cursor serviceInfoObject = array.addObject();
serviceInfoObject.setString("serviceName", serviceInfo.serviceName);
serviceInfoObject.setString("serviceType", serviceInfo.serviceType);
serviceInfoObject.setString("configId", serviceInfo.configId);
serviceInfoObject.setString("hostName", serviceInfo.hostName);
}
}
private void stringsToSlime(List<String> strings, Cursor array) {
for (String string : strings)
array.addString(string);
}
private String readToString(InputStream stream) {
Scanner scanner = new Scanner(stream).useDelimiter("\\A");
if ( ! scanner.hasNext()) return null;
return scanner.next();
}
private boolean systemHasVersion(Version version) {
return controller.versionStatus().versions().stream().anyMatch(v -> v.versionNumber().equals(version));
}
private Version decideDeployVersion(HttpRequest request) {
String requestVersion = readToString(request.getData());
if (requestVersion != null)
return new Version(requestVersion);
else
return controller.systemVersion();
}
public static void toSlime(DeploymentCost deploymentCost, Cursor object) {
object.setLong("tco", (long)deploymentCost.getTco());
object.setLong("waste", (long)deploymentCost.getWaste());
object.setDouble("utilization", deploymentCost.getUtilization());
Cursor clustersObject = object.setObject("cluster");
for (Map.Entry<String, ClusterCost> clusterEntry : deploymentCost.getCluster().entrySet())
toSlime(clusterEntry.getValue(), clustersObject.setObject(clusterEntry.getKey()));
}
private static void toSlime(ClusterCost clusterCost, Cursor object) {
object.setLong("count", clusterCost.getClusterInfo().getHostnames().size());
object.setString("resource", getResourceName(clusterCost.getResultUtilization()));
object.setDouble("utilization", clusterCost.getResultUtilization().getMaxUtilization());
object.setLong("tco", (int)clusterCost.getTco());
object.setLong("waste", (int)clusterCost.getWaste());
object.setString("flavor", clusterCost.getClusterInfo().getFlavor());
object.setDouble("flavorCost", clusterCost.getClusterInfo().getFlavorCost());
object.setDouble("flavorCpu", clusterCost.getClusterInfo().getFlavorCPU());
object.setDouble("flavorMem", clusterCost.getClusterInfo().getFlavorMem());
object.setDouble("flavorDisk", clusterCost.getClusterInfo().getFlavorDisk());
object.setString("type", clusterCost.getClusterInfo().getClusterType().name());
Cursor utilObject = object.setObject("util");
utilObject.setDouble("cpu", clusterCost.getResultUtilization().getCpu());
utilObject.setDouble("mem", clusterCost.getResultUtilization().getMemory());
utilObject.setDouble("disk", clusterCost.getResultUtilization().getDisk());
utilObject.setDouble("diskBusy", clusterCost.getResultUtilization().getDiskBusy());
Cursor usageObject = object.setObject("usage");
usageObject.setDouble("cpu", clusterCost.getSystemUtilization().getCpu());
usageObject.setDouble("mem", clusterCost.getSystemUtilization().getMemory());
usageObject.setDouble("disk", clusterCost.getSystemUtilization().getDisk());
usageObject.setDouble("diskBusy", clusterCost.getSystemUtilization().getDiskBusy());
Cursor hostnamesArray = object.setArray("hostnames");
for (String hostname : clusterCost.getClusterInfo().getHostnames())
hostnamesArray.addString(hostname);
}
private static String getResourceName(ClusterUtilization utilization) {
String name = "cpu";
double max = utilization.getMaxUtilization();
if (utilization.getMemory() == max) {
name = "mem";
} else if (utilization.getDisk() == max) {
name = "disk";
} else if (utilization.getDiskBusy() == max) {
name = "diskbusy";
}
return name;
}
private static boolean recurseOverTenants(HttpRequest request) {
return recurseOverApplications(request) || "tenant".equals(request.getProperty("recursive"));
}
private static boolean recurseOverApplications(HttpRequest request) {
return recurseOverDeployments(request) || "application".equals(request.getProperty("recursive"));
}
private static boolean recurseOverDeployments(HttpRequest request) {
return ImmutableSet.of("all", "true", "deployment").contains(request.getProperty("recursive"));
}
private static String tentantType(Tenant tenant) {
if (tenant instanceof AthenzTenant) {
return "ATHENS";
} else if (tenant instanceof UserTenant) {
return "USER";
}
throw new IllegalArgumentException("Unknown tenant type: " + tenant.getClass().getSimpleName());
}
private static NToken requireNToken(HttpRequest request, String message) {
return getUserPrincipal(request).getNToken().orElseThrow(() -> new IllegalArgumentException(
message + ": No NToken provided"));
}
private static ApplicationId appIdFromPath(Path path) {
return ApplicationId.from(path.get("tenant"), path.get("application"), path.get("instance"));
}
private static JobType jobTypeFromPath(Path path) {
return JobType.fromJobName(path.get("jobtype"));
}
private static RunId runIdFromPath(Path path) {
long number = Long.parseLong(path.get("number"));
return new RunId(appIdFromPath(path), jobTypeFromPath(path), number);
}
private HttpResponse submit(String tenant, String application, HttpRequest request) {
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
Inspector submitOptions = SlimeUtils.jsonToSlime(dataParts.get(EnvironmentResource.SUBMIT_OPTIONS)).get();
SourceRevision sourceRevision = toSourceRevision(submitOptions).orElseThrow(() ->
new IllegalArgumentException("Must specify 'repository', 'branch', and 'commit'"));
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP));
if ( ! applicationPackage.deploymentSpec().athenzDomain().isPresent())
throw new IllegalArgumentException("Application must define an Athenz service in deployment.xml!");
verifyApplicationIdentityConfiguration(tenant, Optional.of(applicationPackage));
return JobControllerApiHandlerHelper.submitResponse(controller.jobController(), tenant, application,
sourceRevision,
applicationPackage.zippedContent(),
dataParts.get(EnvironmentResource.APPLICATION_TEST_ZIP));
}
} |
Never mind, I see now that `ServiceApiResponse` rewrites it to another path in `/application/v4`. However, it should be possible to rewrite the URLs (and simplify the code) without requiring knowledge of the config server URLs. | private HttpResponse services(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationView applicationView = controller.getApplicationView(tenantName, applicationName, instanceName, environment, region);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(applicationView);
return response;
} | controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)), | private HttpResponse services(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationView applicationView = controller.getApplicationView(tenantName, applicationName, instanceName, environment, region);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(applicationView);
return response;
} | class ApplicationApiHandler extends LoggingRequestHandler {
private final Controller controller;
private final AthenzClientFactory athenzClientFactory;
@Inject
public ApplicationApiHandler(LoggingRequestHandler.Context parentCtx,
Controller controller,
AthenzClientFactory athenzClientFactory) {
super(parentCtx);
this.controller = controller;
this.athenzClientFactory = athenzClientFactory;
}
@Override
public Duration getTimeout() {
return Duration.ofMinutes(20);
}
@Override
public HttpResponse handle(HttpRequest request) {
try {
switch (request.getMethod()) {
case GET: return handleGET(request);
case PUT: return handlePUT(request);
case POST: return handlePOST(request);
case DELETE: return handleDELETE(request);
case OPTIONS: return handleOPTIONS();
default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported");
}
}
catch (ForbiddenException e) {
return ErrorResponse.forbidden(Exceptions.toMessageString(e));
}
catch (NotAuthorizedException e) {
return ErrorResponse.unauthorized(Exceptions.toMessageString(e));
}
catch (NotExistsException e) {
return ErrorResponse.notFoundError(Exceptions.toMessageString(e));
}
catch (IllegalArgumentException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
catch (ConfigServerException e) {
return ErrorResponse.from(e);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e);
return ErrorResponse.internalServerError(Exceptions.toMessageString(e));
}
}
private HttpResponse handleGET(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/")) return root(request);
if (path.matches("/application/v4/user")) return authenticatedUser(request);
if (path.matches("/application/v4/tenant")) return tenants(request);
if (path.matches("/application/v4/tenant-pipeline")) return tenantPipelines();
if (path.matches("/application/v4/athensDomain")) return athenzDomains(request);
if (path.matches("/application/v4/property")) return properties();
if (path.matches("/application/v4/cookiefreshness")) return cookieFreshness(request);
if (path.matches("/application/v4/tenant/{tenant}")) return tenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application")) return applications(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return application(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.getUri().getQuery());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job")) return JobControllerApiHandlerHelper.jobTypeResponse(controller, appIdFromPath(path), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.runResponse(controller.jobController().runs(appIdFromPath(path), jobTypeFromPath(path)), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/run/{number}")) return JobControllerApiHandlerHelper.runDetailsResponse(controller.jobController(), runIdFromPath(path), request.getProperty("after"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePUT(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/user")) return createUser(request);
if (path.matches("/application/v4/tenant/{tenant}")) return updateTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePOST(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return createTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return createApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/promote")) return promoteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return deploy(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/jobreport")) return notifyJobCompletion(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/promote")) return promoteApplicationDeployment(path.get("tenant"), path.get("application"), path.get("environment"), path.get("region"), path.get("instance"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleDELETE(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return deleteTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return deleteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleOPTIONS() {
EmptyJsonResponse response = new EmptyJsonResponse();
response.headers().put("Allow", "GET,PUT,POST,DELETE,OPTIONS");
return response;
}
private HttpResponse recursiveRoot(HttpRequest request) {
Slime slime = new Slime();
Cursor tenantArray = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
toSlime(tenantArray.addObject(), tenant, request, true);
return new SlimeJsonResponse(slime);
}
private HttpResponse root(HttpRequest request) {
return recurseOverTenants(request)
? recursiveRoot(request)
: new ResourceResponse(request, "user", "tenant", "tenant-pipeline", "athensDomain", "property", "cookiefreshness");
}
private HttpResponse authenticatedUser(HttpRequest request) {
String userIdString = request.getProperty("userOverride");
if (userIdString == null)
userIdString = getUserId(request)
.map(UserId::id)
.orElseThrow(() -> new ForbiddenException("You must be authenticated or specify userOverride"));
UserId userId = new UserId(userIdString);
List<Tenant> tenants = controller.tenants().asList(userId);
Slime slime = new Slime();
Cursor response = slime.setObject();
response.setString("user", userId.id());
Cursor tenantsArray = response.setArray("tenants");
for (Tenant tenant : tenants)
tenantInTenantsListToSlime(tenant, request.getUri(), tenantsArray.addObject());
response.setBool("tenantExists", tenants.stream().anyMatch(tenant -> tenant instanceof UserTenant &&
((UserTenant) tenant).is(userId.id())));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenants(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
tenantInTenantsListToSlime(tenant, request.getUri(), response.addObject());
return new SlimeJsonResponse(slime);
}
/** Lists the screwdriver project id for each application */
private HttpResponse tenantPipelines() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor pipelinesArray = response.setArray("tenantPipelines");
for (Application application : controller.applications().asList()) {
if ( ! application.deploymentJobs().projectId().isPresent()) continue;
Cursor pipelineObject = pipelinesArray.addObject();
pipelineObject.setString("screwdriverId", String.valueOf(application.deploymentJobs().projectId().getAsLong()));
pipelineObject.setString("tenant", application.id().tenant().value());
pipelineObject.setString("application", application.id().application().value());
pipelineObject.setString("instance", application.id().instance().value());
}
response.setArray("brokenTenantPipelines");
return new SlimeJsonResponse(slime);
}
private HttpResponse athenzDomains(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("data");
for (AthenzDomain athenzDomain : controller.getDomainList(request.getProperty("prefix"))) {
array.addString(athenzDomain.getName());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse properties() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("properties");
for (Map.Entry<PropertyId, Property> entry : controller.fetchPropertyList().entrySet()) {
Cursor propertyObject = array.addObject();
propertyObject.setString("propertyid", entry.getKey().id());
propertyObject.setString("property", entry.getValue().id());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse cookieFreshness(HttpRequest request) {
Slime slime = new Slime();
String passThruHeader = request.getHeader(SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_HEADER_FIELD);
slime.setObject().setBool("shouldRefreshCookie",
! SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_COOKIE_OK.equals(passThruHeader));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenant(String tenantName, HttpRequest request) {
return controller.tenants().tenant(TenantName.from(tenantName))
.map(tenant -> tenant(tenant, request, true))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist"));
}
private HttpResponse tenant(Tenant tenant, HttpRequest request, boolean listApplications) {
Slime slime = new Slime();
toSlime(slime.setObject(), tenant, request, listApplications);
return new SlimeJsonResponse(slime);
}
private HttpResponse applications(String tenantName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
Slime slime = new Slime();
Cursor array = slime.setArray();
for (Application application : controller.applications().asList(tenant))
toSlime(application, array.addObject(), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse application(String tenantName, String applicationName, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, "default");
Application application =
controller.applications().get(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
Slime slime = new Slime();
toSlime(slime.setObject(), application, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse logs(String tenantName, String applicationName, String instanceName, String environment, String region, String query) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
HashMap<String, String> queryParameters = getParameters(query);
Optional<Logs> response = controller.configServer().getLogs(deployment, queryParameters);
Slime slime = new Slime();
Cursor object = slime.setObject();
if (response.isPresent()) {
response.get().logs().entrySet().stream().forEach(entry -> object.setString(entry.getKey(), entry.getValue()));
}
return new SlimeJsonResponse(slime);
}
private HashMap<String, String> getParameters(String query) {
HashMap<String, String> keyValPair = new HashMap<>();
Arrays.stream(query.split("&")).forEach(pair -> {
String[] splitPair = pair.split("=");
keyValPair.put(splitPair[0], splitPair[1]);
});
return keyValPair;
}
private void toSlime(Cursor object, Application application, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/instance/" + application.id().instance().value() + "/job/",
request.getUri()).toString());
if (application.change().isPresent()) {
toSlime(object.setObject("deploying"), application.change());
}
if (application.outstandingChange().isPresent()) {
toSlime(object.setObject("outstandingChange"), application.outstandingChange());
}
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedJobs(application.deploymentJobs().jobStatus().values());
Cursor deploymentsArray = object.setArray("deploymentJobs");
for (JobStatus job : jobStatus) {
Cursor jobObject = deploymentsArray.addObject();
jobObject.setString("type", job.type().jobName());
jobObject.setBool("success", job.isSuccess());
job.lastTriggered().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastTriggered")));
job.lastCompleted().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastCompleted")));
job.firstFailing().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("firstFailing")));
job.lastSuccess().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastSuccess")));
}
Cursor changeBlockers = object.setArray("changeBlockers");
application.deploymentSpec().changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
});
object.setString("compileVersion", application.oldestDeployedPlatform().orElse(controller.systemVersion()).toFullString());
Cursor globalRotationsArray = object.setArray("globalRotations");
application.rotation().ifPresent(rotation -> {
globalRotationsArray.addString(rotation.url().toString());
globalRotationsArray.addString(rotation.secureUrl().toString());
object.setString("rotationId", rotation.id().asString());
});
List<Deployment> deployments = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedDeployments(application.deployments().values());
Cursor instancesArray = object.setArray("instances");
for (Deployment deployment : deployments) {
Cursor deploymentObject = instancesArray.addObject();
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("instance", application.id().instance().value());
controller.applications().rotationRepository().getRotation(application).ifPresent(rotation -> {
Map<String, RotationStatus> rotationHealthStatus = controller.rotationStatus(rotation);
setRotationStatus(deployment, rotationHealthStatus, deploymentObject);
});
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(application.id(), deployment.zone()), deployment, request);
else
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value() +
"/instance/" + application.id().instance().value(),
request.getUri()).toString());
}
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.deploymentJobs().issueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private HttpResponse deployment(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().get(id)
.orElseThrow(() -> new NotExistsException(id + " not found"));
DeploymentId deploymentId = new DeploymentId(application.id(),
ZoneId.from(environment, region));
Deployment deployment = application.deployments().get(deploymentId.zoneId());
if (deployment == null)
throw new NotExistsException(application + " is not deployed in " + deploymentId.zoneId());
Slime slime = new Slime();
toSlime(slime.setObject(), deploymentId, deployment, request);
return new SlimeJsonResponse(slime);
}
private void toSlime(Cursor object, Change change) {
change.platform().ifPresent(version -> object.setString("version", version.toString()));
change.application()
.filter(version -> !version.isUnknown())
.ifPresent(version -> toSlime(version, object.setObject("revision")));
}
private void toSlime(Cursor response, DeploymentId deploymentId, Deployment deployment, HttpRequest request) {
Cursor serviceUrlArray = response.setArray("serviceUrls");
controller.applications().getDeploymentEndpoints(deploymentId)
.ifPresent(endpoints -> endpoints.forEach(endpoint -> serviceUrlArray.addString(endpoint.toString())));
response.setString("nodes", withPath("/zone/v2/" + deploymentId.zoneId().environment() + "/" + deploymentId.zoneId().region() + "/nodes/v2/node/?&recursive=true&application=" + deploymentId.applicationId().tenant() + "." + deploymentId.applicationId().application() + "." + deploymentId.applicationId().instance(), request.getUri()).toString());
controller.zoneRegistry().getLogServerUri(deploymentId)
.ifPresent(elkUrl -> response.setString("elkUrl", elkUrl.toString()));
response.setString("yamasUrl", monitoringSystemUri(deploymentId).toString());
response.setString("version", deployment.version().toFullString());
response.setString("revision", deployment.applicationVersion().id());
response.setLong("deployTimeEpochMs", deployment.at().toEpochMilli());
controller.zoneRegistry().getDeploymentTimeToLive(deploymentId.zoneId())
.ifPresent(deploymentTimeToLive -> response.setLong("expiryTimeEpochMs", deployment.at().plus(deploymentTimeToLive).toEpochMilli()));
controller.applications().require(deploymentId.applicationId()).deploymentJobs().projectId()
.ifPresent(i -> response.setString("screwdriverId", String.valueOf(i)));
sourceRevisionToSlime(deployment.applicationVersion().source(), response);
Cursor activity = response.setObject("activity");
deployment.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried",
instant.toEpochMilli()));
deployment.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten",
instant.toEpochMilli()));
deployment.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
deployment.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
DeploymentCost appCost = deployment.calculateCost();
Cursor costObject = response.setObject("cost");
toSlime(appCost, costObject);
DeploymentMetrics metrics = deployment.metrics();
Cursor metricsObject = response.setObject("metrics");
metricsObject.setDouble("queriesPerSecond", metrics.queriesPerSecond());
metricsObject.setDouble("writesPerSecond", metrics.writesPerSecond());
metricsObject.setDouble("documentCount", metrics.documentCount());
metricsObject.setDouble("queryLatencyMillis", metrics.queryLatencyMillis());
metricsObject.setDouble("writeLatencyMillis", metrics.writeLatencyMillis());
}
private void toSlime(ApplicationVersion applicationVersion, Cursor object) {
if (!applicationVersion.isUnknown()) {
object.setString("hash", applicationVersion.id());
sourceRevisionToSlime(applicationVersion.source(), object.setObject("source"));
}
}
private void sourceRevisionToSlime(Optional<SourceRevision> revision, Cursor object) {
if ( ! revision.isPresent()) return;
object.setString("gitRepository", revision.get().repository());
object.setString("gitBranch", revision.get().branch());
object.setString("gitCommit", revision.get().commit());
}
private URI monitoringSystemUri(DeploymentId deploymentId) {
return controller.zoneRegistry().getMonitoringSystemUri(deploymentId);
}
private HttpResponse setGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region, boolean inService, HttpRequest request) {
Optional<Tenant> existingTenant = controller.tenants().tenant(tenantName);
if (!existingTenant.isPresent())
return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
String reason = mandatory("reason", requestData).asString();
String agent = getUserPrincipal(request).getIdentity().getFullName();
long timestamp = controller.clock().instant().getEpochSecond();
EndpointStatus.Status status = inService ? EndpointStatus.Status.in : EndpointStatus.Status.out;
EndpointStatus endPointStatus = new EndpointStatus(status, reason, agent, timestamp);
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
try {
List<String> rotations = controller.applications().setGlobalRotationStatus(deploymentId, endPointStatus);
return new MessageResponse(String.format("Rotations %s successfully set to %s service", rotations.toString(), inService ? "in" : "out of"));
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to alter rotation status: " + e.getMessage());
}
}
private HttpResponse getGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Slime slime = new Slime();
Cursor c1 = slime.setObject().setArray("globalrotationoverride");
try {
Map<String, EndpointStatus> rotations = controller.applications().getGlobalRotationStatus(deploymentId);
for (String rotation : rotations.keySet()) {
EndpointStatus currentStatus = rotations.get(rotation);
c1.addString(rotation);
Cursor c2 = c1.addObject();
c2.setString("status", currentStatus.getStatus().name());
c2.setString("reason", currentStatus.getReason() == null ? "" : currentStatus.getReason());
c2.setString("agent", currentStatus.getAgent() == null ? "" : currentStatus.getAgent());
c2.setLong("timestamp", currentStatus.getEpoch());
}
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to get rotation status: " + e.getMessage());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse rotationStatus(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().require(applicationId);
if (!application.rotation().isPresent()) {
throw new NotExistsException("global rotation does not exist for '" + environment + "." + region + "'");
}
Slime slime = new Slime();
Cursor response = slime.setObject();
Map<String, RotationStatus> rotationStatus = controller.applications().rotationRepository()
.getRotation(application)
.map(controller::rotationStatus)
.orElseGet(Collections::emptyMap);
for (String rotationEndpoint : rotationStatus.keySet()) {
if (rotationEndpoint.contains(toDns(environment)) && rotationEndpoint.contains(toDns(region))) {
Cursor bcpStatusObject = response.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", rotationStatus.getOrDefault(rotationEndpoint, RotationStatus.UNKNOWN).name());
}
}
return new SlimeJsonResponse(slime);
}
private HttpResponse service(String tenantName, String applicationName, String instanceName, String environment, String region, String serviceName, String restPath, HttpRequest request) {
Map<?,?> result = controller.getServiceApiResponse(tenantName, applicationName, instanceName, environment, region, serviceName, restPath);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(result, serviceName, restPath);
return response;
}
private HttpResponse createUser(HttpRequest request) {
Optional<UserId> user = getUserId(request);
if ( ! user.isPresent() ) throw new ForbiddenException("Not authenticated or not an user.");
String username = UserTenant.normalizeUser(user.get().id());
try {
controller.tenants().create(UserTenant.create(username));
return new MessageResponse("Created user '" + username + "'");
} catch (AlreadyExistsException e) {
return new MessageResponse("User '" + username + "' already exists");
}
}
private HttpResponse updateTenant(String tenantName, HttpRequest request) {
Optional<AthenzTenant> tenant = controller.tenants().athenzTenant(TenantName.from(tenantName));
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
NToken token = requireNToken(request, "Could not update " + tenantName);
controller.tenants().lockOrThrow(tenant.get().name(), lockedTenant -> {
lockedTenant = lockedTenant.with(new Property(mandatory("property", requestData).asString()));
lockedTenant = controller.tenants().withDomain(
lockedTenant,
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
token
);
Optional<PropertyId> propertyId = optional("propertyId", requestData).map(PropertyId::new);
if (propertyId.isPresent()) {
lockedTenant = lockedTenant.with(propertyId.get());
}
controller.tenants().store(lockedTenant);
});
return tenant(controller.tenants().requireAthenzTenant(tenant.get().name()), request, true);
}
private HttpResponse createTenant(String tenantName, HttpRequest request) {
Inspector requestData = toSlime(request.getData()).get();
AthenzTenant tenant = AthenzTenant.create(TenantName.from(tenantName),
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
new Property(mandatory("property", requestData).asString()),
optional("propertyId", requestData).map(PropertyId::new));
throwIfNotAthenzDomainAdmin(tenant.domain(), request);
controller.tenants().create(tenant, requireNToken(request, "Could not create " + tenantName));
return tenant(tenant, request, true);
}
private HttpResponse createApplication(String tenantName, String applicationName, HttpRequest request) {
Application application;
try {
application = controller.applications().createApplication(ApplicationId.from(tenantName, applicationName, "default"), getUserPrincipal(request).getNToken());
}
catch (ZmsException e) {
if (e.getCode() == com.yahoo.jdisc.Response.Status.FORBIDDEN)
throw new ForbiddenException("Not authorized to create application", e);
else
throw e;
}
Slime slime = new Slime();
toSlime(application, slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
/** Trigger deployment of the last built application package, on a given version */
private HttpResponse deploy(String tenantName, String applicationName, HttpRequest request) {
Version version = decideDeployVersion(request);
if ( ! systemHasVersion(version))
throw new IllegalArgumentException("Cannot trigger deployment of version '" + version + "': " +
"Version is not active in this system. " +
"Active versions: " + controller.versionStatus().versions()
.stream()
.map(VespaVersion::versionNumber)
.map(Version::toString)
.collect(Collectors.joining(", ")));
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().lockOrThrow(id, application -> {
controller.applications().deploymentTrigger().triggerChange(application.get().id(),
application.get().change().with(version));
});
return new MessageResponse("Triggered deployment of application '" + id + "' on version " + version);
}
/** Cancel any ongoing change for given application */
private HttpResponse cancelDeploy(String tenantName, String applicationName) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
Application application = controller.applications().require(id);
Change change = application.change();
if ( ! change.isPresent())
return new MessageResponse("No deployment in progress for " + application + " at this time");
controller.applications().lockOrThrow(id, lockedApplication ->
controller.applications().deploymentTrigger().cancelChange(id, false));
return new MessageResponse("Cancelled " + change + " for " + application);
}
/** Schedule restart of deployment, or specific host in a deployment */
private HttpResponse restart(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Optional<Hostname> hostname = Optional.ofNullable(request.getProperty("hostname")).map(Hostname::new);
controller.applications().restart(deploymentId, hostname);
return new StringResponse("Requested restart of " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
private HttpResponse deploy(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
if ( ! dataParts.containsKey("deployOptions"))
return ErrorResponse.badRequest("Missing required form part 'deployOptions'");
Inspector deployOptions = SlimeUtils.jsonToSlime(dataParts.get("deployOptions")).get();
Optional<ApplicationPackage> applicationPackage = Optional.ofNullable(dataParts.get("applicationZip"))
.map(ApplicationPackage::new);
verifyApplicationIdentityConfiguration(tenantName, applicationPackage);
DeployOptions deployOptionsJsonClass = new DeployOptions(deployOptions.field("deployDirectly").asBool(),
optional("vespaVersion", deployOptions).map(Version::new),
deployOptions.field("ignoreValidationErrors").asBool(),
deployOptions.field("deployCurrentVersion").asBool());
ActivateResult result = controller.applications().deploy(applicationId,
zone,
applicationPackage,
deployOptionsJsonClass);
return new SlimeJsonResponse(toSlime(result));
}
private void verifyApplicationIdentityConfiguration(String tenantName, Optional<ApplicationPackage> applicationPackage) {
applicationPackage.map(ApplicationPackage::deploymentSpec).flatMap(DeploymentSpec::athenzDomain)
.ifPresent(identityDomain -> {
AthenzTenant tenant = controller.tenants().athenzTenant(TenantName.from(tenantName))
.orElseThrow(() -> new IllegalArgumentException("Tenant does not exist"));
AthenzDomain tenantDomain = tenant.domain();
if (! Objects.equals(tenantDomain.getName(), identityDomain.value())) {
throw new ForbiddenException(
String.format(
"Athenz domain in deployment.xml: [%s] must match tenant domain: [%s]",
identityDomain.value(),
tenantDomain.getName()
));
}
});
}
private HttpResponse deleteTenant(String tenantName, HttpRequest request) {
Optional<Tenant> tenant = controller.tenants().tenant(tenantName);
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Could not delete tenant '" + tenantName + "': Tenant not found");
if (tenant.get() instanceof AthenzTenant) {
controller.tenants().deleteTenant((AthenzTenant) tenant.get(),
requireNToken(request, "Could not delete " + tenantName));
} else if (tenant.get() instanceof UserTenant) {
controller.tenants().deleteTenant((UserTenant) tenant.get());
} else {
throw new IllegalArgumentException("Unknown tenant type:" + tenant.get().getClass().getSimpleName() +
", for " + tenant.get());
}
return tenant(tenant.get(), request, false);
}
private HttpResponse deleteApplication(String tenantName, String applicationName, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().deleteApplication(id, getUserPrincipal(request).getNToken());
return new EmptyJsonResponse();
}
private HttpResponse deactivate(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
Application application = controller.applications().require(ApplicationId.from(tenantName, applicationName, instanceName));
controller.applications().deactivate(application.id(), ZoneId.from(environment, region));
return new StringResponse("Deactivated " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
/**
* Promote application Chef environments. To be used by component jobs only
*/
private HttpResponse promoteApplication(String tenantName, String applicationName, HttpRequest request) {
try{
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.systemChefEnvironment();
String targetEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s)", tenantName, applicationName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
/**
* Promote application Chef environments for jobs that deploy applications
*/
private HttpResponse promoteApplicationDeployment(String tenantName, String applicationName, String environmentName, String regionName, String instanceName, HttpRequest request) {
try {
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
String targetEnvironment = chefEnvironment.applicationTargetEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName), Environment.from(environmentName), RegionName.from(regionName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s %s.%s)", tenantName, applicationName, environmentName, regionName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
private HttpResponse notifyJobCompletion(String tenant, String applicationName, HttpRequest request) {
try {
controller.applications().deploymentTrigger().notifyOfCompletion(toJobReport(tenant, applicationName, toSlime(request.getData()).get()));
return new MessageResponse("ok");
} catch (IllegalStateException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
}
private static DeploymentJobs.JobReport toJobReport(String tenantName, String applicationName, Inspector report) {
Optional<DeploymentJobs.JobError> jobError = Optional.empty();
if (report.field("jobError").valid()) {
jobError = Optional.of(DeploymentJobs.JobError.valueOf(report.field("jobError").asString()));
}
return new DeploymentJobs.JobReport(
ApplicationId.from(tenantName, applicationName, report.field("instance").asString()),
JobType.fromJobName(report.field("jobName").asString()),
report.field("projectId").asLong(),
report.field("buildNumber").asLong(),
toSourceRevision(report.field("sourceRevision")),
jobError
);
}
private static Optional<SourceRevision> toSourceRevision(Inspector object) {
if (!object.field("repository").valid() ||
!object.field("branch").valid() ||
!object.field("commit").valid()) {
return Optional.empty();
}
return Optional.of(new SourceRevision(object.field("repository").asString(), object.field("branch").asString(),
object.field("commit").asString()));
}
private Tenant getTenantOrThrow(String tenantName) {
return controller.tenants().tenant(tenantName)
.orElseThrow(() -> new NotExistsException(new TenantId(tenantName)));
}
private void toSlime(Cursor object, Tenant tenant, HttpRequest request, boolean listApplications) {
object.setString("tenant", tenant.name().value());
object.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
object.setString("athensDomain", athenzTenant.domain().getName());
object.setString("property", athenzTenant.property().id());
athenzTenant.propertyId().ifPresent(id -> object.setString("propertyId", id.toString()));
}
Cursor applicationArray = object.setArray("applications");
if (listApplications) {
for (Application application : controller.applications().asList(tenant.name())) {
if (application.id().instance().isDefault()) {
if (recurseOverApplications(request))
toSlime(applicationArray.addObject(), application, request);
else
toSlime(application, applicationArray.addObject(), request);
}
}
}
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
athenzTenant.contact().ifPresent(c -> {
object.setString("propertyUrl", c.propertyUrl().toString());
object.setString("contactsUrl", c.url().toString());
object.setString("issueCreationUrl", c.issueTrackerUrl().toString());
Cursor contactsArray = object.setArray("contacts");
c.persons().forEach(persons -> {
Cursor personArray = contactsArray.addArray();
persons.forEach(personArray::addString);
});
});
}
}
private void tenantInTenantsListToSlime(Tenant tenant, URI requestURI, Cursor object) {
object.setString("tenant", tenant.name().value());
Cursor metaData = object.setObject("metaData");
metaData.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
metaData.setString("athensDomain", athenzTenant.domain().getName());
metaData.setString("property", athenzTenant.property().id());
}
object.setString("url", withPath("/application/v4/tenant/" + tenant.name().value(), requestURI).toString());
}
/** Returns a copy of the given URI with the host and port from the given URI and the path set to the given path */
private URI withPath(String newPath, URI uri) {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), newPath, null, null);
}
catch (URISyntaxException e) {
throw new RuntimeException("Will not happen", e);
}
}
private void setRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus, Cursor object) {
if ( ! deployment.zone().environment().equals(Environment.prod)) return;
Cursor bcpStatusObject = object.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", findRotationStatus(deployment, healthStatus).name());
}
private RotationStatus findRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus) {
for (String endpoint : healthStatus.keySet()) {
if (endpoint.contains(toDns(deployment.zone().environment().value())) &&
endpoint.contains(toDns(deployment.zone().region().value()))) {
return healthStatus.getOrDefault(endpoint, RotationStatus.UNKNOWN);
}
}
return RotationStatus.UNKNOWN;
}
private String toDns(String id) {
return id.replace('_', '-');
}
private long asLong(String valueOrNull, long defaultWhenNull) {
if (valueOrNull == null) return defaultWhenNull;
try {
return Long.parseLong(valueOrNull);
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("Expected an integer but got '" + valueOrNull + "'");
}
}
private void toSlime(JobStatus.JobRun jobRun, Cursor object) {
object.setLong("id", jobRun.id());
object.setString("version", jobRun.platform().toFullString());
if (!jobRun.application().isUnknown())
toSlime(jobRun.application(), object.setObject("revision"));
object.setString("reason", jobRun.reason());
object.setLong("at", jobRun.at().toEpochMilli());
}
private Slime toSlime(InputStream jsonStream) {
try {
byte[] jsonBytes = IOUtils.readBytes(jsonStream, 1000 * 1000);
return SlimeUtils.jsonToSlime(jsonBytes);
} catch (IOException e) {
throw new RuntimeException();
}
}
private void throwIfNotAthenzDomainAdmin(AthenzDomain tenantDomain, HttpRequest request) {
AthenzIdentity identity = getUserPrincipal(request).getIdentity();
boolean isDomainAdmin = athenzClientFactory.createZmsClientWithServicePrincipal()
.isDomainAdmin(identity, tenantDomain);
if ( ! isDomainAdmin) {
throw new ForbiddenException(
String.format("The user '%s' is not admin in Athenz domain '%s'", identity.getFullName(), tenantDomain.getName()));
}
}
private static Optional<UserId> getUserId(HttpRequest request) {
return Optional.of(getUserPrincipal(request))
.map(AthenzPrincipal::getIdentity)
.filter(AthenzUser.class::isInstance)
.map(AthenzUser.class::cast)
.map(AthenzUser::getName)
.map(UserId::new);
}
private static AthenzPrincipal getUserPrincipal(HttpRequest request) {
Principal principal = request.getJDiscRequest().getUserPrincipal();
if (principal == null) throw new InternalServerErrorException("Expected a user principal");
if (!(principal instanceof AthenzPrincipal))
throw new InternalServerErrorException(
String.format("Expected principal of type %s, got %s",
AthenzPrincipal.class.getSimpleName(), principal.getClass().getName()));
return (AthenzPrincipal) principal;
}
private Inspector mandatory(String key, Inspector object) {
if ( ! object.field(key).valid())
throw new IllegalArgumentException("'" + key + "' is missing");
return object.field(key);
}
private Optional<String> optional(String key, Inspector object) {
return SlimeUtils.optionalString(object.field(key));
}
private static String path(Object... elements) {
return Joiner.on("/").join(elements);
}
private void toSlime(Application application, Cursor object, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("url", withPath("/application/v4/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value(), request.getUri()).toString());
}
private Slime toSlime(ActivateResult result) {
Slime slime = new Slime();
Cursor object = slime.setObject();
object.setString("revisionId", result.revisionId().id());
object.setLong("applicationZipSize", result.applicationZipSizeBytes());
Cursor logArray = object.setArray("prepareMessages");
if (result.prepareResponse().log != null) {
for (Log logMessage : result.prepareResponse().log) {
Cursor logObject = logArray.addObject();
logObject.setLong("time", logMessage.time);
logObject.setString("level", logMessage.level);
logObject.setString("message", logMessage.message);
}
}
Cursor changeObject = object.setObject("configChangeActions");
Cursor restartActionsArray = changeObject.setArray("restart");
for (RestartAction restartAction : result.prepareResponse().configChangeActions.restartActions) {
Cursor restartActionObject = restartActionsArray.addObject();
restartActionObject.setString("clusterName", restartAction.clusterName);
restartActionObject.setString("clusterType", restartAction.clusterType);
restartActionObject.setString("serviceType", restartAction.serviceType);
serviceInfosToSlime(restartAction.services, restartActionObject.setArray("services"));
stringsToSlime(restartAction.messages, restartActionObject.setArray("messages"));
}
Cursor refeedActionsArray = changeObject.setArray("refeed");
for (RefeedAction refeedAction : result.prepareResponse().configChangeActions.refeedActions) {
Cursor refeedActionObject = refeedActionsArray.addObject();
refeedActionObject.setString("name", refeedAction.name);
refeedActionObject.setBool("allowed", refeedAction.allowed);
refeedActionObject.setString("documentType", refeedAction.documentType);
refeedActionObject.setString("clusterName", refeedAction.clusterName);
serviceInfosToSlime(refeedAction.services, refeedActionObject.setArray("services"));
stringsToSlime(refeedAction.messages, refeedActionObject.setArray("messages"));
}
return slime;
}
private void serviceInfosToSlime(List<ServiceInfo> serviceInfoList, Cursor array) {
for (ServiceInfo serviceInfo : serviceInfoList) {
Cursor serviceInfoObject = array.addObject();
serviceInfoObject.setString("serviceName", serviceInfo.serviceName);
serviceInfoObject.setString("serviceType", serviceInfo.serviceType);
serviceInfoObject.setString("configId", serviceInfo.configId);
serviceInfoObject.setString("hostName", serviceInfo.hostName);
}
}
private void stringsToSlime(List<String> strings, Cursor array) {
for (String string : strings)
array.addString(string);
}
private String readToString(InputStream stream) {
Scanner scanner = new Scanner(stream).useDelimiter("\\A");
if ( ! scanner.hasNext()) return null;
return scanner.next();
}
private boolean systemHasVersion(Version version) {
return controller.versionStatus().versions().stream().anyMatch(v -> v.versionNumber().equals(version));
}
private Version decideDeployVersion(HttpRequest request) {
String requestVersion = readToString(request.getData());
if (requestVersion != null)
return new Version(requestVersion);
else
return controller.systemVersion();
}
public static void toSlime(DeploymentCost deploymentCost, Cursor object) {
object.setLong("tco", (long)deploymentCost.getTco());
object.setLong("waste", (long)deploymentCost.getWaste());
object.setDouble("utilization", deploymentCost.getUtilization());
Cursor clustersObject = object.setObject("cluster");
for (Map.Entry<String, ClusterCost> clusterEntry : deploymentCost.getCluster().entrySet())
toSlime(clusterEntry.getValue(), clustersObject.setObject(clusterEntry.getKey()));
}
private static void toSlime(ClusterCost clusterCost, Cursor object) {
object.setLong("count", clusterCost.getClusterInfo().getHostnames().size());
object.setString("resource", getResourceName(clusterCost.getResultUtilization()));
object.setDouble("utilization", clusterCost.getResultUtilization().getMaxUtilization());
object.setLong("tco", (int)clusterCost.getTco());
object.setLong("waste", (int)clusterCost.getWaste());
object.setString("flavor", clusterCost.getClusterInfo().getFlavor());
object.setDouble("flavorCost", clusterCost.getClusterInfo().getFlavorCost());
object.setDouble("flavorCpu", clusterCost.getClusterInfo().getFlavorCPU());
object.setDouble("flavorMem", clusterCost.getClusterInfo().getFlavorMem());
object.setDouble("flavorDisk", clusterCost.getClusterInfo().getFlavorDisk());
object.setString("type", clusterCost.getClusterInfo().getClusterType().name());
Cursor utilObject = object.setObject("util");
utilObject.setDouble("cpu", clusterCost.getResultUtilization().getCpu());
utilObject.setDouble("mem", clusterCost.getResultUtilization().getMemory());
utilObject.setDouble("disk", clusterCost.getResultUtilization().getDisk());
utilObject.setDouble("diskBusy", clusterCost.getResultUtilization().getDiskBusy());
Cursor usageObject = object.setObject("usage");
usageObject.setDouble("cpu", clusterCost.getSystemUtilization().getCpu());
usageObject.setDouble("mem", clusterCost.getSystemUtilization().getMemory());
usageObject.setDouble("disk", clusterCost.getSystemUtilization().getDisk());
usageObject.setDouble("diskBusy", clusterCost.getSystemUtilization().getDiskBusy());
Cursor hostnamesArray = object.setArray("hostnames");
for (String hostname : clusterCost.getClusterInfo().getHostnames())
hostnamesArray.addString(hostname);
}
private static String getResourceName(ClusterUtilization utilization) {
String name = "cpu";
double max = utilization.getMaxUtilization();
if (utilization.getMemory() == max) {
name = "mem";
} else if (utilization.getDisk() == max) {
name = "disk";
} else if (utilization.getDiskBusy() == max) {
name = "diskbusy";
}
return name;
}
private static boolean recurseOverTenants(HttpRequest request) {
return recurseOverApplications(request) || "tenant".equals(request.getProperty("recursive"));
}
private static boolean recurseOverApplications(HttpRequest request) {
return recurseOverDeployments(request) || "application".equals(request.getProperty("recursive"));
}
private static boolean recurseOverDeployments(HttpRequest request) {
return ImmutableSet.of("all", "true", "deployment").contains(request.getProperty("recursive"));
}
private static String tentantType(Tenant tenant) {
if (tenant instanceof AthenzTenant) {
return "ATHENS";
} else if (tenant instanceof UserTenant) {
return "USER";
}
throw new IllegalArgumentException("Unknown tenant type: " + tenant.getClass().getSimpleName());
}
private static NToken requireNToken(HttpRequest request, String message) {
return getUserPrincipal(request).getNToken().orElseThrow(() -> new IllegalArgumentException(
message + ": No NToken provided"));
}
private static ApplicationId appIdFromPath(Path path) {
return ApplicationId.from(path.get("tenant"), path.get("application"), path.get("instance"));
}
private static JobType jobTypeFromPath(Path path) {
return JobType.fromJobName(path.get("jobtype"));
}
private static RunId runIdFromPath(Path path) {
long number = Long.parseLong(path.get("number"));
return new RunId(appIdFromPath(path), jobTypeFromPath(path), number);
}
private HttpResponse submit(String tenant, String application, HttpRequest request) {
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
Inspector submitOptions = SlimeUtils.jsonToSlime(dataParts.get(EnvironmentResource.SUBMIT_OPTIONS)).get();
SourceRevision sourceRevision = toSourceRevision(submitOptions).orElseThrow(() ->
new IllegalArgumentException("Must specify 'repository', 'branch', and 'commit'"));
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP));
if ( ! applicationPackage.deploymentSpec().athenzDomain().isPresent())
throw new IllegalArgumentException("Application must define an Athenz service in deployment.xml!");
verifyApplicationIdentityConfiguration(tenant, Optional.of(applicationPackage));
return JobControllerApiHandlerHelper.submitResponse(controller.jobController(), tenant, application,
sourceRevision,
applicationPackage.zippedContent(),
dataParts.get(EnvironmentResource.APPLICATION_TEST_ZIP));
}
} | class ApplicationApiHandler extends LoggingRequestHandler {
private final Controller controller;
private final AthenzClientFactory athenzClientFactory;
@Inject
public ApplicationApiHandler(LoggingRequestHandler.Context parentCtx,
Controller controller,
AthenzClientFactory athenzClientFactory) {
super(parentCtx);
this.controller = controller;
this.athenzClientFactory = athenzClientFactory;
}
@Override
public Duration getTimeout() {
return Duration.ofMinutes(20);
}
@Override
public HttpResponse handle(HttpRequest request) {
try {
switch (request.getMethod()) {
case GET: return handleGET(request);
case PUT: return handlePUT(request);
case POST: return handlePOST(request);
case DELETE: return handleDELETE(request);
case OPTIONS: return handleOPTIONS();
default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported");
}
}
catch (ForbiddenException e) {
return ErrorResponse.forbidden(Exceptions.toMessageString(e));
}
catch (NotAuthorizedException e) {
return ErrorResponse.unauthorized(Exceptions.toMessageString(e));
}
catch (NotExistsException e) {
return ErrorResponse.notFoundError(Exceptions.toMessageString(e));
}
catch (IllegalArgumentException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
catch (ConfigServerException e) {
return ErrorResponse.from(e);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e);
return ErrorResponse.internalServerError(Exceptions.toMessageString(e));
}
}
private HttpResponse handleGET(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/")) return root(request);
if (path.matches("/application/v4/user")) return authenticatedUser(request);
if (path.matches("/application/v4/tenant")) return tenants(request);
if (path.matches("/application/v4/tenant-pipeline")) return tenantPipelines();
if (path.matches("/application/v4/athensDomain")) return athenzDomains(request);
if (path.matches("/application/v4/property")) return properties();
if (path.matches("/application/v4/cookiefreshness")) return cookieFreshness(request);
if (path.matches("/application/v4/tenant/{tenant}")) return tenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application")) return applications(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return application(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.getUri().getQuery());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job")) return JobControllerApiHandlerHelper.jobTypeResponse(controller, appIdFromPath(path), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.runResponse(controller.jobController().runs(appIdFromPath(path), jobTypeFromPath(path)), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/run/{number}")) return JobControllerApiHandlerHelper.runDetailsResponse(controller.jobController(), runIdFromPath(path), request.getProperty("after"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePUT(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/user")) return createUser(request);
if (path.matches("/application/v4/tenant/{tenant}")) return updateTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePOST(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return createTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return createApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/promote")) return promoteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return deploy(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/jobreport")) return notifyJobCompletion(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/promote")) return promoteApplicationDeployment(path.get("tenant"), path.get("application"), path.get("environment"), path.get("region"), path.get("instance"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleDELETE(HttpRequest request) {
Path path = new Path(request.getUri().getPath());
if (path.matches("/application/v4/tenant/{tenant}")) return deleteTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return deleteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override"))
return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleOPTIONS() {
EmptyJsonResponse response = new EmptyJsonResponse();
response.headers().put("Allow", "GET,PUT,POST,DELETE,OPTIONS");
return response;
}
private HttpResponse recursiveRoot(HttpRequest request) {
Slime slime = new Slime();
Cursor tenantArray = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
toSlime(tenantArray.addObject(), tenant, request, true);
return new SlimeJsonResponse(slime);
}
private HttpResponse root(HttpRequest request) {
return recurseOverTenants(request)
? recursiveRoot(request)
: new ResourceResponse(request, "user", "tenant", "tenant-pipeline", "athensDomain", "property", "cookiefreshness");
}
private HttpResponse authenticatedUser(HttpRequest request) {
String userIdString = request.getProperty("userOverride");
if (userIdString == null)
userIdString = getUserId(request)
.map(UserId::id)
.orElseThrow(() -> new ForbiddenException("You must be authenticated or specify userOverride"));
UserId userId = new UserId(userIdString);
List<Tenant> tenants = controller.tenants().asList(userId);
Slime slime = new Slime();
Cursor response = slime.setObject();
response.setString("user", userId.id());
Cursor tenantsArray = response.setArray("tenants");
for (Tenant tenant : tenants)
tenantInTenantsListToSlime(tenant, request.getUri(), tenantsArray.addObject());
response.setBool("tenantExists", tenants.stream().anyMatch(tenant -> tenant instanceof UserTenant &&
((UserTenant) tenant).is(userId.id())));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenants(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
tenantInTenantsListToSlime(tenant, request.getUri(), response.addObject());
return new SlimeJsonResponse(slime);
}
/** Lists the screwdriver project id for each application */
private HttpResponse tenantPipelines() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor pipelinesArray = response.setArray("tenantPipelines");
for (Application application : controller.applications().asList()) {
if ( ! application.deploymentJobs().projectId().isPresent()) continue;
Cursor pipelineObject = pipelinesArray.addObject();
pipelineObject.setString("screwdriverId", String.valueOf(application.deploymentJobs().projectId().getAsLong()));
pipelineObject.setString("tenant", application.id().tenant().value());
pipelineObject.setString("application", application.id().application().value());
pipelineObject.setString("instance", application.id().instance().value());
}
response.setArray("brokenTenantPipelines");
return new SlimeJsonResponse(slime);
}
private HttpResponse athenzDomains(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("data");
for (AthenzDomain athenzDomain : controller.getDomainList(request.getProperty("prefix"))) {
array.addString(athenzDomain.getName());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse properties() {
Slime slime = new Slime();
Cursor response = slime.setObject();
Cursor array = response.setArray("properties");
for (Map.Entry<PropertyId, Property> entry : controller.fetchPropertyList().entrySet()) {
Cursor propertyObject = array.addObject();
propertyObject.setString("propertyid", entry.getKey().id());
propertyObject.setString("property", entry.getValue().id());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse cookieFreshness(HttpRequest request) {
Slime slime = new Slime();
String passThruHeader = request.getHeader(SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_HEADER_FIELD);
slime.setObject().setBool("shouldRefreshCookie",
! SetBouncerPassthruHeaderFilter.BOUNCER_PASSTHRU_COOKIE_OK.equals(passThruHeader));
return new SlimeJsonResponse(slime);
}
private HttpResponse tenant(String tenantName, HttpRequest request) {
return controller.tenants().tenant(TenantName.from(tenantName))
.map(tenant -> tenant(tenant, request, true))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist"));
}
private HttpResponse tenant(Tenant tenant, HttpRequest request, boolean listApplications) {
Slime slime = new Slime();
toSlime(slime.setObject(), tenant, request, listApplications);
return new SlimeJsonResponse(slime);
}
private HttpResponse applications(String tenantName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
Slime slime = new Slime();
Cursor array = slime.setArray();
for (Application application : controller.applications().asList(tenant))
toSlime(application, array.addObject(), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse application(String tenantName, String applicationName, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, "default");
Application application =
controller.applications().get(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
Slime slime = new Slime();
toSlime(slime.setObject(), application, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse logs(String tenantName, String applicationName, String instanceName, String environment, String region, String query) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
HashMap<String, String> queryParameters = getParameters(query);
Optional<Logs> response = controller.configServer().getLogs(deployment, queryParameters);
Slime slime = new Slime();
Cursor object = slime.setObject();
if (response.isPresent()) {
response.get().logs().entrySet().stream().forEach(entry -> object.setString(entry.getKey(), entry.getValue()));
}
return new SlimeJsonResponse(slime);
}
private HashMap<String, String> getParameters(String query) {
HashMap<String, String> keyValPair = new HashMap<>();
Arrays.stream(query.split("&")).forEach(pair -> {
String[] splitPair = pair.split("=");
keyValPair.put(splitPair[0], splitPair[1]);
});
return keyValPair;
}
private void toSlime(Cursor object, Application application, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/instance/" + application.id().instance().value() + "/job/",
request.getUri()).toString());
if (application.change().isPresent()) {
toSlime(object.setObject("deploying"), application.change());
}
if (application.outstandingChange().isPresent()) {
toSlime(object.setObject("outstandingChange"), application.outstandingChange());
}
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedJobs(application.deploymentJobs().jobStatus().values());
Cursor deploymentsArray = object.setArray("deploymentJobs");
for (JobStatus job : jobStatus) {
Cursor jobObject = deploymentsArray.addObject();
jobObject.setString("type", job.type().jobName());
jobObject.setBool("success", job.isSuccess());
job.lastTriggered().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastTriggered")));
job.lastCompleted().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastCompleted")));
job.firstFailing().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("firstFailing")));
job.lastSuccess().ifPresent(jobRun -> toSlime(jobRun, jobObject.setObject("lastSuccess")));
}
Cursor changeBlockers = object.setArray("changeBlockers");
application.deploymentSpec().changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
});
object.setString("compileVersion", application.oldestDeployedPlatform().orElse(controller.systemVersion()).toFullString());
Cursor globalRotationsArray = object.setArray("globalRotations");
application.rotation().ifPresent(rotation -> {
globalRotationsArray.addString(rotation.url().toString());
globalRotationsArray.addString(rotation.secureUrl().toString());
object.setString("rotationId", rotation.id().asString());
});
List<Deployment> deployments = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec())
.sortedDeployments(application.deployments().values());
Cursor instancesArray = object.setArray("instances");
for (Deployment deployment : deployments) {
Cursor deploymentObject = instancesArray.addObject();
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("instance", application.id().instance().value());
controller.applications().rotationRepository().getRotation(application).ifPresent(rotation -> {
Map<String, RotationStatus> rotationHealthStatus = controller.rotationStatus(rotation);
setRotationStatus(deployment, rotationHealthStatus, deploymentObject);
});
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(application.id(), deployment.zone()), deployment, request);
else
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value() +
"/instance/" + application.id().instance().value(),
request.getUri()).toString());
}
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.deploymentJobs().issueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private HttpResponse deployment(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().get(id)
.orElseThrow(() -> new NotExistsException(id + " not found"));
DeploymentId deploymentId = new DeploymentId(application.id(),
ZoneId.from(environment, region));
Deployment deployment = application.deployments().get(deploymentId.zoneId());
if (deployment == null)
throw new NotExistsException(application + " is not deployed in " + deploymentId.zoneId());
Slime slime = new Slime();
toSlime(slime.setObject(), deploymentId, deployment, request);
return new SlimeJsonResponse(slime);
}
private void toSlime(Cursor object, Change change) {
change.platform().ifPresent(version -> object.setString("version", version.toString()));
change.application()
.filter(version -> !version.isUnknown())
.ifPresent(version -> toSlime(version, object.setObject("revision")));
}
private void toSlime(Cursor response, DeploymentId deploymentId, Deployment deployment, HttpRequest request) {
Cursor serviceUrlArray = response.setArray("serviceUrls");
controller.applications().getDeploymentEndpoints(deploymentId)
.ifPresent(endpoints -> endpoints.forEach(endpoint -> serviceUrlArray.addString(endpoint.toString())));
response.setString("nodes", withPath("/zone/v2/" + deploymentId.zoneId().environment() + "/" + deploymentId.zoneId().region() + "/nodes/v2/node/?&recursive=true&application=" + deploymentId.applicationId().tenant() + "." + deploymentId.applicationId().application() + "." + deploymentId.applicationId().instance(), request.getUri()).toString());
controller.zoneRegistry().getLogServerUri(deploymentId)
.ifPresent(elkUrl -> response.setString("elkUrl", elkUrl.toString()));
response.setString("yamasUrl", monitoringSystemUri(deploymentId).toString());
response.setString("version", deployment.version().toFullString());
response.setString("revision", deployment.applicationVersion().id());
response.setLong("deployTimeEpochMs", deployment.at().toEpochMilli());
controller.zoneRegistry().getDeploymentTimeToLive(deploymentId.zoneId())
.ifPresent(deploymentTimeToLive -> response.setLong("expiryTimeEpochMs", deployment.at().plus(deploymentTimeToLive).toEpochMilli()));
controller.applications().require(deploymentId.applicationId()).deploymentJobs().projectId()
.ifPresent(i -> response.setString("screwdriverId", String.valueOf(i)));
sourceRevisionToSlime(deployment.applicationVersion().source(), response);
Cursor activity = response.setObject("activity");
deployment.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried",
instant.toEpochMilli()));
deployment.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten",
instant.toEpochMilli()));
deployment.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
deployment.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
DeploymentCost appCost = deployment.calculateCost();
Cursor costObject = response.setObject("cost");
toSlime(appCost, costObject);
DeploymentMetrics metrics = deployment.metrics();
Cursor metricsObject = response.setObject("metrics");
metricsObject.setDouble("queriesPerSecond", metrics.queriesPerSecond());
metricsObject.setDouble("writesPerSecond", metrics.writesPerSecond());
metricsObject.setDouble("documentCount", metrics.documentCount());
metricsObject.setDouble("queryLatencyMillis", metrics.queryLatencyMillis());
metricsObject.setDouble("writeLatencyMillis", metrics.writeLatencyMillis());
}
private void toSlime(ApplicationVersion applicationVersion, Cursor object) {
if (!applicationVersion.isUnknown()) {
object.setString("hash", applicationVersion.id());
sourceRevisionToSlime(applicationVersion.source(), object.setObject("source"));
}
}
private void sourceRevisionToSlime(Optional<SourceRevision> revision, Cursor object) {
if ( ! revision.isPresent()) return;
object.setString("gitRepository", revision.get().repository());
object.setString("gitBranch", revision.get().branch());
object.setString("gitCommit", revision.get().commit());
}
private URI monitoringSystemUri(DeploymentId deploymentId) {
return controller.zoneRegistry().getMonitoringSystemUri(deploymentId);
}
private HttpResponse setGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region, boolean inService, HttpRequest request) {
Optional<Tenant> existingTenant = controller.tenants().tenant(tenantName);
if (!existingTenant.isPresent())
return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
String reason = mandatory("reason", requestData).asString();
String agent = getUserPrincipal(request).getIdentity().getFullName();
long timestamp = controller.clock().instant().getEpochSecond();
EndpointStatus.Status status = inService ? EndpointStatus.Status.in : EndpointStatus.Status.out;
EndpointStatus endPointStatus = new EndpointStatus(status, reason, agent, timestamp);
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
try {
List<String> rotations = controller.applications().setGlobalRotationStatus(deploymentId, endPointStatus);
return new MessageResponse(String.format("Rotations %s successfully set to %s service", rotations.toString(), inService ? "in" : "out of"));
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to alter rotation status: " + e.getMessage());
}
}
private HttpResponse getGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Slime slime = new Slime();
Cursor c1 = slime.setObject().setArray("globalrotationoverride");
try {
Map<String, EndpointStatus> rotations = controller.applications().getGlobalRotationStatus(deploymentId);
for (String rotation : rotations.keySet()) {
EndpointStatus currentStatus = rotations.get(rotation);
c1.addString(rotation);
Cursor c2 = c1.addObject();
c2.setString("status", currentStatus.getStatus().name());
c2.setString("reason", currentStatus.getReason() == null ? "" : currentStatus.getReason());
c2.setString("agent", currentStatus.getAgent() == null ? "" : currentStatus.getAgent());
c2.setLong("timestamp", currentStatus.getEpoch());
}
} catch (IOException e) {
return ErrorResponse.internalServerError("Unable to get rotation status: " + e.getMessage());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse rotationStatus(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
Application application = controller.applications().require(applicationId);
if (!application.rotation().isPresent()) {
throw new NotExistsException("global rotation does not exist for '" + environment + "." + region + "'");
}
Slime slime = new Slime();
Cursor response = slime.setObject();
Map<String, RotationStatus> rotationStatus = controller.applications().rotationRepository()
.getRotation(application)
.map(controller::rotationStatus)
.orElseGet(Collections::emptyMap);
for (String rotationEndpoint : rotationStatus.keySet()) {
if (rotationEndpoint.contains(toDns(environment)) && rotationEndpoint.contains(toDns(region))) {
Cursor bcpStatusObject = response.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", rotationStatus.getOrDefault(rotationEndpoint, RotationStatus.UNKNOWN).name());
}
}
return new SlimeJsonResponse(slime);
}
private HttpResponse service(String tenantName, String applicationName, String instanceName, String environment, String region, String serviceName, String restPath, HttpRequest request) {
Map<?,?> result = controller.getServiceApiResponse(tenantName, applicationName, instanceName, environment, region, serviceName, restPath);
ServiceApiResponse response = new ServiceApiResponse(ZoneId.from(environment, region),
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(ZoneId.from(environment, region)),
request.getUri());
response.setResponse(result, serviceName, restPath);
return response;
}
private HttpResponse createUser(HttpRequest request) {
Optional<UserId> user = getUserId(request);
if ( ! user.isPresent() ) throw new ForbiddenException("Not authenticated or not an user.");
String username = UserTenant.normalizeUser(user.get().id());
try {
controller.tenants().create(UserTenant.create(username));
return new MessageResponse("Created user '" + username + "'");
} catch (AlreadyExistsException e) {
return new MessageResponse("User '" + username + "' already exists");
}
}
private HttpResponse updateTenant(String tenantName, HttpRequest request) {
Optional<AthenzTenant> tenant = controller.tenants().athenzTenant(TenantName.from(tenantName));
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Inspector requestData = toSlime(request.getData()).get();
NToken token = requireNToken(request, "Could not update " + tenantName);
controller.tenants().lockOrThrow(tenant.get().name(), lockedTenant -> {
lockedTenant = lockedTenant.with(new Property(mandatory("property", requestData).asString()));
lockedTenant = controller.tenants().withDomain(
lockedTenant,
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
token
);
Optional<PropertyId> propertyId = optional("propertyId", requestData).map(PropertyId::new);
if (propertyId.isPresent()) {
lockedTenant = lockedTenant.with(propertyId.get());
}
controller.tenants().store(lockedTenant);
});
return tenant(controller.tenants().requireAthenzTenant(tenant.get().name()), request, true);
}
private HttpResponse createTenant(String tenantName, HttpRequest request) {
Inspector requestData = toSlime(request.getData()).get();
AthenzTenant tenant = AthenzTenant.create(TenantName.from(tenantName),
new AthenzDomain(mandatory("athensDomain", requestData).asString()),
new Property(mandatory("property", requestData).asString()),
optional("propertyId", requestData).map(PropertyId::new));
throwIfNotAthenzDomainAdmin(tenant.domain(), request);
controller.tenants().create(tenant, requireNToken(request, "Could not create " + tenantName));
return tenant(tenant, request, true);
}
private HttpResponse createApplication(String tenantName, String applicationName, HttpRequest request) {
Application application;
try {
application = controller.applications().createApplication(ApplicationId.from(tenantName, applicationName, "default"), getUserPrincipal(request).getNToken());
}
catch (ZmsException e) {
if (e.getCode() == com.yahoo.jdisc.Response.Status.FORBIDDEN)
throw new ForbiddenException("Not authorized to create application", e);
else
throw e;
}
Slime slime = new Slime();
toSlime(application, slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
/** Trigger deployment of the last built application package, on a given version */
private HttpResponse deploy(String tenantName, String applicationName, HttpRequest request) {
Version version = decideDeployVersion(request);
if ( ! systemHasVersion(version))
throw new IllegalArgumentException("Cannot trigger deployment of version '" + version + "': " +
"Version is not active in this system. " +
"Active versions: " + controller.versionStatus().versions()
.stream()
.map(VespaVersion::versionNumber)
.map(Version::toString)
.collect(Collectors.joining(", ")));
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().lockOrThrow(id, application -> {
controller.applications().deploymentTrigger().triggerChange(application.get().id(),
application.get().change().with(version));
});
return new MessageResponse("Triggered deployment of application '" + id + "' on version " + version);
}
/** Cancel any ongoing change for given application */
private HttpResponse cancelDeploy(String tenantName, String applicationName) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
Application application = controller.applications().require(id);
Change change = application.change();
if ( ! change.isPresent())
return new MessageResponse("No deployment in progress for " + application + " at this time");
controller.applications().lockOrThrow(id, lockedApplication ->
controller.applications().deploymentTrigger().cancelChange(id, false));
return new MessageResponse("Cancelled " + change + " for " + application);
}
/** Schedule restart of deployment, or specific host in a deployment */
private HttpResponse restart(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
ZoneId.from(environment, region));
Optional<Hostname> hostname = Optional.ofNullable(request.getProperty("hostname")).map(Hostname::new);
controller.applications().restart(deploymentId, hostname);
return new StringResponse("Requested restart of " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
private HttpResponse deploy(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = ZoneId.from(environment, region);
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
if ( ! dataParts.containsKey("deployOptions"))
return ErrorResponse.badRequest("Missing required form part 'deployOptions'");
Inspector deployOptions = SlimeUtils.jsonToSlime(dataParts.get("deployOptions")).get();
Optional<ApplicationPackage> applicationPackage = Optional.ofNullable(dataParts.get("applicationZip"))
.map(ApplicationPackage::new);
verifyApplicationIdentityConfiguration(tenantName, applicationPackage);
DeployOptions deployOptionsJsonClass = new DeployOptions(deployOptions.field("deployDirectly").asBool(),
optional("vespaVersion", deployOptions).map(Version::new),
deployOptions.field("ignoreValidationErrors").asBool(),
deployOptions.field("deployCurrentVersion").asBool());
ActivateResult result = controller.applications().deploy(applicationId,
zone,
applicationPackage,
deployOptionsJsonClass);
return new SlimeJsonResponse(toSlime(result));
}
private void verifyApplicationIdentityConfiguration(String tenantName, Optional<ApplicationPackage> applicationPackage) {
applicationPackage.map(ApplicationPackage::deploymentSpec).flatMap(DeploymentSpec::athenzDomain)
.ifPresent(identityDomain -> {
AthenzTenant tenant = controller.tenants().athenzTenant(TenantName.from(tenantName))
.orElseThrow(() -> new IllegalArgumentException("Tenant does not exist"));
AthenzDomain tenantDomain = tenant.domain();
if (! Objects.equals(tenantDomain.getName(), identityDomain.value())) {
throw new ForbiddenException(
String.format(
"Athenz domain in deployment.xml: [%s] must match tenant domain: [%s]",
identityDomain.value(),
tenantDomain.getName()
));
}
});
}
private HttpResponse deleteTenant(String tenantName, HttpRequest request) {
Optional<Tenant> tenant = controller.tenants().tenant(tenantName);
if ( ! tenant.isPresent()) return ErrorResponse.notFoundError("Could not delete tenant '" + tenantName + "': Tenant not found");
if (tenant.get() instanceof AthenzTenant) {
controller.tenants().deleteTenant((AthenzTenant) tenant.get(),
requireNToken(request, "Could not delete " + tenantName));
} else if (tenant.get() instanceof UserTenant) {
controller.tenants().deleteTenant((UserTenant) tenant.get());
} else {
throw new IllegalArgumentException("Unknown tenant type:" + tenant.get().getClass().getSimpleName() +
", for " + tenant.get());
}
return tenant(tenant.get(), request, false);
}
private HttpResponse deleteApplication(String tenantName, String applicationName, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, "default");
controller.applications().deleteApplication(id, getUserPrincipal(request).getNToken());
return new EmptyJsonResponse();
}
private HttpResponse deactivate(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
Application application = controller.applications().require(ApplicationId.from(tenantName, applicationName, instanceName));
controller.applications().deactivate(application.id(), ZoneId.from(environment, region));
return new StringResponse("Deactivated " + path(TenantResource.API_PATH, tenantName,
ApplicationResource.API_PATH, applicationName,
EnvironmentResource.API_PATH, environment,
"region", region,
"instance", instanceName));
}
/**
* Promote application Chef environments. To be used by component jobs only
*/
private HttpResponse promoteApplication(String tenantName, String applicationName, HttpRequest request) {
try{
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.systemChefEnvironment();
String targetEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s)", tenantName, applicationName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
/**
* Promote application Chef environments for jobs that deploy applications
*/
private HttpResponse promoteApplicationDeployment(String tenantName, String applicationName, String environmentName, String regionName, String instanceName, HttpRequest request) {
try {
ApplicationChefEnvironment chefEnvironment = new ApplicationChefEnvironment(controller.system());
String sourceEnvironment = chefEnvironment.applicationSourceEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName));
String targetEnvironment = chefEnvironment.applicationTargetEnvironment(TenantName.from(tenantName), ApplicationName.from(applicationName), Environment.from(environmentName), RegionName.from(regionName));
controller.chefClient().copyChefEnvironment(sourceEnvironment, targetEnvironment);
return new MessageResponse(String.format("Successfully copied environment %s to %s", sourceEnvironment, targetEnvironment));
} catch (Exception e) {
log.log(LogLevel.ERROR, String.format("Error during Chef copy environment. (%s.%s %s.%s)", tenantName, applicationName, environmentName, regionName), e);
return ErrorResponse.internalServerError("Unable to promote Chef environments for application");
}
}
private HttpResponse notifyJobCompletion(String tenant, String applicationName, HttpRequest request) {
try {
controller.applications().deploymentTrigger().notifyOfCompletion(toJobReport(tenant, applicationName, toSlime(request.getData()).get()));
return new MessageResponse("ok");
} catch (IllegalStateException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
}
private static DeploymentJobs.JobReport toJobReport(String tenantName, String applicationName, Inspector report) {
Optional<DeploymentJobs.JobError> jobError = Optional.empty();
if (report.field("jobError").valid()) {
jobError = Optional.of(DeploymentJobs.JobError.valueOf(report.field("jobError").asString()));
}
return new DeploymentJobs.JobReport(
ApplicationId.from(tenantName, applicationName, report.field("instance").asString()),
JobType.fromJobName(report.field("jobName").asString()),
report.field("projectId").asLong(),
report.field("buildNumber").asLong(),
toSourceRevision(report.field("sourceRevision")),
jobError
);
}
private static Optional<SourceRevision> toSourceRevision(Inspector object) {
if (!object.field("repository").valid() ||
!object.field("branch").valid() ||
!object.field("commit").valid()) {
return Optional.empty();
}
return Optional.of(new SourceRevision(object.field("repository").asString(), object.field("branch").asString(),
object.field("commit").asString()));
}
private Tenant getTenantOrThrow(String tenantName) {
return controller.tenants().tenant(tenantName)
.orElseThrow(() -> new NotExistsException(new TenantId(tenantName)));
}
private void toSlime(Cursor object, Tenant tenant, HttpRequest request, boolean listApplications) {
object.setString("tenant", tenant.name().value());
object.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
object.setString("athensDomain", athenzTenant.domain().getName());
object.setString("property", athenzTenant.property().id());
athenzTenant.propertyId().ifPresent(id -> object.setString("propertyId", id.toString()));
}
Cursor applicationArray = object.setArray("applications");
if (listApplications) {
for (Application application : controller.applications().asList(tenant.name())) {
if (application.id().instance().isDefault()) {
if (recurseOverApplications(request))
toSlime(applicationArray.addObject(), application, request);
else
toSlime(application, applicationArray.addObject(), request);
}
}
}
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
athenzTenant.contact().ifPresent(c -> {
object.setString("propertyUrl", c.propertyUrl().toString());
object.setString("contactsUrl", c.url().toString());
object.setString("issueCreationUrl", c.issueTrackerUrl().toString());
Cursor contactsArray = object.setArray("contacts");
c.persons().forEach(persons -> {
Cursor personArray = contactsArray.addArray();
persons.forEach(personArray::addString);
});
});
}
}
private void tenantInTenantsListToSlime(Tenant tenant, URI requestURI, Cursor object) {
object.setString("tenant", tenant.name().value());
Cursor metaData = object.setObject("metaData");
metaData.setString("type", tentantType(tenant));
if (tenant instanceof AthenzTenant) {
AthenzTenant athenzTenant = (AthenzTenant) tenant;
metaData.setString("athensDomain", athenzTenant.domain().getName());
metaData.setString("property", athenzTenant.property().id());
}
object.setString("url", withPath("/application/v4/tenant/" + tenant.name().value(), requestURI).toString());
}
/** Returns a copy of the given URI with the host and port from the given URI and the path set to the given path */
private URI withPath(String newPath, URI uri) {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), newPath, null, null);
}
catch (URISyntaxException e) {
throw new RuntimeException("Will not happen", e);
}
}
private void setRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus, Cursor object) {
if ( ! deployment.zone().environment().equals(Environment.prod)) return;
Cursor bcpStatusObject = object.setObject("bcpStatus");
bcpStatusObject.setString("rotationStatus", findRotationStatus(deployment, healthStatus).name());
}
private RotationStatus findRotationStatus(Deployment deployment, Map<String, RotationStatus> healthStatus) {
for (String endpoint : healthStatus.keySet()) {
if (endpoint.contains(toDns(deployment.zone().environment().value())) &&
endpoint.contains(toDns(deployment.zone().region().value()))) {
return healthStatus.getOrDefault(endpoint, RotationStatus.UNKNOWN);
}
}
return RotationStatus.UNKNOWN;
}
private String toDns(String id) {
return id.replace('_', '-');
}
private long asLong(String valueOrNull, long defaultWhenNull) {
if (valueOrNull == null) return defaultWhenNull;
try {
return Long.parseLong(valueOrNull);
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("Expected an integer but got '" + valueOrNull + "'");
}
}
private void toSlime(JobStatus.JobRun jobRun, Cursor object) {
object.setLong("id", jobRun.id());
object.setString("version", jobRun.platform().toFullString());
if (!jobRun.application().isUnknown())
toSlime(jobRun.application(), object.setObject("revision"));
object.setString("reason", jobRun.reason());
object.setLong("at", jobRun.at().toEpochMilli());
}
private Slime toSlime(InputStream jsonStream) {
try {
byte[] jsonBytes = IOUtils.readBytes(jsonStream, 1000 * 1000);
return SlimeUtils.jsonToSlime(jsonBytes);
} catch (IOException e) {
throw new RuntimeException();
}
}
private void throwIfNotAthenzDomainAdmin(AthenzDomain tenantDomain, HttpRequest request) {
AthenzIdentity identity = getUserPrincipal(request).getIdentity();
boolean isDomainAdmin = athenzClientFactory.createZmsClientWithServicePrincipal()
.isDomainAdmin(identity, tenantDomain);
if ( ! isDomainAdmin) {
throw new ForbiddenException(
String.format("The user '%s' is not admin in Athenz domain '%s'", identity.getFullName(), tenantDomain.getName()));
}
}
private static Optional<UserId> getUserId(HttpRequest request) {
return Optional.of(getUserPrincipal(request))
.map(AthenzPrincipal::getIdentity)
.filter(AthenzUser.class::isInstance)
.map(AthenzUser.class::cast)
.map(AthenzUser::getName)
.map(UserId::new);
}
private static AthenzPrincipal getUserPrincipal(HttpRequest request) {
Principal principal = request.getJDiscRequest().getUserPrincipal();
if (principal == null) throw new InternalServerErrorException("Expected a user principal");
if (!(principal instanceof AthenzPrincipal))
throw new InternalServerErrorException(
String.format("Expected principal of type %s, got %s",
AthenzPrincipal.class.getSimpleName(), principal.getClass().getName()));
return (AthenzPrincipal) principal;
}
private Inspector mandatory(String key, Inspector object) {
if ( ! object.field(key).valid())
throw new IllegalArgumentException("'" + key + "' is missing");
return object.field(key);
}
private Optional<String> optional(String key, Inspector object) {
return SlimeUtils.optionalString(object.field(key));
}
private static String path(Object... elements) {
return Joiner.on("/").join(elements);
}
private void toSlime(Application application, Cursor object, HttpRequest request) {
object.setString("application", application.id().application().value());
object.setString("instance", application.id().instance().value());
object.setString("url", withPath("/application/v4/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value(), request.getUri()).toString());
}
private Slime toSlime(ActivateResult result) {
Slime slime = new Slime();
Cursor object = slime.setObject();
object.setString("revisionId", result.revisionId().id());
object.setLong("applicationZipSize", result.applicationZipSizeBytes());
Cursor logArray = object.setArray("prepareMessages");
if (result.prepareResponse().log != null) {
for (Log logMessage : result.prepareResponse().log) {
Cursor logObject = logArray.addObject();
logObject.setLong("time", logMessage.time);
logObject.setString("level", logMessage.level);
logObject.setString("message", logMessage.message);
}
}
Cursor changeObject = object.setObject("configChangeActions");
Cursor restartActionsArray = changeObject.setArray("restart");
for (RestartAction restartAction : result.prepareResponse().configChangeActions.restartActions) {
Cursor restartActionObject = restartActionsArray.addObject();
restartActionObject.setString("clusterName", restartAction.clusterName);
restartActionObject.setString("clusterType", restartAction.clusterType);
restartActionObject.setString("serviceType", restartAction.serviceType);
serviceInfosToSlime(restartAction.services, restartActionObject.setArray("services"));
stringsToSlime(restartAction.messages, restartActionObject.setArray("messages"));
}
Cursor refeedActionsArray = changeObject.setArray("refeed");
for (RefeedAction refeedAction : result.prepareResponse().configChangeActions.refeedActions) {
Cursor refeedActionObject = refeedActionsArray.addObject();
refeedActionObject.setString("name", refeedAction.name);
refeedActionObject.setBool("allowed", refeedAction.allowed);
refeedActionObject.setString("documentType", refeedAction.documentType);
refeedActionObject.setString("clusterName", refeedAction.clusterName);
serviceInfosToSlime(refeedAction.services, refeedActionObject.setArray("services"));
stringsToSlime(refeedAction.messages, refeedActionObject.setArray("messages"));
}
return slime;
}
private void serviceInfosToSlime(List<ServiceInfo> serviceInfoList, Cursor array) {
for (ServiceInfo serviceInfo : serviceInfoList) {
Cursor serviceInfoObject = array.addObject();
serviceInfoObject.setString("serviceName", serviceInfo.serviceName);
serviceInfoObject.setString("serviceType", serviceInfo.serviceType);
serviceInfoObject.setString("configId", serviceInfo.configId);
serviceInfoObject.setString("hostName", serviceInfo.hostName);
}
}
private void stringsToSlime(List<String> strings, Cursor array) {
for (String string : strings)
array.addString(string);
}
private String readToString(InputStream stream) {
Scanner scanner = new Scanner(stream).useDelimiter("\\A");
if ( ! scanner.hasNext()) return null;
return scanner.next();
}
private boolean systemHasVersion(Version version) {
return controller.versionStatus().versions().stream().anyMatch(v -> v.versionNumber().equals(version));
}
private Version decideDeployVersion(HttpRequest request) {
String requestVersion = readToString(request.getData());
if (requestVersion != null)
return new Version(requestVersion);
else
return controller.systemVersion();
}
public static void toSlime(DeploymentCost deploymentCost, Cursor object) {
object.setLong("tco", (long)deploymentCost.getTco());
object.setLong("waste", (long)deploymentCost.getWaste());
object.setDouble("utilization", deploymentCost.getUtilization());
Cursor clustersObject = object.setObject("cluster");
for (Map.Entry<String, ClusterCost> clusterEntry : deploymentCost.getCluster().entrySet())
toSlime(clusterEntry.getValue(), clustersObject.setObject(clusterEntry.getKey()));
}
private static void toSlime(ClusterCost clusterCost, Cursor object) {
object.setLong("count", clusterCost.getClusterInfo().getHostnames().size());
object.setString("resource", getResourceName(clusterCost.getResultUtilization()));
object.setDouble("utilization", clusterCost.getResultUtilization().getMaxUtilization());
object.setLong("tco", (int)clusterCost.getTco());
object.setLong("waste", (int)clusterCost.getWaste());
object.setString("flavor", clusterCost.getClusterInfo().getFlavor());
object.setDouble("flavorCost", clusterCost.getClusterInfo().getFlavorCost());
object.setDouble("flavorCpu", clusterCost.getClusterInfo().getFlavorCPU());
object.setDouble("flavorMem", clusterCost.getClusterInfo().getFlavorMem());
object.setDouble("flavorDisk", clusterCost.getClusterInfo().getFlavorDisk());
object.setString("type", clusterCost.getClusterInfo().getClusterType().name());
Cursor utilObject = object.setObject("util");
utilObject.setDouble("cpu", clusterCost.getResultUtilization().getCpu());
utilObject.setDouble("mem", clusterCost.getResultUtilization().getMemory());
utilObject.setDouble("disk", clusterCost.getResultUtilization().getDisk());
utilObject.setDouble("diskBusy", clusterCost.getResultUtilization().getDiskBusy());
Cursor usageObject = object.setObject("usage");
usageObject.setDouble("cpu", clusterCost.getSystemUtilization().getCpu());
usageObject.setDouble("mem", clusterCost.getSystemUtilization().getMemory());
usageObject.setDouble("disk", clusterCost.getSystemUtilization().getDisk());
usageObject.setDouble("diskBusy", clusterCost.getSystemUtilization().getDiskBusy());
Cursor hostnamesArray = object.setArray("hostnames");
for (String hostname : clusterCost.getClusterInfo().getHostnames())
hostnamesArray.addString(hostname);
}
private static String getResourceName(ClusterUtilization utilization) {
String name = "cpu";
double max = utilization.getMaxUtilization();
if (utilization.getMemory() == max) {
name = "mem";
} else if (utilization.getDisk() == max) {
name = "disk";
} else if (utilization.getDiskBusy() == max) {
name = "diskbusy";
}
return name;
}
private static boolean recurseOverTenants(HttpRequest request) {
return recurseOverApplications(request) || "tenant".equals(request.getProperty("recursive"));
}
private static boolean recurseOverApplications(HttpRequest request) {
return recurseOverDeployments(request) || "application".equals(request.getProperty("recursive"));
}
private static boolean recurseOverDeployments(HttpRequest request) {
return ImmutableSet.of("all", "true", "deployment").contains(request.getProperty("recursive"));
}
private static String tentantType(Tenant tenant) {
if (tenant instanceof AthenzTenant) {
return "ATHENS";
} else if (tenant instanceof UserTenant) {
return "USER";
}
throw new IllegalArgumentException("Unknown tenant type: " + tenant.getClass().getSimpleName());
}
private static NToken requireNToken(HttpRequest request, String message) {
return getUserPrincipal(request).getNToken().orElseThrow(() -> new IllegalArgumentException(
message + ": No NToken provided"));
}
private static ApplicationId appIdFromPath(Path path) {
return ApplicationId.from(path.get("tenant"), path.get("application"), path.get("instance"));
}
private static JobType jobTypeFromPath(Path path) {
return JobType.fromJobName(path.get("jobtype"));
}
private static RunId runIdFromPath(Path path) {
long number = Long.parseLong(path.get("number"));
return new RunId(appIdFromPath(path), jobTypeFromPath(path), number);
}
private HttpResponse submit(String tenant, String application, HttpRequest request) {
Map<String, byte[]> dataParts = new MultipartParser().parse(request);
Inspector submitOptions = SlimeUtils.jsonToSlime(dataParts.get(EnvironmentResource.SUBMIT_OPTIONS)).get();
SourceRevision sourceRevision = toSourceRevision(submitOptions).orElseThrow(() ->
new IllegalArgumentException("Must specify 'repository', 'branch', and 'commit'"));
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP));
if ( ! applicationPackage.deploymentSpec().athenzDomain().isPresent())
throw new IllegalArgumentException("Application must define an Athenz service in deployment.xml!");
verifyApplicationIdentityConfiguration(tenant, Optional.of(applicationPackage));
return JobControllerApiHandlerHelper.submitResponse(controller.jobController(), tenant, application,
sourceRevision,
applicationPackage.zippedContent(),
dataParts.get(EnvironmentResource.APPLICATION_TEST_ZIP));
}
} |
nitpick: zipped implies .zip ending, maybe call it gzipped | private void triggerCompression(String oldFileName) {
try {
String zippedFileName = oldFileName + ".gz";
Runtime r = Runtime.getRuntime();
StringBuilder cmd = new StringBuilder("gzip");
cmd.append(" < "). append(oldFileName).append(" > ").append(zippedFileName);
Process p = r.exec(cmd.toString());
NativeIO nativeIO = new NativeIO();
File oldFile = new File(oldFileName);
nativeIO.dropFileFromCache(oldFile);
oldFile.delete();
nativeIO.dropFileFromCache(new File(zippedFileName));
} catch (IOException e) {
}
} | String zippedFileName = oldFileName + ".gz"; | private void triggerCompression(String oldFileName) {
try {
String gzippedFileName = oldFileName + ".gz";
Runtime r = Runtime.getRuntime();
StringBuilder cmd = new StringBuilder("gzip");
cmd.append(" < "). append(oldFileName).append(" > ").append(gzippedFileName);
Process p = r.exec(cmd.toString());
NativeIO nativeIO = new NativeIO();
File oldFile = new File(oldFileName);
nativeIO.dropFileFromCache(oldFile);
oldFile.delete();
nativeIO.dropFileFromCache(new File(gzippedFileName));
} catch (IOException e) {
}
} | class LogThread extends Thread {
LogFileHandler logFileHandler;
long lastFlush = 0;
public LogThread(LogFileHandler logFile) {
super("Logger");
setDaemon(true);
logFileHandler = logFile;
}
@Override
public void run() {
try {
storeLogRecords();
} catch (InterruptedException e) {
} catch (Exception e) {
com.yahoo.protect.Process.logAndDie("Failed storing log records", e);
}
logFileHandler.flush();
}
private void storeLogRecords() throws InterruptedException {
while (!isInterrupted()) {
LogRecord r = logFileHandler.logQueue.poll(100, TimeUnit.MILLISECONDS);
if (r != null) {
if (r == logFileHandler.rotateCmd) {
logFileHandler.internalRotateNow();
lastFlush = System.nanoTime();
} else {
logFileHandler.internalPublish(r);
}
flushIfOld(3, TimeUnit.SECONDS);
} else {
flushIfOld(100, TimeUnit.MILLISECONDS);
}
}
}
private void flushIfOld(long age, TimeUnit unit) {
long now = System.nanoTime();
if (TimeUnit.NANOSECONDS.toMillis(now - lastFlush) > unit.toMillis(age)) {
logFileHandler.flush();
lastFlush = now;
}
}
} | class LogThread extends Thread {
LogFileHandler logFileHandler;
long lastFlush = 0;
public LogThread(LogFileHandler logFile) {
super("Logger");
setDaemon(true);
logFileHandler = logFile;
}
@Override
public void run() {
try {
storeLogRecords();
} catch (InterruptedException e) {
} catch (Exception e) {
com.yahoo.protect.Process.logAndDie("Failed storing log records", e);
}
logFileHandler.flush();
}
private void storeLogRecords() throws InterruptedException {
while (!isInterrupted()) {
LogRecord r = logFileHandler.logQueue.poll(100, TimeUnit.MILLISECONDS);
if (r != null) {
if (r == logFileHandler.rotateCmd) {
logFileHandler.internalRotateNow();
lastFlush = System.nanoTime();
} else {
logFileHandler.internalPublish(r);
}
flushIfOld(3, TimeUnit.SECONDS);
} else {
flushIfOld(100, TimeUnit.MILLISECONDS);
}
}
}
private void flushIfOld(long age, TimeUnit unit) {
long now = System.nanoTime();
if (TimeUnit.NANOSECONDS.toMillis(now - lastFlush) > unit.toMillis(age)) {
logFileHandler.flush();
lastFlush = now;
}
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.