index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/EnvironmentCredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.EnvironmentVariableCredentialsProvider;
import java.util.Properties;
public class EnvironmentCredentialProvider extends CredentialProvider {
public EnvironmentCredentialProvider(final Properties properties, final String providerKey) {
super(properties, providerKey);
}
public EnvironmentCredentialProvider(final Properties properties) {
this(properties, null);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
return new EnvironmentVariableCredentialsProvider();
}
}
| 7,300 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/BasicCredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.accessKeyId;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.secretKey;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateBasicProviderConfiguration;
public class BasicCredentialProvider extends CredentialProvider {
public BasicCredentialProvider(final Properties properties, final String providerKey) {
super(validateBasicProviderConfiguration(properties, providerKey), providerKey);
}
public BasicCredentialProvider(Properties properties) {
this(properties, null);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
return new AWSCredentialsProvider() {
@Override
public AWSCredentials getCredentials() {
return new BasicAWSCredentials(properties.getProperty(accessKeyId(providerKey)), properties.getProperty(secretKey(providerKey)));
}
@Override
public void refresh() {
}
};
}
}
| 7,301 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/AssumeRoleCredentialsProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.factory.CredentialProviderFactory;
import com.amazonaws.services.securitytoken.AWSSecurityTokenService;
import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClientBuilder;
import javax.annotation.Nonnull;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.getCredentialProviderType;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateAssumeRoleCredentialsProvider;
public class AssumeRoleCredentialsProvider extends CredentialProvider {
public AssumeRoleCredentialsProvider(final Properties properties, final String providerKey) {
super(validateAssumeRoleCredentialsProvider(properties, providerKey), providerKey);
}
public AssumeRoleCredentialsProvider(final Properties properties) {
this(properties, AWS_CREDENTIALS_PROVIDER);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
final String baseCredentialsProviderKey = AWSConfigConstants.roleCredentialsProvider(providerKey);
final AWSConfigConstants.CredentialProviderType baseCredentialsProviderType = getCredentialProviderType(properties, baseCredentialsProviderKey);
final CredentialProvider baseCredentialsProvider =
CredentialProviderFactory.newCredentialProvider(baseCredentialsProviderType, properties, baseCredentialsProviderKey);
final AWSSecurityTokenService baseCredentials = AWSSecurityTokenServiceClientBuilder.standard()
.withCredentials(baseCredentialsProvider.getAwsCredentialsProvider())
.withRegion(properties.getProperty(AWSConfigConstants.AWS_REGION))
.build();
return createAwsCredentialsProvider(
properties.getProperty(AWSConfigConstants.roleArn(providerKey)),
properties.getProperty(AWSConfigConstants.roleSessionName(providerKey)),
properties.getProperty(AWSConfigConstants.externalId(providerKey)),
baseCredentials);
}
AWSCredentialsProvider createAwsCredentialsProvider(@Nonnull String roleArn,
@Nonnull String roleSessionName,
@Nonnull String externalId,
@Nonnull AWSSecurityTokenService securityTokenService) {
return new STSAssumeRoleSessionCredentialsProvider.Builder(roleArn, roleSessionName)
.withExternalId(externalId)
.withStsClient(securityTokenService)
.build();
}
}
| 7,302 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/factory/CredentialProviderFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.factory;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.AssumeRoleCredentialsProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.BasicCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.CredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.DefaultCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.EnvironmentCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.ProfileCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.SystemCredentialProvider;
import org.apache.commons.lang3.Validate;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER;
public final class CredentialProviderFactory {
private CredentialProviderFactory() {
}
public static CredentialProvider newCredentialProvider(final CredentialProviderType credentialProviderType,
final Properties awsConfigProps,
final String awsConfigCredentialProviderKey) {
Validate.notNull(awsConfigProps, "AWS configuration properties cannot be null");
if (credentialProviderType == null) {
return new DefaultCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
}
switch (credentialProviderType) {
case BASIC:
// For basic provider, allow the top-level provider key to be missing
if (AWS_CREDENTIALS_PROVIDER.equals(awsConfigCredentialProviderKey)
&& !awsConfigProps.containsKey(AWS_CREDENTIALS_PROVIDER)) {
return new BasicCredentialProvider(awsConfigProps, null);
} else {
return new BasicCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
}
case PROFILE:
return new ProfileCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
case ENV_VARIABLES:
return new EnvironmentCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
case SYS_PROPERTIES:
return new SystemCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
case ASSUME_ROLE:
return new AssumeRoleCredentialsProvider(awsConfigProps, awsConfigCredentialProviderKey);
default:
case AUTO:
return new DefaultCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
}
}
public static CredentialProvider newCredentialProvider(final CredentialProviderType credentialProviderType,
final Properties awsConfigProps) {
return newCredentialProvider(credentialProviderType, awsConfigProps, AWS_CREDENTIALS_PROVIDER);
}
}
| 7,303 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/RecordCouldNotBeSentException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class RecordCouldNotBeSentException extends RuntimeException {
public RecordCouldNotBeSentException(final String msg, final Throwable ex) {
super(msg, ex);
}
public RecordCouldNotBeSentException(final String msg) {
super(msg);
}
}
| 7,304 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/TimeoutExpiredException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class TimeoutExpiredException extends FlinkKinesisFirehoseException {
public TimeoutExpiredException(String msg, Throwable ex) {
super(msg, ex);
}
public TimeoutExpiredException(String msg) {
super(msg);
}
}
| 7,305 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/FlinkKinesisFirehoseException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class FlinkKinesisFirehoseException extends Exception {
public FlinkKinesisFirehoseException(final String msg, final Throwable ex) {
super(msg, ex);
}
public FlinkKinesisFirehoseException(final String msg) {
super(msg);
}
}
| 7,306 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/SerializationException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class SerializationException extends RuntimeException {
public SerializationException(final String msg, final Throwable t) {
super(msg, t);
}
private SerializationException(final String msg) {
super(msg);
}
}
| 7,307 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/RecordCouldNotBeBuffered.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class RecordCouldNotBeBuffered extends FlinkKinesisFirehoseException {
public RecordCouldNotBeBuffered(String msg, Throwable ex) {
super(msg, ex);
}
public RecordCouldNotBeBuffered(String msg) {
super(msg);
}
}
| 7,308 |
0 | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client/jena/NeptuneJenaSigV4Example.java | /*
* Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.neptune.client.jena;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.neptune.auth.NeptuneApacheHttpSigV4Signer;
import com.amazonaws.neptune.auth.NeptuneSigV4SignerException;
import com.amazonaws.util.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.protocol.HttpContext;
import org.apache.jena.rdfconnection.RDFConnection;
import org.apache.jena.rdfconnection.RDFConnectionRemote;
import org.apache.jena.rdfconnection.RDFConnectionRemoteBuilder;
/*
* Example of a building a remote connection
*/
public class NeptuneJenaSigV4Example {
public static void main(String... args) throws NeptuneSigV4SignerException {
if (args.length == 0 || StringUtils.isNullOrEmpty(args[0])) {
System.err.println("Please specify your endpoint as program argument "
+ "(e.g.: http://<your_neptune_endpoint>:<your_neptune_endpoint>)");
System.exit(1);
}
final String endpoint = args[0];
final String regionName = "us-east-1";
final AWSCredentialsProvider awsCredentialsProvider = new DefaultAWSCredentialsProviderChain();
final NeptuneApacheHttpSigV4Signer v4Signer = new NeptuneApacheHttpSigV4Signer(regionName, awsCredentialsProvider);
final HttpClient v4SigningClient = HttpClientBuilder.create().addInterceptorLast(new HttpRequestInterceptor() {
@Override
public void process(final HttpRequest req, final HttpContext ctx) throws HttpException {
if (req instanceof HttpUriRequest) {
final HttpUriRequest httpUriReq = (HttpUriRequest) req;
try {
v4Signer.signRequest(httpUriReq);
} catch (NeptuneSigV4SignerException e) {
throw new HttpException("Problem signing the request: ", e);
}
} else {
throw new HttpException("Not an HttpUriRequest"); // this should never happen
}
}
}).build();
RDFConnectionRemoteBuilder builder = RDFConnectionRemote.create()
.httpClient(v4SigningClient)
.destination(endpoint)
// Query only.
.queryEndpoint("sparql")
.updateEndpoint("sparql");
String query = "SELECT * { ?s ?p ?o } LIMIT 100";
// Whether the connection can be reused depends on the details of the implementation.
// See example 5.
try (RDFConnection conn = builder.build()) {
System.out.println("> Printing query result: ");
conn.querySelect(query, System.out::println);
}
}
}
| 7,309 |
0 | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client/rdf4j/NeptuneSparqlRepository.java | /*
* Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.neptune.client.rdf4j;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.neptune.auth.NeptuneApacheHttpSigV4Signer;
import com.amazonaws.neptune.auth.NeptuneSigV4Signer;
import com.amazonaws.neptune.auth.NeptuneSigV4SignerException;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.protocol.HttpContext;
import org.eclipse.rdf4j.repository.sparql.SPARQLRepository;
import java.io.IOException;
/**
* SPARQL repository for connecting to Neptune instances.
*
* The repository supports both unauthenticated connections as well as IAM using
* Signature V4 auth (https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
* There are two constructors, one for unauthenticated and one for authenticated connections.
*
* @author schmdtm
*/
public class NeptuneSparqlRepository extends SPARQLRepository {
/**
* URL of the Neptune endpoint (*without* the trailing "/sparql" servlet).
*/
private final String endpointUrl;
/**
* The name of the region in which Neptune is running.
*/
private final String regionName;
/**
* Whether or not authentication is enabled.
*/
private final boolean authenticationEnabled;
/**
* The credentials provider, offering credentials for signing the request.
*/
private final AWSCredentialsProvider awsCredentialsProvider;
/**
* The signature V4 signer used to sign the request.
*/
private NeptuneSigV4Signer<HttpUriRequest> v4Signer;
/**
* Set up a NeptuneSparqlRepository with V4 signing disabled.
*
* @param endpointUrl the prefix of the Neptune endpoint (without "/sparql" suffix)
*/
public NeptuneSparqlRepository(final String endpointUrl) {
super(getSparqlEndpoint(endpointUrl));
// all the fields below are only relevant for authentication and can be ignored
this.authenticationEnabled = false;
this.endpointUrl = null; // only needed if auth is enabled
this.awsCredentialsProvider = null; // only needed if auth is enabled
this.regionName = null; // only needed if auth is enabled
}
/**
* Set up a NeptuneSparqlRepository with V4 signing enabled.
*
* @param awsCredentialsProvider the credentials provider used for authentication
* @param endpointUrl the prefix of the Neptune endpoint (without "/sparql" suffix)
* @param regionName name of the region in which Neptune is running
*
* @throws NeptuneSigV4SignerException in case something goes wrong with signer initialization
*/
public NeptuneSparqlRepository(
final String endpointUrl, final AWSCredentialsProvider awsCredentialsProvider,
final String regionName)
throws NeptuneSigV4SignerException {
super(getSparqlEndpoint(endpointUrl));
this.authenticationEnabled = true;
this.endpointUrl = endpointUrl;
this.awsCredentialsProvider = awsCredentialsProvider;
this.regionName = regionName;
initAuthenticatingHttpClient();
}
/**
* Wrap the HTTP client to do Signature V4 signing using Apache HTTP's interceptor mechanism.
*
* @throws NeptuneSigV4SignerException in case something goes wrong with signer initialization
*/
protected void initAuthenticatingHttpClient() throws NeptuneSigV4SignerException {
if (!authenticationEnabled) {
return; // auth not initialized, no signing performed
}
// init an V4 signer for Apache HTTP requests
v4Signer = new NeptuneApacheHttpSigV4Signer(regionName, awsCredentialsProvider);
/*
* Set an interceptor that signs the request before sending it to the server
* => note that we add our interceptor last to make sure we operate on the final
* version of the request as generated by the interceptor chain
*/
final HttpClient v4SigningClient = HttpClientBuilder.create().addInterceptorLast(new HttpRequestInterceptor() {
@Override
public void process(final HttpRequest req, final HttpContext ctx) throws HttpException, IOException {
if (req instanceof HttpUriRequest) {
final HttpUriRequest httpUriReq = (HttpUriRequest) req;
try {
v4Signer.signRequest(httpUriReq);
} catch (NeptuneSigV4SignerException e) {
throw new HttpException("Problem signing the request: ", e);
}
} else {
throw new HttpException("Not an HttpUriRequest"); // this should never happen
}
}
}).build();
setHttpClient(v4SigningClient);
}
/**
* Append the "/sparql" servlet to the endpoint URL. This is fixed, by convention in Neptune.
*
* @param endpointUrl generic endpoint/server URL
* @return the SPARQL endpoint URL for the given server
*/
private static String getSparqlEndpoint(final String endpointUrl) {
return endpointUrl + "/sparql";
}
}
| 7,310 |
0 | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client/rdf4j/NeptuneRdf4JSigV4Example.java | /*
* Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.neptune.client.rdf4j;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.neptune.auth.NeptuneSigV4SignerException;
import com.amazonaws.util.StringUtils;
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.query.Update;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
/**
* Small example demonstrating how to use NeptuneSparqlRepository with SignatureV4 in combination
* with the RDF4J library (see http://rdf4j.org/). The example uses the {@link NeptuneSparqlRepository}
* class contained in this package, which extends RDF4J's SparqlRepository class by IAM authentication.
* <p>
* Before running this code, make sure you've got everything setup properly, in particular:
* <ol>
* <li> Make sure that your AWS credentials are available in the provider chain, see
* <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html">
* DefaultAWSCredentialsProviderChain</a> for more information.</li>
* <li> Start the main method by passing in your endpoint, e.g. "http://<my_neptune_host>:<my_neptune_port>".
* The server will send a <code>"SELECT * WHERE { ?s ?p ?o } LIMIT 10"</code> query against your endpoint.</li>
* </ol>
*
* @author schmdtm
*/
public final class NeptuneRdf4JSigV4Example {
/**
* Region in which the Neptune instance runs.
*/
private static final String TEST_REGION = "us-east-1";
/**
* Sample select query, limited to ten results.
*/
private static final String SAMPLE_QUERY = "SELECT * WHERE { ?s ?p ?o } LIMIT 10";
/**
* Sample SPARQL UPDATE query.
*/
private static final String SAMPLE_UPDATE = "INSERT DATA { <http://Alice> <http://knows> <http://Bob> }";
/**
* Expected exception when sending an unsigned request to an auth enabled neptune server.
*/
static final String ACCESS_DENIED_MSG = "{\"status\":\"403 Forbidden\",\"message\":\"Access Denied!\"}";
/**
* Main method. Expecting the endpoint as the first argument.
*
* @param args arguments
* @throws Exception in case there are problems
*/
public static void main(final String[] args) throws Exception {
if (args.length == 0 || StringUtils.isNullOrEmpty(args[0])) {
System.err.println("Please specify your endpoint as program argument "
+ "(e.g.: http://<my_neptune_host>:<my_neptune_port>)");
System.exit(1);
}
final String endpoint = args[0];
// example of sending a signed query against the SPARQL endpoint
// use default SAMPLE_QUERY if not specified from input args
final String query = (args.length > 1 && !StringUtils.isNullOrEmpty(args[1])) ? args[1] : SAMPLE_QUERY;
executeSignedQueryRequest(endpoint, query);
}
/**
* Example for signed request.
*
* @param endpointUrl of the endpoint to which to send the request
* @throws NeptuneSigV4SignerException in case there's a problem signing the request
*/
protected static void executeSignedQueryRequest(final String endpointUrl, final String query)
throws NeptuneSigV4SignerException {
final AWSCredentialsProvider awsCredentialsProvider = new DefaultAWSCredentialsProviderChain();
final NeptuneSparqlRepository neptuneSparqlRepo =
new NeptuneSparqlRepository(endpointUrl, awsCredentialsProvider, TEST_REGION);
try {
neptuneSparqlRepo.initialize();
evaluateAndPrintQueryResult(query, neptuneSparqlRepo);
} finally {
neptuneSparqlRepo.shutDown();
}
}
/**
* Example for signed request.
*
* @param endpointUrl of the endpoint to which to send the request
* @throws NeptuneSigV4SignerException in case there's a problem signing the request
*/
protected static void executeSignedInsertRequest(final String endpointUrl)
throws NeptuneSigV4SignerException {
final AWSCredentialsProvider awsCredentialsProvider = new DefaultAWSCredentialsProviderChain();
final NeptuneSparqlRepository neptuneSparqlRepo =
new NeptuneSparqlRepository(endpointUrl, awsCredentialsProvider, TEST_REGION);
try {
neptuneSparqlRepo.initialize();
try (RepositoryConnection conn = neptuneSparqlRepo.getConnection()) {
final Update update = conn.prepareUpdate(SAMPLE_UPDATE);
update.execute();
System.out.println("Update query executed!");
}
} finally {
neptuneSparqlRepo.shutDown();
}
}
/**
* Example for unsigned request.
*
* @param endpointUrl of the endpoint to which to send the request
*/
protected static void executeUnsignedQueryRequest(final String endpointUrl) {
// use the simple constructor version which skips auth initialization
final NeptuneSparqlRepository neptuneSparqlRepo = new NeptuneSparqlRepository(endpointUrl);
try {
neptuneSparqlRepo.initialize();
evaluateAndPrintQueryResult(SAMPLE_QUERY, neptuneSparqlRepo);
} finally {
neptuneSparqlRepo.shutDown();
}
}
/**
* Evaluate the query and print the query result.
*
* @param queryString the query string to evaluate
* @param repo the repository over which to evaluate the query
*/
protected static void evaluateAndPrintQueryResult(final String queryString, final Repository repo) {
try (RepositoryConnection conn = repo.getConnection()) {
final TupleQuery query = conn.prepareTupleQuery(queryString);
System.out.println("> Printing query result: ");
final TupleQueryResult res = query.evaluate();
while (res.hasNext()) {
System.err.println("{");
final BindingSet bs = res.next();
boolean first = true;
for (final String varName : bs.getBindingNames()) {
if (first) {
System.out.print(" { ");
} else {
System.out.print(", ");
}
System.out.print("?" + varName + " -> " + bs.getBinding(varName));
first = false;
}
System.out.println("}");
System.out.println("}");
}
}
}
/**
* Constructor.
*/
private NeptuneRdf4JSigV4Example() {
}
}
| 7,311 |
0 | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client | Create_ds/amazon-neptune-sparql-java-sigv4/src/main/java/com/amazonaws/neptune/client/rdf4j/package-info.java | /*
* Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/**
* RDF4J client examples for Amazon Neptune.
*/
package com.amazonaws.neptune.client.rdf4j;
| 7,312 |
0 | Create_ds/airpal/src/test/java/com/airbnb/airpal/core | Create_ds/airpal/src/test/java/com/airbnb/airpal/core/execution/QueryExecutionAuthorizerTest.java | package com.airbnb.airpal.core.execution;
import com.airbnb.airpal.presto.Table;
import com.google.common.collect.ImmutableSet;
import org.junit.Test;
import java.util.Set;
import static com.airbnb.airpal.core.execution.QueryExecutionAuthorizer.tablesUsedByQuery;
import static org.junit.Assert.assertEquals;
public class QueryExecutionAuthorizerTest
{
static String defaultConnector = "hive";
static String defaultSchema = "default";
static String TEST_CREATE_TABLE = "CREATE TABLE the_gibson.users_pii AS SELECT * FROM users;";
@Test
public void testTableReferencesCreateTable()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_CREATE_TABLE, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users"),
new Table(defaultConnector, "the_gibson", "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_CREATE_VIEW = "CREATE VIEW the_gibson.users_pii AS SELECT pii_col FROM users;";
@Test
public void testTableReferencesCreateView()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_CREATE_VIEW, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users"),
new Table(defaultConnector, "the_gibson", "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_REPLACE_VIEW = "CREATE OR REPLACE VIEW the_gibson.users AS SELECT pii_col FROM users;";
@Test
public void testTableReferencesReplaceView()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_REPLACE_VIEW, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users"),
new Table(defaultConnector, "the_gibson", "users")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_ALTER_TABLE = "ALTER TABLE default.users RENAME TO the_gibson.users_pii; SELECT pii_col FROM the_gibson.users_pii;";
@Test
public void testTableReferencesRenameTable()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_ALTER_TABLE, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, "default", "users"),
new Table(defaultConnector, "the_gibson", "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_ALTER_TABLE2 = "USE SCHEMA the_gibson; ALTER TABLE default.users RENAME TO users_pii; SELECT pii_col FROM users_pii;";
@Test
public void testTableReferencesRenameTable2()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_ALTER_TABLE2, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, "default", "users"),
new Table(defaultConnector, "the_gibson", "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_ALTER_TABLE3 = "USE CATALOG cassandra; USE SCHEMA the_gibson; ALTER TABLE hive.default.users RENAME TO users_pii; SELECT pii_col FROM users_pii;";
@Test
public void testTableReferencesRenameTable3()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_ALTER_TABLE3, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, "default", "users"),
new Table("cassandra", "the_gibson", "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_ACCESS_PII_UNION_VIEW = "SELECT str_col FROM the_gibson.users UNION ALL SELECT pii_str_col FROM users;";
@Test
public void testTableReferencesSelectUnion()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_ACCESS_PII_UNION_VIEW, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users"),
new Table(defaultConnector, "the_gibson", "users")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_ACCESS_WITH = "WITH a AS (SELECT * FROM users) SELECT * FROM the_gibson.users UNION ALL SELECT * FROM a;";
@Test
public void testTableReferencesSelectWith()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_ACCESS_WITH, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users"),
new Table(defaultConnector, "the_gibson", "users")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_BASIC_SELECT_COUNT = "SELECT COUNT(*) FROM users;";
@Test
public void testTableReferencesSelectCount()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_BASIC_SELECT_COUNT, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_SELECT_ALL = "SELECT * FROM users;";
@Test
public void testTableReferencesSelectStar()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_SELECT_ALL, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_SELECT_SUBQUERY = "SELECT * FROM (SELECT pii_str_col FROM users) UNION ALL SELECT * FROM the_gibson.users;";
@Test
public void testTableReferencesSelectStarSubquery()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_SELECT_SUBQUERY, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users"),
new Table(defaultConnector, "the_gibson", "users")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_DROP_VIEW = "DROP VIEW my_view";
@Test
public void testTableReferencesDropView()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_DROP_VIEW, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "my_view")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_DROP_TABLE = "DROP TABLE my_table";
@Test
public void testTableReferencesDropTable()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_DROP_TABLE, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "my_table")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_SELECT_ALIAS = "SELECT pii FROM users u JOIN users_pii p ON u.id = p.id;";
@Test
public void testTableReferencesJoinAlias()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_SELECT_ALIAS, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(defaultConnector, defaultSchema, "users"),
new Table(defaultConnector, defaultSchema, "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_SELECT_CONNECTOR = "SELECT pii FROM cassandra.pii.users u JOIN users_pii p ON u.id = p.id;";
@Test
public void testTableReferencesSelectConnector()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_SELECT_CONNECTOR, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table("cassandra", "pii", "users"),
new Table(defaultConnector, defaultSchema, "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
static String TEST_CROSS_JOIN_SUBSELECT = "SELECT * " +
"FROM cassandra.pii.users u " +
"CROSS JOIN (SELECT * FROM hive.pii.users) u2 " +
"WHERE NOT u2.id IS NULL;";
@Test
public void testTableReferencesCrossJoinSubselect()
throws Exception
{
Set<Table> tablesUsed = tablesUsedByQuery(TEST_CROSS_JOIN_SUBSELECT, defaultConnector, defaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table("cassandra", "pii", "users"),
new Table("hive", "pii", "users")
);
assertEquals(tablesExpected, tablesUsed);
}
@Test
public void testAlternateDefaultConnectorSchemaReferences()
throws Exception
{
String alternateDefaultConnector = "cassandra";
String alternateDefaultSchema = "default2";
Set<Table> tablesUsed = tablesUsedByQuery(TEST_SELECT_ALIAS, alternateDefaultConnector, alternateDefaultSchema);
Set<Table> tablesExpected = ImmutableSet.of(
new Table(alternateDefaultConnector, alternateDefaultSchema, "users"),
new Table(alternateDefaultConnector, alternateDefaultSchema, "users_pii")
);
assertEquals(tablesExpected, tablesUsed);
}
} | 7,313 |
0 | Create_ds/airpal/src/main/java/com/airbnb | Create_ds/airpal/src/main/java/com/airbnb/airpal/AirpalConfiguration.java | package com.airbnb.airpal;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.airlift.units.DataSize;
import io.dropwizard.Configuration;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.flyway.FlywayFactory;
import io.dropwizard.util.Duration;
import lombok.Getter;
import lombok.Setter;
import org.secnod.dropwizard.shiro.ShiroConfiguration;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.net.URI;
public class AirpalConfiguration extends Configuration
{
@Getter
@Setter
@JsonProperty
private URI prestoCoordinator = null;
@Getter
@Setter
@JsonProperty
@NotNull
private String prestoUser = "airpalChangeMe";
@Getter
@Setter
@JsonProperty
@NotNull
private String prestoSource = "airpal";
@Getter
@Setter
@JsonProperty
@NotNull
private String prestoCatalog = "hive";
@Getter
@Setter
@JsonProperty
@NotNull
private String prestoSchema = "default";
@Getter
@Setter
@JsonProperty
@NotNull
private boolean prestoDebug = false;
@Getter
@Setter
@JsonProperty
@NotNull
private Duration usageWindow = Duration.hours(6);
@Getter
@Setter
@JsonProperty
private String s3SecretKey;
@Getter
@Setter
@JsonProperty
private String s3AccessKey;
@Getter
@Setter
@JsonProperty
private String s3Bucket;
@Getter
@Setter
@JsonProperty
private String s3EncryptionMaterialsProvider;
@Getter
@Setter
@JsonProperty
private String createTableDestinationSchema = "airpal";
@Getter
@Setter
private DataSize bufferSize = DataSize.valueOf("512kB");
@Getter
@Setter
@JsonProperty
@NotNull
private DataSize maxOutputSize = DataSize.valueOf("1GB");
@Getter
@Setter
@Valid
@JsonProperty
@NotNull
private DataSourceFactory dataSourceFactory = new DataSourceFactory();
@Getter
@Setter
@Valid
@JsonProperty
@NotNull
private FlywayFactory flywayFactory = new FlywayFactory();
@Getter
@Setter
@Valid
@JsonProperty
@NotNull
private ShiroConfiguration shiro;
@Getter
@Setter
@Valid
@JsonProperty
@NotNull
private boolean useS3 = false;
@Getter
@Setter
@Valid
@JsonProperty
@NotNull
private boolean compressedOutput = false;
}
| 7,314 |
0 | Create_ds/airpal/src/main/java/com/airbnb | Create_ds/airpal/src/main/java/com/airbnb/airpal/AirpalApplication.java | package com.airbnb.airpal;
import com.airbnb.airpal.AirpalApplicationBase;
import com.airbnb.airpal.modules.AirpalModule;
import com.airbnb.airpal.modules.DropwizardModule;
import com.google.common.collect.ImmutableList;
import com.google.inject.AbstractModule;
import io.dropwizard.ConfiguredBundle;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
import io.dropwizard.views.ViewBundle;
import org.secnod.dropwizard.shiro.ShiroBundle;
import org.secnod.dropwizard.shiro.ShiroConfiguration;
import java.util.Arrays;
public class AirpalApplication extends AirpalApplicationBase<AirpalConfiguration>
{
@Override
public Iterable<AbstractModule> getModules(AirpalConfiguration config, Environment environment)
{
return Arrays.asList(new DropwizardModule(config, environment),
new AirpalModule(config, environment));
}
@Override
public Iterable<ConfiguredBundle<AirpalConfiguration>> getConfiguredBundles()
{
Iterable<ConfiguredBundle<AirpalConfiguration>> bundles = super.getConfiguredBundles();
ImmutableList.Builder<ConfiguredBundle<AirpalConfiguration>> builder = ImmutableList.builder();
for (ConfiguredBundle<AirpalConfiguration> bundle : bundles) {
builder.add(bundle);
}
builder.add(new ShiroBundle<AirpalConfiguration>() {
@Override
protected ShiroConfiguration narrow(AirpalConfiguration configuration)
{
return configuration.getShiro();
}
});
return builder.build();
}
public static void main(final String[] args) throws Exception {
final AirpalApplication application = new AirpalApplication();
application.run(args);
}
}
| 7,315 |
0 | Create_ds/airpal/src/main/java/com/airbnb | Create_ds/airpal/src/main/java/com/airbnb/airpal/AirpalApplicationBase.java | package com.airbnb.airpal;
import com.airbnb.airpal.core.AirpalUserFactory;
import com.airbnb.airpal.core.health.PrestoHealthCheck;
import com.airbnb.airpal.resources.ExecuteResource;
import com.airbnb.airpal.resources.FilesResource;
import com.airbnb.airpal.resources.HealthResource;
import com.airbnb.airpal.resources.PingResource;
import com.airbnb.airpal.resources.QueriesResource;
import com.airbnb.airpal.resources.QueryResource;
import com.airbnb.airpal.resources.ResultsPreviewResource;
import com.airbnb.airpal.resources.S3FilesResource;
import com.airbnb.airpal.resources.SessionResource;
import com.airbnb.airpal.resources.TablesResource;
import com.airbnb.airpal.resources.UserResource;
import com.airbnb.airpal.resources.UsersResource;
import com.airbnb.airpal.resources.sse.SSEEventSourceServlet;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Stage;
import io.dropwizard.Application;
import io.dropwizard.Bundle;
import io.dropwizard.ConfiguredBundle;
import io.dropwizard.assets.AssetsBundle;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.flyway.FlywayBundle;
import io.dropwizard.flyway.FlywayFactory;
import io.dropwizard.jetty.BiDiGzipHandler;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
import io.dropwizard.views.ViewBundle;
import org.eclipse.jetty.server.Handler;
import javax.servlet.ServletRegistration;
import java.util.Arrays;
import static org.glassfish.jersey.message.MessageProperties.IO_BUFFER_SIZE;
public abstract class AirpalApplicationBase<T extends AirpalConfiguration>
extends Application<T>
{
private static final String SERVER_SENT_EVENTS = "text/event-stream";
protected Injector injector;
@Override
public void initialize(Bootstrap<T> bootstrap)
{
for (ConfiguredBundle<T> configuredBundle : getConfiguredBundles()) {
bootstrap.addBundle(configuredBundle);
}
for (Bundle bundle : getBundles()) {
bootstrap.addBundle(bundle);
}
}
public abstract Iterable<AbstractModule> getModules(T config, Environment environment);
public Iterable<ConfiguredBundle<T>> getConfiguredBundles()
{
return Arrays.asList(new ViewBundle());
}
public Iterable<Bundle> getBundles()
{
return Arrays.asList(
new AssetsBundle("/assets", "/app", "index.html"),
new FlywayBundle<T>()
{
@Override
public DataSourceFactory getDataSourceFactory(T configuration)
{
return configuration.getDataSourceFactory();
}
@Override
public FlywayFactory getFlywayFactory(T configuration)
{
return configuration.getFlywayFactory();
}
});
}
@Override
public void run(T config, Environment environment)
throws Exception
{
this.injector = Guice.createInjector(Stage.PRODUCTION, getModules(config, environment));
System.setProperty(IO_BUFFER_SIZE, String.valueOf(config.getBufferSize().toBytes()));
environment.healthChecks().register("presto", injector.getInstance(PrestoHealthCheck.class));
environment.jersey().register(injector.getInstance(ExecuteResource.class));
environment.jersey().register(injector.getInstance(QueryResource.class));
environment.jersey().register(injector.getInstance(QueriesResource.class));
environment.jersey().register(injector.getInstance(UserResource.class));
environment.jersey().register(injector.getInstance(UsersResource.class));
environment.jersey().register(injector.getInstance(TablesResource.class));
environment.jersey().register(injector.getInstance(HealthResource.class));
environment.jersey().register(injector.getInstance(PingResource.class));
environment.jersey().register(injector.getInstance(SessionResource.class));
environment.jersey().register(injector.getInstance(FilesResource.class));
environment.jersey().register(injector.getInstance(ResultsPreviewResource.class));
environment.jersey().register(injector.getInstance(S3FilesResource.class));
environment.jersey().register(injector.getInstance(AirpalUserFactory.class));
// Setup SSE (Server Sent Events)
ServletRegistration.Dynamic sseServlet = environment.servlets()
.addServlet("updates", injector.getInstance(SSEEventSourceServlet.class));
sseServlet.setAsyncSupported(true);
sseServlet.addMapping("/api/updates/subscribe");
// Disable GZIP content encoding for SSE endpoints
environment.lifecycle().addServerLifecycleListener(server -> {
for (Handler handler : server.getChildHandlersByClass(BiDiGzipHandler.class)) {
((BiDiGzipHandler) handler).addExcludedMimeTypes(SERVER_SENT_EVENTS);
}
});
}
}
| 7,316 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/QueryInfoClient.java | package com.airbnb.airpal.presto;
import com.facebook.presto.execution.Input;
import com.facebook.presto.execution.QueryStats;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.MoreObjects;
import com.google.common.net.MediaType;
import io.airlift.http.client.FullJsonResponseHandler;
import io.airlift.http.client.HttpClient;
import io.airlift.http.client.HttpStatus;
import io.airlift.http.client.Request;
import io.airlift.json.JsonCodec;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import java.net.URI;
import java.util.Set;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.net.HttpHeaders.ACCEPT;
import static com.google.common.net.HttpHeaders.USER_AGENT;
import static io.airlift.http.client.FullJsonResponseHandler.createFullJsonResponseHandler;
import static io.airlift.http.client.Request.Builder.prepareGet;
@Slf4j
public class QueryInfoClient
{
private static final String USER_AGENT_VALUE = QueryInfoClient.class.getSimpleName() +
"/" +
MoreObjects.firstNonNull(QueryInfoClient.class.getPackage().getImplementationVersion(), "unknown");
private final HttpClient httpClient;
private final FullJsonResponseHandler<BasicQueryInfo> queryInfoHandler;
public QueryInfoClient(HttpClient httpClient, JsonCodec<BasicQueryInfo> queryInfoCodec)
{
this.httpClient = httpClient;
this.queryInfoHandler = createFullJsonResponseHandler(queryInfoCodec);
}
public BasicQueryInfo from(URI infoUri)
{
infoUri = checkNotNull(infoUri, "infoUri is null");
Request request = prepareGet()
.setHeader(USER_AGENT, USER_AGENT_VALUE)
.setHeader(ACCEPT, MediaType.JSON_UTF_8.toString())
.setUri(infoUri)
.build();
Exception cause = null;
long start = System.nanoTime();
long attempts = 0;
FullJsonResponseHandler.JsonResponse<BasicQueryInfo> response;
try {
response = httpClient.execute(request, queryInfoHandler);
if (response.getStatusCode() == HttpStatus.OK.code() && response.hasValue()) {
return response.getValue();
}
}
catch (RuntimeException e) {
log.error("Caught error in QueryInfoClient load", e);
}
return null;
}
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public static class BasicQueryInfo
{
private final QueryStats queryStats;
private final Set<Input> inputs;
@JsonCreator
public BasicQueryInfo(
@JsonProperty("queryStats") QueryStats queryStats,
@JsonProperty("inputs") Set<Input> inputs)
{
this.queryStats = queryStats;
this.inputs = inputs;
}
}
}
| 7,317 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/ClientSessionFactory.java | package com.airbnb.airpal.presto;
import com.facebook.presto.client.ClientSession;
import com.google.common.collect.ImmutableMap;
import io.airlift.units.Duration;
import javax.inject.Provider;
import java.net.URI;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.MoreObjects.firstNonNull;
import static io.airlift.units.Duration.succinctDuration;
import static java.util.concurrent.TimeUnit.MINUTES;
public class ClientSessionFactory
{
private final boolean debug;
private final String defaultSchema;
private final String catalog;
private final String source;
private final String user;
private final Provider<URI> server;
private final String timeZoneId;
private final Locale locale;
private final Duration clientSessionTimeout;
public ClientSessionFactory(Provider<URI> server, String user, String source, String catalog, String defaultSchema, boolean debug, Duration clientSessionTimeout)
{
this.server = server;
this.user = user;
this.source = source;
this.catalog = catalog;
this.defaultSchema = defaultSchema;
this.debug = debug;
this.timeZoneId = TimeZone.getTimeZone("UTC").getID();
this.locale = Locale.getDefault();
this.clientSessionTimeout = firstNonNull(clientSessionTimeout, succinctDuration(1, MINUTES));
}
public ClientSession create(String user, String schema)
{
return new ClientSession(server.get(),
user,
source,
catalog,
schema,
timeZoneId,
locale,
ImmutableMap.<String, String>of(),
null,
debug,
clientSessionTimeout
);
}
public ClientSession create(String schema)
{
return new ClientSession(server.get(),
user,
source,
catalog,
schema,
timeZoneId,
locale,
ImmutableMap.<String, String>of(),
null,
debug,
clientSessionTimeout
);
}
public ClientSession create()
{
return new ClientSession(server.get(),
user,
source,
catalog,
defaultSchema,
timeZoneId,
locale,
ImmutableMap.<String, String>of(),
null,
debug,
clientSessionTimeout
);
}
}
| 7,318 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/Util.java | package com.airbnb.airpal.presto;
import com.google.common.base.Joiner;
public class Util {
private static Joiner FQN_JOINER = Joiner.on('.').skipNulls();
public static String fqn(String databaseName, String tableName) {
return FQN_JOINER.join(databaseName, tableName);
}
public static String fqn(String connectorId, String databaseName, String tableName) {
return FQN_JOINER.join(connectorId, databaseName, tableName);
}
} | 7,319 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/Table.java | package com.airbnb.airpal.presto;
import com.facebook.presto.execution.Column;
import com.facebook.presto.execution.Input;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.hubspot.rosetta.StoredAsJson;
import lombok.Data;
import lombok.ToString;
import javax.annotation.concurrent.Immutable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static com.google.common.base.Preconditions.checkNotNull;
@ToString
@Data
@Immutable
@JsonIgnoreProperties(ignoreUnknown = true)
public class Table
{
private static Splitter TABLE_PART_SPLITTER = Splitter.on(".").omitEmptyStrings().trimResults();
private static Joiner TABLE_PART_JOINER = Joiner.on(".").skipNulls();
private final String connectorId;
private final String schema;
private final String table;
@StoredAsJson
private final ImmutableList<String> columns;
@JsonCreator
protected Table(@JsonProperty("connectorId") String connectorId,
@JsonProperty("schema") String schema,
@JsonProperty("table") String table,
@JsonProperty("columns") List<String> columns)
{
this.connectorId = checkNotNull(connectorId, "connectorId is null");
this.schema = checkNotNull(schema, "schema is null");
this.table = checkNotNull(table, "table is null");
this.columns = ImmutableList.copyOf(checkNotNull(columns, "columns is null"));
}
public Table(String connectorId,
String schema,
String table)
{
this(connectorId, schema, table, Collections.<String>emptyList());
}
public static Table valueOf(String s)
{
List<String> parts = TABLE_PART_SPLITTER.splitToList(s);
if (parts.size() == 3) {
return new Table(parts.get(0), parts.get(1), parts.get(2));
}
else if (parts.size() == 2) {
return new Table("hive", parts.get(0), parts.get(1));
}
else {
throw new IllegalArgumentException("Table identifier parts not found.");
}
}
public static Table fromInput(Input input)
{
List<String> columns = new ArrayList<>(input.getColumns().size());
for (Column c : input.getColumns()) {
columns.add(c.getName());
}
return new Table(input.getConnectorId(), input.getSchema(), input.getTable(), columns);
}
@JsonProperty("fqn")
public String getFqn()
{
return TABLE_PART_JOINER.join(getConnectorId(), getSchema(), getTable());
}
@Override
public int hashCode()
{
int result = getConnectorId().hashCode();
result = 31 * result + getSchema().hashCode();
result = 31 * result + getTable().hashCode();
return result;
}
@Override
public boolean equals(Object obj)
{
if (obj == this) {
return true;
}
if (obj == null) {
return false;
}
if (obj instanceof Input) {
Input other = (Input)obj;
return getConnectorId().equals(other.getConnectorId()) &&
getSchema().equals(other.getSchema()) &&
getTable().equals(other.getTable());
} else if (obj instanceof Table) {
Table other = (Table)obj;
return getConnectorId().equals(other.getConnectorId()) &&
getSchema().equals(other.getSchema()) &&
getTable().equals(other.getTable());
}
return false;
}
}
| 7,320 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/QueryRunner.java | package com.airbnb.airpal.presto;
import com.facebook.presto.client.ClientSession;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.client.StatementClient;
import io.airlift.http.client.HttpClient;
import io.airlift.json.JsonCodec;
import java.io.Closeable;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.airlift.json.JsonCodec.jsonCodec;
public class QueryRunner
implements Closeable
{
private final JsonCodec<QueryResults> queryResultsCodec;
private final ClientSession session;
private final HttpClient httpClient;
protected QueryRunner(ClientSession session, JsonCodec<QueryResults> queryResultsCodec, HttpClient httpClient)
{
this.session = checkNotNull(session, "session is null");
this.queryResultsCodec = checkNotNull(queryResultsCodec, "queryResultsCodec is null");
this.httpClient = httpClient;
}
public StatementClient startInternalQuery(String query)
{
return new StatementClient(httpClient, queryResultsCodec, session, query);
}
@Override
public void close()
{
httpClient.close();
}
public static class QueryRunnerFactory
{
private final ClientSessionFactory sessionFactory;
private final HttpClient httpClient;
public QueryRunnerFactory(ClientSessionFactory sessionFactory, HttpClient httpClient)
{
this.httpClient = httpClient;
this.sessionFactory = sessionFactory;
}
public QueryRunner create(String user, String schema)
{
return new QueryRunner(sessionFactory.create(user, schema), jsonCodec(QueryResults.class), httpClient);
}
public QueryRunner create()
{
return new QueryRunner(sessionFactory.create(), jsonCodec(QueryResults.class), httpClient);
}
}
}
| 7,321 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/PartitionedTable.java | package com.airbnb.airpal.presto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function;
import com.google.common.base.Splitter;
import lombok.Getter;
import lombok.ToString;
import javax.annotation.Nullable;
import java.util.List;
@ToString
public class PartitionedTable extends Table
{
private static Splitter TABLE_PART_SPLITTER = Splitter.on(".").omitEmptyStrings().trimResults();
@Getter
private final String partitionName;
protected PartitionedTable(@JsonProperty("connectorId") String connectorId,
@JsonProperty("schema") String schema,
@JsonProperty("table") String table,
@JsonProperty("partition") String partitionName,
@JsonProperty("columns") List<String> columns) {
super(connectorId, schema, table, columns);
this.partitionName = partitionName;
}
public PartitionedTable(String connectorId,
String schema,
String table,
String partitionName) {
super(connectorId, schema, table);
this.partitionName = partitionName;
}
public PartitionedTable(String connectorId,
String schema,
String table) {
this(connectorId,
schema,
table,
null);
}
public PartitionedTable withPartitionName(String partitionName)
{
return new PartitionedTable(getConnectorId(),
getSchema(),
getTable(),
partitionName,
getColumns());
}
public static PartitionedTable fromTable(Table table)
{
return new PartitionedTable(table.getConnectorId(),
table.getSchema(),
table.getTable(),
null,
table.getColumns());
}
public Table asTable()
{
return new Table(getConnectorId(),
getSchema(),
getTable(),
getColumns());
}
@Override
public int hashCode()
{
return getConnectorId().hashCode() +
getSchema().hashCode() +
getTable().hashCode() +
((getTable() == null) ? 0 : getTable().hashCode());
}
@Override
public boolean equals(Object obj)
{
if (obj instanceof PartitionedTable) {
PartitionedTable otherTable = (PartitionedTable) obj;
return getConnectorId().equals(otherTable.getConnectorId()) &&
getSchema().equals(otherTable.getSchema()) &&
getTable().equals(otherTable.getTable()) &&
partitionsSame(partitionName, otherTable.getPartitionName());
} else {
return false;
}
}
public static boolean partitionsSame(String partition1, String partition2)
{
if ((partition1 == null) && (partition2 == null)) {
return true;
} else if (((partition1 == null) && (partition2 != null))
|| ((partition1 != null) && (partition2 == null))) {
return false;
} else {
return partition1.equals(partition2);
}
}
public static PartitionedTable valueOf(String s)
{
List<String> parts = TABLE_PART_SPLITTER.splitToList(s);
if (parts.size() == 4) {
// String of the form hive.default.request_search.d=2013-11-20
return new PartitionedTable(parts.get(0), parts.get(1), parts.get(2), parts.get(3));
} else if (parts.size() == 3) {
// String of the form hive.default.request_search
return new PartitionedTable(parts.get(0), parts.get(1), parts.get(2));
} else if (parts.size() == 2) {
// String of the form default.request_search
return new PartitionedTable("hive", parts.get(0), parts.get(1));
} else {
throw new IllegalArgumentException("Table identifier parts not found.");
}
}
public static class PartitionedTableToTable implements Function<PartitionedTable, Table>
{
@Nullable
@Override
public Table apply(PartitionedTable input)
{
return input;
}
}
}
| 7,322 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/ForQueryRunner.java | package com.airbnb.airpal.presto;
import javax.inject.Qualifier;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@Retention(RUNTIME)
@Target({FIELD, PARAMETER, METHOD})
@Qualifier
public @interface ForQueryRunner
{
} | 7,323 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/hive/HivePartition.java | package com.airbnb.airpal.presto.hive;
import com.facebook.presto.client.ClientTypeSignature;
import com.facebook.presto.client.ClientTypeSignatureParameter;
import com.facebook.presto.client.Column;
import com.facebook.presto.spi.type.TypeSignature;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import lombok.Getter;
import org.joda.time.DateTime;
import javax.annotation.concurrent.Immutable;
import java.util.List;
import static java.lang.String.format;
@Immutable
public class HivePartition extends Column
{
private final List<Object> values;
@JsonProperty
public List<Object> getValues() {
return values;
}
@JsonCreator
public HivePartition(@JsonProperty("name") String name,
@JsonProperty("type") String type,
@JsonProperty("values") List<Object> values) {
super(name, type, new ClientTypeSignature(TypeSignature.parseTypeSignature(type)));
this.values = values;
}
public static HivePartition fromColumn(Column column, List<Object> values) {
return new HivePartition(column.getName(), column.getType(), values);
}
public static List<String> getPartitionIds(HivePartition partition)
{
ImmutableList.Builder<String> partitionIdBuilder = ImmutableList.builder();
String partitionName = partition.getName();
for (Object value : partition.getValues()) {
partitionIdBuilder.add(getPartitionId(partitionName, value));
}
return partitionIdBuilder.build();
}
public static String getPartitionId(String partitionName, Object partitionValue)
{
return format("%s=%s", partitionName, partitionValue.toString());
}
public static class HivePartitionItem
{
@JsonProperty
@Getter
private final Object value;
@JsonProperty
@Getter
private final String type;
@JsonProperty
@Getter
private final String name;
@JsonProperty
@Getter
private final DateTime lastUpdated;
@JsonCreator
public HivePartitionItem(@JsonProperty("name") String name,
@JsonProperty("type") String type,
@JsonProperty("value") Object value,
@JsonProperty("lastUpdated") DateTime lastUpdated)
{
this.name = name;
this.type = type;
this.value = value;
this.lastUpdated = lastUpdated;
}
}
}
| 7,324 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/hive/HiveColumn.java | package com.airbnb.airpal.presto.hive;
import com.facebook.presto.client.ClientTypeSignature;
import com.facebook.presto.client.ClientTypeSignatureParameter;
import com.facebook.presto.client.Column;
import com.facebook.presto.spi.type.TypeSignature;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.annotation.concurrent.Immutable;
@Immutable
public class HiveColumn extends Column {
private final boolean isPartition;
private final boolean isNullable;
@JsonCreator
public HiveColumn(@JsonProperty("name") String name,
@JsonProperty("type") String type,
@JsonProperty("isPartition") boolean isPartition,
@JsonProperty("isNullable") boolean isNullable) {
super(name, type, new ClientTypeSignature(TypeSignature.parseTypeSignature(type)));
this.isPartition = isPartition;
this.isNullable = isNullable;
}
@JsonProperty
public boolean isPartition() {
return isPartition;
}
@JsonProperty
public boolean isNullable() {
return isNullable;
}
public static HiveColumn fromColumn(Column column, boolean isNullable, boolean isPartition) {
return new HiveColumn(column.getName(), column.getType(),isPartition, isNullable);
}
}
| 7,325 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/metadata/PartitionedTableWithValue.java | package com.airbnb.airpal.presto.metadata;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.presto.hive.HivePartition;
import com.google.common.base.Optional;
import lombok.Value;
@Value
public class PartitionedTableWithValue
{
private final Table table;
private final Optional<HivePartition> partition;
private final String value;
}
| 7,326 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/metadata/ColumnCache.java | package com.airbnb.airpal.presto.metadata;
import com.airbnb.airpal.core.BackgroundCacheLoader;
import com.airbnb.airpal.core.execution.QueryClient;
import com.airbnb.airpal.presto.QueryRunner;
import com.airbnb.airpal.presto.Util;
import com.airbnb.airpal.presto.hive.HiveColumn;
import com.airbnb.airpal.presto.hive.HivePartition;
import com.facebook.presto.client.ClientTypeSignature;
import com.facebook.presto.client.Column;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.client.StatementClient;
import com.facebook.presto.spi.type.TypeSignature;
import com.google.common.base.Function;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import io.airlift.units.Duration;
import lombok.extern.slf4j.Slf4j;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import static com.airbnb.airpal.presto.QueryRunner.QueryRunnerFactory;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
@Slf4j
public class ColumnCache
{
private final LoadingCache<String, List<HiveColumn>> schemaTableCache;
private final LoadingCache<String, List<HivePartition>> partitionCache;
private final QueryRunnerFactory queryRunnerFactory;
public ColumnCache(final QueryRunnerFactory queryRunnerFactory,
final Duration columnCacheLifetime,
final Duration partitionCacheLifetime,
final ExecutorService executor)
{
this.queryRunnerFactory = checkNotNull(queryRunnerFactory, "queryRunnerFactory session was null!");
checkNotNull(executor, "executor was null!");
ListeningExecutorService listeningExecutor = MoreExecutors.listeningDecorator(executor);
BackgroundCacheLoader<String, List<HiveColumn>> columnLoader = new BackgroundCacheLoader<String,
List<HiveColumn>>(listeningExecutor)
{
@Override
public List<HiveColumn> load(String fqTableName)
{
return queryColumns(format("SHOW COLUMNS FROM %s", fqTableName));
}
};
BackgroundCacheLoader<String, List<HivePartition>> partitionLoader = new BackgroundCacheLoader<String,
List<HivePartition>>(listeningExecutor)
{
@Override
public List<HivePartition> load(String fqTableName) throws Exception
{
return queryPartitions(format("SHOW PARTITIONS FROM %s", fqTableName));
}
};
this.schemaTableCache = CacheBuilder.newBuilder()
.expireAfterWrite(Math.round(columnCacheLifetime.getValue()),
columnCacheLifetime.getUnit())
.build(columnLoader);
this.partitionCache = CacheBuilder.newBuilder()
.expireAfterWrite(Math.round(partitionCacheLifetime.getValue()),
partitionCacheLifetime.getUnit())
.build(partitionLoader);
}
private List<HivePartition> queryPartitions(String query)
{
final ImmutableList.Builder<HivePartition> cache = ImmutableList.builder();
final Map<Column, List<Object>> objects = Maps.newHashMap();
QueryRunner queryRunner = queryRunnerFactory.create();
QueryClient queryClient = new QueryClient(queryRunner, io.dropwizard.util.Duration.seconds(60), query);
try {
queryClient.executeWith(new Function<StatementClient, Void>() {
@Nullable
@Override
public Void apply(StatementClient client)
{
QueryResults results = client.current();
if (results.getData() != null && results.getColumns() != null) {
final List<Column> columns = results.getColumns();
for (Column column : columns) {
objects.put(column, Lists.newArrayList());
}
for (List<Object> row : results.getData()) {
for (int i = 0; i < row.size(); i++) {
Column column = columns.get(i);
objects.get(column).add(row.get(i));
}
}
}
return null;
}
});
}
catch (QueryClient.QueryTimeOutException e) {
log.error("Caught timeout loading columns", e);
}
for (Map.Entry<Column, List<Object>> entry : objects.entrySet()) {
cache.add(HivePartition.fromColumn(entry.getKey(), entry.getValue()));
}
return cache.build();
}
private List<HiveColumn> queryColumns(String query)
{
final ImmutableList.Builder<HiveColumn> cache = ImmutableList.builder();
QueryRunner queryRunner = queryRunnerFactory.create();
QueryClient queryClient = new QueryClient(queryRunner, io.dropwizard.util.Duration.seconds(60), query);
try {
queryClient.executeWith(new Function<StatementClient, Void>() {
@Nullable
@Override
public Void apply(StatementClient client)
{
QueryResults results = client.current();
if (results.getData() != null) {
for (List<Object> row : results.getData()) {
Column column = new Column((String) row.get(0), (String) row.get(1), new ClientTypeSignature(TypeSignature.parseTypeSignature((String)row.get(1))));
boolean isNullable = (Boolean) row.get(2);
boolean isPartition = (Boolean) row.get(3);
cache.add(HiveColumn.fromColumn(column, isNullable, isPartition));
}
}
return null;
}
});
}
catch (QueryClient.QueryTimeOutException e) {
log.error("Caught timeout loading columns", e);
}
return cache.build();
}
public List<HiveColumn> getColumns(String databaseName, String tableName) throws ExecutionException
{
return schemaTableCache.get(Util.fqn(databaseName, tableName));
}
public List<HivePartition> getPartitions(String databaseName, String tableName) throws ExecutionException
{
return partitionCache.get(Util.fqn(databaseName, tableName));
}
}
| 7,327 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/metadata/SchemaCache.java | package com.airbnb.airpal.presto.metadata;
import com.airbnb.airpal.core.BackgroundCacheLoader;
import com.airbnb.airpal.core.execution.QueryClient;
import com.airbnb.airpal.presto.QueryRunner;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.client.StatementClient;
import com.google.common.base.Function;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import lombok.extern.slf4j.Slf4j;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
@Slf4j
public class SchemaCache
implements Closeable
{
private static final long RELOAD_TIME_MINUTES = 2;
private static final Set<String> EXCLUDED_SCHEMAS = Sets.newHashSet("sys", "information_schema");
private final ExecutorService executor;
private final QueryRunner.QueryRunnerFactory queryRunnerFactory;
private final LoadingCache<String, Map<String, List<String>>> schemaTableCache;
public SchemaCache(final QueryRunner.QueryRunnerFactory queryRunnerFactory,
final ExecutorService executor)
{
this.queryRunnerFactory = checkNotNull(queryRunnerFactory, "queryRunnerFactory session was null!");
this.executor = checkNotNull(executor, "executor was null!");
ListeningExecutorService listeningExecutor = MoreExecutors.listeningDecorator(executor);
BackgroundCacheLoader<String, Map<String, List<String>>> loader =
new BackgroundCacheLoader<String, Map<String, List<String>>>(listeningExecutor)
{
@Override
public Map<String, List<String>> load(String catalogName)
{
return queryMetadata(format(
"SELECT table_catalog, table_schema, table_name " +
"FROM information_schema.tables " +
"WHERE table_catalog = '%s'",
catalogName));
}
};
schemaTableCache = CacheBuilder.newBuilder()
.refreshAfterWrite(RELOAD_TIME_MINUTES, TimeUnit.MINUTES)
.build(loader);
}
private Map<String, List<String>> queryMetadata(String query)
{
final Map<String, List<String>> cache = Maps.newHashMap();
QueryRunner queryRunner = queryRunnerFactory.create();
QueryClient queryClient = new QueryClient(queryRunner, io.dropwizard.util.Duration.seconds(60), query);
try {
queryClient.executeWith(new Function<StatementClient, Void>() {
@Nullable
@Override
public Void apply(StatementClient client)
{
QueryResults results = client.current();
if (results.getData() != null) {
for (List<Object> row : results.getData()) {
String schema = (String) row.get(1);
String table = (String) row.get(2);
if (EXCLUDED_SCHEMAS.contains(schema)) {
continue;
}
List<String> tables = cache.get(schema);
if (tables == null) {
tables = Lists.newArrayList();
cache.put(schema, tables);
}
tables.add(table);
}
}
return null;
}
});
}
catch (QueryClient.QueryTimeOutException e) {
log.error("Caught timeout loading columns", e);
}
return ImmutableMap.copyOf(cache);
}
public void populateCache(final String catalog)
{
checkNotNull(catalog, "schemaName is null");
executor.execute(new Runnable()
{
@Override
public void run()
{
schemaTableCache.refresh(catalog);
}
});
}
public Set<String> getCatalogs()
{
return schemaTableCache.asMap().keySet();
}
public Map<String, List<String>> getSchemaMap(final String catalog)
{
try {
return schemaTableCache.get(catalog);
}
catch (ExecutionException e) {
e.printStackTrace();
return Maps.newHashMap();
}
}
@Override
public void close()
{
executor.shutdownNow();
}
public static ThreadFactory daemonThreadsNamed(String nameFormat)
{
return new ThreadFactoryBuilder().setNameFormat(nameFormat).setDaemon(true).build();
}
}
| 7,328 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto | Create_ds/airpal/src/main/java/com/airbnb/airpal/presto/metadata/PreviewTableCache.java | package com.airbnb.airpal.presto.metadata;
import com.airbnb.airpal.core.BackgroundCacheLoader;
import com.airbnb.airpal.core.execution.QueryClient;
import com.airbnb.airpal.presto.QueryRunner;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.presto.Util;
import com.airbnb.airpal.presto.hive.HivePartition;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.client.StatementClient;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import io.airlift.units.Duration;
import lombok.extern.slf4j.Slf4j;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
@Slf4j
public class PreviewTableCache
{
private final LoadingCache<PartitionedTableWithValue, List<List<Object>>> previewTableCache;
private final QueryRunner.QueryRunnerFactory queryRunnerFactory;
public PreviewTableCache(final QueryRunner.QueryRunnerFactory queryRunnerFactory,
final Duration previewCacheLifetime,
final ExecutorService executor,
final int previewLimit)
{
this.queryRunnerFactory = checkNotNull(queryRunnerFactory, "queryRunnerFactory session was null!");
ListeningExecutorService listeningExecutor = MoreExecutors.listeningDecorator(executor);
BackgroundCacheLoader<PartitionedTableWithValue, List<List<Object>>> tableLoader =
new BackgroundCacheLoader<PartitionedTableWithValue, List<List<Object>>>(listeningExecutor)
{
@Override
public List<List<Object>> load(PartitionedTableWithValue key)
throws Exception
{
return queryRows(buildQueryWithLimit(key, previewLimit));
}
};
this.previewTableCache = CacheBuilder.newBuilder()
.expireAfterWrite(Math.round(previewCacheLifetime.getValue()),
previewCacheLifetime.getUnit())
.maximumSize(previewLimit)
.build(tableLoader);
}
private static String buildQueryWithLimit(PartitionedTableWithValue tableWithValue, int limit)
{
Table table = tableWithValue.getTable();
HivePartition partition = tableWithValue.getPartition().orNull();
String partitionClause = "";
if (partition != null) {
String value = tableWithValue.getValue();
String partitionValue = (Objects.equals(partition.getType(), "varchar")) ?
"'" + value + "'" :
String.valueOf(value);
partitionClause = format("WHERE %s = %s", partition.getName(), partitionValue);
}
return format("SELECT * FROM %s %s LIMIT %d",
Util.fqn(table.getConnectorId(), table.getSchema(), table.getTable()),
partitionClause,
limit);
}
private List<List<Object>> queryRows(String query)
{
final ImmutableList.Builder<List<Object>> cache = ImmutableList.builder();
QueryRunner queryRunner = queryRunnerFactory.create();
QueryClient queryClient = new QueryClient(queryRunner, io.dropwizard.util.Duration.seconds(60), query);
try {
queryClient.executeWith(new Function<StatementClient, Void>() {
@Nullable
@Override
public Void apply(StatementClient client)
{
QueryResults results = client.current();
if (results.getData() != null) {
cache.addAll(results.getData());
}
return null;
}
});
}
catch (QueryClient.QueryTimeOutException e) {
log.error("Caught timeout loading columns", e);
}
return cache.build();
}
public List<List<Object>> getPreview(final String connectorId,
final String schema,
final String table,
final Optional<HivePartition> partition,
final String partitionValue)
throws ExecutionException
{
return previewTableCache.get(
new PartitionedTableWithValue(
new Table(connectorId,
schema,
table),
partition,
partitionValue
));
}
}
| 7,329 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/TableUpdatedCache.java | package com.airbnb.airpal.core;
import com.airbnb.airpal.presto.PartitionedTable;
import com.airbnb.airpal.presto.Table;
import org.joda.time.DateTime;
import java.util.List;
import java.util.Map;
public interface TableUpdatedCache
{
public DateTime get(Table table);
public Map<PartitionedTable, DateTime> getAllPresent(List<? extends Table> tables);
public Map<PartitionedTable, DateTime> getAll(List<Table> tables);
}
| 7,330 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/AirpalUser.java | package com.airbnb.airpal.core;
import org.joda.time.Duration;
public interface AirpalUser
{
public String getUserName();
public String getDefaultSchema();
public Duration getQueryTimeout();
public String getAccessLevel();
public boolean isPermitted(String permission);
}
| 7,331 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/AuthorizationUtil.java | package com.airbnb.airpal.core;
import com.airbnb.airpal.presto.Table;
import com.google.common.base.Predicate;
import javax.annotation.Nullable;
import static java.lang.String.format;
public class AuthorizationUtil
{
public static boolean isAuthorizedRead(AirpalUser subject, Table table)
{
return isAuthorizedRead(subject, table.getConnectorId(), table.getSchema(), table.getTable());
}
public static boolean isAuthorizedRead(AirpalUser subject, String connectorId, String schema, String table)
{
return subject.isPermitted(format("read:%s.%s:%s", connectorId, schema, table));
}
public static boolean isAuthorizedWrite(AirpalUser subject, String connectorId, String schema, String table)
{
return subject.isPermitted(format("write:%s.%s:%s", connectorId, schema, table));
}
public static class AuthorizedTablesPredicate
implements Predicate<Table>
{
private final AirpalUser subject;
public AuthorizedTablesPredicate(AirpalUser subject)
{
this.subject = subject;
}
@Override
public boolean apply(@Nullable Table input)
{
if (input == null) {
return false;
}
return isAuthorizedRead(subject, input);
}
}
}
| 7,332 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/ToAirpalUser.java | package com.airbnb.airpal.core;
import org.apache.shiro.subject.Subject;
public interface ToAirpalUser
{
public AirpalUser toAirpalUser(Subject subject);
}
| 7,333 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/AirpalUserFactory.java | package com.airbnb.airpal.core;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.subject.Subject;
import org.joda.time.Duration;
import org.secnod.shiro.jersey.TypeFactory;
import static java.lang.String.format;
public class AirpalUserFactory extends TypeFactory<AirpalUser>
{
private final String defaultAccessLevel;
private final String defaultSchema;
private final Duration defaultQueryTimeout;
public AirpalUserFactory(String defaultSchema, Duration queryTimeout, String accessLevel)
{
super(AirpalUser.class);
this.defaultSchema = defaultSchema;
this.defaultQueryTimeout = queryTimeout;
this.defaultAccessLevel = accessLevel;
}
@Override
public AirpalUser provide()
{
Subject subject = SecurityUtils.getSubject();
Object principal = subject.getPrincipal();
if (principal instanceof ToAirpalUser) {
return ((ToAirpalUser)principal).toAirpalUser(subject);
} else if (principal instanceof String) {
return new AirpalUserImpl((String) principal, defaultSchema, defaultQueryTimeout, defaultAccessLevel, subject);
} else if (principal instanceof AirpalUser) {
return (AirpalUser) principal;
} else {
throw new IllegalArgumentException(format("Could not marshall %s to AirpalUser", principal));
}
}
}
| 7,334 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/AirpalUserImpl.java | package com.airbnb.airpal.core;
import lombok.Value;
import org.apache.shiro.subject.Subject;
import org.joda.time.Duration;
@Value
public class AirpalUserImpl implements AirpalUser
{
private final String userName;
private final String defaultSchema;
private final Duration queryTimeout;
private final String accessLevel;
private final Subject subject;
@Override
public boolean isPermitted(String permission)
{
return subject.isPermitted(permission);
}
}
| 7,335 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/BackgroundCacheLoader.java | package com.airbnb.airpal.core;
import com.google.common.cache.CacheLoader;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import java.util.concurrent.Callable;
import static com.google.common.base.Preconditions.checkNotNull;
abstract public class BackgroundCacheLoader<K, V> extends CacheLoader<K, V>
{
private final ListeningExecutorService executor;
protected BackgroundCacheLoader(ListeningExecutorService executor)
{
this.executor = checkNotNull(executor, "executor is null");
}
@Override
public final ListenableFuture<V> reload(final K key, V oldValue)
{
return executor.submit(new Callable<V>()
{
@Override
public V call()
throws Exception
{
return load(key);
}
});
}
}
| 7,336 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/health/PrestoHealthCheck.java | package com.airbnb.airpal.core.health;
import com.codahale.metrics.health.HealthCheck;
import com.facebook.presto.client.StatementClient;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static com.airbnb.airpal.presto.QueryRunner.QueryRunnerFactory;
public class PrestoHealthCheck extends HealthCheck
{
private static final String HEALTH_CHECK_QUERY = "SELECT 1";
private final Supplier<Future<Result>> resultSupplier;
@Inject
public PrestoHealthCheck(
final QueryRunnerFactory queryRunnerFactory,
@Named("presto") final ExecutorService executorService)
{
// To prevent a lagging Presto health check from freezing Airpal, by blocking a large
// number of health check threads, we have the supplier return a Future<Result>. This
// way, the Future is immediately memoized and all calls will be successful if the
// future resolved successfully.
Supplier<Future<Result>> baseSupplier = new Supplier<Future<Result>>()
{
@Override
public Future<Result> get()
{
return executorService.submit(new Callable<Result>() {
@Override
public Result call()
throws Exception
{
final List<Object> invalidValue = ImmutableList.of((Object) new Integer(-1));
List<Object> result;
try (StatementClient client = queryRunnerFactory.create().startInternalQuery(HEALTH_CHECK_QUERY)) {
while (client.isValid() && !Thread.currentThread().isInterrupted()) {
Iterable<List<Object>> results = client.current().getData();
if (results != null) {
result = Iterables.getFirst(results, invalidValue);
assert(result != null);
assert(result.size() == 1);
assert((int)result.get(0) == 1);
}
client.advance();
}
return Result.healthy();
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
});
}
};
this.resultSupplier = Suppliers.memoizeWithExpiration(baseSupplier, 120, TimeUnit.SECONDS);
}
@Override
protected Result check() throws Exception
{
// Wait at most 5 seconds for the future to resolve, so that we don't block too many
// threads awaiting the result of this check.
return resultSupplier.get().get(5, TimeUnit.SECONDS);
}
}
| 7,337 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/execution/Execution.java | package com.airbnb.airpal.core.execution;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.JobState;
import com.airbnb.airpal.api.event.JobUpdateEvent;
import com.airbnb.airpal.api.output.InvalidQueryException;
import com.airbnb.airpal.api.output.builders.FileTooLargeException;
import com.airbnb.airpal.api.output.builders.JobOutputBuilder;
import com.airbnb.airpal.api.output.builders.OutputBuilderFactory;
import com.airbnb.airpal.api.output.persistors.Persistor;
import com.airbnb.airpal.api.output.persistors.PersistorFactory;
import com.airbnb.airpal.core.execution.QueryClient.QueryTimeOutException;
import com.airbnb.airpal.presto.QueryInfoClient;
import com.airbnb.airpal.presto.QueryRunner;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.presto.metadata.ColumnCache;
import com.facebook.presto.client.Column;
import com.facebook.presto.client.ErrorLocation;
import com.facebook.presto.client.FailureInfo;
import com.facebook.presto.client.QueryError;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.client.StatementClient;
import com.facebook.presto.execution.QueryStats;
import com.facebook.presto.sql.parser.ParsingException;
import com.google.common.base.Function;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.eventbus.EventBus;
import com.google.common.util.concurrent.RateLimiter;
import io.airlift.units.DataSize;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import javax.annotation.Nullable;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import static com.airbnb.airpal.core.execution.ExecutionClient.ExecutionFailureException;
import static com.airbnb.airpal.presto.QueryInfoClient.BasicQueryInfo;
import static java.lang.String.format;
@Slf4j
@RequiredArgsConstructor
public class Execution implements Callable<Job>
{
@Getter
private final Job job;
@Getter
private final EventBus eventBus;
@Getter
private final QueryRunner queryRunner;
@Getter
private final QueryInfoClient queryInfoClient;
@Getter
private final QueryExecutionAuthorizer authorizer;
@Getter
private final Duration timeout;
private final ColumnCache columnCache;
private final OutputBuilderFactory outputBuilderFactory;
private final PersistorFactory persistorFactory;
private final RateLimiter updateLimiter = RateLimiter.create(2.0);
private final int maxRowsPreviewOutput = 1_000;
private boolean isCancelled = false;
public void cancel()
{
isCancelled = true;
}
@Override
public Job call() throws Exception
{
return doExecute();
}
private Job doExecute()
throws ExecutionFailureException
{
final String userQuery = QUERY_SPLITTER.splitToList(getJob().getQuery()).get(0);
final JobOutputBuilder outputBuilder;
job.setQueryStats(createNoOpQueryStats());
try {
outputBuilder = outputBuilderFactory.forJob(job);
}
catch (IOException e) {
throw new ExecutionFailureException(job, "Could not create output builder for job", e);
}
catch (InvalidQueryException e) {
throw new ExecutionFailureException(job, e.getMessage(), e);
}
final Persistor persistor = persistorFactory.getPersistor(job, job.getOutput());
final String query = job.getOutput().processQuery(userQuery);
if (!persistor.canPersist(authorizer)) {
throw new ExecutionFailureException(job, "Not authorized to create tables", null);
}
final List<List<Object>> outputPreview = new ArrayList<>(maxRowsPreviewOutput);
final Set<Table> tables = new HashSet<>();
try {
tables.addAll(authorizer.tablesUsedByQuery(query));
} catch (ParsingException e) {
job.setError(new QueryError(e.getMessage(), null, -1, null, null, new ErrorLocation(e.getLineNumber(), e.getColumnNumber()), null));
throw new ExecutionFailureException(job, "Invalid query, could not parse", e);
}
if (!authorizer.isAuthorizedRead(tables)) {
job.setQueryStats(createNoOpQueryStats());
throw new ExecutionFailureException(job, "Cannot access tables", null);
}
QueryClient queryClient = new QueryClient(queryRunner, timeout, query);
try {
queryClient.executeWith(new Function<StatementClient, Void>()
{
@Nullable
@Override
public Void apply(@Nullable StatementClient client)
{
if (client == null) {
return null;
}
QueryResults results = client.current();
List<Column> resultColumns = null;
JobState jobState = null;
QueryError queryError = null;
QueryStats queryStats = null;
if (isCancelled) {
throw new ExecutionFailureException(job,
"Query was cancelled",
null);
}
if (results.getError() != null) {
queryError = results.getError();
jobState = JobState.FAILED;
}
if ((results.getInfoUri() != null) && (jobState != JobState.FAILED)) {
BasicQueryInfo queryInfo = queryInfoClient.from(results.getInfoUri());
if (queryInfo != null) {
queryStats = queryInfo.getQueryStats();
}
}
if (results.getStats() != null) {
jobState = JobState.fromStatementState(results.getStats().getState());
}
try {
if (results.getColumns() != null) {
resultColumns = results.getColumns();
outputBuilder.addColumns(resultColumns);
}
if (results.getData() != null) {
List<List<Object>> resultsData = ImmutableList.copyOf(results.getData());
for (List<Object> row : resultsData) {
outputBuilder.addRow(row);
}
}
} catch (FileTooLargeException e) {
throw new ExecutionFailureException(job,
"Output file exceeded maximum configured filesize",
e);
}
rlUpdateJobInfo(tables, resultColumns, queryStats, jobState, queryError, outputPreview);
return null;
}
});
} catch (QueryTimeOutException e) {
throw new ExecutionFailureException(job,
format("Query exceeded maximum execution time of %s minutes", Duration.millis(e.getElapsedMs()).getStandardMinutes()),
e);
}
QueryResults finalResults = queryClient.finalResults();
if (finalResults != null && finalResults.getInfoUri() != null) {
BasicQueryInfo queryInfo = queryInfoClient.from(finalResults.getInfoUri());
if (queryInfo != null) {
updateJobInfo(
null,
null,
queryInfo.getQueryStats(),
JobState.fromStatementState(finalResults.getStats().getState()),
finalResults.getError(),
outputPreview,
true);
}
}
if (job.getState() != JobState.FAILED) {
URI location = persistor.persist(outputBuilder, job);
if (location != null) {
job.getOutput().setLocation(location);
}
} else {
throw new ExecutionFailureException(job, null, null);
}
return getJob();
}
private static final Splitter QUERY_SPLITTER = Splitter.on(";").omitEmptyStrings().trimResults();
/**
* Rate Limited updateJobInfo
*/
protected void rlUpdateJobInfo(
Set<Table> usedTables,
List<Column> columns,
QueryStats queryStats,
JobState state,
QueryError error,
List<List<Object>> outputPreview)
{
if (updateLimiter.tryAcquire(1)) {
updateJobInfo(usedTables, columns, queryStats, state, error, outputPreview, true);
} else {
updateJobInfo(usedTables, columns, queryStats, state, error, outputPreview, false);
}
}
protected void updateJobInfo(
Set<Table> usedTables,
List<Column> columns,
QueryStats queryStats,
JobState state,
QueryError error,
List<List<Object>> outputPreview,
boolean postUpdate)
{
if ((usedTables != null) && (usedTables.size() > 0)) {
job.getTablesUsed().addAll(usedTables);
}
if ((columns != null) && (columns.size() > 0)) {
job.setColumns(columns);
}
if (queryStats != null) {
job.setQueryStats(queryStats);
}
if ((state != null) && (job.getState() != JobState.FINISHED) && (job.getState() != JobState.FAILED)) {
job.setState(state);
}
if (error != null) {
FailureInfo failureInfo = new FailureInfo(
error.getFailureInfo().getType(),
error.getFailureInfo().getMessage(),
null,
Collections.<FailureInfo>emptyList(),
Collections.<String>emptyList(),
error.getFailureInfo().getErrorLocation());
QueryError queryError = new QueryError(
error.getMessage(),
error.getSqlState(),
error.getErrorCode(),
error.getErrorName(),
error.getErrorType(),
error.getErrorLocation(),
failureInfo);
job.setError(queryError);
}
if (postUpdate) {
eventBus.post(new JobUpdateEvent(job, outputPreview));
}
}
public static QueryStats createNoOpQueryStats()
{
DateTime now = DateTime.now();
io.airlift.units.Duration zeroDuration = new io.airlift.units.Duration(0, TimeUnit.SECONDS);
DataSize zeroData = new DataSize(0, DataSize.Unit.BYTE);
return new QueryStats(
now,
null,
now,
now,
zeroDuration,
zeroDuration,
zeroDuration,
zeroDuration,
zeroDuration,
zeroDuration,
0,
0,
0,
0,
0,
0,
0,
0.0,
zeroData,
zeroData,
zeroDuration,
zeroDuration,
zeroDuration,
zeroDuration,
false,
ImmutableSet.of(),
zeroData,
0,
zeroData,
0,
zeroData,
0
);
}
}
| 7,338 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/execution/ExecutionClient.java | package com.airbnb.airpal.core.execution;
import com.airbnb.airpal.api.ExecutionRequest;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.JobState;
import com.airbnb.airpal.api.event.JobFinishedEvent;
import com.airbnb.airpal.api.output.HiveTablePersistentOutput;
import com.airbnb.airpal.api.output.PersistentJobOutputFactory;
import com.airbnb.airpal.api.output.builders.OutputBuilderFactory;
import com.airbnb.airpal.api.output.persistors.PersistorFactory;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.store.history.JobHistoryStore;
import com.airbnb.airpal.core.store.jobs.ActiveJobsStore;
import com.airbnb.airpal.core.store.usage.UsageStore;
import com.airbnb.airpal.presto.QueryInfoClient;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.presto.metadata.ColumnCache;
import com.airbnb.airpal.presto.metadata.SchemaCache;
import com.facebook.presto.client.Column;
import com.facebook.presto.client.QueryError;
import com.google.common.eventbus.EventBus;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.inject.Inject;
import lombok.Getter;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import javax.annotation.Nullable;
import javax.validation.constraints.NotNull;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import static com.airbnb.airpal.presto.QueryRunner.QueryRunnerFactory;
public class ExecutionClient
{
private final ListeningExecutorService executor = MoreExecutors.listeningDecorator(
Executors.newCachedThreadPool(SchemaCache.daemonThreadsNamed("execution-client-%d")));
@Getter
private final EventBus eventBus;
@Getter
private final JobHistoryStore historyStore;
private final PersistentJobOutputFactory persistentJobOutputFactory;
@Getter
private final UsageStore usageStore;
@Getter
private final SchemaCache schemaCache;
private final ColumnCache columnCache;
private final QueryInfoClient queryInfoClient;
private final QueryRunnerFactory queryRunnerFactory;
private final ActiveJobsStore activeJobsStore;
private final OutputBuilderFactory outputBuilderFactory;
private final PersistorFactory persistorFactory;
private final Map<UUID, Execution> executionMap = new ConcurrentHashMap<>();
@Inject
public ExecutionClient(QueryRunnerFactory queryRunnerFactory,
EventBus eventBus,
JobHistoryStore historyStore,
PersistentJobOutputFactory persistentJobOutputFactory,
UsageStore usageStore,
SchemaCache schemaCache,
ColumnCache columnCache,
QueryInfoClient queryInfoClient,
ActiveJobsStore activeJobsStore,
OutputBuilderFactory outputBuilderFactory,
PersistorFactory persistorFactory)
{
this.queryRunnerFactory = queryRunnerFactory;
this.eventBus = eventBus;
this.historyStore = historyStore;
this.persistentJobOutputFactory = persistentJobOutputFactory;
this.usageStore = usageStore;
this.schemaCache = schemaCache;
this.columnCache = columnCache;
this.queryInfoClient = queryInfoClient;
this.activeJobsStore = activeJobsStore;
this.outputBuilderFactory = outputBuilderFactory;
this.persistorFactory = persistorFactory;
}
public UUID runQuery(final ExecutionRequest request,
final AirpalUser user,
final String schema,
final Duration timeout)
{
return runQuery(request.getQuery(), request.getTmpTable(), user, schema, timeout);
}
public UUID runQuery(final String query,
final String tmpTable,
final AirpalUser user,
final String schema,
final Duration timeout)
{
final UUID uuid = UUID.randomUUID();
final Job job = new Job(user.getUserName(),
query,
uuid,
persistentJobOutputFactory.create(tmpTable, uuid),
null,
JobState.QUEUED,
Collections.<Column>emptyList(),
null,
null
);
final Execution execution = new Execution(job,
eventBus,
queryRunnerFactory.create(user.getUserName(), schema),
queryInfoClient,
new QueryExecutionAuthorizer(user, "hive", user.getDefaultSchema()),
timeout,
columnCache,
outputBuilderFactory,
persistorFactory);
executionMap.put(uuid, execution);
activeJobsStore.jobStarted(job);
ListenableFuture<Job> result = executor.submit(execution);
Futures.addCallback(result, new FutureCallback<Job>()
{
@Override
public void onSuccess(@Nullable Job result)
{
if (result != null) {
result.setState(JobState.FINISHED);
}
jobFinished(result);
}
@Override
public void onFailure(@NotNull Throwable t)
{
if (t instanceof ExecutionFailureException) {
ExecutionFailureException e = (ExecutionFailureException) t;
Job j = e.getJob();
j.setState(JobState.FAILED);
if (j.getError() == null) {
j.setError(new QueryError(e.getMessage(), null, -1, null, null, null, null));
}
jobFinished(j);
}
}
});
return uuid;
}
protected void jobFinished(Job job)
{
job.setQueryFinished(new DateTime());
activeJobsStore.jobFinished(job);
historyStore.addRun(job);
for (Table t : job.getTablesUsed()) {
usageStore.markUsage(t);
}
if (job.getOutput() instanceof HiveTablePersistentOutput && job.getOutput().getLocation() != null) {
String[] parts = job.getOutput().getLocation().toString().split("\\.");
if (parts.length == 2) {
Map<String, List<String>> cache = schemaCache.getSchemaMap("hive");
List<String> tables = cache.get(parts[0]);
tables.add(parts[1]);
}
}
eventBus.post(new JobFinishedEvent(job));
executionMap.remove(job.getUuid());
}
public boolean cancelQuery(
AirpalUser user,
UUID uuid)
{
Execution execution = executionMap.get(uuid);
if ((execution != null) && (execution.getJob().getUser().equals(user.getUserName()))) {
execution.cancel();
return true;
} else {
return false;
}
}
public static class ExecutionFailureException extends RuntimeException
{
@Getter
private final Job job;
public ExecutionFailureException(Job job, String message, Throwable cause)
{
super(message, cause);
this.job = job;
}
}
}
| 7,339 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/execution/QueryClient.java | package com.airbnb.airpal.core.execution;
import com.airbnb.airpal.presto.QueryRunner;
import com.facebook.presto.client.QueryResults;
import com.facebook.presto.client.StatementClient;
import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
import io.dropwizard.util.Duration;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
@AllArgsConstructor
public class QueryClient
{
private final QueryRunner queryRunner;
private final Duration timeout;
private final String query;
private final AtomicReference<QueryResults> finalResults = new AtomicReference<>();
public QueryClient(QueryRunner queryRunner, String query)
{
this(queryRunner, Duration.seconds(60 * 30), query);
}
public QueryClient(QueryRunner queryRunner, org.joda.time.Duration timeout, String query)
{
this(queryRunner, Duration.milliseconds(timeout.getMillis()), query);
}
public <T> T executeWith(Function<StatementClient, T> function)
throws QueryTimeOutException
{
final Stopwatch stopwatch = Stopwatch.createStarted();
T t = null;
try (StatementClient client = queryRunner.startInternalQuery(query)) {
while (client.isValid() && !Thread.currentThread().isInterrupted()) {
if (stopwatch.elapsed(TimeUnit.MILLISECONDS) > timeout.toMilliseconds()) {
throw new QueryTimeOutException(stopwatch.elapsed(TimeUnit.MILLISECONDS));
}
t = function.apply(client);
client.advance();
}
finalResults.set(client.finalResults());
} catch (RuntimeException | QueryTimeOutException e) {
stopwatch.stop();
throw e;
}
return t;
}
public QueryResults finalResults()
{
return finalResults.get();
}
@AllArgsConstructor
public static class QueryTimeOutException extends Throwable
{
@Getter
private final long elapsedMs;
}
}
| 7,340 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/execution/InputReferenceExtractor.java | package com.airbnb.airpal.core.execution;
import com.airbnb.airpal.presto.Table;
import com.facebook.presto.sql.tree.CreateTable;
import com.facebook.presto.sql.tree.CreateView;
import com.facebook.presto.sql.tree.DefaultTraversalVisitor;
import com.facebook.presto.sql.tree.DropTable;
import com.facebook.presto.sql.tree.DropView;
import com.facebook.presto.sql.tree.Join;
import com.facebook.presto.sql.tree.JoinOn;
import com.facebook.presto.sql.tree.Node;
import com.facebook.presto.sql.tree.QualifiedName;
import com.facebook.presto.sql.tree.RenameTable;
import com.facebook.presto.sql.tree.Use;
import com.facebook.presto.sql.tree.WithQuery;
import com.google.common.collect.Sets;
import lombok.EqualsAndHashCode;
import lombok.Value;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@Value
@EqualsAndHashCode(callSuper = false)
public class InputReferenceExtractor
extends DefaultTraversalVisitor<InputReferenceExtractor.CatalogSchemaContext, InputReferenceExtractor.CatalogSchemaContext>
{
private final Set<Table> references = new HashSet<>();
private final Set<Table> aliases = new HashSet<>();
public Set<Table> getReferences()
{
return Sets.difference(references, aliases);
}
private Table qualifiedNameToTable(QualifiedName name, CatalogSchemaContext context)
{
List<String> nameParts = name.getParts();
String connectorId = context.getCatalog();
String schema = context.getSchema();
String table = null;
if (nameParts.size() == 3) {
connectorId = nameParts.get(0);
schema = nameParts.get(1);
table = nameParts.get(2);
} else if (nameParts.size() == 2) {
schema = nameParts.get(0);
table = nameParts.get(1);
} else if (nameParts.size() == 1) {
table = nameParts.get(0);
}
return new Table(connectorId, schema, table);
}
@Override
protected CatalogSchemaContext visitCreateView(CreateView node, CatalogSchemaContext context)
{
references.add(qualifiedNameToTable(node.getName(), context));
visitQuery(node.getQuery(), context);
return context;
}
@Override
protected CatalogSchemaContext visitCreateTable(CreateTable node, CatalogSchemaContext context)
{
references.add(qualifiedNameToTable(node.getName(), context));
visitCreateTable(node, context);
return context;
}
@Override
protected CatalogSchemaContext visitDropTable(DropTable node, CatalogSchemaContext context)
{
references.add(qualifiedNameToTable(node.getTableName(), context));
return context;
}
@Override
protected CatalogSchemaContext visitDropView(DropView node, CatalogSchemaContext context)
{
references.add(qualifiedNameToTable(node.getName(), context));
return context;
}
@Override
protected CatalogSchemaContext visitTable(com.facebook.presto.sql.tree.Table node, CatalogSchemaContext context)
{
references.add(qualifiedNameToTable(node.getName(), context));
return context;
}
@Override
protected CatalogSchemaContext visitRenameTable(RenameTable node, CatalogSchemaContext context)
{
references.add(qualifiedNameToTable(node.getSource(), context));
return context;
}
@Override
protected CatalogSchemaContext visitWithQuery(WithQuery node, CatalogSchemaContext context)
{
aliases.add(new Table(context.getCatalog(), context.getSchema(), node.getName()));
return super.visitWithQuery(node, context);
}
@Override
protected CatalogSchemaContext visitUse(Use node, CatalogSchemaContext context)
{
return new CatalogSchemaContext(node.getCatalog().orElse(context.getCatalog()), node.getSchema());
}
@Override
protected CatalogSchemaContext visitNode(Node node, CatalogSchemaContext context)
{
return context;
}
@Override
protected CatalogSchemaContext visitJoin(Join node, CatalogSchemaContext context)
{
process(node.getLeft(), context);
process(node.getRight(), context);
if (node.getCriteria().isPresent()) {
if (node.getCriteria().get() instanceof JoinOn) {
process(((JoinOn) node.getCriteria().get()).getExpression(), context);
}
}
return context;
}
@Value
public static class CatalogSchemaContext
{
private final String catalog;
private final String schema;
}
}
| 7,341 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/execution/QueryExecutionAuthorizer.java | package com.airbnb.airpal.core.execution;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.AuthorizationUtil;
import com.airbnb.airpal.presto.Table;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.tree.Statement;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.util.List;
import java.util.Set;
import static com.airbnb.airpal.core.AuthorizationUtil.AuthorizedTablesPredicate;
import static com.airbnb.airpal.core.execution.InputReferenceExtractor.CatalogSchemaContext;
import static com.google.common.base.Preconditions.checkNotNull;
public class QueryExecutionAuthorizer
{
private static final SqlParser SQL_PARSER = new SqlParser();
private final AirpalUser user;
private final String defaultConnector;
private final String defaultSchema;
public QueryExecutionAuthorizer(AirpalUser user, String defaultConnector, String defaultSchema)
{
this.user = checkNotNull(user);
this.defaultConnector = checkNotNull(defaultConnector);
this.defaultSchema = checkNotNull(defaultSchema);
}
public boolean isAuthorizedWrite(String connectorId, String schema, String table)
{
return AuthorizationUtil.isAuthorizedWrite(user, connectorId, schema, table);
}
public boolean isAuthorizedRead(Set<Table> tables)
{
return Iterables.all(tables, new AuthorizedTablesPredicate(user));
}
private static Splitter STATEMENT_SPLITTER = Splitter.on(";").omitEmptyStrings();
public static Set<Table> tablesUsedByQuery(String query, String defaultConnector, String defaultSchema)
{
List<String> statements = STATEMENT_SPLITTER.splitToList(query);
ImmutableSet.Builder<Table> tables = ImmutableSet.builder();
CatalogSchemaContext context = new CatalogSchemaContext(defaultConnector, defaultSchema);
for (String strStatement : statements) {
InputReferenceExtractor extractor = new InputReferenceExtractor();
Statement statement = SQL_PARSER.createStatement(strStatement);
context = statement.accept(extractor, context);
tables.addAll(extractor.getReferences());
}
return tables.build();
}
public Set<Table> tablesUsedByQuery(String query)
{
return tablesUsedByQuery(query, defaultConnector, defaultSchema);
}
}
| 7,342 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/queries/QueryStoreDAO.java | package com.airbnb.airpal.core.store.queries;
import com.airbnb.airpal.api.queries.SavedQuery;
import com.airbnb.airpal.api.queries.UserSavedQuery;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.presto.PartitionedTable;
import com.hubspot.rosetta.jdbi.RosettaBinder;
import org.skife.jdbi.v2.sqlobject.Bind;
import org.skife.jdbi.v2.sqlobject.BindBean;
import org.skife.jdbi.v2.sqlobject.SqlQuery;
import org.skife.jdbi.v2.sqlobject.SqlUpdate;
import java.util.List;
import java.util.UUID;
public abstract class QueryStoreDAO implements QueryStore
{
@SqlQuery("SELECT * FROM saved_queries WHERE user = :userName")
@Override
public abstract List<SavedQuery> getSavedQueries(@BindBean AirpalUser airpalUser);
@Override
public List<SavedQuery> getSavedQueries(AirpalUser airpalUser, List<PartitionedTable> tables)
{
return null;
}
@SqlUpdate(
"INSERT INTO saved_queries (query, user, description, uuid, name) " +
"VALUES (:queryWithPlaceholders, :user, :description, :uuid, :name)")
public abstract int _saveQuery(@RosettaBinder UserSavedQuery query);
@Override
public boolean saveQuery(UserSavedQuery query)
{
return _saveQuery(query) > 0;
}
@SqlUpdate("DELETE FROM saved_queries WHERE uuid = :queryUuid")
public abstract int _deleteSavedQuery(AirpalUser airpalUser, @Bind("queryUuid") UUID queryUUID);
@Override
public boolean deleteSavedQuery(AirpalUser airpalUser, UUID queryUUID)
{
return _deleteSavedQuery(airpalUser, queryUUID) > 0;
}
@Override
@SqlQuery("SELECT * FROM saved_queries WHERE uuid = :queryUuid")
public abstract SavedQuery getSavedQuery(@Bind("queryUuid") UUID queryUUID);
}
| 7,343 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/queries/QueryStore.java | package com.airbnb.airpal.core.store.queries;
import com.airbnb.airpal.api.queries.SavedQuery;
import com.airbnb.airpal.api.queries.UserSavedQuery;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.presto.PartitionedTable;
import java.util.List;
import java.util.UUID;
public interface QueryStore
{
public List<SavedQuery> getSavedQueries(AirpalUser airpalUser);
public List<SavedQuery> getSavedQueries(AirpalUser airpalUser, List<PartitionedTable> tables);
public boolean saveQuery(UserSavedQuery query);
public boolean deleteSavedQuery(AirpalUser airpalUser, UUID queryUUID);
public SavedQuery getSavedQuery(UUID queryUUID);
}
| 7,344 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/usage/UsageStore.java | package com.airbnb.airpal.core.store.usage;
import com.airbnb.airpal.presto.Table;
import io.dropwizard.util.Duration;
import java.util.Map;
public interface UsageStore
{
public long getUsages(Table table);
public Map<Table, Long> getUsages(Iterable<Table> tables);
public void markUsage(Table table);
public Duration window();
}
| 7,345 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/usage/LocalUsageStore.java | package com.airbnb.airpal.core.store.usage;
import com.airbnb.airpal.presto.Table;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.SlidingTimeWindowReservoir;
import com.google.common.collect.Maps;
import io.dropwizard.util.Duration;
import lombok.RequiredArgsConstructor;
import java.util.Map;
@RequiredArgsConstructor
public class LocalUsageStore implements UsageStore
{
private final Map<Table, Histogram> usageMap = Maps.newHashMap();
private final Duration usageTrackTime;
@Override
public long getUsages(Table table)
{
final Histogram window = usageMap.get(table);
if (window != null)
return window.getSnapshot().size();
else
return 0l;
}
@Override
public Map<Table, Long> getUsages(Iterable<Table> tables) {
return null;
}
@Override
public void markUsage(Table table)
{
Histogram window = usageMap.get(table);
if (window == null) {
final SlidingTimeWindowReservoir reservoir = new SlidingTimeWindowReservoir(
usageTrackTime.getQuantity(),
usageTrackTime.getUnit());
window = new Histogram(reservoir);
usageMap.put(table, window);
}
window.update(1);
}
@Override
public Duration window() {
return usageTrackTime;
}
}
| 7,346 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/usage/SQLUsageStore.java | package com.airbnb.airpal.core.store.usage;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.sql.DbType;
import com.airbnb.airpal.sql.Util;
import com.airbnb.airpal.sql.beans.JobUsageCountRow;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.hubspot.rosetta.jdbi.RosettaResultSetMapperFactory;
import io.dropwizard.util.Duration;
import lombok.extern.slf4j.Slf4j;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@Slf4j
public class SQLUsageStore implements UsageStore
{
private final Duration duration;
private final DBI dbi;
private final DbType dbType;
public SQLUsageStore(Duration duration, DBI dbi, DbType dbType)
{
this.duration = duration;
this.dbi = dbi;
this.dbType = dbType;
}
@Override
public long getUsages(Table table)
{
Map<Table, Long> usages = getUsages(ImmutableList.of(table));
if (usages.containsKey(table)) {
return usages.get(table);
} else {
return 0;
}
}
@Override
public Map<Table, Long> getUsages(Iterable<Table> tables)
{
try (Handle handle = dbi.open()) {
Query<Map<String, Object>> query = handle.createQuery(
"SELECT connector_id AS connectorId, schema_ AS \"schema\", table_ AS \"table\", COUNT(*) AS count " +
"FROM jobs j " +
"LEFT OUTER JOIN job_tables jt ON j.id = jt.job_id " +
"LEFT OUTER JOIN tables t ON jt.table_id = t.id " +
"WHERE " + Util.getQueryFinishedCondition(dbType) + " " +
"AND (" + Util.getTableCondition(Lists.newArrayList(tables)) + ") " +
"GROUP BY connector_id, schema_, table_ " +
"ORDER BY count DESC")
.bind("day_interval", 1);
return query.
map(RosettaResultSetMapperFactory.mapperFor(JobUsageCountRow.class)).
fold(new HashMap<Table, Long>(), new JobUsageCountRow.CountFolder());
} catch (Exception e) {
log.error("getTables caught exception", e);
return Collections.emptyMap();
}
}
@Override
public void markUsage(Table table)
{}
@Override
public Duration window()
{
return duration;
}
}
| 7,347 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/usage/CachingUsageStore.java | package com.airbnb.airpal.core.store.usage;
import com.airbnb.airpal.presto.Table;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.Iterables;
import io.dropwizard.util.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
public class CachingUsageStore implements UsageStore
{
private final UsageStore delegate;
private final LoadingCache<Table, Long> cache;
public CachingUsageStore(final UsageStore delegate, final Duration expireAfter)
{
this.delegate = delegate;
this.cache = CacheBuilder
.newBuilder()
.expireAfterWrite(expireAfter.getQuantity(), expireAfter.getUnit())
.build(new CacheLoader<Table, Long>()
{
@Override
public Long load(Table table)
throws Exception
{
return delegate.getUsages(table);
}
});
}
@Override
public long getUsages(Table table)
{
try {
return cache.get(table);
}
catch (ExecutionException e) {
return 0;
}
}
@Override
public Map<Table, Long> getUsages(Iterable<Table> tables)
{
Map<Table, Long> usages = new HashMap<>(Iterables.size(tables));
for (Table table : tables) {
usages.put(table, getUsages(table));
}
return usages;
}
@Override
public void markUsage(Table table)
{
delegate.markUsage(table);
}
@Override
public Duration window()
{
return delegate.window();
}
}
| 7,348 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/history/LocalJobHistoryStore.java | package com.airbnb.airpal.core.store.history;
import com.airbnb.airpal.api.EvictingDeque;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.presto.Table;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import java.util.List;
import java.util.Map;
import java.util.concurrent.LinkedBlockingDeque;
public class LocalJobHistoryStore implements JobHistoryStore {
public static class FinishedJobEvictingDeque extends EvictingDeque<Job> {
public FinishedJobEvictingDeque(int capacity) {
super(capacity);
}
@Override
protected boolean evictItem(LinkedBlockingDeque<Job> deque) {
final Job job = deque.poll();
if (job != null) {
if (job.getState().isDone()) {
return true;
} else {
boolean addResult = add(job);
boolean secondEviction = evictItem(deque);
return addResult && secondEviction;
}
} else {
return false;
}
}
}
private final Cache<Table, EvictingDeque<Job>> tableHistoryCache;
private final EvictingDeque<Job> historyCache;
private final int maximumHistoryPerTable;
public LocalJobHistoryStore(final long maximumTableHistories,
final int maximumHistoryPerTable,
final long maximumHistoryGeneral)
{
this.tableHistoryCache = CacheBuilder.newBuilder()
.maximumSize(maximumTableHistories)
.build();
this.historyCache = new FinishedJobEvictingDeque((int)maximumHistoryGeneral);
this.maximumHistoryPerTable = maximumHistoryPerTable;
}
@Override
public List<Job> getRecentlyRun(long maxResults) {
final ImmutableList.Builder<Job> builder = ImmutableList.builder();
long added = 0;
for (Job job : historyCache) {
if (added + 1 > maxResults)
break;
builder.add(job);
added += 1;
}
return builder.build();
}
@Override
public List<Job> getRecentlyRun(long maxResults, Table table1, Table... otherTables)
{
return getRecentlyRun(maxResults, Lists.asList(table1, otherTables));
}
@Override
public List<Job> getRecentlyRunForUser(String user, long maxResults)
{
return null;
}
@Override
public List<Job> getRecentlyRunForUser(String user, long maxResults, Iterable<Table> tables)
{
return null;
}
@Override
public List<Job> getRecentlyRun(long maxResults, Iterable<Table> tables)
{
final ImmutableList.Builder<Job> builder = ImmutableList.builder();
long added = 0;
for (Map.Entry<Table, EvictingDeque<Job>> entry : tableHistoryCache.getAllPresent(tables).entrySet()) {
EvictingDeque<Job> deque = entry.getValue();
if (deque != null) {
final int dequeSize = deque.size();
if (added + dequeSize > maxResults) {
break;
} else {
builder.addAll(deque);
added += dequeSize;
}
}
}
return builder.build();
}
@Override
public void addRun(Job job) {
historyCache.add(job);
for (Table usedTable : job.getTablesUsed()) {
EvictingDeque<Job> tableCache = tableHistoryCache.getIfPresent(usedTable);
if (tableCache == null) {
tableCache = new FinishedJobEvictingDeque(maximumHistoryPerTable);
tableHistoryCache.put(usedTable, tableCache);
}
tableCache.add(job);
}
}
}
| 7,349 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/history/JobHistoryStoreDAO.java | package com.airbnb.airpal.core.store.history;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.sql.DbType;
import com.airbnb.airpal.sql.Util;
import com.airbnb.airpal.sql.beans.JobTableOutputJoinRow;
import com.airbnb.airpal.sql.beans.JobTableRow;
import com.airbnb.airpal.sql.beans.TableRow;
import com.airbnb.airpal.sql.dao.JobDAO;
import com.airbnb.airpal.sql.dao.JobOutputDAO;
import com.airbnb.airpal.sql.dao.JobTableDAO;
import com.airbnb.airpal.sql.dao.TableDAO;
import com.google.common.base.Strings;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.hubspot.rosetta.jdbi.RosettaResultSetMapperFactory;
import lombok.extern.slf4j.Slf4j;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.lang.String.format;
@Slf4j
public class JobHistoryStoreDAO
implements JobHistoryStore
{
private final DBI dbi;
private final DbType dbType;
@Inject
public JobHistoryStoreDAO(DBI dbi, DbType dbType)
{
this.dbi = dbi;
this.dbType = dbType;
}
private List<Job> getJobs(long limit, int dayInterval, String outerWhereClauseArg, String innerWhereClauseArg)
{
String outerWhereClause = Strings.isNullOrEmpty(outerWhereClauseArg) ? "true" : outerWhereClauseArg;
String innerWhereClause = Strings.isNullOrEmpty(innerWhereClauseArg) ? "true" : innerWhereClauseArg;
try (Handle handle = dbi.open()) {
Query<Map<String, Object>> query = handle.createQuery(
"SELECT " +
"j.id AS id, " +
"j.query AS query, " +
"j.user AS user, " +
"j.uuid AS uuid, " +
"j.queryStats as queryStats, " +
"j.state AS state, " +
"j.columns AS columns, " +
"j.query_finished AS queryFinished, " +
"j.query_started AS queryStarted, " +
"j.error AS error, " +
"t.connector_id AS connectorId, " +
"t.schema_ AS \"schema\", " +
"t.table_ AS \"table\", " +
"t.columns, " +
"jo.type, " +
"jo.description, " +
"jo.location " +
"FROM (SELECT * FROM jobs " +
"WHERE " + Util.getQueryFinishedCondition(dbType) + " " +
"AND " + innerWhereClause + " " +
"ORDER BY query_finished DESC LIMIT :limit) j " +
"LEFT OUTER JOIN job_tables jt ON j.id = jt.job_id " +
"LEFT OUTER JOIN tables t ON jt.table_id = t.id " +
"LEFT OUTER JOIN job_outputs jo ON j.id = jo.job_id " +
"WHERE " + outerWhereClause + " " +
"ORDER BY query_finished DESC")
.bind("limit", limit)
.bind("day_interval", dayInterval);
Map<Long, Job> idToJobMap = query.
map(RosettaResultSetMapperFactory.mapperFor(JobTableOutputJoinRow.class)).
fold(new HashMap<Long, Job>(), new JobTableOutputJoinRow.JobFolder());
return new ArrayList<>(idToJobMap.values());
}
}
private List<Job> getJobs(long limit, int dayInterval)
{
return getJobs(limit, dayInterval, null, null);
}
@Override
public List<Job> getRecentlyRun(long maxResults)
{
try {
return getJobs(maxResults, 1);
} catch (Exception e) {
log.error("Caught exception during getRecentlyRun", e);
return Collections.emptyList();
}
}
@Override
public List<Job> getRecentlyRun(long maxResults, Table table1, Table... otherTables)
{
return getRecentlyRun(maxResults, Lists.asList(table1, otherTables));
}
@Override
public List<Job> getRecentlyRun(long maxResults, Iterable<Table> tables)
{
try {
String tablesClause = Util.getTableCondition(tables);
return getJobs(maxResults, 1, tablesClause, null);
} catch (Exception e) {
log.error("Caught exception during getRecentlyRun", e);
return Collections.emptyList();
}
}
@Override
public List<Job> getRecentlyRunForUser(String user, long maxResults)
{
try {
String usersClause = format("user = '%s'", user);
return getJobs(maxResults, 1, null, usersClause);
} catch (Exception e) {
log.error("Caught exception during getRecentlyRun", e);
return Collections.emptyList();
}
}
@Override
public List<Job> getRecentlyRunForUser(String user, long maxResults, Iterable<Table> tables)
{
try {
String usersClause = format("user = '%s'", user);
String tablesClause = Util.getTableCondition(tables);
return getJobs(maxResults, 1, tablesClause, usersClause);
} catch (Exception e) {
log.error("Caught exception during getRecentlyRun", e);
return Collections.emptyList();
}
}
@Override
public void addRun(Job job)
{
JobDAO jobDAO = dbi.onDemand(JobDAO.class);
TableDAO tableDAO = dbi.onDemand(TableDAO.class);
JobTableDAO jobTableDAO = dbi.onDemand(JobTableDAO.class);
JobOutputDAO jobOutputDAO = dbi.onDemand(JobOutputDAO.class);
// Create the job
long jobId = jobDAO.createJob(job);
// Find all presto tables already represented
Set<TableRow> tablesInDb = Collections.emptySet();
if (job.getTablesUsed().size() > 0) {
tablesInDb = new HashSet<>(tableDAO.getTables(new ArrayList<>(job.getTablesUsed())));
}
// Figure out which tables are not represented
Sets.SetView<Table> tablesToAdd = Sets.difference(
job.getTablesUsed(),
Sets.newHashSet(Iterables.transform(tablesInDb, TableRow.MAP_TO_TABLE)));
// Add tables not already represented
tableDAO.createTables(tablesToAdd);
Set<TableRow> tablesWithIds = Collections.emptySet();
if (job.getTablesUsed().size() > 0) {
tablesWithIds = new HashSet<>(tableDAO.getTables(new ArrayList<>(job.getTablesUsed())));
}
List<JobTableRow> jobTableRows = new ArrayList<>(job.getTablesUsed().size());
for (TableRow tableRow : tablesWithIds) {
jobTableRows.add(new JobTableRow(-1, jobId, tableRow.getId()));
}
// Add associations between Job and Table
jobTableDAO.createJobTables(jobTableRows);
if (job.getOutput().getLocation() != null) {
jobOutputDAO.createJobOutput(job.getOutput(), jobId);
}
}
}
| 7,350 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/history/JobHistoryStore.java | package com.airbnb.airpal.core.store.history;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.presto.Table;
import java.util.List;
public interface JobHistoryStore
{
public List<Job> getRecentlyRun(long maxResults);
public List<Job> getRecentlyRun(long maxResults, Table table1, Table... otherTables);
public List<Job> getRecentlyRun(long maxResults, Iterable<Table> tables);
public List<Job> getRecentlyRunForUser(String user, long maxResults);
public List<Job> getRecentlyRunForUser(String user, long maxResults, Iterable<Table> tables);
public void addRun(Job job);
}
| 7,351 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/files/ExpiringFileStore.java | package com.airbnb.airpal.core.store.files;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import com.google.common.cache.Weigher;
import io.airlift.units.DataSize;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.joda.time.DateTime;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
@Slf4j
public class ExpiringFileStore
{
private LoadingCache<String, FileWithMetadata> fileWithMetadataCache;
private File basePath = new File(System.getProperty("java.io.tmpdir"));
public ExpiringFileStore(DataSize maxStorageSize)
{
long maxWeightInBytes = Math.round(Math.floor(maxStorageSize.getValue(DataSize.Unit.BYTE)));
this.fileWithMetadataCache = CacheBuilder.newBuilder().maximumWeight(maxWeightInBytes).weigher(new Weigher<String, FileWithMetadata>() {
@Override
public int weigh(String key, FileWithMetadata fileWithMetadata)
{
return (int) Math.round(fileWithMetadata.getSize().getValue(DataSize.Unit.BYTE));
}
}).removalListener(new RemovalListener<String, FileWithMetadata>() {
@Override
public void onRemoval(RemovalNotification<String, FileWithMetadata> notification)
{
File f = notification.getValue().getFile();
if (f != null && f.exists()) {
f.delete();
}
}
}).build(new CacheLoader<String, FileWithMetadata>() {
@Override
public FileWithMetadata load(String key)
throws Exception
{
File file = new File(basePath, key);
if (file.exists()) {
return new FileWithMetadata(file, new DataSize(file.length(), DataSize.Unit.BYTE), DateTime.now());
}
throw new FileNotFoundException();
}
});
}
public void addFile(String key, File file)
throws IOException
{
long fileSize = file.length();
fileWithMetadataCache.put(key, new FileWithMetadata(file, new DataSize(fileSize, DataSize.Unit.BYTE), DateTime.now()));
}
public File get(String key)
{
try {
return fileWithMetadataCache.get(key).getFile();
}
catch (ExecutionException e) {
return null;
}
}
@Value
private static class FileWithMetadata
{
private final File file;
private final DataSize size;
private final DateTime createdAt;
}
}
| 7,352 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/jobs/ActiveJobsStore.java | package com.airbnb.airpal.core.store.jobs;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.core.AirpalUser;
import java.util.Set;
/**
* A store for currently running jobs.
*/
public interface ActiveJobsStore
{
/**
* Get all running jobs for the specified user.
* @param user The user to retrieve jobs for.
* @return All currently running jobs for this user.
*/
public Set<Job> getJobsForUser(AirpalUser user);
/**
* Mark a job as having started.
* @param job The job that has started.
*/
public void jobStarted(Job job);
/**
* Mark a job as having finished.
* @param job The job that has finished.
*/
public void jobFinished(Job job);
}
| 7,353 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store | Create_ds/airpal/src/main/java/com/airbnb/airpal/core/store/jobs/InMemoryActiveJobsStore.java | package com.airbnb.airpal.core.store.jobs;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.core.AirpalUser;
import com.google.common.collect.ImmutableSet;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class InMemoryActiveJobsStore implements ActiveJobsStore
{
private ConcurrentMap<String, Set<Job>> activeJobs = new ConcurrentHashMap<>();
@Override
public Set<Job> getJobsForUser(AirpalUser user)
{
if (!activeJobs.containsKey(user.getUserName())) {
return Collections.emptySet();
}
return ImmutableSet.copyOf(activeJobs.get(user.getUserName()));
}
@Override
public void jobStarted(Job job)
{
Set<Job> jobsForUser = activeJobs.get(job.getUser());
if (jobsForUser == null) {
jobsForUser = Collections.newSetFromMap(new ConcurrentHashMap<Job, Boolean>());
activeJobs.putIfAbsent(job.getUser(), jobsForUser);
}
activeJobs.get(job.getUser()).add(job);
}
@Override
public void jobFinished(Job job)
{
Set<Job> jobsForUser = activeJobs.get(job.getUser());
if (jobsForUser == null) {
return;
}
jobsForUser.remove(job);
}
}
| 7,354 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/ResultsPreviewResource.java | package com.airbnb.airpal.resources;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.opencsv.CSVReader;
import com.airbnb.airpal.core.store.files.ExpiringFileStore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.inject.Inject;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.AmazonS3;
import javax.inject.Named;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.net.URI;
import java.util.zip.GZIPInputStream;
@Slf4j
@Path("/api/preview")
public class ResultsPreviewResource
{
private final ExpiringFileStore fileStore;
private final AmazonS3 s3Client;
private final String outputBucket;
@Inject
public ResultsPreviewResource(
ExpiringFileStore fileStore,
AmazonS3 s3Client,
@Named("s3Bucket") String outputBucket)
{
this.fileStore = fileStore;
this.s3Client = s3Client;
this.outputBucket = outputBucket;
}
@GET
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
public Response getFile(@QueryParam("fileURI") URI fileURI,
@DefaultValue("100") @QueryParam("lines") int numLines)
{
if (fileURI.getPath().startsWith("/api/s3")) {
return getS3Preview(fileURI, numLines);
} else {
return getFilePreview(fileURI, numLines);
}
}
private String getOutputKey(String fileBaseName)
{
return "airpal/" + fileBaseName;
}
private String getFilename(URI fileURI)
{
return fileURI.getPath().substring(fileURI.getPath().lastIndexOf('/') + 1);
}
private Response getS3Preview(URI fileURI, int numLines) {
val filename = getFilename(fileURI);
val outputKey = getOutputKey(filename);
// download ~100 kb (depending on your definition) of the file
val request = new GetObjectRequest(
outputBucket,
outputKey
).withRange(0, 100 * 1024);
val object = s3Client.getObject(request);
ObjectMetadata objectMetadata = object.getObjectMetadata();
boolean gzip = "gzip".equalsIgnoreCase(objectMetadata.getContentEncoding());
try (InputStream input = object.getObjectContent()) {
InputStreamReader reader;
if (gzip) {
reader = new InputStreamReader(new GZIPInputStream(input));
}
else {
reader = new InputStreamReader(input);
}
return getPreviewFromCSV(new CSVReader(reader), numLines);
}
catch (IOException e) {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
private Response getPreviewFromCSV(CSVReader reader, final int numLines) {
List<Map<String, String>> columns = new ArrayList<>();
List<List<String>> rows = new ArrayList<>();
try {
for (final String columnName: reader.readNext()) {
columns.add(new HashMap<String, String>(){{
put("name", columnName);
}});
}
int counter = 0;
for (String[] line : reader) {
counter++;
rows.add(Arrays.asList(line));
if (counter >= numLines) {
break;
}
}
} catch (IOException e) {
log.error(e.getMessage());
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
return Response.ok(new PreviewResponse(columns, rows)).build();
}
private Response getFilePreview(URI fileURI, int numLines) {
String fileName = getFilename(fileURI);
final File file = fileStore.get(fileName);
try {
if (file == null) {
throw new FileNotFoundException(fileName + " could not be found");
}
try (final CSVReader reader = new CSVReader(new FileReader(file))) {
return getPreviewFromCSV(reader, numLines);
} catch (IOException e) {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
} catch (FileNotFoundException e) {
log.warn(e.getMessage());
return Response.status(Response.Status.NOT_FOUND).build();
}
}
@Data
private static class PreviewResponse
{
@JsonProperty
private final List<Map<String, String>> columns;
@JsonProperty
private final List<List<String>> data;
}
}
| 7,355 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/RedirectRootResource.java | package com.airbnb.airpal.resources;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.Response;
import java.net.URI;
@Path("/")
public class RedirectRootResource {
@GET
public Response redirectToApp()
{
return Response.temporaryRedirect(URI.create("/app"))
.status(Response.Status.MOVED_PERMANENTLY)
.build();
}
}
| 7,356 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/UserResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.AuthorizationUtil;
import lombok.Value;
import org.secnod.shiro.jaxrs.Auth;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Path("/api/user")
@Produces(MediaType.APPLICATION_JSON)
public class UserResource
{
@GET
public Response getUserInfo(@Auth AirpalUser user)
{
if (user == null) {
return Response.status(Response.Status.FORBIDDEN).build();
} else {
return Response.ok(
new UserInfo(
user.getUserName(),
new ExecutionPermissions(
AuthorizationUtil.isAuthorizedWrite(user, "hive", "airpal", "any"),
true,
user.getAccessLevel())
)).build();
}
}
@Value
private static class UserInfo
{
private final String name;
private final ExecutionPermissions executionPermissions;
}
@Value
public static class ExecutionPermissions
{
private final boolean canCreateTable;
private final boolean canCreateCsv;
private final String accessLevel;
}
}
| 7,357 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/QueriesResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.JobState;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.AuthorizationUtil;
import com.airbnb.airpal.core.execution.ExecutionClient;
import com.airbnb.airpal.core.store.history.JobHistoryStore;
import com.airbnb.airpal.presto.PartitionedTable;
import com.airbnb.airpal.presto.Table;
import com.facebook.presto.client.Column;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
import org.secnod.shiro.jaxrs.Auth;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import static com.airbnb.airpal.resources.QueryResource.JOB_ORDERING;
@Path("/api/queries")
@Produces({MediaType.APPLICATION_JSON})
public class QueriesResource
{
private final JobHistoryStore jobHistoryStore;
private final ExecutionClient executionClient;
@Inject
public QueriesResource(
JobHistoryStore jobHistoryStore,
ExecutionClient executionClient)
{
this.jobHistoryStore = jobHistoryStore;
this.executionClient = executionClient;
}
@GET
public Response getQueries(
@Auth AirpalUser user,
@QueryParam("results") int numResults,
@QueryParam("table") List<PartitionedTable> tables)
{
Iterable<Job> recentlyRun;
int results = Optional.of(numResults).or(200);
if (tables.size() < 1) {
recentlyRun = jobHistoryStore.getRecentlyRun(results);
} else {
recentlyRun = jobHistoryStore.getRecentlyRun(
results,
Iterables.transform(tables, new PartitionedTable.PartitionedTableToTable()));
}
ImmutableList.Builder<Job> filtered = ImmutableList.builder();
for (Job job : recentlyRun) {
if (job.getTablesUsed().isEmpty() && (job.getState() == JobState.FAILED)) {
filtered.add(job);
continue;
}
for (Table table : job.getTablesUsed()) {
if (AuthorizationUtil.isAuthorizedRead(user, table)) {
filtered.add(new Job(
job.getUser(),
job.getQuery(),
job.getUuid(),
job.getOutput(),
job.getQueryStats(),
job.getState(),
Collections.<Column>emptyList(),
Collections.<Table>emptySet(),
job.getQueryStartedDateTime(),
job.getError(),
job.getQueryFinishedDateTime()));
}
}
}
List<Job> sortedResult = Ordering
.natural()
.nullsLast()
.onResultOf(JOB_ORDERING)
.reverse()
.immutableSortedCopy(filtered.build());
return Response.ok(sortedResult).build();
}
@DELETE
@Path("/{uuid}")
public Response cancelQuery(
@Auth AirpalUser user,
@PathParam("uuid") UUID uuid)
{
boolean success = executionClient.cancelQuery(user, uuid);
if (success) {
return Response.ok().build();
} else {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
}
| 7,358 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/PingResource.java | package com.airbnb.airpal.resources;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.Response;
@Path("/ping")
public class PingResource {
@GET
public Response ping() {
return Response.status(Response.Status.OK).entity("PONG").build();
}
} | 7,359 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/SessionResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.service.LoginView;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.web.util.WebUtils;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.NewCookie;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.net.URI;
@Path("/")
public class SessionResource
{
@GET
public Response redirectToApp()
{
return Response.temporaryRedirect(URI.create("/app"))
.status(Response.Status.MOVED_PERMANENTLY)
.build();
}
@GET
@Path("/login")
@Produces({MediaType.TEXT_HTML, MediaType.APPLICATION_JSON})
public LoginView getLogin()
{
return new LoginView();
}
@POST
@Path("/login")
public void doLogin(
@Context HttpServletRequest request,
@Context HttpServletResponse response,
@FormParam("username") String username,
@FormParam("password") String password)
throws IOException
{
Subject currentUser = SecurityUtils.getSubject();
if (!currentUser.isAuthenticated()) {
AuthenticationToken token = new UsernamePasswordToken(username, password);
currentUser.login(token);
}
WebUtils.redirectToSavedRequest(request, response, "/app");
}
@GET
@Path("/postlogin")
@Produces({MediaType.TEXT_HTML, MediaType.APPLICATION_JSON})
public Response getLoginNoRemember()
{
return Response.temporaryRedirect(URI.create("/app")).cookie(new NewCookie("rememberMe", null)).build();
}
}
| 7,360 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/S3FilesResource.java | package com.airbnb.airpal.resources;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.google.common.io.ByteStreams;
import com.google.inject.Inject;
import lombok.val;
import javax.inject.Named;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@Path("/api/s3")
public class S3FilesResource
{
private final AmazonS3 s3Client;
private final String outputBucket;
@Inject
public S3FilesResource(
AmazonS3 s3Client,
@Named("s3Bucket") String outputBucket)
{
this.s3Client = s3Client;
this.outputBucket = outputBucket;
}
private String getOutputKey(String fileBaseName)
{
return "airpal/" + fileBaseName;
}
@GET
@Path("/{filename}")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response getFile(@PathParam("filename") String filename)
{
val outputKey = getOutputKey(filename);
val getRequest = new GetObjectRequest(outputBucket, outputKey);
final val object = s3Client.getObject(getRequest);
if (object == null) {
return Response.status(Response.Status.NOT_FOUND).build();
} else {
ObjectMetadata objectMetadata = object.getObjectMetadata();
Response.ResponseBuilder builder = Response.ok().type(objectMetadata.getContentType());
if (objectMetadata.getContentEncoding() != null) {
builder = builder.encoding(objectMetadata.getContentEncoding()); // gzip
}
return builder.entity(new StreamingOutput() {
@Override
public void write(OutputStream output)
throws IOException, WebApplicationException
{
try (InputStream objectData = object.getObjectContent()) {
ByteStreams.copy(objectData, output);
} finally {
output.close();
}
}
}).build();
}
}
}
| 7,361 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/ExecuteResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.api.ExecutionRequest;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.AuthorizationUtil;
import com.airbnb.airpal.core.execution.ExecutionClient;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.inject.Inject;
import lombok.Data;
import org.secnod.shiro.jaxrs.Auth;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.util.UUID;
@Path("/api/execute")
public class ExecuteResource {
private ExecutionClient executionClient;
@Inject
public ExecuteResource(ExecutionClient executionClient)
{
this.executionClient = executionClient;
}
@PUT
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response executeQuery(@Auth AirpalUser user, ExecutionRequest request) throws IOException
{
if (user != null) {
final UUID queryUuid = executionClient.runQuery(
request,
user,
user.getDefaultSchema(),
user.getQueryTimeout());
return Response.ok(new ExecutionSuccess(queryUuid)).build();
}
return Response.status(Response.Status.NOT_FOUND)
.entity(new ExecutionError("No Airpal user found"))
.build();
}
@GET
@Path("permissions")
@Produces(MediaType.APPLICATION_JSON)
public Response getPermissions(@Auth AirpalUser user)
{
if (user == null) {
return Response.status(Response.Status.FORBIDDEN).build();
} else {
return Response.ok(new ExecutionPermissions(
AuthorizationUtil.isAuthorizedWrite(user, "hive", "airpal", "any"),
true,
user.getUserName(),
user.getAccessLevel()
)).build();
}
}
@Data
public static class ExecutionSuccess
{
@JsonProperty
public final UUID uuid;
}
@Data
public static class ExecutionError
{
@JsonProperty
public final String message;
}
@Data
public static class ExecutionPermissions
{
private final boolean canCreateTable;
private final boolean canCreateCsv;
private final String userName;
private final String accessLevel;
}
}
| 7,362 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/FilesResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.core.store.files.ExpiringFileStore;
import com.google.common.io.ByteStreams;
import com.google.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
@Path("/api/files")
public class FilesResource
{
private final ExpiringFileStore fileStore;
@Inject
public FilesResource(ExpiringFileStore fileStore)
{
this.fileStore = fileStore;
}
@GET
@Path("/{fileName}")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response getFile(@PathParam("fileName") String fileName)
{
final File file = fileStore.get(fileName);
if (file == null) {
return Response.status(Response.Status.NOT_FOUND).build();
} else {
return Response.ok(new StreamingOutput() {
@Override
public void write(OutputStream output)
throws IOException, WebApplicationException
{
// TODO: Make this use chunked encoding?
try (FileInputStream inputStream = new FileInputStream(file)) {
ByteStreams.copy(inputStream, output);
} finally {
output.close();
}
}
}).build();
}
}
}
| 7,363 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/UsersResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.JobState;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.AuthorizationUtil;
import com.airbnb.airpal.core.store.jobs.ActiveJobsStore;
import com.airbnb.airpal.core.store.history.JobHistoryStore;
import com.airbnb.airpal.presto.PartitionedTable;
import com.airbnb.airpal.presto.PartitionedTable.PartitionedTableToTable;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.resources.UserResource.ExecutionPermissions;
import com.facebook.presto.client.Column;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
import org.secnod.shiro.jaxrs.Auth;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.Collections;
import java.util.List;
import static com.airbnb.airpal.resources.QueryResource.JOB_ORDERING;
@Path("/api/users/{id}")
@Produces(MediaType.APPLICATION_JSON)
public class UsersResource
{
private final JobHistoryStore jobHistoryStore;
private final ActiveJobsStore activeJobsStore;
@Inject
public UsersResource(JobHistoryStore jobHistoryStore, ActiveJobsStore activeJobsStore)
{
this.jobHistoryStore = jobHistoryStore;
this.activeJobsStore = activeJobsStore;
}
@GET
@Path("permissions")
public Response getUserPermissions(
@Auth AirpalUser user,
@PathParam("id") String userId)
{
if (user == null) {
return Response.status(Response.Status.FORBIDDEN).build();
} else {
return Response.ok(
new ExecutionPermissions(
AuthorizationUtil.isAuthorizedWrite(user, "hive", "airpal", "any"),
true,
user.getAccessLevel())
).build();
}
}
@GET
@Path("queries")
public Response getUserQueries(
@Auth AirpalUser user,
@PathParam("id") String userId,
@QueryParam("results") int numResults,
@QueryParam("table") List<PartitionedTable> tables)
{
Iterable<Job> recentlyRun;
int results = Optional.of(numResults).or(0);
if (results <= 0) {
results = 100;
}
if (tables.size() < 1) {
recentlyRun = jobHistoryStore.getRecentlyRunForUser(userId, results);
} else {
recentlyRun = jobHistoryStore.getRecentlyRunForUser(
userId,
results,
Iterables.transform(tables, new PartitionedTableToTable()));
}
ImmutableList.Builder<Job> filtered = ImmutableList.builder();
for (Job job : recentlyRun) {
if (job.getTablesUsed().isEmpty() && (job.getState() == JobState.FAILED)) {
filtered.add(job);
continue;
}
for (Table table : job.getTablesUsed()) {
if (AuthorizationUtil.isAuthorizedRead(user, table)) {
filtered.add(new Job(
job.getUser(),
job.getQuery(),
job.getUuid(),
job.getOutput(),
job.getQueryStats(),
job.getState(),
Collections.<Column>emptyList(),
Collections.<Table>emptySet(),
job.getQueryStartedDateTime(),
job.getError(),
job.getQueryFinishedDateTime()));
}
}
}
List<Job> sortedResult = Ordering
.natural()
.nullsLast()
.onResultOf(JOB_ORDERING)
.reverse()
.immutableSortedCopy(filtered.build());
return Response.ok(sortedResult).build();
}
@GET
@Path("active-queries")
public Response getUserActiveQueries(@Auth AirpalUser user)
{
List<Job> sortedResult = Ordering
.natural()
.nullsLast()
.onResultOf(JOB_ORDERING)
.reverse()
.immutableSortedCopy(activeJobsStore.getJobsForUser(user));
return Response.ok(sortedResult).build();
}
}
| 7,364 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/QueryResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.JobState;
import com.airbnb.airpal.api.queries.CreateSavedQueryBuilder;
import com.airbnb.airpal.api.queries.SavedQuery;
import com.airbnb.airpal.api.queries.UserSavedQuery;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.AuthorizationUtil;
import com.airbnb.airpal.core.store.history.JobHistoryStore;
import com.airbnb.airpal.core.store.queries.QueryStore;
import com.airbnb.airpal.presto.PartitionedTable;
import com.airbnb.airpal.presto.Table;
import com.facebook.presto.client.Column;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
import org.joda.time.DateTime;
import org.secnod.shiro.jaxrs.Auth;
import javax.annotation.Nullable;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
@Path("/api/query")
public class QueryResource
{
private final JobHistoryStore jobHistoryStore;
private final QueryStore queryStore;
@Inject
public QueryResource(JobHistoryStore jobHistoryStore,
QueryStore queryStore)
{
this.jobHistoryStore = jobHistoryStore;
this.queryStore = queryStore;
}
@GET
@Path("saved")
@Produces(MediaType.APPLICATION_JSON)
public Response getSaved(
@Auth AirpalUser user,
@QueryParam("table") List<PartitionedTable> tables)
{
if (user != null) {
return Response.ok(queryStore.getSavedQueries(user)).build();
}
return Response.ok(Collections.<SavedQuery>emptyList()).build();
}
@POST
@Path("saved")
@Produces(MediaType.APPLICATION_JSON)
public Response saveQuery(
@Auth AirpalUser user,
@FormParam("description") String description,
@FormParam("name") String name,
@FormParam("query") String query)
{
CreateSavedQueryBuilder createFeaturedQueryRequest = CreateSavedQueryBuilder.featured()
.description(description)
.name(name)
.query(query);
if (user != null) {
SavedQuery savedQuery = createFeaturedQueryRequest.user(user.getUserName())
.build();
if (queryStore.saveQuery((UserSavedQuery) savedQuery)) {
return Response.ok(savedQuery.getUuid()).build();
}
else {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
return Response.status(Response.Status.UNAUTHORIZED).build();
}
@DELETE
@Path("saved/{uuid}")
@Produces(MediaType.APPLICATION_JSON)
public Response deleteQuery(
@Auth AirpalUser user,
@PathParam("uuid") UUID uuid)
{
if (user != null) {
if (queryStore.deleteSavedQuery(user, uuid)) {
return Response.status(Response.Status.NO_CONTENT).build();
}
else {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
return Response.status(Response.Status.UNAUTHORIZED).build();
}
public static Function<Job, DateTime> JOB_ORDERING = new Function<Job, DateTime>()
{
@Nullable
@Override
public DateTime apply(@Nullable Job input)
{
if (input == null) {
return null;
}
return input.getQueryFinished();
}
};
@GET
@Path("history")
@Produces(MediaType.APPLICATION_JSON)
public Response getHistory(
@Auth AirpalUser user,
@QueryParam("table") List<Table> tables)
{
Iterable<Job> recentlyRun;
if (tables.size() < 1) {
recentlyRun = jobHistoryStore.getRecentlyRun(200);
}
else {
Table[] tablesArray = tables.toArray(new Table[tables.size()]);
Table[] restTables = Arrays.copyOfRange(tablesArray, 1, tablesArray.length);
recentlyRun = jobHistoryStore.getRecentlyRun(200, tablesArray[0], restTables);
}
ImmutableList.Builder<Job> filtered = ImmutableList.builder();
for (Job job : recentlyRun) {
if (job.getTablesUsed().isEmpty() && (job.getState() == JobState.FAILED)) {
filtered.add(job);
continue;
}
for (Table table : job.getTablesUsed()) {
if (AuthorizationUtil.isAuthorizedRead(user, table)) {
filtered.add(new Job(
job.getUser(),
job.getQuery(),
job.getUuid(),
job.getOutput(),
job.getQueryStats(),
job.getState(),
Collections.<Column>emptyList(),
Collections.<Table>emptySet(),
job.getQueryStartedDateTime(),
job.getError(),
job.getQueryFinishedDateTime()));
}
}
}
List<Job> sortedResult = Ordering
.natural()
.nullsLast()
.onResultOf(JOB_ORDERING)
.reverse()
.immutableSortedCopy(filtered.build());
return Response.ok(sortedResult).build();
}
}
| 7,365 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/HealthResource.java | package com.airbnb.airpal.resources;
import com.codahale.metrics.health.HealthCheck;
import com.codahale.metrics.health.HealthCheckRegistry;
import io.dropwizard.jersey.caching.CacheControl;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.Map;
import java.util.SortedMap;
@Path("/health")
@Produces(MediaType.APPLICATION_JSON)
public class HealthResource {
private final HealthCheckRegistry registry;
@Inject
public HealthResource(HealthCheckRegistry registry) {
this.registry = registry;
}
@GET
@CacheControl(mustRevalidate = true, noCache = true, noStore = true)
public Response health() {
final SortedMap<String, HealthCheck.Result> results = registry.runHealthChecks();
if (results.isEmpty()) {
return Response.status(new NotImplementedStatus()).entity(results).build();
} else {
if (isAllHealthy(results)) {
return Response.status(Response.Status.OK).entity(results).build();
} else {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(results).build();
}
}
}
private static boolean isAllHealthy(Map<String, HealthCheck.Result> results) {
for (HealthCheck.Result result : results.values()) {
if (!result.isHealthy()) {
return false;
}
}
return true;
}
private static final class NotImplementedStatus implements Response.StatusType
{
@Override
public int getStatusCode() {
return 501;
}
@Override
public String getReasonPhrase() {
return "Not Implemented";
}
@Override
public Response.Status.Family getFamily() {
return Response.Status.Family.SERVER_ERROR;
}
}
} | 7,366 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/TablesResource.java | package com.airbnb.airpal.resources;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.store.usage.UsageStore;
import com.airbnb.airpal.presto.PartitionedTable;
import com.airbnb.airpal.presto.Table;
import com.airbnb.airpal.presto.hive.HivePartition;
import com.airbnb.airpal.presto.metadata.ColumnCache;
import com.airbnb.airpal.presto.metadata.PreviewTableCache;
import com.airbnb.airpal.presto.metadata.SchemaCache;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import io.dropwizard.util.Duration;
import lombok.Data;
import lombok.NonNull;
import org.joda.time.DateTime;
import org.secnod.shiro.jaxrs.Auth;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static com.airbnb.airpal.core.AuthorizationUtil.isAuthorizedRead;
import static com.airbnb.airpal.presto.hive.HivePartition.HivePartitionItem;
import static java.lang.String.format;
@Path("/api/table")
public class TablesResource
{
private final SchemaCache schemaCache;
private final ColumnCache columnCache;
private final PreviewTableCache previewTableCache;
private final UsageStore usageStore;
private final String defaultCatalog;
@Inject
public TablesResource(
final SchemaCache schemaCache,
final ColumnCache columnCache,
final PreviewTableCache previewTableCache,
final UsageStore usageStore,
@Named("default-catalog") final String defaultCatalog)
{
this.schemaCache = schemaCache;
this.columnCache = columnCache;
this.previewTableCache = previewTableCache;
this.usageStore = usageStore;
this.defaultCatalog = defaultCatalog;
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getTableUpdates(
@Auth AirpalUser user,
@QueryParam("catalog") Optional<String> catalogOptional)
{
final String catalog = catalogOptional.or(defaultCatalog);
final Map<String, List<String>> schemaMap = schemaCache.getSchemaMap(catalog);
final ImmutableList.Builder<Table> builder = ImmutableList.builder();
for (Map.Entry<String, List<String>> entry : schemaMap.entrySet()) {
String schema = entry.getKey();
for (String table : entry.getValue()) {
if (isAuthorizedRead(user, catalog, schema, table)) {
builder.add(new Table(catalog, schema, table));
}
}
}
final List<Table> tables = builder.build();
final Map<Table, Long> allUsages = usageStore.getUsages(tables);
final Map<PartitionedTable, DateTime> updateMap = Collections.emptyMap();
return Response.ok(createTablesWithMetaData(tables, allUsages, updateMap)).build();
}
// TODO: Make getTableColumns, getTablePartitions and getTablePreview take a 3rd path parameter for catalog
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{schema}/{tableName}/columns")
public Response getTableColumns(
@Auth AirpalUser user,
@PathParam("schema") String schema,
@PathParam("tableName") String tableName)
throws ExecutionException
{
if (isAuthorizedRead(user, defaultCatalog, schema, tableName)) {
return Response.ok(columnCache.getColumns(schema, tableName)).build();
}
else {
return Response.status(Response.Status.FORBIDDEN).build();
}
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{schema}/{tableName}/partitions")
public Response getTablePartitions(
@Auth AirpalUser user,
@PathParam("schema") String schema,
@PathParam("tableName") String tableName)
throws ExecutionException
{
if (isAuthorizedRead(user, defaultCatalog, schema, tableName)) {
return Response.ok(getPartitionsWithMetaData(new PartitionedTable("hive", schema, tableName))).build();
}
else {
return Response.status(Response.Status.FORBIDDEN).build();
}
}
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{schema}/{tableName}/preview")
public Response getTablePreview(
@Auth AirpalUser user,
@PathParam("schema") String schema,
@PathParam("tableName") String tableName,
@QueryParam("connectorId") String connectorId,
@QueryParam("partitionName") final String partitionName,
@QueryParam("partitionValue") String partitionValue)
throws ExecutionException
{
List<HivePartition> partitions = columnCache.getPartitions(schema, tableName);
Optional<HivePartition> partition = FluentIterable.from(partitions).firstMatch(
new Predicate<HivePartition>()
{
@Override
public boolean apply(HivePartition input)
{
return Objects.equals(input.getName(), partitionName);
}
});
if (isAuthorizedRead(user, defaultCatalog, schema, tableName)) {
return Response.ok(previewTableCache.getPreview(
Optional.fromNullable(connectorId).or(defaultCatalog),
schema,
tableName,
partition,
partitionValue)).build();
}
else {
return Response.status(Response.Status.FORBIDDEN).build();
}
}
@Data
public static class PartitionedTableWithMetaData
{
@JsonProperty
private final String schema;
@JsonProperty
private final String tableName;
@JsonProperty
private final String partition;
@JsonProperty
private final String fqn;
@JsonProperty
private final long usages;
@JsonProperty
private final int windowCount;
@JsonProperty
private final TimeUnit windowUnit;
@JsonProperty
private final DateTime lastUpdated;
public static PartitionedTableWithMetaData fromTable(final Table table,
final long usages,
final TimeUnit windowUnit,
final int windowCount,
final DateTime lastUpdated)
{
return fromPartionedTable(PartitionedTable.fromTable(table),
usages,
windowUnit,
windowCount,
lastUpdated);
}
public static PartitionedTableWithMetaData fromPartionedTable(final PartitionedTable table,
final long usages,
final TimeUnit windowUnit,
final int windowCount,
final DateTime lastUpdated)
{
return new PartitionedTableWithMetaData(table.getSchema(),
table.getTable(),
table.getPartitionName(),
format("%s.%s", table.getSchema(), table.getTable()),
usages,
windowCount,
windowUnit,
lastUpdated);
}
}
private List<PartitionedTableWithMetaData> createTablesWithMetaData(
@NonNull final List<Table> tables,
@NonNull final Map<Table, Long> tableUsageMap,
@NonNull final Map<PartitionedTable, DateTime> tableUpdateMap)
{
final ImmutableList.Builder<PartitionedTableWithMetaData> builder = ImmutableList.builder();
final Duration usageWindow = usageStore.window();
for (Table table : tables) {
PartitionedTable partitionedTable = PartitionedTable.fromTable(table);
DateTime updatedAt = tableUpdateMap.get(partitionedTable);
long lastUsage = 0;
if (tableUsageMap.containsKey(table)) {
lastUsage = tableUsageMap.get(table);
}
builder.add(PartitionedTableWithMetaData.fromTable(
table,
lastUsage,
usageWindow.getUnit(),
(int) usageWindow.getQuantity(),
updatedAt
));
}
return builder.build();
}
private List<HivePartitionItem> getPartitionsWithMetaData(PartitionedTable table)
throws ExecutionException
{
List<HivePartition> partitions = columnCache.getPartitions(table.getSchema(), table.getTable());
ImmutableList.Builder<HivePartitionItem> partitionItems = ImmutableList.builder();
for (HivePartition partition : partitions) {
for (Object value : partition.getValues()) {
PartitionedTable partitionedTable = table.withPartitionName(
HivePartition.getPartitionId(partition.getName(), value));
DateTime updatedAt = null;
partitionItems.add(new HivePartitionItem(partition.getName(), partition.getType(), value, updatedAt));
}
}
return partitionItems.build();
}
}
| 7,367 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/sse/SSEEventSource.java | package com.airbnb.airpal.resources.sse;
import lombok.extern.slf4j.Slf4j;
import org.eclipse.jetty.servlets.EventSource;
import java.io.IOException;
import static com.airbnb.airpal.resources.sse.SSEEventSourceServlet.JobUpdateToSSERelay;
import static com.google.common.base.Preconditions.checkNotNull;
@Slf4j
public class SSEEventSource implements EventSource
{
private final JobUpdateToSSERelay jobUpdateToSSERelay;
private Emitter emitter;
public SSEEventSource(JobUpdateToSSERelay jobUpdateToSSERelay)
{
this.jobUpdateToSSERelay = checkNotNull(jobUpdateToSSERelay, "jobUpdateToSSERelay was null");
}
@Override
public void onOpen(Emitter emitter)
throws IOException
{
this.emitter = checkNotNull(emitter, "emitter was null");
}
@Override
public void onClose()
{
jobUpdateToSSERelay.removeListener(this);
this.emitter = null;
}
public void emit(String message)
{
if (emitter != null) {
try {
emitter.data(message);
}
catch (IOException e) {
log.error("Could not send data to SSEEventSource", e);
jobUpdateToSSERelay.removeListener(this);
}
} else {
log.error("Emitter was closed, could not emit message!");
}
}
}
| 7,368 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources | Create_ds/airpal/src/main/java/com/airbnb/airpal/resources/sse/SSEEventSourceServlet.java | package com.airbnb.airpal.resources.sse;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.event.JobEvent;
import com.airbnb.airpal.api.event.JobFinishedEvent;
import com.airbnb.airpal.api.event.JobUpdateEvent;
import com.airbnb.airpal.core.AirpalUser;
import com.airbnb.airpal.core.AirpalUserFactory;
import com.airbnb.airpal.core.AuthorizationUtil;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterables;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import com.google.common.util.concurrent.RateLimiter;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.eclipse.jetty.servlets.EventSource;
import org.eclipse.jetty.servlets.EventSourceServlet;
import javax.servlet.http.HttpServletRequest;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import static com.codahale.metrics.MetricRegistry.name;
import static com.google.common.base.Preconditions.checkNotNull;
@Slf4j
public class SSEEventSourceServlet extends EventSourceServlet
{
private final JobUpdateToSSERelay jobUpdateToSSERelay;
private final AirpalUserFactory userFactory;
@Inject
public SSEEventSourceServlet(ObjectMapper objectMapper,
EventBus eventBus,
@Named("sse") ExecutorService executorService,
MetricRegistry registry,
AirpalUserFactory userFactory)
{
this.jobUpdateToSSERelay = new JobUpdateToSSERelay(objectMapper, executorService, registry);
this.userFactory = userFactory;
eventBus.register(jobUpdateToSSERelay);
}
@Override
protected EventSource newEventSource(HttpServletRequest request)
{
SSEEventSource eventSource = new SSEEventSource(jobUpdateToSSERelay);
jobUpdateToSSERelay.addListener(eventSource, userFactory.provide());
return eventSource;
}
static class JobUpdateToSSERelay
{
private final ObjectMapper objectMapper;
private final RateLimiter updateLimiter = RateLimiter.create(15.0);
private final Set<SSEEventSource> subscribers = Collections.newSetFromMap(new ConcurrentHashMap<SSEEventSource, Boolean>());
private final Map<SSEEventSource, AirpalUser> eventSourceSubjectMap = new ConcurrentHashMap<>();
private final ExecutorService executorService;
private final Timer timer;
public JobUpdateToSSERelay(ObjectMapper objectMapper, ExecutorService executorService, MetricRegistry registry)
{
this.objectMapper = checkNotNull(objectMapper, "objectMapper was null");
this.executorService = checkNotNull(executorService, "executorService was null");
this.timer = registry.timer(name(AuthorizedEventBroadcast.class, "authorization"));
}
public void addListener(SSEEventSource sseEventSource, AirpalUser subject)
{
AirpalUser eventSubject = checkNotNull(subject, "subject was null");
SSEEventSource eventSource = checkNotNull(sseEventSource, "sseEventSource was null");
subscribers.add(eventSource);
eventSourceSubjectMap.put(eventSource, eventSubject);
}
public void removeListener(SSEEventSource sseEventSource)
{
SSEEventSource eventSource = checkNotNull(sseEventSource, "sseEventSource was null");
subscribers.remove(eventSource);
eventSourceSubjectMap.remove(eventSource);
}
private void broadcast(JobEvent message)
{
try {
String jsonMessage = objectMapper.writeValueAsString(message);
for (SSEEventSource subscriber : subscribers) {
executorService.submit(
new AuthorizedEventBroadcast(subscriber,
eventSourceSubjectMap.get(subscriber),
jsonMessage,
message.getJob(),
timer));
}
}
catch (JsonProcessingException e) {
log.error("Could not serialize JobEvent as JSON", e);
}
}
@Subscribe
public void receiveJobUpdate(JobUpdateEvent event) {
if (updateLimiter.tryAcquire()) {
broadcast(event);
}
}
@Subscribe
public void receiveJobFinished(JobFinishedEvent event) {
broadcast(event);
}
}
@Value
private static class AuthorizedEventBroadcast implements Runnable
{
private final SSEEventSource eventSource;
private final AirpalUser subject;
private final String message;
private final Job job;
private final Timer timer;
@Override
public void run()
{
Timer.Context context = timer.time();
if (Iterables.all(job.getTablesUsed(), new AuthorizationUtil.AuthorizedTablesPredicate(subject))) {
eventSource.emit(message);
}
context.stop();
}
}
}
| 7,369 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/Job.java | package com.airbnb.airpal.api;
import com.airbnb.airpal.api.output.PersistentJobOutput;
import com.airbnb.airpal.presto.Table;
import com.facebook.presto.client.Column;
import com.facebook.presto.client.QueryError;
import com.facebook.presto.execution.QueryStats;
import com.fasterxml.jackson.annotation.*;
import com.google.common.collect.Sets;
import com.hubspot.rosetta.StoredAsJson;
import lombok.Data;
import lombok.experimental.Wither;
import org.joda.time.DateTime;
import java.util.List;
import java.util.Set;
import java.util.UUID;
@JsonIgnoreProperties(ignoreUnknown = true)
@Data
public class Job
{
@JsonProperty
@Wither
private final String user;
@JsonProperty
@Wither
private final String query;
@JsonProperty
@Wither
private final UUID uuid;
@JsonProperty
@Wither
private final PersistentJobOutput output;
@JsonProperty
@Wither
@StoredAsJson
private QueryStats queryStats;
@JsonProperty
@Wither
private JobState state;
@JsonProperty
@StoredAsJson
private List<Column> columns;
@JsonProperty
@Wither
private Set<Table> tablesUsed;
@JsonProperty
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
@Wither
private DateTime queryStarted = new DateTime();
@JsonProperty
@StoredAsJson
private QueryError error;
@JsonProperty
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss")
@Wither
private DateTime queryFinished;
@JsonCreator
public Job(@JsonProperty("user") final String user,
@JsonProperty("query") final String query,
@JsonProperty("uuid") final UUID uuid,
@JsonProperty("output") final PersistentJobOutput output,
@JsonProperty("queryStats") final QueryStats queryStats,
@JsonProperty("state") final JobState state,
@JsonProperty("columns") final List<Column> columns,
@JsonProperty("tablesUsed") final Set<Table> tablesUsed,
@JsonProperty("queryStarted") final DateTime queryStarted,
@JsonProperty("error") final QueryError error,
@JsonProperty("queryFinished") final DateTime queryFinished)
{
this.user = user;
this.query = query;
this.uuid = uuid;
this.output = output;
this.queryStats = queryStats;
this.state = state;
this.columns = columns;
this.tablesUsed = tablesUsed;
this.queryStarted = queryStarted;
this.error = error;
this.queryFinished = queryFinished;
}
public Job(final String user,
final String query,
final UUID uuid,
final PersistentJobOutput output,
final QueryStats stats,
final JobState state,
final List<Column> columns,
final QueryError error,
final DateTime queryFinished)
{
this(user,
query,
uuid,
output,
stats,
state,
columns,
Sets.<Table>newConcurrentHashSet(),
new DateTime(),
error,
queryFinished
);
}
@JsonIgnore
public DateTime getQueryFinishedDateTime()
{
return queryFinished;
}
@JsonIgnore
public DateTime getQueryStartedDateTime()
{
return queryStarted;
}
}
| 7,370 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/EvictingDeque.java | package com.airbnb.airpal.api;
import com.google.common.collect.ForwardingBlockingDeque;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
public class EvictingDeque<E> extends ForwardingBlockingDeque<E>
{
private final LinkedBlockingDeque<E> blockingDeque;
public EvictingDeque(final int capacity)
{
this.blockingDeque = new LinkedBlockingDeque<>(capacity);
}
@Override
public void put(E e) throws InterruptedException
{
this.add(e);
}
@Override
public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException
{
return this.add(e);
}
@Override
public boolean add(E element)
{
final boolean initialResult = blockingDeque.offer(element);
return initialResult || (evictItem(blockingDeque) && add(element));
}
@Override
protected BlockingDeque<E> delegate()
{
return blockingDeque;
}
protected boolean evictItem(LinkedBlockingDeque<E> deque)
{
return deque.remove() != null;
}
}
| 7,371 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/ExecutionRequest.java | package com.airbnb.airpal.api;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Getter;
import lombok.Setter;
public class ExecutionRequest
{
@Getter
@JsonProperty
private final String query;
@Getter
@JsonProperty
private final String tmpTable;
@JsonCreator
public ExecutionRequest(@JsonProperty("query") final String query,
@JsonProperty("tmpTable") final String tmpTable) {
this.query = query;
this.tmpTable = tmpTable;
}
}
| 7,372 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/JobState.java | package com.airbnb.airpal.api;
import com.google.common.base.Predicate;
public enum JobState {
/**
* Query has been accepted and is awaiting execution.
*/
QUEUED(false),
/**
* Query is being planned.
*/
PLANNING(false),
/**
* Query execution is being started.
*/
STARTING(false),
/**
* Query has at least one task in the output stage.
*/
RUNNING(false),
/**
* Query has finished executing and all output has been consumed.
*/
FINISHED_EXECUTION(false),
/**
* Job has finished forwarding all output to S3/Hive
*/
FINISHED(true),
/**
* Query was canceled by a user.
*/
CANCELED(true),
/**
* Query execution failed.
*/
FAILED(true);
private final boolean doneState;
private JobState(boolean doneState)
{
this.doneState = doneState;
}
/**
* Is this a terminal state.
*/
public boolean isDone()
{
return doneState;
}
public static Predicate<JobState> inDoneState()
{
return new Predicate<JobState>()
{
@Override
public boolean apply(JobState state)
{
return state.isDone();
}
};
}
public static JobState fromStatementState(String statementState) {
String state = statementState.equalsIgnoreCase("FINISHED") ? "FINISHED_EXECUTION" : statementState;
return JobState.valueOf(state);
}
}
| 7,373 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/queries/SavedQuery.java | package com.airbnb.airpal.api.queries;
import java.util.UUID;
public interface SavedQuery
{
public String getUser();
public String getName();
public String getDescription();
public UUID getUuid();
public FeaturedQuery.QueryWithPlaceholders getQueryWithPlaceholders();
}
| 7,374 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/queries/FeaturedQuery.java | package com.airbnb.airpal.api.queries;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import lombok.*;
import org.joda.time.DateTime;
import javax.validation.constraints.NotNull;
import java.util.List;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@EqualsAndHashCode(callSuper = false)
@AllArgsConstructor
@NoArgsConstructor
public class FeaturedQuery extends UserSavedQuery
{
@NotNull
@JsonProperty
private boolean featured = true;
public FeaturedQuery(QueryWithPlaceholders queryWithPlaceholders,
String user,
String name,
String description,
DateTime createdAt,
UUID uuid,
boolean featured)
{
super(queryWithPlaceholders, user, name, description, createdAt, uuid, featured);
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Position
{
@JsonProperty
private int row;
@JsonProperty
private int column;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class QueryPlaceholder
{
@JsonProperty
private int length;
@JsonProperty
private Position position;
@JsonProperty
private String name;
@JsonProperty
private String typeRestriction;
}
public static Pattern PLACEHOLDER_PATTERN = Pattern.compile("(\\[\\[placeholder:([\\w-]+)\\]\\])",
Pattern.CASE_INSENSITIVE);
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class QueryWithPlaceholders
{
@JsonProperty
private String query;
@JsonProperty
private List<QueryPlaceholder> placeholders;
public static QueryWithPlaceholders fromQuery(String query)
{
ImmutableList.Builder<QueryPlaceholder> builder = ImmutableList.builder();
Matcher matcher = PLACEHOLDER_PATTERN.matcher(query);
String[] queryLines = query.split("\\r?\\n");
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
int line = 0;
for (int i = 0; i < queryLines.length; i++) {
if (queryLines[i].contains(matcher.group(1))) {
line = i;
break;
}
}
builder.add(new QueryPlaceholder((end - start),
new Position(line, start),
matcher.group(2),
null));
}
return new QueryWithPlaceholders(query, builder.build());
}
}
}
| 7,375 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/queries/CreateSavedQueryBuilder.java | package com.airbnb.airpal.api.queries;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Getter;
import org.joda.time.DateTime;
import java.util.UUID;
import static com.google.common.base.Preconditions.*;
public class CreateSavedQueryBuilder
{
@JsonProperty
@Getter
private String description;
@JsonProperty
@Getter
private String query;
@JsonProperty
@Getter
private String user;
@JsonProperty
@Getter
private String name;
@Getter
private final DateTime createdAt = new DateTime();
@Getter
private final boolean featured;
private CreateSavedQueryBuilder(String user,
String query,
String name,
String description,
boolean featured)
{
this.user = user;
this.query = query;
this.name = name;
this.description = description;
this.featured = featured;
}
public static CreateSavedQueryBuilder featured()
{
return new CreateSavedQueryBuilder(null, null, null, null, true);
}
public static CreateSavedQueryBuilder notFeatured()
{
return new CreateSavedQueryBuilder(null, null, null, null, false);
}
public CreateSavedQueryBuilder user(String user)
{
this.user = checkNotNull(user, "User can not be null");
return this;
}
public CreateSavedQueryBuilder query(String query)
{
this.query = checkNotNull(query, "Query can not be null");
return this;
}
public CreateSavedQueryBuilder name(String name)
{
this.name = checkNotNull(name, "Name can not be null");
return this;
}
public CreateSavedQueryBuilder description(String description)
{
this.description = checkNotNull(description, "Description can not be null");
return this;
}
public SavedQuery build()
{
checkNotNull(user, "User can not be null");
checkNotNull(query, "Query can not be null");
checkNotNull(name, "Name can not be null");
checkNotNull(description, "Description can not be null");
final FeaturedQuery.QueryWithPlaceholders queryWithPlaceholders =
checkNotNull(FeaturedQuery.QueryWithPlaceholders.fromQuery(getQuery()),
"Generated query can not be null");
if (isFeatured()) {
return new FeaturedQuery(queryWithPlaceholders,
getUser(),
getName(),
getDescription(),
getCreatedAt(),
UUID.randomUUID(),
true);
} else {
return new UserSavedQuery(queryWithPlaceholders,
getUser(),
getName(),
getDescription(),
getCreatedAt(),
UUID.randomUUID(),
false);
}
}
}
| 7,376 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/queries/UserSavedQuery.java | package com.airbnb.airpal.api.queries;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.hubspot.rosetta.StoredAsJson;
import lombok.*;
import org.joda.time.DateTime;
import javax.validation.constraints.NotNull;
import java.util.UUID;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class UserSavedQuery implements SavedQuery
{
@NotNull
@JsonProperty
@StoredAsJson
private FeaturedQuery.QueryWithPlaceholders queryWithPlaceholders;
@NotNull
@JsonProperty
private String user;
@NotNull
@JsonProperty
private String name;
@NotNull
@JsonProperty
private String description;
@NotNull
private DateTime createdAt;
@NotNull
@JsonProperty
private UUID uuid;
@NotNull
@JsonProperty
private boolean featured = false;
@JsonProperty
public String getCreatedAt()
{
if (createdAt != null) {
return createdAt.toDateTimeISO().toString();
} else {
return null;
}
}
}
| 7,377 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/HiveTablePersistentOutput.java | package com.airbnb.airpal.api.output;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.UUID;
import java.util.regex.Pattern;
import static java.lang.String.format;
@Slf4j
@JsonTypeName("hive")
public class HiveTablePersistentOutput implements PersistentJobOutput
{
private final static Pattern INVALID_TABLE_CHARS = Pattern.compile("\\s");
private final UUID jobUUID;
@Getter
private final String tmpTableName;
@Getter
private final String destinationSchema;
@Getter
@Setter
private URI location;
public HiveTablePersistentOutput(UUID jobUUID,
String tmpTableName,
String destinationSchema)
{
this.jobUUID = jobUUID;
this.tmpTableName = tmpTableName;
try {
this.location = new URI(format("%s.%s", destinationSchema, tmpTableName));
}
catch (URISyntaxException e) {
this.location = null;
log.error("Couldn't create hive output", e);
}
this.destinationSchema = destinationSchema;
}
@JsonCreator
public HiveTablePersistentOutput(@JsonProperty("location") URI location,
@JsonProperty("type") String type,
@JsonProperty("description") String description)
{
this((UUID) null, null, null);
this.location = location;
}
@Override
public String getType()
{
return "hive";
}
@Override
public String getDescription()
{
return null;
}
@Override
public String processQuery(String query)
{
String tableFqn = format("%s.%s", destinationSchema, tmpTableName);
return format("CREATE TABLE %s AS %s", tableFqn, query);
}
}
| 7,378 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/PersistentJobOutputFactory.java | package com.airbnb.airpal.api.output;
import com.amazonaws.services.s3.AmazonS3;
import com.google.common.base.Strings;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import java.net.URI;
import java.util.UUID;
public class PersistentJobOutputFactory
{
private final AmazonS3 s3Client;
private final String s3Bucket;
private final String createTableDestinationSchema;
@Inject
public PersistentJobOutputFactory(AmazonS3 s3Client,
@Named("s3Bucket") String s3Bucket,
@Named("createTableDestinationSchema") String createTableDestinationSchema)
{
this.s3Client = s3Client;
this.s3Bucket = s3Bucket;
this.createTableDestinationSchema = createTableDestinationSchema;
}
public PersistentJobOutput create(final String tmpTable,
final UUID jobUUID)
{
if (!Strings.isNullOrEmpty(tmpTable)) {
return new HiveTablePersistentOutput(jobUUID, tmpTable, createTableDestinationSchema);
} else {
return new CSVPersistentOutput(null, "csv", null);
}
}
public static PersistentJobOutput create(String type, String description, URI location)
{
if (location == null) {
return null;
} else if (location.isAbsolute()) {
return new CSVPersistentOutput(location, type, description);
} else {
return new HiveTablePersistentOutput(location, type, description);
}
}
}
| 7,379 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/PersistentJobOutput.java | package com.airbnb.airpal.api.output;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import java.net.URI;
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
public interface PersistentJobOutput
{
public String getType();
public String getDescription();
public URI getLocation();
public void setLocation(URI location);
public String processQuery(String query);
}
| 7,380 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/InvalidQueryException.java | package com.airbnb.airpal.api.output;
public class InvalidQueryException
extends Exception
{
public InvalidQueryException(String message)
{
super(message);
}
}
| 7,381 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/CSVPersistentOutput.java | package com.airbnb.airpal.api.output;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import lombok.Getter;
import lombok.Setter;
import java.net.URI;
@JsonTypeName("csv")
public class CSVPersistentOutput implements PersistentJobOutput
{
@Getter
@Setter
private URI location;
@Getter
private final String type;
@Getter
private final String description;
@JsonCreator
public CSVPersistentOutput(
@JsonProperty("location") URI location,
@JsonProperty("type") String type,
@JsonProperty("description") String description)
{
this.location = location;
this.type = type;
this.description = description;
}
@Override
public String processQuery(String query)
{
return query;
}
}
| 7,382 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/builders/CsvOutputBuilder.java | package com.airbnb.airpal.api.output.builders;
import com.opencsv.CSVWriter;
import com.facebook.presto.client.Column;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.Lists;
import com.google.common.io.CountingOutputStream;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.List;
import java.util.UUID;
import java.util.zip.GZIPOutputStream;
@Slf4j
public class CsvOutputBuilder implements JobOutputBuilder
{
private static final String FILE_SUFFIX = ".csv";
@JsonIgnore
private final File outputFile;
@JsonIgnore
private final CSVWriter csvWriter;
@JsonIgnore
private final boolean includeHeader;
@JsonIgnore
private final CountingOutputStream countingOutputStream;
@JsonIgnore
private final long maxFileSizeBytes;
@JsonIgnore
private boolean headerWritten = false;
@JsonIgnore
private final UUID jobUUID;
public CsvOutputBuilder(boolean includeHeader, UUID jobUUID, long maxFileSizeBytes, boolean compressedOutput) throws IOException {
this.includeHeader = includeHeader;
this.jobUUID = jobUUID;
this.outputFile = File.createTempFile(jobUUID.toString(), FILE_SUFFIX);
this.maxFileSizeBytes = maxFileSizeBytes;
this.countingOutputStream = new CountingOutputStream(new FileOutputStream(this.outputFile));
OutputStreamWriter writer;
if (compressedOutput) {
writer = new OutputStreamWriter(new GZIPOutputStream(this.countingOutputStream));
}
else {
writer = new OutputStreamWriter(this.countingOutputStream);
}
this.csvWriter = new CSVWriter(writer);
}
@Override
public void addRow(List<Object> row)
throws FileTooLargeException
{
final String[] values = new String[row.size()];
for (int i = 0; i < values.length; i++) {
final Object value = row.get(i);
values[i] = (value == null) ? "" : value.toString();
}
writeCsvRow(values);
}
@Override
public void addColumns(List<Column> columns)
throws FileTooLargeException
{
if (!headerWritten && includeHeader) {
List<String> columnNames = Lists.transform(columns, Column::getName);
writeCsvRow(columnNames.toArray(new String[columnNames.size()]));
headerWritten = true;
}
}
@Override
public String processQuery(String query)
{
return query;
}
private void writeCsvRow(String[] cols)
throws FileTooLargeException
{
csvWriter.writeNext(cols);
if (countingOutputStream.getCount() > maxFileSizeBytes) {
try {
csvWriter.close();
}
catch (IOException e) {
log.error("Caught exception closing csv writer", e);
}
delete();
throw new FileTooLargeException();
}
}
@Override
public File build()
{
try {
csvWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
return outputFile;
}
@Override
public void delete()
{
log.info("Deleting outputFile {}", outputFile);
if (!outputFile.delete()) {
log.error("Failed to delete outputFile {}", outputFile);
}
}
}
| 7,383 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/builders/FileTooLargeException.java | package com.airbnb.airpal.api.output.builders;
public class FileTooLargeException extends Exception
{
}
| 7,384 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/builders/HiveTableOutputBuilder.java | package com.airbnb.airpal.api.output.builders;
import com.facebook.presto.client.Column;
import lombok.RequiredArgsConstructor;
import java.io.File;
import java.util.List;
import static java.lang.String.format;
@RequiredArgsConstructor
public class HiveTableOutputBuilder
implements JobOutputBuilder
{
private final String destinationSchema;
private final String tmpTableName;
@Override
public void addRow(List<Object> row)
throws FileTooLargeException
{}
@Override
public void addColumns(List<Column> columns)
throws FileTooLargeException
{}
@Override
public String processQuery(String query)
{
String tableFqn = format("%s.%s", destinationSchema, tmpTableName);
return format("CREATE TABLE %s AS %s", tableFqn, query);
}
@Override
public File build()
{
return null;
}
@Override
public void delete()
{}
}
| 7,385 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/builders/JobOutputBuilder.java | package com.airbnb.airpal.api.output.builders;
import com.facebook.presto.client.Column;
import java.io.File;
import java.util.List;
public interface JobOutputBuilder
{
public void addRow(List<Object> row)
throws FileTooLargeException;
public void addColumns(List<Column> columns)
throws FileTooLargeException;
public String processQuery(String query);
public File build();
public void delete();
}
| 7,386 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/builders/OutputBuilderFactory.java | package com.airbnb.airpal.api.output.builders;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.output.HiveTablePersistentOutput;
import com.airbnb.airpal.api.output.InvalidQueryException;
import com.airbnb.airpal.api.output.PersistentJobOutput;
import lombok.RequiredArgsConstructor;
import java.io.IOException;
import java.net.URI;
import static java.lang.String.format;
@RequiredArgsConstructor
public class OutputBuilderFactory
{
private final long maxFileSizeBytes;
private final boolean isCompressedOutput;
public JobOutputBuilder forJob(Job job)
throws IOException, InvalidQueryException
{
PersistentJobOutput output = job.getOutput();
switch (output.getType()) {
case "csv":
return new CsvOutputBuilder(true, job.getUuid(), maxFileSizeBytes, isCompressedOutput);
case "hive":
HiveTablePersistentOutput hiveOutput = (HiveTablePersistentOutput) output;
URI location = output.getLocation();
if (location == null) {
throw new InvalidQueryException(format("Invalid table name '%s'", hiveOutput.getTmpTableName()));
}
return new HiveTableOutputBuilder(hiveOutput.getDestinationSchema(), hiveOutput.getTmpTableName());
default:
throw new IllegalArgumentException(format("OutputBuilder for type %s not found", output.getType()));
}
}
}
| 7,387 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/persistors/HiveTablePersistor.java | package com.airbnb.airpal.api.output.persistors;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.output.PersistentJobOutput;
import com.airbnb.airpal.api.output.builders.JobOutputBuilder;
import com.airbnb.airpal.core.execution.QueryExecutionAuthorizer;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import java.net.URI;
import java.util.List;
import static com.google.common.base.Preconditions.checkState;
public class HiveTablePersistor
implements Persistor
{
private static final Splitter TABLE_SPLITTER = Splitter.on(".").omitEmptyStrings();
private final URI jobURI;
public HiveTablePersistor(PersistentJobOutput jobOutput)
{
this.jobURI = jobOutput.getLocation();
}
@Override
public boolean canPersist(QueryExecutionAuthorizer authorizer)
{
List<String> locationParts = TABLE_SPLITTER.splitToList(jobURI.toString());
checkState(locationParts.size() == 2, "destination hive table did not have schema and table components");
return authorizer.isAuthorizedWrite("hive", Iterables.getFirst(locationParts, ""), Iterables.getLast(locationParts));
}
@Override
public URI persist(JobOutputBuilder outputBuilder, Job job)
{
return null;
}
}
| 7,388 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/persistors/Persistor.java | package com.airbnb.airpal.api.output.persistors;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.output.builders.JobOutputBuilder;
import com.airbnb.airpal.core.execution.QueryExecutionAuthorizer;
import java.net.URI;
public interface Persistor
{
public boolean canPersist(QueryExecutionAuthorizer authorizer);
URI persist(JobOutputBuilder outputBuilder, Job job);
}
| 7,389 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/persistors/CSVPersistorFactory.java | package com.airbnb.airpal.api.output.persistors;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.output.PersistentJobOutput;
import com.airbnb.airpal.core.store.files.ExpiringFileStore;
import com.amazonaws.services.s3.AmazonS3;
import lombok.AllArgsConstructor;
@AllArgsConstructor
public class CSVPersistorFactory
{
private boolean useS3Persistor = false;
private AmazonS3 s3Client;
private String s3Bucket;
private ExpiringFileStore expiringFileStore;
private boolean compressedOutput;
public Persistor getPersistor(Job job, PersistentJobOutput jobOutput)
{
// TODO: Support variable CSV persistor.
if (useS3Persistor) {
return new S3FilePersistor(s3Client, s3Bucket, 0L, compressedOutput);
} else {
return new FlatFilePersistor(expiringFileStore);
}
}
}
| 7,390 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/persistors/FlatFilePersistor.java | package com.airbnb.airpal.api.output.persistors;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.output.builders.JobOutputBuilder;
import com.airbnb.airpal.core.execution.QueryExecutionAuthorizer;
import com.airbnb.airpal.core.store.files.ExpiringFileStore;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import static java.lang.String.format;
@RequiredArgsConstructor
@Slf4j
public class FlatFilePersistor
implements Persistor
{
private final ExpiringFileStore fileStore;
@Override
public boolean canPersist(QueryExecutionAuthorizer authorizer)
{
// Everyone can create files to download.
return true;
}
@Override
public URI persist(JobOutputBuilder outputBuilder, Job job)
{
File file = outputBuilder.build();
try {
fileStore.addFile(file.getName(), file);
}
catch (IOException e) {
log.error("Caught error adding file to local store", e);
}
return URI.create(format("/api/files/%s", file.getName()));
}
}
| 7,391 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/persistors/PersistorFactory.java | package com.airbnb.airpal.api.output.persistors;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.output.PersistentJobOutput;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public class PersistorFactory
{
private final CSVPersistorFactory csvPersistorFactory;
public Persistor getPersistor(Job job, PersistentJobOutput jobOutput)
{
switch (jobOutput.getType()) {
case "csv":
return csvPersistorFactory.getPersistor(job, jobOutput);
case "hive":
return new HiveTablePersistor(jobOutput);
default:
throw new IllegalArgumentException();
}
}
}
| 7,392 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/output/persistors/S3FilePersistor.java | package com.airbnb.airpal.api.output.persistors;
import com.airbnb.airpal.api.Job;
import com.airbnb.airpal.api.output.builders.JobOutputBuilder;
import com.airbnb.airpal.core.execution.ExecutionClient;
import com.airbnb.airpal.core.execution.QueryExecutionAuthorizer;
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.google.common.net.MediaType;
import lombok.RequiredArgsConstructor;
import lombok.val;
import javax.ws.rs.core.UriBuilder;
import java.io.File;
import java.net.URI;
import static com.google.common.base.Preconditions.checkNotNull;
@RequiredArgsConstructor
public class S3FilePersistor
implements Persistor
{
private final AmazonS3 s3Client;
private final String outputBucket;
private final long maxSizeForTextView;
private final boolean compressedOutput;
@Override
public boolean canPersist(QueryExecutionAuthorizer authorizer)
{
// Everyone can write to s3
return true;
}
private String getOutputKey(String fileBaseName)
{
return "airpal/" + fileBaseName;
}
@Override
public URI persist(JobOutputBuilder outputBuilder, Job job)
{
File file = checkNotNull(outputBuilder.build(), "output builder resulting file was null");
val objectMetaData = new ObjectMetadata();
objectMetaData.setContentLength(file.length());
objectMetaData.setContentType(MediaType.CSV_UTF_8.toString());
if (compressedOutput) {
objectMetaData.setContentEncoding("gzip");
}
val putRequest = new PutObjectRequest(
outputBucket,
getOutputKey(file.getName()),
file
).withMetadata(objectMetaData);
try {
s3Client.putObject(putRequest);
return UriBuilder.fromPath("/api/s3/{filename}").build(file.getName());
}
catch (AmazonClientException e) {
throw new ExecutionClient.ExecutionFailureException(job, "Could not upload CSV to S3", e);
}
finally {
outputBuilder.delete();
}
}
}
| 7,393 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/event/JobFinishedEvent.java | package com.airbnb.airpal.api.event;
import com.airbnb.airpal.api.Job;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
@Data
public class JobFinishedEvent implements JobEvent {
@JsonProperty
private final Job job;
}
| 7,394 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/event/JobOutputPersistenceEvent.java | package com.airbnb.airpal.api.event;
import com.facebook.presto.client.QueryError;
import lombok.Data;
import java.util.UUID;
@Data
public class JobOutputPersistenceEvent {
public enum JobPersistenceStatus {
COMPLETED,
FAILED
}
private final UUID jobUUID;
private final JobPersistenceStatus status;
private final QueryError queryError;
}
| 7,395 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/event/JobEvent.java | package com.airbnb.airpal.api.event;
import com.airbnb.airpal.api.Job;
public interface JobEvent
{
public Job getJob();
}
| 7,396 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal/api | Create_ds/airpal/src/main/java/com/airbnb/airpal/api/event/JobUpdateEvent.java | package com.airbnb.airpal.api.event;
import com.airbnb.airpal.api.Job;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.net.URI;
import java.util.List;
@Data
public class JobUpdateEvent implements JobEvent {
@JsonProperty
private final Job job;
@JsonProperty
private final List<List<Object>> sample;
}
| 7,397 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/service/LoginView.java | package com.airbnb.airpal.service;
import io.dropwizard.views.View;
public class LoginView extends View
{
private static String TEMPLATE_NAME = "login.ftl";
public LoginView()
{
super(TEMPLATE_NAME);
}
}
| 7,398 |
0 | Create_ds/airpal/src/main/java/com/airbnb/airpal | Create_ds/airpal/src/main/java/com/airbnb/airpal/modules/DropwizardModule.java | package com.airbnb.airpal.modules;
import com.airbnb.airpal.AirpalConfiguration;
import com.airbnb.airpal.api.output.CSVPersistentOutput;
import com.airbnb.airpal.api.output.HiveTablePersistentOutput;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Singleton;
import com.hubspot.rosetta.Rosetta;
import io.dropwizard.setup.Environment;
public class DropwizardModule extends AbstractModule {
private final Environment environment;
private final AirpalConfiguration configuration;
public DropwizardModule(AirpalConfiguration configuration,
Environment environment)
{
this.configuration = configuration;
this.environment = environment;
}
@Override
protected void configure() {
bind(MetricRegistry.class).toInstance(environment.metrics());
bind(HealthCheckRegistry.class).toInstance(environment.healthChecks());
}
@Singleton
@Provides
protected ObjectMapper provideObjectMapper()
{
ObjectMapper mapper = environment.getObjectMapper();
mapper.registerSubtypes(
new NamedType(CSVPersistentOutput.class, "csv"),
new NamedType(HiveTablePersistentOutput.class, "hive")
);
Rosetta.getMapper().disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
Rosetta.getMapper().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES);
return mapper;
}
}
| 7,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.