index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/internal/specification/OfficeListReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.internal.specification;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Header;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.api.v1.domain.Row;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import org.apache.fineract.cn.reporting.api.v1.domain.Value;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.spi.CriteriaBuilder;
import org.apache.fineract.cn.reporting.service.spi.DisplayableFieldBuilder;
import org.apache.fineract.cn.reporting.service.spi.Report;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.time.Clock;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.fineract.cn.api.util.UserContextHolder;
import org.apache.fineract.cn.lang.DateConverter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@Report(category = "Organization", identifier = "Office")
public class OfficeListReportSpecification implements ReportSpecification {
private static final String OFFICE = "Identifier";
private static final String OFFICE_NAME = "Office";
private static final String DESCRIPTION = "Description";
private static final String CREATED_BY = "Created By";
// private static final String STREET = "Street";
//private static final String CITY = "City";
// private static final String REGION = "Region";
// private static final String POSTAL_CODE = "Postal Code";
// private static final String COUNTRY = "Country";
private static final String ADDRESS = "Address";
private final Logger logger;
private final EntityManager entityManager;
private final HashMap<String, String> officeColumnMapping = new HashMap<>();
private final HashMap<String, String> addressColumnMapping = new HashMap<>();
private final HashMap<String, String> allColumnMapping = new HashMap<>();
@Autowired
public OfficeListReportSpecification(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final EntityManager entityManager) {
super();
this.logger = logger;
this.entityManager = entityManager;
this.initializeMapping();
}
@Override
public ReportDefinition getReportDefinition() {
final ReportDefinition reportDefinition = new ReportDefinition();
reportDefinition.setIdentifier("Office");
reportDefinition.setName("Office Listing");
reportDefinition.setDescription("List of all Offices.");
reportDefinition.setQueryParameters(this.buildQueryParameters());
reportDefinition.setDisplayableFields(this.buildDisplayableFields());
return reportDefinition;
}
@Override
public ReportPage generateReport(final ReportRequest reportRequest, final int pageIndex, final int size) {
final ReportDefinition reportDefinition = this.getReportDefinition();
this.logger.info("Generating report {0}.", reportDefinition.getIdentifier());
final ReportPage reportPage = new ReportPage();
reportPage.setName(reportDefinition.getName());
reportPage.setDescription(reportDefinition.getDescription());
reportPage.setHeader(this.createHeader(reportRequest.getDisplayableFields()));
final Query customerQuery = this.entityManager.createNativeQuery(this.buildOfficeQuery(reportRequest, pageIndex, size));
final List<?> customerResultList = customerQuery.getResultList();
reportPage.setRows(this.buildRows(reportRequest, customerResultList));
reportPage.setHasMore(
!this.entityManager.createNativeQuery(this.buildOfficeQuery(reportRequest, pageIndex + 1, size))
.getResultList().isEmpty()
);
reportPage.setGeneratedBy(UserContextHolder.checkedGetUser());
reportPage.setGeneratedOn(DateConverter.toIsoString(LocalDateTime.now(Clock.systemUTC())));
return reportPage;
}
@Override
public void validate(final ReportRequest reportRequest) throws IllegalArgumentException {
final ArrayList<String> unknownFields = new ArrayList<>();
reportRequest.getQueryParameters().forEach(queryParameter -> {
if (!this.allColumnMapping.keySet().contains(queryParameter.getName())) {
unknownFields.add(queryParameter.getName());
}
});
reportRequest.getDisplayableFields().forEach(displayableField -> {
if (!this.allColumnMapping.keySet().contains(displayableField.getName())) {
unknownFields.add(displayableField.getName());
}
});
if (!unknownFields.isEmpty()) {
throw new IllegalArgumentException(
"Unspecified fields requested: " + unknownFields.stream().collect(Collectors.joining(", "))
);
}
}
private void initializeMapping() {
this.officeColumnMapping.put(OFFICE, "ho.id");
this.officeColumnMapping.put(OFFICE_NAME, "ho.a_name");
this.officeColumnMapping.put(DESCRIPTION, "ho.description");
this.officeColumnMapping.put(CREATED_BY, "ho.created_by");
this.addressColumnMapping.put(ADDRESS, "CONCAT(IFNULL(ha.street, ', '), " +
"IFNULL(ha.postal_code, ', '), IFNULL(ha.city, ', ')," +
" IFNULL(ha.region, ', '), IFNULL(ha.country, ','))");
this.allColumnMapping.putAll(officeColumnMapping);
this.allColumnMapping.putAll(addressColumnMapping);
}
private Header createHeader(final List<DisplayableField> displayableFields) {
final Header header = new Header();
header.setColumnNames(
displayableFields
.stream()
.map(DisplayableField::getName)
.collect(Collectors.toList())
);
return header;
}
private List<QueryParameter> buildQueryParameters() {
return Arrays.asList(
//QueryParameterBuilder.create(DATE_RANGE, Type.DATE).operator(QueryParameter.Operator.BETWEEN).build(),
//QueryParameterBuilder.create(STATE, Type.TEXT).operator(QueryParameter.Operator.IN).build()
);
}
private List<DisplayableField> buildDisplayableFields() {
return Arrays.asList(
DisplayableFieldBuilder.create(OFFICE, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(OFFICE_NAME, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(DESCRIPTION, Type.TEXT).mandatory().build(),
DisplayableFieldBuilder.create(CREATED_BY, Type.TEXT).build(),
DisplayableFieldBuilder.create(ADDRESS, Type.TEXT).mandatory().build()
);
}
private List<Row> buildRows(final ReportRequest reportRequest, final List<?> officeResultList) {
final ArrayList<Row> rows = new ArrayList<>();
officeResultList.forEach(result -> {
final Row row = new Row();
row.setValues(new ArrayList<>());
final String officeIdentifier;
if (result instanceof Object[]) {
final Object[] resultValues = (Object[]) result;
officeIdentifier = resultValues[0].toString();
for (final Object resultValue : resultValues) {
final Value value = new Value();
if (resultValue != null) {
value.setValues(new String[]{resultValue.toString()});
} else {
value.setValues(new String[]{});
}
row.getValues().add(value);
}
} else {
officeIdentifier = result.toString();
final Value value = new Value();
value.setValues(new String[]{result.toString()});
row.getValues().add(value);
}
final String addressQueryString = this.buildAddressQuery(reportRequest, officeIdentifier);
if (addressQueryString != null) {
final Query addressQuery = this.entityManager.createNativeQuery(addressQueryString);
final List<?> resultList = addressQuery.getResultList();
final Value addressValue = new Value();
addressValue.setValues(new String[]{resultList.get(0).toString()});
row.getValues().add(addressValue);
}
rows.add(row);
});
return rows;
}
private String buildOfficeQuery(final ReportRequest reportRequest, int pageIndex, int size) {
final StringBuilder query = new StringBuilder("SELECT ");
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.officeColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
query.append(columns.stream().collect(Collectors.joining(", ")))
.append(" FROM ")
.append("horus_offices ho ");
final List<QueryParameter> queryParameters = reportRequest.getQueryParameters();
if (!queryParameters.isEmpty()) {
final ArrayList<String> criteria = new ArrayList<>();
queryParameters.forEach(queryParameter -> {
if(queryParameter.getValue() != null && !queryParameter.getValue().isEmpty()) {
criteria.add(
CriteriaBuilder.buildCriteria(this.officeColumnMapping.get(queryParameter.getName()), queryParameter)
);
}
});
if (!criteria.isEmpty()) {
query.append(" WHERE ");
query.append(criteria.stream().collect(Collectors.joining(" AND ")));
}
}
query.append(" ORDER BY ho.a_name");
query.append(" LIMIT ");
query.append(size);
if (pageIndex > 0) {
query.append(" OFFSET ");
query.append(size * pageIndex);
}
return query.toString();
}
private String buildAddressQuery(final ReportRequest reportRequest, final String officeIdentifier) {
final List<DisplayableField> displayableFields = reportRequest.getDisplayableFields();
final ArrayList<String> columns = new ArrayList<>();
displayableFields.forEach(displayableField -> {
final String column = this.addressColumnMapping.get(displayableField.getName());
if (column != null) {
columns.add(column);
}
});
if (!columns.isEmpty()) {
return "SELECT " + columns.stream().collect(Collectors.joining(", ")) + " " +
"FROM horus_addresses ha " +
"LEFT JOIN horus_offices ho on ha.office_id = ho.id " +
"WHERE ho.id ='" + officeIdentifier + "' ";
}
return null;
}
}
| 6,100 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/spi/QueryParameterBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.spi;
import org.apache.fineract.cn.reporting.api.v1.domain.AutoCompleteResource;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
import java.util.Arrays;
public class QueryParameterBuilder {
private String name;
private Type type;
private QueryParameter.Operator operator;
private Boolean mandatory;
private AutoCompleteResource autoCompleteResource;
private QueryParameter queryParameter;
private QueryParameterBuilder(final String name, final Type type) {
super();
this.name = name;
this.type = type;
}
public static QueryParameterBuilder create(final String name, final Type type) {
return new QueryParameterBuilder(name, type);
}
public QueryParameterBuilder operator(final QueryParameter.Operator operator) {
this.operator = operator;
return this;
}
public QueryParameterBuilder mandatory() {
this.mandatory = Boolean.TRUE;
return this;
}
public QueryParameterBuilder autoComplete(final String path, final String... terms) {
final AutoCompleteResource autoCompleteResource = new AutoCompleteResource();
autoCompleteResource.setPath(path);
autoCompleteResource.setTerms(Arrays.asList(terms));
this.autoCompleteResource = autoCompleteResource;
return this;
}
public QueryParameter build() {
final QueryParameter queryParameter = new QueryParameter();
queryParameter.setName(this.name);
queryParameter.setType(this.type);
queryParameter.setOperator(this.operator != null ? this.operator : QueryParameter.Operator.EQUALS);
queryParameter.setMandatory(this.mandatory != null ? this.mandatory : Boolean.FALSE);
queryParameter.setAutoCompleteResource(this.autoCompleteResource);
return queryParameter;
}
}
| 6,101 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/spi/DisplayableFieldBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.spi;
import org.apache.fineract.cn.reporting.api.v1.domain.DisplayableField;
import org.apache.fineract.cn.reporting.api.v1.domain.Type;
public class DisplayableFieldBuilder {
private String name;
private Type type;
private Boolean mandatory;
private DisplayableFieldBuilder(final String name, final Type type) {
super();
this.name = name;
this.type = type;
}
public static DisplayableFieldBuilder create(final String name, final Type type) {
return new DisplayableFieldBuilder(name, type);
}
public DisplayableFieldBuilder mandatory() {
this.mandatory = Boolean.TRUE;
return this;
}
public DisplayableField build() {
final DisplayableField displayableField = new DisplayableField();
displayableField.setName(this.name);
displayableField.setType(this.type);
displayableField.setMandatory(this.mandatory != null ? this.mandatory : Boolean.FALSE);
return displayableField;
}
}
| 6,102 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/spi/CriteriaBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.spi;
import org.apache.fineract.cn.reporting.api.v1.domain.QueryParameter;
import org.owasp.esapi.ESAPI;
import org.owasp.esapi.Encoder;
import org.owasp.esapi.codecs.MySQLCodec;
import org.springframework.util.StringUtils;
import java.util.Set;
import java.util.stream.Collectors;
public class CriteriaBuilder {
// https://www.owasp.org/index.php/SQL_Injection_Prevention_Cheat_Sheet
public static Encoder ENCODER;
public static MySQLCodec MY_SQL_CODEC;
static {
// TODO move this code into bean
try {
ENCODER = ESAPI.encoder();
MY_SQL_CODEC = new MySQLCodec(MySQLCodec.Mode.ANSI);
} catch(final Exception e) {
System.out.println(e.getMessage());
}
}
private CriteriaBuilder() {
super();
}
public static String buildCriteria(final String field, final QueryParameter queryParameter) {
final StringBuilder criteria = new StringBuilder(field);
switch (queryParameter.getOperator()) {
case EQUALS:
criteria.append(" = '");
criteria.append(CriteriaBuilder.ENCODER.encodeForSQL(CriteriaBuilder.MY_SQL_CODEC, queryParameter.getValue()));
criteria.append("'");
break;
case LIKE:
criteria.append(" LIKE '%");
criteria.append(CriteriaBuilder.ENCODER.encodeForSQL(CriteriaBuilder.MY_SQL_CODEC, queryParameter.getValue()));
criteria.append("%'");
break;
case GREATER:
criteria.append(" > '");
criteria.append(CriteriaBuilder.ENCODER.encodeForSQL(CriteriaBuilder.MY_SQL_CODEC, queryParameter.getValue()));
criteria.append("'");
break;
case LESSER:
criteria.append(" < '");
criteria.append(CriteriaBuilder.ENCODER.encodeForSQL(CriteriaBuilder.MY_SQL_CODEC, queryParameter.getValue()));
criteria.append("'");
break;
case IN:
criteria.append(" in (");
final Set<String> strings = StringUtils.commaDelimitedListToSet(queryParameter.getValue());
criteria.append(
strings
.stream()
.map(s -> "'" + CriteriaBuilder.ENCODER.encodeForSQL(CriteriaBuilder.MY_SQL_CODEC, s) + "'")
.collect(Collectors.joining(","))
);
criteria.append(")");
break;
case BETWEEN:
final String[] splitString = queryParameter.getValue().split("\\.\\.");
criteria.append(" BETWEEN '");
criteria.append(CriteriaBuilder.ENCODER.encodeForSQL(CriteriaBuilder.MY_SQL_CODEC, splitString[0]));
criteria.append("' AND '");
criteria.append(CriteriaBuilder.ENCODER.encodeForSQL(CriteriaBuilder.MY_SQL_CODEC, splitString[1]));
criteria.append("'");
break;
}
return criteria.toString();
}
}
| 6,103 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/spi/ReportSpecification.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.spi;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
public interface ReportSpecification {
ReportDefinition getReportDefinition();
ReportPage generateReport(final ReportRequest reportRequest, int pageIndex, int size);
void validate(final ReportRequest reportRequest) throws IllegalArgumentException;
}
| 6,104 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/spi/Report.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.spi;
import org.springframework.stereotype.Component;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Documented
@Component
public @interface Report {
String category();
String identifier();
}
| 6,105 |
0 | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service | Create_ds/fineract-cn-reporting/service/src/main/java/org/apache/fineract/cn/reporting/service/rest/ReportingRestController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.cn.reporting.service.rest;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.fineract.cn.reporting.api.v1.EventConstants;
import org.apache.fineract.cn.reporting.api.v1.PermittableGroupIds;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportDefinition;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportPage;
import org.apache.fineract.cn.reporting.api.v1.domain.ReportRequest;
import org.apache.fineract.cn.reporting.service.ServiceConstants;
import org.apache.fineract.cn.reporting.service.internal.provider.ReportSpecificationProvider;
import org.apache.fineract.cn.reporting.service.spi.ReportSpecification;
import java.util.List;
import java.util.Optional;
import org.apache.fineract.cn.anubis.annotation.AcceptedTokenType;
import org.apache.fineract.cn.anubis.annotation.Permittable;
import org.apache.fineract.cn.lang.ApplicationName;
import org.apache.fineract.cn.lang.ServiceException;
import org.apache.fineract.cn.lang.TenantContextHolder;
import org.apache.fineract.cn.lang.config.TenantHeaderFilter;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
@SuppressWarnings("unused")
@RestController
@RequestMapping("/")
public class ReportingRestController {
private final Logger logger;
private final ReportSpecificationProvider reportSpecificationProvider;
private final ApplicationName applicationName;
private final JmsTemplate jmsTemplate;
@Autowired
public ReportingRestController(@Qualifier(ServiceConstants.LOGGER_NAME) final Logger logger,
final ReportSpecificationProvider reportSpecificationProvider,
final ApplicationName applicationName,
final JmsTemplate jmsTemplate) {
super();
this.logger = logger;
this.reportSpecificationProvider = reportSpecificationProvider;
this.applicationName = applicationName;
this.jmsTemplate = jmsTemplate;
}
@Permittable(value = AcceptedTokenType.SYSTEM)
@RequestMapping(
value = "/initialize",
method = RequestMethod.POST,
consumes = MediaType.ALL_VALUE,
produces = MediaType.APPLICATION_JSON_VALUE
)
public
@ResponseBody
ResponseEntity<Void> initialize() {
final Gson gson = new GsonBuilder().create();
this.jmsTemplate.convertAndSend(
gson.toJson(this.applicationName.getVersionString()),
message -> {
if (TenantContextHolder.identifier().isPresent()) {
message.setStringProperty(
TenantHeaderFilter.TENANT_HEADER,
TenantContextHolder.checkedGetIdentifier());
}
message.setStringProperty(
EventConstants.SELECTOR_NAME,
EventConstants.INITIALIZE
);
return message;
}
);
return ResponseEntity.ok().build();
}
@Permittable(value = AcceptedTokenType.TENANT, groupId = PermittableGroupIds.REPORT_MANAGEMENT)
@RequestMapping(
value = "/categories",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE,
consumes = MediaType.ALL_VALUE
)
public
ResponseEntity<List<String>> fetchCategories() {
return ResponseEntity.ok(this.reportSpecificationProvider.getAvailableCategories());
}
@Permittable(value = AcceptedTokenType.TENANT, groupId = PermittableGroupIds.REPORT_MANAGEMENT)
@RequestMapping(
value = "categories/{category}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE,
consumes = MediaType.ALL_VALUE)
public
ResponseEntity<List<ReportDefinition>> fetchReportDefinitions(@PathVariable("category") final String category) {
return ResponseEntity.ok(this.reportSpecificationProvider.getAvailableReports(category));
}
@Permittable(value = AcceptedTokenType.TENANT, groupId = PermittableGroupIds.REPORT_MANAGEMENT)
@RequestMapping(
value = "/categories/{category}/reports/{identifier}",
method = RequestMethod.POST,
produces = MediaType.APPLICATION_JSON_VALUE,
consumes = MediaType.APPLICATION_JSON_VALUE
)
public
ResponseEntity<ReportPage> generateReport(@PathVariable("category") final String category,
@PathVariable("identifier") final String identifier,
@RequestBody final ReportRequest reportRequest,
@RequestParam(value = "pageIndex", required = false) final Integer pageIndex,
@RequestParam(value = "size", required = false) final Integer size) {
final Optional<ReportSpecification> optionalReportSpecification =
this.reportSpecificationProvider.getReportSpecification(category, identifier);
if (optionalReportSpecification.isPresent()) {
final ReportSpecification reportSpecification = optionalReportSpecification.get();
try {
reportSpecification.validate(reportRequest);
} catch (final IllegalArgumentException iaex) {
throw ServiceException.badRequest(iaex.getMessage());
}
return ResponseEntity.ok(reportSpecification.generateReport(reportRequest, pageIndex, size));
} else {
throw ServiceException.notFound("Report {0} not found.", identifier);
}
}
@Permittable(value = AcceptedTokenType.TENANT, groupId = PermittableGroupIds.REPORT_MANAGEMENT)
@RequestMapping(
value = "categories/{category}/definitions/{identifier}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE,
consumes = MediaType.ALL_VALUE)
public
ResponseEntity<ReportDefinition> findReportDefinition(
@PathVariable("category") final String category,
@PathVariable("identifier") final String identifier) {
return ResponseEntity.ok(
this.reportSpecificationProvider.findReportDefinition(category, identifier)
.orElseThrow(() -> ServiceException.notFound("Report definition {0} not found.", identifier))
);
}
}
| 6,106 |
0 | Create_ds/geronimo-arthur/documentation/src/test/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/test/java/org/apache/geronimo/arthur/documentation/lang/PathPredicatesTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.lang;
import static java.util.Collections.singletonList;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.function.Predicate;
import org.junit.jupiter.api.Test;
class PathPredicatesTest {
@Test
void nullValue() {
final Predicate<Path> adoc = new PathPredicates().createFilter(null);
assertTrue(adoc.test(Paths.get("foo.adoc")));
assertTrue(adoc.test(Paths.get(".foo.adoc")));
assertTrue(adoc.test(Paths.get(".git")));
assertTrue(adoc.test(Paths.get("whatever")));
}
@Test
void simple() {
final Predicate<Path> adoc = new PathPredicates().createFilter(singletonList(".+\\.adoc"));
assertTrue(adoc.test(Paths.get("foo.adoc")));
assertTrue(adoc.test(Paths.get(".foo.adoc")));
assertFalse(adoc.test(Paths.get(".git")));
assertFalse(adoc.test(Paths.get("whatever")));
final Predicate<Path> dotted = new PathPredicates().createFilter(singletonList("^\\..+"));
assertFalse(dotted.test(Paths.get("foo.adoc")));
assertTrue(dotted.test(Paths.get(".foo.adoc")));
assertTrue(dotted.test(Paths.get(".git")));
assertFalse(dotted.test(Paths.get("whatever")));
}
@Test
void includeExclude() {
final Predicate<Path> filter = new PathPredicates().createFilter(singletonList(".+\\.adoc"), singletonList("^\\..+"));
assertTrue(filter.test(Paths.get("foo.adoc")));
assertFalse(filter.test(Paths.get(".foo.adoc")));
assertFalse(filter.test(Paths.get(".git")));
assertFalse(filter.test(Paths.get("whatever")));
}
@Test
void includeExcludeNullInclude() {
final Predicate<Path> filter = new PathPredicates().createFilter(null, singletonList("^\\..+"));
assertTrue(filter.test(Paths.get("foo.adoc")));
assertFalse(filter.test(Paths.get(".foo.adoc")));
assertFalse(filter.test(Paths.get(".git")));
assertTrue(filter.test(Paths.get("whatever")));
}
}
| 6,107 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/DocumentationGeneratorLauncher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.toMap;
import static lombok.AccessLevel.PRIVATE;
import java.beans.PropertyEditorManager;
import java.io.PrintStream;
import java.nio.file.Path;
import java.util.Map;
import java.util.stream.Stream;
import org.apache.geronimo.arthur.documentation.download.Downloads;
import org.apache.geronimo.arthur.documentation.editor.PathEditor;
import org.apache.geronimo.arthur.documentation.io.ConsumedPrintStream;
import org.apache.geronimo.arthur.documentation.io.FolderVisitor;
import org.apache.geronimo.arthur.documentation.lang.PathPredicates;
import org.apache.geronimo.arthur.documentation.mojo.MojoParser;
import org.apache.geronimo.arthur.documentation.renderer.AsciidocRenderer;
import org.tomitribe.crest.Main;
import org.tomitribe.crest.environments.Environment;
import org.tomitribe.crest.environments.SystemEnvironment;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@NoArgsConstructor(access = PRIVATE)
public final class DocumentationGeneratorLauncher {
public static void main(final String[] args) throws Exception {
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "32");
final Map<Class<?>, Object> services = createServices().collect(toMap(Object::getClass, identity()));
final SystemEnvironment env = new SystemEnvironment(services) {
private final ConsumedPrintStream err = new ConsumedPrintStream(log::error);
private final ConsumedPrintStream out = new ConsumedPrintStream(log::info);
@Override
public PrintStream getOutput() {
return out;
}
@Override
public PrintStream getError() {
return err;
}
};
PropertyEditorManager.registerEditor(Path.class, PathEditor.class);
Environment.ENVIRONMENT_THREAD_LOCAL.set(env);
try {
new Main().main(env, args);
} finally {
// cheap cleanup solution
services.values().stream()
.filter(AutoCloseable.class::isInstance)
.map(AutoCloseable.class::cast)
.forEach(it -> {
try {
it.close();
} catch (final Exception e) {
log.error(e.getMessage(), e);
}
});
Environment.ENVIRONMENT_THREAD_LOCAL.remove();
}
}
private static Stream<Object> createServices() {
final PathPredicates predicates = new PathPredicates();
return Stream.of(
predicates,
new AsciidocRenderer(),
new FolderVisitor(predicates),
new MojoParser(),
new Downloads());
}
}
| 6,108 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/renderer/TemplateConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.renderer;
import java.nio.file.Path;
import org.tomitribe.crest.api.Option;
import org.tomitribe.crest.api.Options;
import lombok.Data;
@Data
@Options
public class TemplateConfiguration {
private final Path header;
private final Path footer;
public TemplateConfiguration(@Option("header") final Path header,
@Option("footer") final Path footer) {
this.header = header;
this.footer = footer;
}
}
| 6,109 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/renderer/AsciidocRenderer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.renderer;
import static java.util.Collections.singletonMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import org.asciidoctor.Asciidoctor;
import org.asciidoctor.Options;
import org.asciidoctor.ast.DocumentHeader;
import org.asciidoctor.jruby.internal.JRubyAsciidoctor;
public class AsciidocRenderer implements AutoCloseable {
private Asciidoctor asciidoctor;
private final CountDownLatch latch = new CountDownLatch(1);
public AsciidocRenderer() {
new Thread(() -> { // this is insanely slow so let's do it in background
asciidoctor = JRubyAsciidoctor.create();
latch.countDown();
}, getClass().getName() + '-' + hashCode()).start();
}
public String render(final String input, final Options options) {
await();
return asciidoctor.convert(input, options);
}
@Override
public void close() {
await();
asciidoctor.shutdown();
}
private void await() {
try {
latch.await();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
public Map<String, String> extractMetadata(final String content) {
await();
final DocumentHeader header = asciidoctor.readDocumentHeader(content);
return singletonMap("title", header.getPageTitle());
}
}
| 6,110 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/io/FolderVisitor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.io;
import static java.util.Objects.requireNonNull;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.function.Consumer;
import java.util.function.Predicate;
import org.apache.geronimo.arthur.documentation.lang.PathPredicates;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public class FolderVisitor {
private final PathPredicates predicates;
public void visit(final FolderConfiguration configuration,
final Consumer<Path> consumer) {
final Path root = requireNonNull(configuration.getLocation(), "Missing location");
if (Files.exists(root)) {
final Predicate<Path> filter = predicates.createFilter(configuration.getIncludes(), configuration.getExcludes());
try {
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException {
if (filter.test(root.relativize(file))) {
consumer.accept(file);
}
return super.visitFile(file, attrs);
}
});
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
}
}
| 6,111 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/io/FolderConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.io;
import java.nio.file.Path;
import java.util.List;
import org.tomitribe.crest.api.Default;
import org.tomitribe.crest.api.Option;
import org.tomitribe.crest.api.Options;
import lombok.Data;
@Data
@Options
public class FolderConfiguration {
private final Path location;
private final List<String> includes;
private final List<String> excludes;
public FolderConfiguration(@Option("location") final Path location,
@Option("includes") final List<String> includes,
@Option("excludes") @Default("^\\..+") final List<String> excludes) {
this.location = location;
this.includes = includes;
this.excludes = excludes;
}
} | 6,112 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/io/ConsumedPrintStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.io;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.function.Consumer;
public class ConsumedPrintStream extends PrintStream {
private final Consumer<String> consumer;
private final ByteArrayOutputStream buffer;
public ConsumedPrintStream(final Consumer<String> consumer) {
super(new ByteArrayOutputStream());
this.consumer = consumer;
this.buffer = ByteArrayOutputStream.class.cast(out);
}
private void onEnd() {
final byte[] bytes = buffer.toByteArray();
if (bytes.length > 0) {
consumer.accept(new String(bytes, 0, bytes[bytes.length - 1] == '\n' ? bytes.length - 1 : bytes.length, StandardCharsets.UTF_8));
buffer.reset();
}
}
@Override
public void println(final String content) {
super.println(content);
onEnd();
}
@Override
public void println() {
super.println();
onEnd();
}
@Override
public void println(final boolean x) {
super.println(x);
onEnd();
}
@Override
public void println(final char x) {
super.println(x);
onEnd();
}
@Override
public void println(final int x) {
super.println(x);
onEnd();
}
@Override
public void println(final long x) {
super.println(x);
onEnd();
}
@Override
public void println(final float x) {
super.println(x);
onEnd();
}
@Override
public void println(final double x) {
super.println(x);
onEnd();
}
@Override
public void println(final char[] x) {
super.println(x);
onEnd();
}
@Override
public void println(final Object x) {
super.println(x);
onEnd();
}
}
| 6,113 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/lang/PathPredicates.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.lang;
import java.nio.file.Path;
import java.util.List;
import java.util.function.Predicate;
import java.util.regex.Pattern;
public class PathPredicates {
public Predicate<Path> createFilter(final List<String> includes, final List<String> excludes) {
return createFilter(includes).and(createFilter(excludes).negate());
}
public Predicate<Path> createFilter(final List<String> patterns) {
return patterns == null || patterns.isEmpty() ?
p -> true :
patterns.stream()
.map(Pattern::compile)
.map(Pattern::asPredicate)
.map(pattern -> (Predicate<Path>) path -> pattern.test(path.toString()))
.reduce(p -> false, Predicate::or);
}
}
| 6,114 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/download/Downloads.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.arthur.documentation.download;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Optional.ofNullable;
// helper to generate the download table content
public class Downloads {
private static final String MVN_BASE = "https://repo.maven.apache.org/maven2/";
private static final String ASF_RELEASE_BASE = "https://repository.apache.org/content/repositories/releases/";
private static final String DIST_RELEASE = "https://dist.apache.org/repos/dist/release/geronimo/arthur/";
private static final String ARCHIVE_RELEASE = "https://archive.apache.org/dist/geronimo/arthur/";
private static final String MIRROR_RELEASE = "https://www.apache.org/dyn/closer.lua/geronimo/arthur/";
private static final long MEGA_RATIO = 1024 * 1024;
private static final long KILO_RATIO = 1024;
public static void main(final String[] args) {
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "32");
new Downloads().update(System.out);
}
public void update(final PrintStream stream) {
final SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setNamespaceAware(false);
factory.setValidating(false);
final DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.of("UTC"));
printHeader(stream);
Stream.of("org/apache/geronimo/arthur/arthur")
.flatMap(it -> toVersions(it, factory))
.map(v -> v.extensions("zip"))
.map(v -> v.classifiers("source-release"))
.flatMap(this::toDownloadable)
.map(this::fillDownloadable)
.filter(Objects::nonNull)
.sorted(Comparator.<Download, Instant>comparing(it -> it.date).reversed())
.map(it -> toDownloadLine(dateFormatter, it))
.forEach(stream::println);
stream.println("|===\n");
}
private String toDownloadLine(final DateTimeFormatter dateFormatter, final Download d) {
return "|" + d.name + (d.classifier.isEmpty() ? "" : (" " + d.classifier))
.replace("source-release", "Source Release") +
"|" + d.version +
"|" + dateFormatter.format(d.date) +
"|" + d.size +
"|" + d.format +
"|" + d.url.replace(ASF_RELEASE_BASE, MVN_BASE) + "[icon:download[] " + d.format + "] " +
(d.sha512 != null ? d.sha512 + "[icon:download[] sha512] " : d.sha1 + "[icon:download[] sha1] ") +
d.asc + "[icon:download[] asc]";
}
private void printHeader(final PrintStream stream) {
stream.println(
"////\n" +
"Licensed to the Apache Software Foundation (ASF) under one or more\n" +
"contributor license agreements. See the NOTICE file distributed with\n" +
"this work for additional information regarding copyright ownership.\n" +
"The ASF licenses this file to You under the Apache License, Version 2.0\n" +
"(the \"License\"); you may not use this file except in compliance with\n" +
"the License. You may obtain a copy of the License at\n" +
"\n" +
"http://www.apache.org/licenses/LICENSE-2.0\n" +
"\n" +
"Unless required by applicable law or agreed to in writing, software\n" +
"distributed under the License is distributed on an \"AS IS\" BASIS,\n" +
"WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" +
"See the License for the specific language governing permissions and\n" +
"limitations under the License.\n" +
"////\n" +
"= Arthur Downloads\n" +
"\n" +
"License under Apache License v2 (ALv2).\n" +
"\n" +
"[.table.table-bordered,options=\"header\"]\n" +
"|===\n" +
"|Name|Version|Date|Size|Type|Links");
}
private Download fillDownloadable(final Download download) {
try {
final URL url = new URL(download.mavenCentralUrl);
final HttpURLConnection connection = HttpURLConnection.class.cast(url.openConnection());
connection.setConnectTimeout((int) TimeUnit.SECONDS.toMillis(30));
final int responseCode = connection.getResponseCode();
if (responseCode != HttpURLConnection.HTTP_OK) {
if (HttpURLConnection.HTTP_NOT_FOUND != responseCode) {
System.err.println("Got " + responseCode + " for " + download.url);
}
return null;
}
long lastMod = connection.getHeaderFieldDate("Last-Modified", 0);
download.date = Instant.ofEpochMilli(lastMod);
download.size = toSize(ofNullable(connection.getHeaderField("Content-Length"))
.map(Long::parseLong).orElse(0L), ofNullable(connection.getHeaderField("Accept-Ranges")).orElse("bytes"));
connection.getInputStream().close();
} catch (final IOException e) {
e.printStackTrace();
return null;
}
return download;
}
private String toSize(final long length, final String bytes) {
if (!"bytes".equalsIgnoreCase(bytes)) {
throw new IllegalArgumentException("Not handled unit: " + bytes);
}
final long meg = length / MEGA_RATIO;
final long kilo = (length - (meg * MEGA_RATIO)) / KILO_RATIO;
return (meg > 0 ? meg + " MB " : "") + (kilo > 0 ? kilo + " kB" : "");
}
/*
private static Stream<Version> versionStream(final String artifactId) {
return Stream.of("org/apache/geronimo/arthur/" + artifactId)
.flatMap(Downloads::toVersions);
}
*/
private Stream<Download> toDownloadable(final Version version) {
final String base = version.base;
final String artifactId = base.substring(base.lastIndexOf('/') + 1);
final String artifactBase = version.base + "/" + version.version + "/" + artifactId + "-" + version.version;
return version.extensions.stream()
.flatMap(e -> (version.classifiers.isEmpty() ? Stream.of(new ArtifactDescription("", e)) : version.classifiers.stream().map(c -> new ArtifactDescription(c, e))))
.map(a -> toDownload(artifactId, a.classifier, version.version, a.extension, artifactBase + (a.classifier.isEmpty() ? '.' + a.extension : ('-' + a.classifier + '.' + a.extension))));
}
private Download toDownload(final String artifactId, final String classifier, final String version, final String format, String artifactUrl) {
String url = DIST_RELEASE + version + "/" + artifactId + "-" + version + "-" + classifier + "." + format;
String downloadUrl;
String sha512 = null;
if (urlExists(url)) {
// artifact exists on dist.a.o
downloadUrl = MIRROR_RELEASE + version + "/" + artifactId + "-" + version + "-" + classifier + "." + format;
} else {
url = ARCHIVE_RELEASE + version + "/" + artifactId + "-" + version + "-" + classifier + "." + format;
if (urlExists(url)) {
// artifact exists on archive.a.o
downloadUrl = url;
} else {
// falling back to Maven URL
downloadUrl = artifactUrl;
url = artifactUrl;
}
}
if (urlExists(url + ".sha512")) {
sha512 = url + ".sha512";
}
return new Download(
Character.toUpperCase(artifactId.charAt(0)) + artifactId.substring(1).replace('-', ' '),
classifier,
version,
format,
downloadUrl,
url + ".sha1",
sha512,
url + ".asc",
artifactUrl);
}
private boolean urlExists(String urlString) {
try {
final URL url = new URL(urlString);
final HttpURLConnection conn = HttpURLConnection.class.cast(url.openConnection());
conn.setRequestMethod("HEAD");
conn.setUseCaches(false);
return conn.getResponseCode() == HttpURLConnection.HTTP_OK;
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
private Stream<Version> toVersions(final String baseUrl, final SAXParserFactory factory) {
final QuickMvnMetadataParser handler = new QuickMvnMetadataParser();
final String base = ASF_RELEASE_BASE + baseUrl;
try (final InputStream stream = new URL(base + "/maven-metadata.xml").openStream()) {
final SAXParser parser = factory.newSAXParser();
parser.parse(stream, handler);
return handler.foundVersions.stream().map(v -> new Version(base, v)).parallel();
} catch (final Exception e) {
e.printStackTrace();
return Stream.empty();
}
}
public static class Version {
private final String base;
private final String version;
private final Collection<String> classifiers = new ArrayList<>();
private final Collection<String> extensions = new ArrayList<>();
public Version(final String base, final String version) {
this.base = base;
this.version = version;
}
private Version extensions(final String... values) {
extensions.addAll(asList(values));
return this;
}
private Version classifiers(final String... values) {
classifiers.addAll(asList(values));
return this;
}
}
public static class ArtifactDescription {
private final String classifier;
private final String extension;
private ArtifactDescription(final String classifier, final String extension) {
this.classifier = classifier;
this.extension = extension;
}
}
public static class Download {
private final String name;
private final String classifier;
private final String version;
private final String format;
private final String mavenCentralUrl;
private final String url;
private final String sha1;
private final String sha512;
private final String asc;
private Instant date;
private String size;
public Download(final String name, final String classifier, final String version,
final String format, final String url, final String sha1, final String sha512,
final String asc, String mavenCentralUrl) {
this.name = name;
this.classifier = classifier;
this.version = version;
this.format = format;
this.url = url;
this.sha1 = sha1;
this.sha512 = sha512;
this.asc = asc;
this.mavenCentralUrl = mavenCentralUrl;
}
}
private static class QuickMvnMetadataParser extends DefaultHandler {
private boolean versioning = false;
private boolean versions = false;
private StringBuilder version;
private final Collection<String> foundVersions = new ArrayList<>();
@Override
public void startElement(final String uri, final String localName,
final String name, final Attributes attributes) {
if ("versioning".equalsIgnoreCase(name)) {
versioning = true;
} else if ("versions".equalsIgnoreCase(name)) {
versions = true;
} else if (versioning && versions && "version".equalsIgnoreCase(name)) {
version = new StringBuilder();
}
}
@Override
public void characters(final char[] ch, final int start, final int length) {
if (version != null) {
version.append(new String(ch, start, length));
}
}
public void endElement(final String uri, final String localName, final String name) {
if ("versioning".equalsIgnoreCase(name)) {
versioning = false;
} else if ("versions".equalsIgnoreCase(name)) {
versions = false;
} else if ("version".equalsIgnoreCase(name)) {
foundVersions.add(version.toString());
}
}
}
}
| 6,115 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/mojo/MojoParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.mojo;
import static java.util.stream.Collectors.toList;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.stream.Stream;
import lombok.Data;
public class MojoParser {
// simplified java parser for *our* code
public Collection<Parameter> extractParameters(final Path file) throws IOException {
final Collection<Parameter> parameters = new ArrayList<>();
String parent = null;
try (final BufferedReader reader = Files.newBufferedReader(file)) {
String line;
boolean jdoc = false;
final StringBuilder javadoc = new StringBuilder();
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.startsWith("public class ") || line.startsWith("public abstract class ")) {
final int ext = line.indexOf("extends ");
if (ext > 0) {
parent = line.substring(ext + "extends ".length(), line.indexOf("{")).trim();
}
if ("AbstractMojo".equals(parent)) {
parent = null;
}
} else if (line.startsWith("/**")) {
jdoc = true;
} else if (line.endsWith("*/")) {
jdoc = false;
} else if (jdoc) {
javadoc.append(line.startsWith("*") ? line.substring(1).trim() : line);
} else if (javadoc.length() > 0 && line.startsWith("@Parameter")) {
final StringBuilder annotation = new StringBuilder(line);
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.startsWith("private ") || line.startsWith("protected ")) {
final int nameStart = line.lastIndexOf(' ') + 1;
final String config = annotation.toString();
final int prefixLen = line.indexOf(' ') + 1;
String type = line.substring(prefixLen, line.indexOf(" ", prefixLen)).trim();
final int generics = type.indexOf('<');
if (generics > 0) {
type = type.substring(0, generics);
}
type = type.substring(type.indexOf('.') + 1); // nested classes dont need enclosing
parameters.add(new Parameter(
line.substring(nameStart, line.length() - ";".length()),
type,
config.contains("required = true"),
find("property =", config),
javadoc.toString(),
find("defaultValue =", config)));
javadoc.setLength(0);
break;
} else {
annotation.append(line);
}
}
} else if (line.startsWith("package")) { // we have the header before
javadoc.setLength(0);
}
}
}
if (parent != null) {
return Stream.concat(
parameters.stream(),
extractParameters(file.getParent().resolve(parent + ".java")).stream())
.collect(toList());
}
return parameters;
}
private String find(final String prefix, final String value) {
int start = value.indexOf(prefix);
if (start < 0) {
return "-";
}
start += prefix.length();
while (Character.isWhitespace(value.charAt(start)) || value.charAt(start) == '"') {
start++;
}
final int end = value.indexOf('"', start);
if (end > 0) {
return value.substring(start, end).trim();
}
return "-";
}
@Data
public static class Parameter {
private final String name;
private final String type;
private final boolean required;
private final String property;
private final String description;
private final String defaultValue;
}
}
| 6,116 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/command/Generate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.command;
import org.apache.geronimo.arthur.documentation.download.Downloads;
import org.apache.geronimo.arthur.documentation.io.FolderConfiguration;
import org.apache.geronimo.arthur.documentation.io.FolderVisitor;
import org.apache.geronimo.arthur.documentation.mojo.MojoParser;
import org.apache.geronimo.arthur.documentation.renderer.AsciidocRenderer;
import org.apache.geronimo.arthur.documentation.renderer.TemplateConfiguration;
import org.asciidoctor.Attributes;
import org.asciidoctor.Options;
import org.tomitribe.crest.api.Command;
import org.tomitribe.crest.api.Default;
import org.tomitribe.crest.api.Defaults.DefaultMapping;
import org.tomitribe.crest.api.Err;
import org.tomitribe.crest.api.Option;
import org.tomitribe.crest.api.Out;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import static java.util.Comparator.comparing;
import static java.util.Locale.ROOT;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.joining;
import static org.asciidoctor.SafeMode.UNSAFE;
public class Generate {
@Command
public void generate(
@DefaultMapping(name = "location", value = "src/content")
@DefaultMapping(name = "includes", value = "[.+/]?.+\\.adoc$")
@Option("content-") final FolderConfiguration contentConfiguration,
@DefaultMapping(name = "location", value = "src/static")
@Option("static-") final FolderConfiguration staticConfiguration,
@DefaultMapping(name = "header", value = "src/template/header.html")
@DefaultMapping(name = "footer", value = "src/template/footer.html")
@DefaultMapping(name = "nav", value = "src/template/nav.adoc")
@Option("template-") final TemplateConfiguration templateConfiguration,
@Option("mojo") final List<Path> mojos,
@Option("output") final Path output,
@Option("work-directory") final Path workdir,
@Option("threads") @Default("${sys.processorCount}") final int threads,
@Option("download-source") final Path downloadSource,
@Out final PrintStream stdout,
@Err final PrintStream stderr,
final AsciidocRenderer renderer,
final FolderVisitor visitor,
final MojoParser mojoParser,
final Downloads downloads) {
stdout.println("Generating the website in " + output);
final Collection<Throwable> errors = new ArrayList<>();
final Executor executorImpl = threads > 1 ? newThreadPool(output, threads) : Runnable::run;
final Executor executor = task -> {
try {
task.run();
} catch (final Throwable err) {
err.printStackTrace(stderr);
synchronized (errors) {
errors.add(err);
}
}
};
final CountDownLatch templateLatch = new CountDownLatch(1);
final AtomicReference<BiFunction<String, String, String>> computedTemplatization = new AtomicReference<>();
final Supplier<BiFunction<String, String, String>> templatize = () -> {
try {
templateLatch.await();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
return computedTemplatization.get();
};
final Options adocOptions = Options.builder()
.safe(UNSAFE) // we generated_dir is not safe but locally it is ok
.attributes(Attributes.builder()
.showTitle(true)
.icons("font")
.attribute("generated_dir", workdir.toAbsolutePath().toString())
.build())
.build();
final CountDownLatch downloadLatch;
if (downloadSource != null) {
downloadLatch = new CountDownLatch(1);
executor.execute(() -> {
try (final PrintStream stream = new PrintStream(Files.newOutputStream(downloadSource))) {
downloads.update(stream);
} catch (final IOException e) {
throw new IllegalStateException(e);
} finally {
downloadLatch.countDown();
}
});
} else {
downloadLatch = null;
}
executor.execute(() -> {
try {
computedTemplatization.set(compileTemplate(templateConfiguration));
} finally {
templateLatch.countDown();
}
});
executor.execute(() -> mojos.forEach(mojo -> {
try {
generateMojoDoc(
mojo.getFileName().toString().toLowerCase(ROOT).replace("mojo.java", ""),
mojoParser, mojo, workdir, stdout);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}));
final CountDownLatch visitorsFinished = new CountDownLatch(2);
try {
executor.execute(() -> {
try {
visitor.visit(contentConfiguration, file -> executor.execute(() -> {
final String name = file.getFileName().toString();
final int dot = name.lastIndexOf('.');
final String targetFilename = dot > 0 ? name.substring(0, dot) + ".html" : name;
final Path targetFolder = contentConfiguration.getLocation()
.relativize(file)
.getParent();
final Path target = output.resolve(targetFolder == null ?
Paths.get(targetFilename) :
targetFolder.resolve(targetFilename));
ensureExists(target.getParent());
if (Objects.equals(file.toAbsolutePath(), downloadSource)) {
try {
downloadLatch.await();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
try {
final String read = read(file);
final Map<String, String> metadata = renderer.extractMetadata(read);
Files.write(
target,
templatize.get().apply(
metadata.getOrDefault("title", "Arthur"),
renderer.render(read, adocOptions))
.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
stdout.println("Created '" + target + "'");
}));
} finally {
visitorsFinished.countDown();
}
});
executor.execute(() -> {
try {
visitor.visit(staticConfiguration, file -> executor.execute(() -> {
final Path target = output.resolve(staticConfiguration.getLocation().relativize(file));
ensureExists(target.getParent());
final String filename = file.getFileName().toString();
try {
if (filename.endsWith(".js") || filename.endsWith(".css")) { // strip ASF header
try (final BufferedReader reader = Files.newBufferedReader(file);
final InputStream stream = new ByteArrayInputStream(
reader.lines().skip(16/*header*/).collect(joining("\n")).getBytes(StandardCharsets.UTF_8))) {
Files.copy(stream, target, StandardCopyOption.REPLACE_EXISTING);
}
} else {
Files.copy(file, target, StandardCopyOption.REPLACE_EXISTING);
}
} catch (final IOException e) {
throw new IllegalStateException(e);
}
stdout.println("Copied '" + target + "'");
}));
} finally {
visitorsFinished.countDown();
}
});
} finally {
try {
visitorsFinished.await();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
if (ExecutorService.class.isInstance(executorImpl)) {
final ExecutorService service = ExecutorService.class.cast(executorImpl);
service.shutdown();
try {
if (!service.awaitTermination(Integer.MAX_VALUE, TimeUnit.SECONDS)) {
stderr.println("Exiting without the executor being properly shutdown");
}
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
if (!errors.isEmpty()) {
final IllegalStateException failed = new IllegalStateException("Execution failed");
errors.forEach(failed::addSuppressed);
throw failed;
}
stdout.println("Website generation done");
}
private void generateMojoDoc(final String marker, final MojoParser mojoParser, final Path mojo, final Path workdir,
final PrintStream stdout) throws IOException {
ensureExists(workdir);
final Collection<MojoParser.Parameter> parameters = mojoParser.extractParameters(mojo);
try (final Writer writer = Files.newBufferedWriter(workdir.resolve("generated_" + marker + "_mojo.adoc"))) {
writer.write("[opts=\"header\",role=\"table table-bordered\",cols=\"2,1,3\"]\n" +
"|===\n" +
"|Name|Type|Description\n\n" +
parameters.stream()
.sorted(comparing(MojoParser.Parameter::getName))
.map(this::toLine)
.collect(joining("\n\n")) +
"\n|===\n");
}
stdout.println("Generated documentation for " + mojo);
}
private String toLine(final MojoParser.Parameter parameter) {
return "|" + parameter.getName() + (parameter.isRequired() ? "*" : "") +
"\n|" + parameter.getType() +
"\na|\n" + parameter.getDescription() +
"\n\n*Default value*: " + parameter.getDefaultValue() +
"\n\n*User property*: " + parameter.getProperty();
}
private String read(final Path file) {
try (final Stream<String> lines = Files.lines(file)) {
return lines.collect(joining("\n"));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private BiFunction<String, String, String> compileTemplate(final TemplateConfiguration templateConfiguration) {
final Collection<Function<String, String>> parts = new ArrayList<>();
final Path header = templateConfiguration.getHeader();
if (header != null && Files.exists(header)) {
final String content = read(header);
parts.add(s -> content + s);
}
final Path footer = templateConfiguration.getFooter();
if (footer != null && Files.exists(footer)) {
final String content = read(footer);
parts.add(s -> s + content);
}
final Function<String, String> fn = parts.stream().reduce(identity(), Function::andThen);
return (title, html) -> fn.apply(html).replace("${arthurTemplateTitle}", title);
}
private ExecutorService newThreadPool(@Option("output") Path output, @Default("${sys.processorCount}") @Option("threads") int threads) {
return new ThreadPoolExecutor(threads, threads, 1, MINUTES, new LinkedBlockingQueue<>(), new ThreadFactory() {
private final AtomicInteger counter = new AtomicInteger();
@Override
public Thread newThread(final Runnable worker) {
return new Thread(worker, "arthur-generator-" + counter.incrementAndGet() + "-[" + output + "]");
}
});
}
private void ensureExists(final Path dir) {
if (!Files.exists(dir)) {
try {
Files.createDirectories(dir);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
}
}
| 6,117 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/interpolation/Sys.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.interpolation;
import java.lang.reflect.Method;
import org.tomitribe.crest.cmds.targets.Target;
import org.tomitribe.crest.contexts.DefaultsContext;
public class Sys implements DefaultsContext {
@Override
public String find(final Target target, final Method method, final String key) {
switch (key) {
case ".processorCount":
return Integer.toString(Math.max(1, Runtime.getRuntime().availableProcessors()));
default:
return null;
}
}
}
| 6,118 |
0 | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation | Create_ds/geronimo-arthur/documentation/src/main/java/org/apache/geronimo/arthur/documentation/editor/PathEditor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.documentation.editor;
import java.nio.file.Paths;
import org.tomitribe.util.editor.AbstractConverter;
public class PathEditor extends AbstractConverter {
@Override
protected Object toObjectImpl(final String text) {
return Paths.get(text);
}
protected String toStringImpl(final Object value) {
return String.valueOf(value);
}
}
| 6,119 |
0 | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven/extension/MavenArthurExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.maven.extension;
import java.util.Collection;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.DynamicProxyModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import lombok.RequiredArgsConstructor;
public class MavenArthurExtension implements ArthurExtension {
private static final ThreadLocal<Ctx> CONTEXT = new ThreadLocal<>();
@Override
public int order() {
return 99;
}
@Override
public void execute(final Context context) {
final Ctx ctx = CONTEXT.get();
if (ctx == null) {
return;
}
if (ctx.reflections != null && !ctx.reflections.isEmpty()) {
ctx.reflections.forEach(context::register);
}
if (ctx.resources != null && !ctx.resources.isEmpty()) {
ctx.resources.forEach(context::register);
}
if (ctx.bundles != null && !ctx.bundles.isEmpty()) {
ctx.bundles.forEach(context::register);
}
if (ctx.dynamicProxies != null && !ctx.dynamicProxies.isEmpty()) {
ctx.dynamicProxies.forEach(context::register);
}
}
@Override
public boolean equals(final Object obj) {
return obj != null && MavenArthurExtension.class == obj.getClass();
}
public static void with(final Collection<ClassReflectionModel> reflections,
final Collection<ResourceModel> resources,
final Collection<ResourceBundleModel> bundles,
final Collection<DynamicProxyModel> dynamicProxies,
final Runnable task) {
CONTEXT.set(new Ctx(reflections, resources, bundles, dynamicProxies));
try {
task.run();
} finally {
CONTEXT.remove();
}
}
@RequiredArgsConstructor
private static class Ctx {
private final Collection<ClassReflectionModel> reflections;
private final Collection<ResourceModel> resources;
private final Collection<ResourceBundleModel> bundles;
private final Collection<DynamicProxyModel> dynamicProxies;
}
}
| 6,120 |
0 | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven/mojo/ImageMojo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/package org.apache.geronimo.arthur.maven.mojo;
import static java.util.Optional.ofNullable;
import java.util.Optional;
import com.google.cloud.tools.jib.api.Containerizer;
import com.google.cloud.tools.jib.api.ImageReference;
import com.google.cloud.tools.jib.api.InvalidImageReferenceException;
import com.google.cloud.tools.jib.api.RegistryImage;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.settings.crypto.DefaultSettingsDecryptionRequest;
import org.apache.maven.settings.crypto.SettingsDecrypter;
import org.apache.maven.settings.crypto.SettingsDecryptionResult;
/**
* Alternate mojo to jib:build to avoid to bundle useless files.
* Can be replaced by vanilla jib when it will support it, see https://github.com/GoogleContainerTools/jib/issues/1857
*/
@Mojo(name = "image", threadSafe = true)
public class ImageMojo extends JibMojo {
/**
* Server identifier (in settings.xml) used to authenticate to the remote image registry.
*/
@Parameter(property = "arthur.serverId")
private String serverId;
@Component
private SettingsDecrypter settingsDecrypter;
@Parameter(defaultValue = "${session}", readonly = true)
private MavenSession session;
@Override
protected Containerizer createContainer() throws InvalidImageReferenceException {
final ImageReference reference = ImageReference.parse(to);
final RegistryImage image = RegistryImage.named(reference);
registerCredentials(reference, image);
return Containerizer.to(image);
}
private void registerCredentials(final ImageReference reference, final RegistryImage registryImage) {
ofNullable(serverId)
.map(Optional::of)
.orElseGet(() -> ofNullable(reference.getRegistry()))
.map(id -> session.getSettings().getServer(id))
.map(it -> settingsDecrypter.decrypt(new DefaultSettingsDecryptionRequest(it)))
.map(SettingsDecryptionResult::getServer)
.ifPresent(server -> registryImage.addCredential(server.getUsername(), server.getPassword()));
}
}
| 6,121 |
0 | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven/mojo/NativeImageMojo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.maven.mojo;
import lombok.Getter;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageConfiguration;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageExecutor;
import org.apache.geronimo.arthur.impl.nativeimage.generator.extension.AnnotationExtension;
import org.apache.geronimo.arthur.impl.nativeimage.installer.SdkmanGraalVMInstaller;
import org.apache.geronimo.arthur.maven.extension.MavenArthurExtension;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.DynamicProxyModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.model.Dependency;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.DefaultDependencyResolutionRequest;
import org.apache.maven.project.DependencyResolutionException;
import org.apache.maven.project.DependencyResolutionRequest;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import org.apache.maven.project.ProjectDependenciesResolver;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import org.apache.xbean.finder.AnnotationFinder;
import org.apache.xbean.finder.archive.Archive;
import org.apache.xbean.finder.archive.CompositeArchive;
import org.apache.xbean.finder.archive.FilteredArchive;
import org.apache.xbean.finder.filter.Filter;
import org.apache.xbean.finder.filter.Filters;
import org.eclipse.aether.graph.DependencyNode;
import org.eclipse.aether.graph.DependencyVisitor;
import javax.json.bind.Jsonb;
import javax.json.bind.JsonbBuilder;
import javax.json.bind.JsonbConfig;
import javax.json.bind.config.PropertyOrderStrategy;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static java.lang.ClassLoader.getSystemClassLoader;
import static java.util.Comparator.comparing;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static lombok.AccessLevel.PROTECTED;
import static org.apache.maven.plugins.annotations.LifecyclePhase.PACKAGE;
import static org.apache.maven.plugins.annotations.ResolutionScope.TEST;
import static org.apache.xbean.finder.archive.ClasspathArchive.archive;
/**
* Generates a native binary from current project.
*/
@Mojo(name = "native-image", defaultPhase = PACKAGE, requiresDependencyResolution = TEST, threadSafe = true)
public class NativeImageMojo extends ArthurMojo {
//
// ArthurNativeImageConfiguration
//
/**
* custom native-image arguments.
*/
@Parameter(property = "arthur.customOptions")
private List<String> customOptions;
/**
* custom pre-built classpath, if not set it defaults on the project dependencies.
*/
@Parameter(property = "arthur.classpath")
private List<String> classpath;
/**
* JSON java.lang.reflect.Proxy configuration.
*/
@Parameter(property = "arthur.dynamicProxyConfigurationFiles")
private List<String> dynamicProxyConfigurationFiles;
/**
* JSON reflection configuration.
*/
@Parameter(property = "arthur.reflectionConfigurationFiles")
private List<String> reflectionConfigurationFiles;
/**
* JSON resources configuration.
*/
@Parameter(property = "arthur.resourcesConfigurationFiles")
private List<String> resourcesConfigurationFiles;
/**
* resource bundle qualified names to include.
*/
@Parameter(property = "arthur.includeResourceBundles")
private List<String> includeResourceBundles;
/**
* Classes to intiialize at run time.
*/
@Parameter(property = "arthur.initializeAtRunTime")
private List<String> initializeAtRunTime;
/**
* Classes to initialize at build time.
*/
@Parameter(property = "arthur.initializeAtBuildTime")
private List<String> initializeAtBuildTime;
/**
* Limit the number of compilable methods.
*/
@Parameter(property = "arthur.maxRuntimeCompileMethods", defaultValue = "1000")
private int maxRuntimeCompileMethods;
/**
* Enforce `maxRuntimeCompileMethods`.
*/
@Parameter(property = "arthur.enforceMaxRuntimeCompileMethods", defaultValue = "true")
private boolean enforceMaxRuntimeCompileMethods;
/**
* Should all charsets be added.
*/
@Parameter(property = "arthur.addAllCharsets", defaultValue = "true")
private boolean addAllCharsets;
/**
* Should exception stacks be reported.
*/
@Parameter(property = "arthur.reportExceptionStackTraces", defaultValue = "true")
private boolean reportExceptionStackTraces;
/**
* Should initialiation of classes be printed - mainly for debug purposes.
*/
@Parameter(property = "arthur.printClassInitialization", defaultValue = "false")
private boolean printClassInitialization;
/**
* Behavior when native compilation fails, it is recommended to keep it to "no".
* Supported values are `no`, `force` and `auto`.
*/
@Parameter(property = "arthur.fallbackMode", defaultValue = "no")
private ArthurNativeImageConfiguration.FallbackMode fallbackMode;
/**
* Should the image be static or dynamic (jvm part).
*/
@Parameter(property = "arthur.buildStaticImage", defaultValue = "true")
private boolean buildStaticImage;
/**
* Should incomplete classpath be tolerated.
*/
@Parameter(property = "arthur.allowIncompleteClasspath")
private Boolean allowIncompleteClasspath;
/**
* Should unsupported element be reported at runtime or not. It is not a recommended option but it is often needed.
*/
@Parameter(property = "arthur.reportUnsupportedElementsAtRuntime", defaultValue = "true")
private boolean reportUnsupportedElementsAtRuntime;
/**
* Should security services be included.
*/
@Parameter(property = "arthur.enableAllSecurityServices")
private Boolean enableAllSecurityServices;
/**
* Which main to compile.
*/
@Parameter(property = "arthur.main", required = true)
private String main;
/**
* Where to put the output binary.
*/
@Parameter(property = "arthur.output", defaultValue = "${project.build.directory}/${project.artifactId}.graal.bin")
private String output;
/**
* The execution will fork native-image process, should IO be inherited from maven process (recommended).
*/
@Getter(PROTECTED)
@Parameter(property = "arthur.inheritIO", defaultValue = "true")
private boolean inheritIO;
/**
* Should graal build server be used (a bit like gradle daemon), it is very discouraged to be used cause invalidation is not yet well handled.
* Deprecated in recent graalvm versions.
*/
@Deprecated
@Parameter(property = "arthur.noServer", defaultValue = "false")
private boolean noServer;
//
// Other maven injections
//
/**
* Inline configuration model (appended to `reflectionConfigurationFiles`).
*/
@Parameter
private List<ClassReflectionModel> reflections;
/**
* Inline resource bundle model (appended to `reflectionConfigurationFiles`).
*/
@Parameter
private List<ResourceBundleModel> bundles;
/**
* Inline resources model (appended to `resourcesConfigurationFiles`).
*/
@Parameter
private List<ResourceModel> resources;
/**
* Inline dynamic proxy configuration (appended to `dynamicProxyConfigurationFiles`).
*/
@Parameter
private List<DynamicProxyModel> dynamicProxies;
/**
* Should this mojo be skipped.
*/
@Parameter(property = "arthur.skip")
private boolean skip;
/**
* Should the build be done with test dependencies (and binaries).
*/
@Parameter(property = "arthur.supportTestArtifacts", defaultValue = "false")
private boolean supportTestArtifacts;
/**
* By default arthur runs the extension with a dedicated classloader built from the project having as parent the JVM,
* this enables to use the mojo as parent instead).
*/
@Parameter(property = "arthur.useTcclAsScanningParentClassLoader", defaultValue = "false")
private boolean useTcclAsScanningParentClassLoader;
/**
* groupId:artifactId list of ignored artifact during the pre-build phase.
*/
@Parameter(property = "arthur.excludedArtifacts")
private List<String> excludedArtifacts;
/**
* Classes or packages (startsWith is used to test entries).
*/
@Parameter(property = "arthur.scanningClassesOrPackagesExcludes")
private List<String> scanningClassesOrPackagesExcludes;
/**
* groupId:artifactId list of ignored artifact during the scanning phase.
* Compared to `excludedArtifacts`, it keeps the jar in the scanning/extension classloader
* but it does not enable to find any annotation in it.
* Note that putting `*` will disable the scanning completely.
*/
@Parameter(property = "arthur.scanningExcludedArtifacts")
private List<String> scanningExcludedArtifacts;
/**
* `<groupId>:<artifactId>[:<extension>[:<classifier>]]:<version>` list of artifacts appended to graal build.
* If you don't want transitive dependencies to be included, you can append to the coordinates `?transitive=false`.
*/
@Parameter(property = "arthur.graalExtensions")
private List<String> graalExtensions;
/**
* List of types used in the build classpath, typically enables to ignore tar.gz/natives for example.
*/
@Parameter(property = "arthur.supportedTypes", defaultValue = "jar,zip")
private List<String> supportedTypes;
/**
* Should jar be used instead of exploded folder (target/classes).
* Note this option disable the support of module test classes.
*/
@Parameter(property = "arthur.usePackagedArtifact", defaultValue = "false")
private boolean usePackagedArtifact;
/**
* Should binary artifact be attached.
*/
@Parameter(property = "arthur.attach", defaultValue = "true")
private boolean attach;
/**
* If `attach` is true, the classifier to use the binary file, `none` will skip the classifier.
*/
@Parameter(property = "arthur.attachClassifier", defaultValue = "arthur")
private String attachClassifier;
/**
* If `attach` is true, the type to use to attach the binary file.
*/
@Parameter(property = "arthur.attachType", defaultValue = "bin")
private String attachType;
/**
* Properties passed to the extensions if needed.
*/
@Parameter
private Map<String, String> extensionProperties;
@Parameter(defaultValue = "${project.packaging}", readonly = true)
private String packaging;
@Parameter(defaultValue = "${project.version}", readonly = true)
private String version;
@Parameter(defaultValue = "${project.artifactId}", readonly = true)
private String artifactId;
@Parameter(defaultValue = "${project.groupId}", readonly = true)
private String groupId;
@Parameter(defaultValue = "${project.build.directory}/${project.build.finalName}.${project.packaging}")
private File jar;
@Parameter(defaultValue = "${project.build.outputDirectory}")
private File classes;
@Parameter(defaultValue = "${project.build.testOutputDirectory}")
private File testClasses;
@Component
private ProjectDependenciesResolver dependenciesResolver;
@Component
private DependencyGraphBuilder graphBuilder;
@Component
private MavenProjectHelper helper;
private String cachedVersion;
@Override
public void execute() {
if (skip) {
getLog().info("Skipping execution as requested");
return;
}
if ("pom".equals(packaging)) {
getLog().info("Skipping packaging pom");
return;
}
final Map<Artifact, Path> classpathEntries = findClasspathFiles().collect(toMap(Map.Entry::getKey, Map.Entry::getValue, (a, b) -> a));
final ArthurNativeImageConfiguration configuration = getConfiguration(classpathEntries.values());
configuration.complete(graalVersion);
if (nativeImage == null) {
final SdkmanGraalVMInstaller graalInstaller = createInstaller();
final Path graalHome = graalInstaller.install();
getLog().info("Using GRAAL: " + graalHome);
configuration.setNativeImage(graalInstaller.installNativeImage().toAbsolutePath().toString());
}
final URL[] urls = classpathEntries.values().stream()
.map(it -> {
try {
return it.toUri().toURL();
} catch (final MalformedURLException e) {
throw new IllegalStateException(e);
}
}).toArray(URL[]::new);
final Thread thread = Thread.currentThread();
final ClassLoader parentLoader = useTcclAsScanningParentClassLoader ?
thread.getContextClassLoader() : getSystemClassLoader();
final ClassLoader oldLoader = thread.getContextClassLoader();
if (getLog().isDebugEnabled()) {
getLog().debug("Classpath:\n" + Stream.of(urls).map(URL::toExternalForm).collect(joining("\n")));
}
try (final URLClassLoader loader = new URLClassLoader(urls, parentLoader) {
@Override
protected Class<?> loadClass(final String name, final boolean resolve) throws ClassNotFoundException {
if (name != null) {
if (name.startsWith("org.")) {
final String org = name.substring("org.".length());
if (org.startsWith("slf4j.")) {
return oldLoader.loadClass(name);
}
if (org.startsWith("apache.geronimo.arthur.")) {
final String arthur = org.substring("apache.geronimo.arthur.".length());
if (arthur.startsWith("api.") || arthur.startsWith("spi") || arthur.startsWith("impl")) {
return oldLoader.loadClass(name);
}
}
}
}
return super.loadClass(name, resolve);
}
}; final Jsonb jsonb = JsonbBuilder.create(new JsonbConfig()
.setProperty("johnzon.cdi.activated", false)
.withPropertyOrderStrategy(PropertyOrderStrategy.LEXICOGRAPHICAL))) {
thread.setContextClassLoader(loader);
final Predicate<Artifact> scanningFilter = createScanningFilter();
final Function<Archive, Archive> archiveProcessor = createArchiveFilter();
final AnnotationFinder finder = new AnnotationFinder(archiveProcessor.apply(new CompositeArchive(classpathEntries.entrySet().stream()
.filter(e -> scanningFilter.test(e.getKey()))
.map(Map.Entry::getValue)
.map(path -> {
try {
return archive(loader, path.toUri().toURL());
} catch (final MalformedURLException e) { // unlikely
throw new IllegalStateException(e);
}
})
.collect(toList()))));
final AtomicBoolean finderLinked = new AtomicBoolean();
MavenArthurExtension.with(
reflections, resources, bundles, dynamicProxies,
() -> new ArthurNativeImageExecutor(
ArthurNativeImageExecutor.ExecutorConfiguration.builder()
.jsonSerializer(jsonb::toJson)
.annotatedClassFinder(finder::findAnnotatedClasses)
.annotatedFieldFinder(finder::findAnnotatedFields)
.annotatedMethodFinder(finder::findAnnotatedMethods)
.extensionProperties(getExtensionProperties())
.implementationFinder(p -> {
if (finderLinked.compareAndSet(false, true)) {
finder.enableFindImplementations().enableFindSubclasses();
}
final Class parent = Class.class.cast(p);
final List<Class<?>> implementations = finder.findImplementations(parent);
final List<Class<?>> subclasses = finder.findSubclasses(parent);
if (implementations.size() + subclasses.size() == 0) {
return implementations; // empty
}
final List<Class<?>> output = new ArrayList<>(implementations.size() + subclasses.size());
output.addAll(implementations);
output.addAll(subclasses);
return output;
})
.configuration(configuration)
.workingDirectory(workdir.toPath().resolve("generated_configuration"))
.build()) {
@Override
protected Iterable<ArthurExtension> loadExtensions() {
return Stream.concat(
// classloading bypasses them since TCCL is a fake loader with the JVM as parent
Stream.of(new AnnotationExtension(), new MavenArthurExtension()),
// graalextensions
StreamSupport.stream(super.loadExtensions().spliterator(), false))
// ensure we dont duplicate any extension
.distinct()
.sorted(comparing(ArthurExtension::order))
.collect(toList());
}
}
.run());
} catch (final Exception e) {
throw new IllegalStateException(e);
} finally {
thread.setContextClassLoader(oldLoader);
}
if (propertiesPrefix != null) {
project.getProperties().setProperty(propertiesPrefix + "binary.path", output);
}
if (attach) {
if (!"none".equals(attachClassifier) && attachClassifier != null && !attachClassifier.isEmpty()) {
helper.attachArtifact(project, attachType, attachClassifier, new File(output));
} else {
helper.attachArtifact(project, attachType, new File(output));
}
}
}
private Function<Archive, Archive> createArchiveFilter() {
if (scanningClassesOrPackagesExcludes == null || scanningClassesOrPackagesExcludes.isEmpty()) {
return Function.identity();
}
final Filter filter = Filters.invert(Filters.prefixes(
scanningClassesOrPackagesExcludes.stream()
.map(String::trim)
.filter(it -> !it.isEmpty())
.distinct()
.toArray(String[]::new)));
return a -> new FilteredArchive(a, filter);
}
private Map<String, String> getExtensionProperties() {
final Map<String, String> props = extensionProperties == null ? new HashMap<>() : new HashMap<>(extensionProperties);
props.putIfAbsent("classes", project.getBuild().getOutputDirectory());
return props;
}
private Predicate<Artifact> createScanningFilter() {
if (scanningExcludedArtifacts != null && scanningExcludedArtifacts.contains("*")) {
return a -> false;
}
if (scanningExcludedArtifacts == null || scanningExcludedArtifacts.isEmpty()) {
return a -> true;
}
return a -> {
final String coord = a.getGroupId() + ':' + a.getArtifactId();
return scanningExcludedArtifacts.stream().noneMatch(it -> it.equals(coord));
};
}
private Stream<? extends Map.Entry<? extends Artifact, Path>> findClasspathFiles() {
final Artifact artifactGav = new org.apache.maven.artifact.DefaultArtifact(
groupId, artifactId, version, "compile", packaging, null, new DefaultArtifactHandler());
return Stream.concat(Stream.concat(
usePackagedArtifact ?
Stream.of(jar).map(j -> new AbstractMap.SimpleImmutableEntry<>(artifactGav, j.toPath())) :
Stream.concat(
Stream.of(classes).map(j -> new AbstractMap.SimpleImmutableEntry<>(artifactGav, j.toPath())),
supportTestArtifacts ? Stream.of(testClasses).<Map.Entry<Artifact, Path>>map(j ->
new AbstractMap.SimpleImmutableEntry<>(new org.apache.maven.artifact.DefaultArtifact(
groupId, artifactId, version, "compile", packaging, "test", new DefaultArtifactHandler()),
j.toPath())) :
Stream.empty()),
project.getArtifacts().stream()
.filter(a -> !excludedArtifacts.contains(a.getGroupId() + ':' + a.getArtifactId()))
.filter(this::handleTestInclusion)
.filter(this::isNotSvm)
.filter(a -> supportedTypes.contains(a.getType()))
.map(a -> new AbstractMap.SimpleImmutableEntry<>(a, a.getFile().toPath()))),
resolveExtension())
.filter(e -> Files.exists(e.getValue()));
}
private boolean handleTestInclusion(final Artifact a) {
return !"test".equals(a.getScope()) || supportTestArtifacts;
}
private boolean isNotSvm(final Artifact artifact) {
return !"com.oracle.substratevm".equals(artifact.getGroupId()) &&
!"org.graalvm.nativeimage".equals(artifact.getGroupId());
}
private Stream<? extends Map.Entry<? extends Artifact, Path>> resolveExtension() {
return ofNullable(graalExtensions)
.map(e -> e.stream()
.map(this::prepareExtension)
.flatMap(art -> art.endsWith("?transitive=false") ?
Stream.of(toArtifact(art.substring(0, art.length() - "?transitive=false".length()))) :
resolveTransitiveDependencies(toArtifact(art)))
.map(it -> new AbstractMap.SimpleImmutableEntry<>(
toMavenArtifact(it),
resolve(it).getFile().toPath())))
.orElseGet(Stream::empty);
}
private Stream<org.eclipse.aether.artifact.Artifact> resolveTransitiveDependencies(final org.eclipse.aether.artifact.Artifact artifact) {
final Dependency rootDependency = new Dependency();
rootDependency.setGroupId(artifact.getGroupId());
rootDependency.setArtifactId(artifact.getArtifactId());
rootDependency.setVersion(artifact.getVersion());
rootDependency.setClassifier(artifact.getClassifier());
rootDependency.setType(artifact.getExtension());
final MavenProject fakeProject = new MavenProject();
fakeProject.setRemoteArtifactRepositories(project.getRemoteArtifactRepositories());
fakeProject.setSnapshotArtifactRepository(project.getDistributionManagementArtifactRepository());
fakeProject.setPluginArtifactRepositories(project.getPluginArtifactRepositories());
fakeProject.getDependencies().add(rootDependency);
final DependencyResolutionRequest request = new DefaultDependencyResolutionRequest();
request.setMavenProject(fakeProject);
request.setRepositorySession(repositorySystemSession);
final Collection<org.eclipse.aether.artifact.Artifact> artifacts = new ArrayList<>();
try {
dependenciesResolver.resolve(request).getDependencyGraph().accept(new DependencyVisitor() {
@Override
public boolean visitEnter(org.eclipse.aether.graph.DependencyNode node) {
return true;
}
@Override
public boolean visitLeave(org.eclipse.aether.graph.DependencyNode node) {
final org.eclipse.aether.artifact.Artifact artifact = node.getArtifact();
if (artifact == null) {
if (node.getChildren() != null) {
node.getChildren().stream()
.map(DependencyNode::getArtifact)
.filter(Objects::nonNull)
.forEach(artifacts::add);
} else {
getLog().warn(node + " has no artifact");
}
} else {
artifacts.add(artifact);
}
return true;
}
});
return artifacts.stream().map(NativeImageMojo.this::resolve);
} catch (final DependencyResolutionException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
// support short name for our knights
private String prepareExtension(final String ext) {
if (ext.contains(":")) {
return ext;
}
return "org.apache.geronimo.arthur.knights:" + ext + (ext.endsWith("-knight") ? "" : "-knight") + ':' + lookupVersion();
}
private String lookupVersion() {
if (cachedVersion == null) {
final Properties properties = new Properties();
try (final InputStream stream = Thread.currentThread().getContextClassLoader()
.getResourceAsStream("META-INF/maven/org.apache.geronimo.arthur/arthur-maven-plugin/pom.properties")) {
properties.load(stream);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
cachedVersion = properties.getProperty("version");
}
return cachedVersion;
}
private org.apache.maven.artifact.DefaultArtifact toMavenArtifact(final org.eclipse.aether.artifact.Artifact it) {
return new org.apache.maven.artifact.DefaultArtifact(
it.getGroupId(), it.getArtifactId(), it.getVersion(), "compile",
it.getExtension(), it.getClassifier(), new DefaultArtifactHandler());
}
private ArthurNativeImageConfiguration getConfiguration(final Collection<Path> classpathFiles) {
final ArthurNativeImageConfiguration configuration = new ArthurNativeImageConfiguration();
Stream.of(ArthurNativeImageConfiguration.class.getDeclaredFields())
.filter(field -> field.isAnnotationPresent(ArthurNativeImageConfiguration.GraalCommandPart.class))
.map(this::asAccessible)
.forEach(field -> {
final Class<?> fieldHolder;
if ("nativeImage".equals(field.getName())) {
fieldHolder = ArthurMojo.class;
} else {
fieldHolder = NativeImageMojo.class;
}
try {
final Field mojoField = asAccessible(fieldHolder.getDeclaredField(field.getName()));
field.set(configuration, mojoField.get(NativeImageMojo.this));
} catch (final NoSuchFieldException | IllegalAccessException e) {
throw new IllegalArgumentException(e);
}
});
if (configuration.getClasspath() == null || configuration.getClasspath().isEmpty()) {
configuration.setClasspath(classpathFiles.stream().map(Path::toAbsolutePath).map(Object::toString).collect(toList()));
}
configuration.setInheritIO(inheritIO);
return configuration;
}
private Field asAccessible(final Field field) {
if (!field.isAccessible()) {
field.setAccessible(true);
}
return field;
}
}
| 6,122 |
0 | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven/mojo/DockerMojo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.maven.mojo;
import com.google.cloud.tools.jib.api.Containerizer;
import com.google.cloud.tools.jib.api.DockerDaemonImage;
import com.google.cloud.tools.jib.api.InvalidImageReferenceException;
import org.apache.maven.plugins.annotations.Mojo;
/**
* Alternate mojo to jib:dockerBuild to avoid to bundle useless files.
* Can be replaced by vanilla jib when it will support it, see https://github.com/GoogleContainerTools/jib/issues/1857
*/
@Mojo(name = "docker", threadSafe = true)
public class DockerMojo extends JibMojo {
@Override
protected Containerizer createContainer() throws InvalidImageReferenceException {
return Containerizer.to(DockerDaemonImage.named(this.to));
}
}
| 6,123 |
0 | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven/mojo/JibMojo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.maven.mojo;
import com.google.cloud.tools.jib.api.CacheDirectoryCreationException;
import com.google.cloud.tools.jib.api.Containerizer;
import com.google.cloud.tools.jib.api.ImageReference;
import com.google.cloud.tools.jib.api.InvalidImageReferenceException;
import com.google.cloud.tools.jib.api.Jib;
import com.google.cloud.tools.jib.api.JibContainer;
import com.google.cloud.tools.jib.api.JibContainerBuilder;
import com.google.cloud.tools.jib.api.LogEvent;
import com.google.cloud.tools.jib.api.Ports;
import com.google.cloud.tools.jib.api.RegistryException;
import com.google.cloud.tools.jib.api.buildplan.AbsoluteUnixPath;
import com.google.cloud.tools.jib.api.buildplan.FileEntriesLayer;
import com.google.cloud.tools.jib.api.buildplan.FileEntry;
import com.google.cloud.tools.jib.api.buildplan.FilePermissions;
import org.apache.maven.plugins.annotations.Parameter;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Stream;
import static java.util.Collections.singletonList;
import static java.util.Objects.requireNonNull;
import static java.util.Optional.ofNullable;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
public abstract class JibMojo extends ArthurMojo {
/**
* Base image to use. Scratch will ensure it starts from an empty image and is the most minimal option.
* For a partially linked use busybox:glibc.
* Note that using scratch can require you to turn on useLDD flag (not by default since it depends in your build OS).
* On the opposite, using an existing distribution (debian, fedora, ...) enables to not do that at the cost of a bigger overall image.
* However, not only the overall image size is important, the reusable layers can save network time so pick what fits the best your case.
*/
@Parameter(property = "arthur.from", defaultValue = "scratch")
private String from;
/**
* Ports to expose.
*/
@Parameter(property = "arthur.ports")
private List<String> ports;
/**
* Other files to include in the image, note that their permissions will not be executable.
*/
@Parameter(property = "arthur.files")
private List<File> otherFiles;
/**
* Program arguments.
*/
@Parameter(property = "arthur.programArguments")
private List<String> programArguments;
/**
* Image environment.
*/
@Parameter(property = "arthur.environment")
private Map<String, String> environment;
/**
* Image labels.
*/
@Parameter(property = "arthur.labels")
private Map<String, String> labels;
/**
* Timestamp creation for the image, it is recommended to set it fixed for reproducibility.
*/
@Parameter(property = "arthur.creationTimestamp", defaultValue = "1")
private long creationTimestamp;
/**
* Entry point to use.
*/
@Parameter(property = "arthur.entrypoint", defaultValue = "/${project.artifactId}")
private List<String> entrypoint;
/**
* Where is the binary to include. It defaults on native-image output if done before in the same execution
*/
@Parameter(property = "arthur.binarySource", defaultValue = "${project.build.directory}/${project.artifactId}.graal.bin")
private File binarySource;
/**
* Should base images be cached.
*/
@Parameter(property = "arthur.enableCache", defaultValue = "true")
private boolean enableCache;
/**
* Are insecure registries allowed.
*/
@Parameter(property = "arthur.allowInsecureRegistries", defaultValue = "false")
private boolean allowInsecureRegistries;
/**
* Where to cache application layers.
*/
@Parameter(property = "arthur.applicationLayersCache", defaultValue = "${project.build.directory}/arthur_jib_cache/application")
private File applicationLayersCache;
/**
* Where to cache base layers layers (if any).
*/
@Parameter(property = "arthur.baseLayersCache", defaultValue = "${project.build.directory}/arthur_jib_cache/base")
private File baseLayersCache;
/**
* Number of threads used to build.
*/
@Parameter(property = "arthur.threads", defaultValue = "1")
private int threads;
/**
* Build timeout in milliseconds if it is using threads > 1.
*/
@Parameter(property = "arthur.timeout", defaultValue = "3600000")
private long timeout;
/**
* Target image name.
*/
@Parameter(property = "arthur.to", defaultValue = "${project.artifactId}:${project.version}")
protected String to;
/**
* Should JVM native libraries be included, it is useful to get libraries like sunec (security).
* Value can be `false` to disable it (empty or null works too), `true` to include them all
* or a list of lib names like `sunec`.
*/
@Parameter(property = "arthur.includeNatives", defaultValue = "false")
protected List<String> includeNatives;
/**
* When includeNatives, the directory which will contain the natives in the image.
*/
@Parameter(property = "arthur.nativeRootDir", defaultValue = "/native")
protected String nativeRootDir;
/**
* Should cacerts be included.
*/
@Parameter(property = "arthur.includeCacerts", defaultValue = "false")
protected boolean includeCacerts;
/**
* When includeCacerts, the file which will contain the certificates in the image.
*/
@Parameter(property = "arthur.cacertsDir", defaultValue = "/certificates/cacerts")
protected String cacertsTarget;
/**
* If true, the created binary will be passed to ldd to detect the needed libraries.
* It enables to use FROM scratch even when the binary requires dynamic linking.
* Note that if ld-linux libraries is found by that processing it is set as first argument of the entrypoint
* until skipLdLinuxInEntrypoint is set to true.
*/
@Parameter(property = "arthur.useLDD", defaultValue = "false")
protected boolean useLDD;
/**
* If true, and even if useLDD is true, ld-linux will not be in entrypoint.
*/
@Parameter(property = "arthur.skipLdLinuxInEntrypoint", defaultValue = "false")
protected boolean skipLdLinuxInEntrypoint;
protected abstract Containerizer createContainer() throws InvalidImageReferenceException;
@Override
public void execute() {
final JibContainerBuilder prepared = prepare();
withExecutor(es -> {
try {
final Containerizer containerizer = createContainer();
final JibContainer container = prepared.containerize(configure(containerizer, es));
if (propertiesPrefix != null) {
project.getProperties().setProperty(propertiesPrefix + "image.imageId", container.getImageId().getHash());
project.getProperties().setProperty(propertiesPrefix + "image.digest", container.getDigest().getHash());
}
getLog().info("Built '" + to + "'");
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
} catch (final RegistryException | IOException | CacheDirectoryCreationException | ExecutionException | InvalidImageReferenceException e) {
throw new IllegalStateException(e);
}
});
}
private void withExecutor(final Consumer<ExecutorService> consumer) {
if (threads > 1) {
final ExecutorService executorService = Executors.newFixedThreadPool(threads, new ThreadFactory() {
private final AtomicInteger counter = new AtomicInteger();
@Override
public Thread newThread(final Runnable r) {
return new Thread(r, JibMojo.class.getName() + "-" + counter.incrementAndGet());
}
});
try {
consumer.accept(executorService);
} finally {
executorService.shutdown();
try {
executorService.awaitTermination(timeout, MILLISECONDS);
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
} else {
consumer.accept(null);
}
}
private Containerizer configure(final Containerizer to, final ExecutorService executorService) {
to.setAlwaysCacheBaseImage(enableCache);
to.setAllowInsecureRegistries(allowInsecureRegistries);
to.setApplicationLayersCache(applicationLayersCache.toPath());
to.setBaseImageLayersCache(baseLayersCache.toPath());
to.setOfflineMode(offline);
to.setToolName("Arthur " + getClass().getSimpleName().replace("Mojo", ""));
to.setExecutorService(executorService);
to.addEventHandler(LogEvent.class, event -> {
switch (event.getLevel()) {
case INFO:
case LIFECYCLE:
case PROGRESS:
getLog().info(event.getMessage());
break;
case WARN:
getLog().warn(event.getMessage());
break;
case ERROR:
getLog().error(event.getMessage());
break;
case DEBUG:
default:
getLog().debug(event.getMessage());
break;
}
});
return to;
}
private JibContainerBuilder prepare() {
try {
final JibContainerBuilder from = Jib.from(ImageReference.parse(this.from));
if (ports != null) {
from.setExposedPorts(Ports.parse(ports));
}
if (labels != null) {
from.setLabels(labels);
}
if (programArguments != null) {
from.setProgramArguments(programArguments);
}
from.setCreationTime(creationTimestamp < 0 ? Instant.now() : Instant.ofEpochMilli(creationTimestamp));
final boolean hasNatives = useLDD || (includeNatives != null && !includeNatives.isEmpty() && !singletonList("false").equals(includeNatives));
final Path source = ofNullable(binarySource)
.map(File::toPath)
.orElseGet(() -> Paths.get(requireNonNull(
project.getProperties().getProperty(propertiesPrefix + "binary.path"),
"No binary path found, ensure to run native-image before or set entrypoint")));
final Map<String, String> env = environment == null ? new TreeMap<>() : new TreeMap<>(environment);
final List<FileEntriesLayer> layers = new ArrayList<>(8);
if (includeCacerts) {
layers.add(findCertificates());
}
String ldLinux = null;
if (hasNatives) {
if (!singletonList("false").equals(includeNatives)) {
layers.add(findNatives());
}
if (useLDD) {
ldLinux = addLddLibsAndFindLdLinux(env, layers);
}
}
if (otherFiles != null && !otherFiles.isEmpty()) {
layers.add(createOthersLayer());
}
layers.add(FileEntriesLayer.builder()
.setName("Binary")
.addEntry(new FileEntry(
source, AbsoluteUnixPath.get(entrypoint.iterator().next()), FilePermissions.fromOctalString("755"),
getTimestamp(source)))
.build());
from.setFileEntriesLayers(layers);
if (!env.isEmpty()) {
from.setEnvironment(env);
}
if (entrypoint == null || entrypoint.size() < 1) {
throw new IllegalArgumentException("No entrypoint set");
}
from.setEntrypoint(Stream.concat(Stream.concat(Stream.concat(
useLDD && ldLinux != null && !skipLdLinuxInEntrypoint ? Stream.of(ldLinux) : Stream.empty(),
entrypoint.stream()),
hasNatives ? Stream.of("-Djava.library.path=" + nativeRootDir) : Stream.empty()),
includeCacerts ? Stream.of("-Djavax.net.ssl.trustStore=" + cacertsTarget) : Stream.empty())
.collect(toList()));
return from;
} catch (final InvalidImageReferenceException | IOException e) {
throw new IllegalStateException(e);
}
}
// todo: enrich to be able to use manual resolution using LD_LIBRARY_PATH or default without doing an exec
private String addLddLibsAndFindLdLinux(final Map<String, String> env, final List<FileEntriesLayer> layers) throws IOException {
getLog().info("Running ldd on " + binarySource.getName());
final Process ldd = new ProcessBuilder("ldd", binarySource.getAbsolutePath()).start();
try {
final int status = ldd.waitFor();
if (status != 0) {
throw new IllegalArgumentException("LDD failed with status " + status + ": " + slurp(ldd.getErrorStream()));
}
} catch (final InterruptedException e) {
throw new IllegalStateException(e);
}
final Collection<Path> files;
try (final BufferedReader reader = new BufferedReader(new InputStreamReader(ldd.getInputStream(), StandardCharsets.UTF_8))) {
files = reader.lines()
.filter(it -> it.contains("/"))
.map(it -> {
final int start = it.indexOf('/');
int end = it.indexOf(' ', start);
if (end < 0) {
end = it.indexOf('(', start);
if (end < 0) {
end = it.length();
}
}
return it.substring(start, end);
})
.map(Paths::get)
.filter(Files::exists)
.collect(toList());
} catch (final IOException e) {
throw new IllegalStateException(e);
}
String ldLinux = null;
if (!files.isEmpty()) {
final FileEntriesLayer.Builder libraries = FileEntriesLayer.builder()
.setName("Libraries");
// copy libs + tries to extract ld-linux-x86-64.so.2 if present
ldLinux = files.stream()
.map(file -> {
final String fileName = file.getFileName().toString();
try {
libraries.addEntry(
file, AbsoluteUnixPath.get(nativeRootDir).resolve(fileName),
FilePermissions.fromPosixFilePermissions(Files.getPosixFilePermissions(file)), getTimestamp(file));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
return fileName;
})
.filter(it -> it.startsWith("ld-linux"))
.min((a, b) -> { // best is "ld-linux-x86-64.so.2"
if ("ld-linux-x86-64.so.2".equals(a)) {
return -1;
}
if ("ld-linux-x86-64.so.2".equals(b)) {
return 1;
}
if (a.endsWith(".so.2")) {
return -1;
}
if (b.endsWith(".so.2")) {
return -1;
}
return a.compareTo(b);
})
.map(it -> AbsoluteUnixPath.get(nativeRootDir).resolve(it).toString()) // make it absolute since it will be added to the entrypoint
.orElse(null);
layers.add(libraries.build());
env.putIfAbsent("LD_LIBRARY_PATH", AbsoluteUnixPath.get(nativeRootDir).toString());
}
return ldLinux;
}
private String slurp(final InputStream stream) {
if (stream == null) {
return "-";
}
try (final BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) {
return reader.lines().collect(joining("\n"));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private Path findHome() {
if (nativeImage == null) {
return createInstaller().install();
}
return Paths.get(nativeImage).getParent().getParent();
}
private FileEntriesLayer findCertificates() {
final Path home = findHome();
getLog().info("Using certificates from '" + home + "'");
final Path cacerts = home.resolve("jre/lib/security/cacerts");
if (!Files.exists(cacerts)) {
throw new IllegalArgumentException("Missing cacerts in '" + home + "'");
}
return FileEntriesLayer.builder()
.setName("Certificates")
.addEntry(cacerts, AbsoluteUnixPath.get(cacertsTarget))
.build();
}
private FileEntriesLayer findNatives() {
final Path home = findHome();
getLog().info("Using natives from '" + home + "'");
final Path jreLib = home.resolve("jre/lib");
final boolean isWin = Files.exists(jreLib.resolve("java.lib"));
Path nativeFolder = isWin ?
jreLib /* win/cygwin */ :
jreLib.resolve(System.getProperty("os.arch", "amd64")); // older graalvm, for 20.x it is no more needed
if (!Files.exists(nativeFolder)) {
nativeFolder = nativeFolder.getParent();
try {
if (!Files.exists(nativeFolder) || !Files.list(nativeFolder).anyMatch(it -> it.getFileName().toString().endsWith(".so"))) {
nativeFolder = home.resolve("lib");
if (!Files.exists(nativeFolder)) { // java 17 and after
throw new IllegalArgumentException("No native folder '" + nativeFolder + "' found.");
}
}
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
final boolean includeAll = singletonList("true").equals(includeNatives) || singletonList("*").equals(includeNatives);
final Predicate<Path> include = includeAll ?
p -> true : path -> {
final String name = path.getFileName().toString();
return includeNatives.stream().anyMatch(n -> name.contains(isWin ? (n + ".lib") : ("lib" + n + ".so")));
};
final FileEntriesLayer.Builder builder = FileEntriesLayer.builder();
final Collection<String> collected = new ArrayList<>();
final Path nativeDir = nativeFolder; // ref for lambda
try {
Files.walkFileTree(nativeFolder, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException {
if (include.test(file)) {
collected.add(file.getFileName().toString());
builder.addEntry(
file, AbsoluteUnixPath.get(nativeRootDir).resolve(nativeDir.relativize(file).toString()),
FilePermissions.DEFAULT_FILE_PERMISSIONS, getTimestamp(file));
}
return super.visitFile(file, attrs);
}
});
} catch (final IOException e) {
throw new IllegalStateException(e);
}
if (!includeAll && collected.size() != includeNatives.size()) {
throw new IllegalArgumentException("Found " + collected + " but was configured to extract " + includeNatives);
}
return builder.setName("Natives").build();
}
private Instant getTimestamp(final Path source) throws IOException {
return creationTimestamp < 0 ? Files.getLastModifiedTime(source).toInstant() : Instant.ofEpochMilli(creationTimestamp);
}
private FileEntriesLayer createOthersLayer() {
final FileEntriesLayer.Builder builder = FileEntriesLayer.builder().setName("Others");
otherFiles.stream().map(File::toPath).forEach(f -> {
final AbsoluteUnixPath containerPath = AbsoluteUnixPath.get(project.getBasedir().toPath().relativize(f).toString());
if (containerPath.toString().contains("..")) {
throw new IllegalArgumentException("You can only include files included in basedir");
}
try {
if (Files.isDirectory(f)) {
builder.addEntryRecursive(
f, containerPath,
(l, c) -> FilePermissions.DEFAULT_FILE_PERMISSIONS,
(l, c) -> {
try {
return getTimestamp(l);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
});
} else {
builder.addEntry(f, containerPath, FilePermissions.DEFAULT_FILE_PERMISSIONS, getTimestamp(f));
}
} catch (final IOException e) {
throw new IllegalStateException(e);
}
});
return builder.build();
}
}
| 6,124 |
0 | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven | Create_ds/geronimo-arthur/arthur-maven-plugin/src/main/java/org/apache/geronimo/arthur/maven/mojo/ArthurMojo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.maven.mojo;
import org.apache.geronimo.arthur.impl.nativeimage.archive.Extractor;
import org.apache.geronimo.arthur.impl.nativeimage.installer.SdkmanGraalVMInstaller;
import org.apache.geronimo.arthur.impl.nativeimage.installer.SdkmanGraalVMInstallerConfiguration;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.installation.InstallRequest;
import org.eclipse.aether.installation.InstallResult;
import org.eclipse.aether.installation.InstallationException;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import org.eclipse.aether.resolution.ArtifactResult;
import java.io.File;
import java.nio.file.Path;
import java.util.List;
import java.util.stream.Stream;
import static java.util.Collections.emptyMap;
import static java.util.Locale.ROOT;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.joining;
public abstract class ArthurMojo extends AbstractMojo {
/**
* native-image binary to use, if not set it will install graal in the local repository.
*/
@Parameter(property = "arthur.nativeImage")
protected String nativeImage;
/**
* Once built, the binary path is set in maven properties.
* This enables to configure the prefix to use.
*/
@Parameter(defaultValue = "arthur.", property = "arthur.propertiesPrefix")
protected String propertiesPrefix;
@Parameter(defaultValue = "${project}", readonly = true)
protected MavenProject project;
@Parameter(defaultValue = "${settings.offline}", readonly = true)
protected boolean offline;
//
// Installer parameters
//
/**
* In case Graal must be downloaded to get native-image, where to take it from.
*/
@Parameter(property = "arthur.graalDownloadUrl", defaultValue = "auto")
private String graalDownloadUrl;
/**
* In case Graal must be downloaded to get native-image, which version to download.
* It contains the graal version and can be suffixed by the graal java version prefixed with "r" (as on sdkman).
* Alternatively, in more recent version you can use "$javaVersion-graalce" or "$javaVersion-graal-oracle" to use the appropriated mirror.
*/
@Parameter(property = "arthur.graalVersion", defaultValue = "20.3.0.r8")
protected String graalVersion;
/**
* In case Graal must be downloaded to get native-image, which platform to download, auto will handle it for you.
*/
@Parameter(property = "arthur.graalPlatform", defaultValue = "auto")
private String graalPlatform;
/**
* In case Graal must be downloaded to get native-image, it will be cached in the local repository with this gav.
*/
@Parameter(property = "arthur.graalCacheGav", defaultValue = "org.apache.geronimo.arthur.cache:graal")
private String graalCacheGav; // groupId:artifactId
/**
* Where the temporary files are created.
*/
@Parameter(defaultValue = "${project.build.directory}/arthur_workdir")
protected File workdir;
@Parameter(defaultValue = "${repositorySystemSession}")
protected RepositorySystemSession repositorySystemSession;
@Parameter(defaultValue = "${project.remoteProjectRepositories}")
private List<RemoteRepository> remoteRepositories;
@Component
private RepositorySystem repositorySystem;
protected boolean isInheritIO() {
return false;
}
protected SdkmanGraalVMInstaller createInstaller() {
final String graalPlatform = buildPlatform();
final Extractor extractor = new Extractor();
final String url = buildDownloadUrl(graalPlatform);
getLog().debug("Graal URL: " + url);
return new SdkmanGraalVMInstaller(SdkmanGraalVMInstallerConfiguration.builder()
.offline(offline)
.inheritIO(isInheritIO())
.url(url)
.version(graalVersion)
.platform(graalPlatform)
.gav(buildCacheGav(graalPlatform))
.workdir(workdir.toPath())
.resolver(gav -> resolve(toArtifact(gav)).getFile().toPath())
.installer((gav, file) -> install(file.toFile(), toArtifact(gav)))
.extractor(extractor::unpack)
.build());
}
protected org.eclipse.aether.artifact.Artifact toArtifact(final String s) {
return new DefaultArtifact(s);
}
protected Path install(final File file, final org.eclipse.aether.artifact.Artifact art) {
final org.eclipse.aether.artifact.Artifact artifact = new DefaultArtifact(
art.getGroupId(),
art.getArtifactId(),
art.getClassifier(),
art.getExtension(),
art.getVersion(),
emptyMap(),
file);
try {
final InstallResult result = repositorySystem.install(
repositorySystemSession,
new InstallRequest().addArtifact(artifact));
if (result.getArtifacts().isEmpty()) {
throw new IllegalStateException("Can't install " + art);
}
return resolve(art).getFile().toPath();
} catch (final InstallationException e) {
throw new IllegalStateException(e);
}
}
protected org.eclipse.aether.artifact.Artifact resolve(final org.eclipse.aether.artifact.Artifact art) {
final ArtifactRequest artifactRequest =
new ArtifactRequest().setArtifact(art).setRepositories(remoteRepositories);
try {
final ArtifactResult result = repositorySystem.resolveArtifact(repositorySystemSession, artifactRequest);
if (result.isMissing()) {
throw new IllegalStateException("Can't find " + art);
}
return result.getArtifact();
} catch (final ArtifactResolutionException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
private String buildCacheGav(final String graalPlatform) {
if (graalPlatform.toLowerCase(ROOT).contains("win")) {
return graalCacheGav + ":zip:" + graalPlatform + ':' + graalVersion;
}
// linux, macos and others
return graalCacheGav + ":tar.gz:" + graalPlatform + ':' + graalVersion;
}
private String buildDownloadUrl(final String graalPlatform) {
if (graalDownloadUrl.startsWith("https://api.sdkman.io/2/broker/download/java/")) {
return graalDownloadUrl
.replace("${graalVersion}", graalVersion)
.replace("${platform}", graalPlatform);
}
// else assume github
final String[] versionSegments = graalVersion.split("\\.");
final boolean versionIncludesJavaVersion = versionSegments[versionSegments.length - 1].startsWith("r");
final String graalSimpleVersion = versionIncludesJavaVersion ?
Stream.of(versionSegments).limit(versionSegments.length - 1).collect(joining(".")) :
graalVersion;
String graalJavaVersion = versionIncludesJavaVersion ?
versionSegments[versionSegments.length - 1].substring(1) :
System.getProperty("java.version", "1.8").startsWith("8") ? "8" : "11";
final String githubPlatform = graalPlatform.toLowerCase(ROOT).contains("win")
? "windows-amd64"
: (graalPlatform.toLowerCase(ROOT).contains("mac")
? "darwin-amd64"
: "linux-amd64");
// backward compat
String baseUrl = "auto".equals(graalDownloadUrl) ?
"https://github.com/graalvm/graalvm-ce-builds/releases/download/vm-${graalSimpleVersion}/graalvm-ce-java${graalJavaVersion}-${githubPlatform}-${graalSimpleVersion}.tar.gz" :
graalDownloadUrl;
if (graalVersion.endsWith("-graalce")) {
graalJavaVersion = graalVersion.substring(0, graalVersion.length() - "-graalce".length());
baseUrl = "https://github.com/graalvm/graalvm-ce-builds/releases/download/jdk-" + graalJavaVersion + "/graalvm-community-jdk-" + graalJavaVersion + "_${githubPlatform2}_bin.tar.gz";
} else if (graalVersion.endsWith("-graal-oracle")) {
graalJavaVersion = graalVersion.substring(0, graalVersion.length() - "-graal-oracle".length());
baseUrl = "https://download.oracle.com/graalvm/" + graalJavaVersion + "/latest/graalvm-jdk-" + graalJavaVersion + "_${githubPlatform2}_bin.tar.gz";
}
return baseUrl
.replace("${graalSimpleVersion}", graalSimpleVersion)
.replace("${graalJavaVersion}", graalJavaVersion)
.replace("${githubPlatform}", githubPlatform)
.replace("${githubPlatform2}", githubPlatform.replace("amd64", "x64"));
}
private String buildPlatform() {
if (!"auto".equals(graalPlatform)) {
return graalPlatform;
}
return (System.getProperty("os.name", "linux") +
ofNullable(System.getProperty("sun.arch.data.model"))
.orElseGet(() -> System.getProperty("os.arch", "64").replace("amd", "")))
.toLowerCase(ROOT).replace(" ", "");
}
}
| 6,125 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl/nativeimage/CommandGeneratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage;
import lombok.Data;
import org.apache.geronimo.arthur.impl.nativeimage.generator.ConfigurationGenerator;
import org.apache.geronimo.arthur.impl.nativeimage.graal.CommandGenerator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
class CommandGeneratorTest {
@Test
void dynamicClasses() throws IOException {
final ArthurNativeImageConfiguration configuration = new ArthurNativeImageConfiguration();
configuration.setEnforceMaxRuntimeCompileMethods(false);
configuration.setAddAllCharsets(false);
configuration.setReportExceptionStackTraces(false);
configuration.setPrintClassInitialization(false);
configuration.setBuildStaticImage(false);
configuration.setAllowIncompleteClasspath(false);
configuration.setReportExceptionStackTraces(false);
configuration.setReportUnsupportedElementsAtRuntime(false);
configuration.setEnableAllSecurityServices(false);
configuration.setNoServer(false);
final Path workingDirectory = Paths.get("target/tests/CommandGeneratorTest/workdir");
new ConfigurationGenerator(
singletonList(context -> context.registerGeneratedClass("org.foo.Bar", new byte[]{1, 2, 3})),
configuration, workingDirectory,
(o, writer) -> {}, k -> emptyList(), k -> emptyList(), k -> emptyList(),
(Function<Class<?>, Collection<Class<?>>>) k -> emptyList(), emptyMap()
).run();
assertArrayEquals(new byte[]{1, 2, 3}, Files.readAllBytes(workingDirectory.resolve("dynamic_classes/org/foo/Bar.class")));
assertEquals(asList(
"native-image", "-classpath", workingDirectory.resolve("dynamic_classes").toString(),
"-H:MaxRuntimeCompileMethods=1000", "--no-fallback", "main", "main.graal.exec"),
new CommandGenerator().generate(configuration));
}
@ParameterizedTest
@MethodSource("configurations")
void generate(final Case useCase) {
assertEquals(
useCase.result,
new CommandGenerator().generate(useCase.configuration));
}
static Stream<Case> configurations() {
final ArthurNativeImageConfiguration emptyConfig = new ArthurNativeImageConfiguration();
final ArthurNativeImageConfiguration classpathConfig = new ArthurNativeImageConfiguration();
classpathConfig.setClasspath(asList("foo", "bar"));
final ArthurNativeImageConfiguration filledConfig = new ArthurNativeImageConfiguration();
filledConfig.setNativeImage("custom-image");
filledConfig.setClasspath(singletonList("myclasspath"));
filledConfig.setMaxRuntimeCompileMethods(5);
filledConfig.setResourcesConfigurationFiles(singletonList("resources.json"));
filledConfig.setEnforceMaxRuntimeCompileMethods(false);
filledConfig.setAddAllCharsets(false);
filledConfig.setReportExceptionStackTraces(false);
filledConfig.setPrintClassInitialization(true);
filledConfig.setFallbackMode(ArthurNativeImageConfiguration.FallbackMode.auto);
filledConfig.setBuildStaticImage(false);
filledConfig.setAllowIncompleteClasspath(false);
filledConfig.setReportExceptionStackTraces(false);
filledConfig.setReportUnsupportedElementsAtRuntime(false);
filledConfig.setEnableAllSecurityServices(false);
filledConfig.setNoServer(false);
filledConfig.setMain("mysoft");
filledConfig.setOutput("output.bin");
return Stream.of(
new Case(
emptyConfig,
asList(
"native-image", "-classpath",
"-H:MaxRuntimeCompileMethods=1000", "-H:+EnforceMaxRuntimeCompileMethods",
"-H:+AddAllCharsets", "-H:+ReportExceptionStackTraces",
"--no-fallback", "--static",
"--report-unsupported-elements-at-runtime",
"main", "main.graal.exec")),
new Case(
classpathConfig,
asList(
"native-image", "-classpath", "foo" + File.pathSeparator + "bar",
"-H:MaxRuntimeCompileMethods=1000", "-H:+EnforceMaxRuntimeCompileMethods",
"-H:+AddAllCharsets", "-H:+ReportExceptionStackTraces",
"--no-fallback", "--static",
"--report-unsupported-elements-at-runtime",
"main", "main.graal.exec")),
new Case(
filledConfig,
asList(
"custom-image", "-classpath", "myclasspath", "-H:ResourceConfigurationFiles=resources.json",
"-H:MaxRuntimeCompileMethods=5", "-H:+PrintClassInitialization",
"--auto-fallback", "mysoft", "output.bin")));
}
@Data
private static class Case {
private final ArthurNativeImageConfiguration configuration;
private final List<String> result;
}
}
| 6,126 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl/nativeimage/generator/DefautContextTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.generator;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static java.util.stream.Collectors.toList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Predicate;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageConfiguration;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
class DefautContextTest {
@ParameterizedTest
@CsvSource({
"null,whatever,EQUALS,true",
"null,whatever,STARTS_WITH,true",
"null,whatever,MATCHES,true",
"foo,foo,EQUALS,true",
"foo,foo,STARTS_WITH,true",
"foo,foo,MATCHES,true",
"foo,foo-,STARTS_WITH,true",
"foo.*,foo,MATCHES,true",
"foo,foo-,EQUALS,false",
"foo,bar,STARTS_WITH,false"
})
void predicate(final String prop, final String value, final ArthurExtension.PredicateType type, final boolean result) {
final Optional<Predicate<String>> predicate = new DefautContext(
new ArthurNativeImageConfiguration(), null, null, null, null,
"null".equals(prop) ? emptyMap() : singletonMap("includes", prop))
.createPredicate("includes", type);
assertEquals(result, !predicate.isPresent() || predicate.orElseThrow(IllegalStateException::new).test(value));
}
@ParameterizedTest
@CsvSource({
"null,null,whatever,EQUALS,true",
"null,null,whatever,STARTS_WITH,true",
"null,null,whatever,MATCHES,true",
"foo,null,foo,EQUALS,true",
"foo,null,foo,STARTS_WITH,true",
"foo,null,foo,MATCHES,true",
"foo,null,foo-,STARTS_WITH,true",
"foo.*,null,foo,MATCHES,true",
"foo,null,foo-,EQUALS,false",
"foo,null,bar,STARTS_WITH,false",
"foo,foo,bar,STARTS_WITH,false",
"bar,whatever,bar,STARTS_WITH,true",
"null,bar,bar,STARTS_WITH,false"
})
void includeExclude(final String includesProp, final String excludesProp, final String value,
final ArthurExtension.PredicateType type, final boolean result) {
final Map<String, String> map = new HashMap<>();
if (!"null".equals(includesProp)) {
map.put("includes", includesProp);
}
if (!"null".equals(excludesProp)) {
map.put("excludes", excludesProp);
}
final DefautContext context = new DefautContext(new ArthurNativeImageConfiguration(), null, null, null, null, map);
assertEquals(result, context.createIncludesExcludes("", type).test(value));
}
@Test
void findHierarchy() {
final ArthurExtension.Context context = new DefautContext(new ArthurNativeImageConfiguration(), null, null, null, null, emptyMap());
assertEquals(singletonList(StandaloneClass.class), context.findHierarchy(StandaloneClass.class).collect(toList()));
assertEquals(asList(ChildClass.class, StandaloneClass.class), context.findHierarchy(ChildClass.class).collect(toList()));
assertEquals(asList(ImplClass.class, StandaloneInterface.class), context.findHierarchy(ImplClass.class).collect(toList()));
assertEquals(asList(ChildAndImplClass.class, StandaloneClass.class, StandaloneInterface.class), context.findHierarchy(ChildAndImplClass.class).collect(toList()));
}
public static class StandaloneClass {
}
public interface StandaloneInterface {
}
public static class ChildClass extends StandaloneClass {
}
public static class ChildAndImplClass extends StandaloneClass implements StandaloneInterface {
}
public static class ImplClass implements StandaloneInterface {
}
}
| 6,127 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl/nativeimage/generator | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl/nativeimage/generator/extension/AnnotationExtensionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.generator.extension;
import org.apache.geronimo.arthur.api.RegisterClass;
import org.apache.geronimo.arthur.api.RegisterClasses;
import org.apache.geronimo.arthur.api.RegisterField;
import org.apache.geronimo.arthur.api.RegisterMethod;
import org.apache.geronimo.arthur.api.RegisterResource;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageConfiguration;
import org.apache.geronimo.arthur.impl.nativeimage.generator.DefautContext;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import org.apache.xbean.finder.AnnotationFinder;
import org.apache.xbean.finder.archive.ClassesArchive;
import org.apache.xbean.finder.filter.Filter;
import org.apache.xbean.finder.util.Files;
import org.junit.jupiter.api.Test;
import javax.json.bind.Jsonb;
import javax.json.bind.JsonbBuilder;
import javax.json.bind.JsonbConfig;
import javax.json.bind.config.PropertyOrderStrategy;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Collection;
import java.util.Iterator;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static java.util.Collections.singletonMap;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
class AnnotationExtensionTest {
@Test
void scan() throws Exception {
final ClassesArchive archive = new ClassesArchive(AnnotationExtensionTest.class.getClasses());
final AnnotationFinder finder = new AnnotationFinder(archive);
final DefautContext context = new DefautContext(new ArthurNativeImageConfiguration(),
finder::findAnnotatedClasses,
finder::findAnnotatedMethods,
finder::findAnnotatedFields,
p -> Collection.class.cast(finder.findImplementations(p)),
null);
new AnnotationExtension().execute(context);
try (final Jsonb jsonb = JsonbBuilder.create(
new JsonbConfig().withPropertyOrderStrategy(PropertyOrderStrategy.LEXICOGRAPHICAL))) {
assertTrue(context.isModified());
{
final Iterator<ClassReflectionModel> reflections = context.getReflections().stream()
.sorted(comparing(ClassReflectionModel::getName))
.collect(toList())
.iterator();
assertTrue(reflections.hasNext());
assertEquals("{\"allDeclaredClasses\":true,\"allDeclaredConstructors\":true,\"allDeclaredFields\":true,\"allDeclaredMethods\":true,\"name\":\"" + All.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"allDeclaredFields\":true,\"name\":\"" + AllFields.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"name\":\"" + Child1.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"name\":\"" + Child2.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"name\":\"" + ChildRegistersIt.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"fields\":[{\"name\":\"name\"}],\"name\":\"" + ExplicitFields.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"methods\":[{\"name\":\"hasExplicitMethod\",\"parameterTypes\":[]}],\"name\":\"" + ExplicitMethod.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"name\":\"" + JustTheClass.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"name\":\"" + Filter.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertTrue(reflections.hasNext());
assertEquals("{\"name\":\"" + Files.class.getName() + "\"}", jsonb.toJson(reflections.next()));
assertFalse(reflections.hasNext());
}
assertEquals("myres1,myres2", context.getResources().stream().map(ResourceModel::getPattern).sorted().collect(joining(",")));
assertEquals("another,org.bundle1,org.foo.2", context.getBundles().stream().map(ResourceBundleModel::getName).sorted().collect(joining(",")));
}
}
@Test
void customAnnot() throws Exception {
final ClassesArchive archive = new ClassesArchive(MarkedRegister.class);
final AnnotationFinder finder = new AnnotationFinder(archive);
final DefautContext context = new DefautContext(new ArthurNativeImageConfiguration(),
finder::findAnnotatedClasses,
finder::findAnnotatedMethods,
finder::findAnnotatedFields,
p -> Collection.class.cast(finder.findImplementations(p)),
singletonMap("extension.annotation.custom.annotations.class", MyRegister.class.getName() + ":allDeclaredFields=true"));
new AnnotationExtension().execute(context);
try (final Jsonb jsonb = JsonbBuilder.create(
new JsonbConfig().withPropertyOrderStrategy(PropertyOrderStrategy.LEXICOGRAPHICAL))) {
assertTrue(context.isModified());
final Iterator<ClassReflectionModel> reflections = context.getReflections().iterator();
assertTrue(reflections.hasNext());
assertEquals("{\"allDeclaredFields\":true,\"name\":\"org.apache.geronimo.arthur.impl.nativeimage.generator.extension.AnnotationExtensionTest$MarkedRegister\"}", jsonb.toJson(reflections.next()));
assertFalse(reflections.hasNext());
}
}
@Target(TYPE)
@Retention(RUNTIME)
public @interface MyRegister {
}
@MyRegister
public static class MarkedRegister {
}
@RegisterClasses.Entry(clazz = Filter.class)
@RegisterClasses.Entry(clazz = Files.class)
public static class OutProjectClassRegistration {
}
@RegisterResource(
patterns = {"myres1", "myres2"},
bundles = {"org.bundle1", "org.foo.2", "another"}
)
public static class App {
}
public static class IgnoredNormally {
private String name;
public String isIgnored() {
return "test";
}
}
public static class ChildRegistersIt {
}
@RegisterClass
public static class Child1 extends ChildRegistersIt {
}
@RegisterClass
public static class Child2 extends ChildRegistersIt {
}
@RegisterClass
public static class JustTheClass {
private String name;
public String isJust() {
return "test";
}
}
@RegisterClass(all = true)
public static class All {
private String name;
public All() {
// no-op
}
public All(final String name) {
this.name = name;
}
public String isAll() {
return "test";
}
}
@RegisterClass(allDeclaredFields = true)
public static class AllFields {
private String name;
public String allF() {
return "test";
}
}
@RegisterClass
public static class ExplicitFields {
@RegisterField
private String name;
public String hasExplicitField() {
return "test";
}
}
@RegisterClass
public static class ExplicitMethod {
private String name;
@RegisterMethod
public String hasExplicitMethod() {
return "test";
}
}
}
| 6,128 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/test/java/org/apache/geronimo/arthur/impl/nativeimage/installer/SdkmanGraalVMInstallerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.installer;
import static java.util.stream.Collectors.joining;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.function.Function;
import com.sun.net.httpserver.HttpServer;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.apache.geronimo.arthur.impl.nativeimage.archive.Extractor;
import org.apache.geronimo.arthur.impl.nativeimage.installer.SdkmanGraalVMInstaller;
import org.apache.geronimo.arthur.impl.nativeimage.installer.SdkmanGraalVMInstallerConfiguration;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
class SdkmanGraalVMInstallerTest {
@TempDir
Path workdir;
@ParameterizedTest
@CsvSource({
"19.2.1-grl,linux64,tar.gz", // tar.gz
"19.2.1-grl,cygwin,zip" // zip
})
void install(final String version, final String platform) throws IOException {
final String handledApi = "/broker/download/java/" + version + "/" + platform;
final HttpServer server = HttpServer.create(new InetSocketAddress(0), 0);
server.createContext("/").setHandler(ex -> {
final String path = ex.getRequestURI().getPath();
if (path.endsWith(handledApi + "/redirected-as-sdkman-on-github")) {
final byte[] bytes = createFakeArchive(version, platform).toByteArray();
ex.getResponseHeaders().set("content-type", "application/octect-stream");
ex.sendResponseHeaders(HttpURLConnection.HTTP_OK, bytes.length);
ex.getResponseBody().write(bytes);
ex.close();
} else if (handledApi.equals(path)) {
ex.getResponseHeaders().set("location", "http://localhost:" + server.getAddress().getPort() + path + "/redirected-as-sdkman-on-github");
ex.sendResponseHeaders(HttpURLConnection.HTTP_MOVED_TEMP, 0);
ex.close();
} else {
ex.sendResponseHeaders(HttpURLConnection.HTTP_INTERNAL_ERROR, 0);
ex.close();
}
});
try {
server.start();
final SpiedResolver resolver = new SpiedResolver();
final SpiedInstaller artifactInstaller = new SpiedInstaller();
final SdkmanGraalVMInstaller installer = newInstaller(
workdir, platform, version, "http://localhost:" + server.getAddress().getPort(),
artifactInstaller, resolver);
final Path installed = installer.install();
assertNotNull(installed);
final Path gu = installed.resolve("bin/gu");
assertTrue(Files.exists(gu));
assertEquals("works", Files.lines(gu).collect(joining("\n")));
assertEquals(1, resolver.counter.get());
assertEquals(1, artifactInstaller.counter.get());
// ensure we use the cache
resolver.result = installed; // we should put the archive we we only check the parent anyway so not a big deal
assertEquals(installed, installer.install());
assertEquals(2, resolver.counter.get());
assertEquals(1, artifactInstaller.counter.get());
} finally {
server.stop(0);
}
}
private ByteArrayOutputStream createFakeArchive(final String version, final String platform) throws IOException {
final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
final String rootName = "graal-ce-" + version + "/";
final BiConsumer<ArchiveOutputStream, Function<String, ArchiveEntry>> prepareStructure = (archive, entryFactory) -> {
try {
// root
archive.putArchiveEntry(entryFactory.apply(rootName));
archive.closeArchiveEntry();
// bin folder
archive.putArchiveEntry(entryFactory.apply(rootName + "bin/"));
archive.closeArchiveEntry();
} catch (final IOException ioe) {
throw new IllegalStateException(ioe);
}
};
final byte[] guContent = "works".getBytes(StandardCharsets.UTF_8);
if ("cygwin".equals(platform)) { // zip
try (final ZipArchiveOutputStream archive = new ZipArchiveOutputStream(outputStream)) {
prepareStructure.accept(archive, ZipArchiveEntry::new);
final ZipArchiveEntry gu = new ZipArchiveEntry(rootName + "bin/gu");
gu.setSize(guContent.length);
archive.putArchiveEntry(gu);
archive.write(guContent);
archive.closeArchiveEntry();
}
} else { // tar.gz
try (final TarArchiveOutputStream archive = new TarArchiveOutputStream(new GzipCompressorOutputStream(outputStream))) {
prepareStructure.accept(archive, TarArchiveEntry::new);
final TarArchiveEntry gu = new TarArchiveEntry(rootName + "bin/gu");
gu.setSize(guContent.length);
archive.putArchiveEntry(gu);
archive.write(guContent);
archive.closeArchiveEntry();
}
}
return outputStream;
}
private SdkmanGraalVMInstaller newInstaller(final Path workdir, final String platform, final String version,
final String baseUrl, final SpiedInstaller installer, final SpiedResolver resolver) {
return new SdkmanGraalVMInstaller(
SdkmanGraalVMInstallerConfiguration.builder()
.offline(false)
.inheritIO(true)
.url(baseUrl + "/broker/download/java/" + version + "/" + platform)
.version(version)
.platform(platform)
.gav("org.apache.geronimo.arthur.cache:graal:" + ("cygwin".equals(platform) ? "zip" : "tar.gz") + ":" + platform + ':' + version)
.workdir(workdir)
.resolver(resolver::resolve)
.installer(installer::install)
.extractor(new Extractor()::unpack)
.build());
}
public static class SpiedInstaller {
private final AtomicInteger counter = new AtomicInteger();
public Path install(final String gav, final Path source) {
counter.incrementAndGet();
return source;
}
}
public static class SpiedResolver {
private final AtomicInteger counter = new AtomicInteger();
private Path result;
public Path resolve(final String gav) {
counter.incrementAndGet();
if (result != null) {
return result;
}
throw new IllegalStateException("missing in the test");
}
}
}
| 6,129 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/ArthurNativeImageConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.ArrayList;
import java.util.Collection;
import lombok.Data;
@Data
public class ArthurNativeImageConfiguration {
@GraalCommandPart(order = 0)
private String nativeImage = "native-image";
@GraalCommandPart(order = 1, passthrough = true)
private Collection<String> customOptions = new ArrayList<>();
@GraalCommandPart(order = 2, joiner = "${File.pathSeparator}", preParts = "-classpath")
private Collection<String> classpath = new ArrayList<>();
@GraalCommandPart(order = 3, joiner = ",", template = "-H:DynamicProxyConfigurationFiles=%s")
private Collection<String> dynamicProxyConfigurationFiles = new ArrayList<>();
@GraalCommandPart(order = 4, joiner = ",", template = "-H:ReflectionConfigurationFiles=%s")
private Collection<String> reflectionConfigurationFiles = new ArrayList<>();
@GraalCommandPart(order = 5, joiner = ",", template = "-H:ResourceConfigurationFiles=%s")
private Collection<String> resourcesConfigurationFiles = new ArrayList<>();
@GraalCommandPart(order = 6, joiner = ",", template = "-H:IncludeResourceBundles=%s")
private Collection<String> includeResourceBundles = new ArrayList<>();
@GraalCommandPart(order = 7, joiner = ",", template = "--initialize-at-run-time=%s")
private Collection<String> initializeAtRunTime = new ArrayList<>();
@GraalCommandPart(order = 8, joiner = ",", template = "--initialize-at-build-time=%s")
private Collection<String> initializeAtBuildTime = new ArrayList<>();
@GraalCommandPart(order = 9, template = "-H:MaxRuntimeCompileMethods=%d")
private int maxRuntimeCompileMethods = 1000;
@GraalCommandPart(order = 10, template = "-H:+EnforceMaxRuntimeCompileMethods")
private boolean enforceMaxRuntimeCompileMethods = true;
@GraalCommandPart(order = 11, template = "-H:+AddAllCharsets")
private boolean addAllCharsets = true;
@GraalCommandPart(order = 12, template = "-H:+ReportExceptionStackTraces")
private boolean reportExceptionStackTraces = true;
@GraalCommandPart(order = 14, template = "-H:+PrintClassInitialization")
private boolean printClassInitialization = false;
@GraalCommandPart(order = 15, template = "--%s-fallback")
private FallbackMode fallbackMode = FallbackMode.no;
@GraalCommandPart(order = 16, template = "--static")
private boolean buildStaticImage = true;
@GraalCommandPart(order = 17, template = "--allow-incomplete-classpath")
private Boolean allowIncompleteClasspath; // recent version make it a default
@GraalCommandPart(order = 18, template = "--report-unsupported-elements-at-runtime")
private boolean reportUnsupportedElementsAtRuntime = true;
@GraalCommandPart(order = 19, template = "--enable-all-security-services")
private Boolean enableAllSecurityServices = null;
@GraalCommandPart(order = 20, template = "--no-server")
private Boolean noServer = null;
@GraalCommandPart(order = 21)
private String main;
@GraalCommandPart(order = 22)
private String output;
private boolean inheritIO = true;
/**
* @param graalVersion the graalvm version used to complete this configuration, in particular allowIncompleteClasspath and enableAllSecurityServices.
*/
public void complete(final String graalVersion) {
if (graalVersion != null && (graalVersion.startsWith("21.") || graalVersion.startsWith("20."))) {
if (allowIncompleteClasspath == null) {
allowIncompleteClasspath = true;
}
if (enableAllSecurityServices == null) {
enableAllSecurityServices = true;
}
}
}
public enum FallbackMode {
no, auto, force
}
@Target(FIELD)
@Retention(RUNTIME)
public @interface GraalCommandPart {
/**
* @return the order this command segment must be set to.
*/
int order();
/**
* @return for collections, the separator to use to join the values.
*/
String joiner() default "";
/**
* @return true if the content must be included in the command as it is provided.
*/
boolean passthrough() default false;
/**
* @return command segments to prepend to the part.
*/
String[] preParts() default {};
/**
* @return the template for the resulting value.
*/
String template() default "%s";
}
}
| 6,130 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/ArthurNativeImageExecutor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage;
import lombok.Builder;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.geronimo.arthur.impl.nativeimage.generator.ConfigurationGenerator;
import org.apache.geronimo.arthur.impl.nativeimage.graal.CommandGenerator;
import org.apache.geronimo.arthur.impl.nativeimage.process.ProcessExecutor;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import java.io.Writer;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.nio.file.Path;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.function.BiConsumer;
import java.util.function.Function;
import static java.util.Collections.emptyMap;
import static java.util.Optional.ofNullable;
@Slf4j
@RequiredArgsConstructor
public class ArthurNativeImageExecutor implements Runnable {
private final ExecutorConfiguration configuration;
@Override
public void run() {
final ConfigurationGenerator configurationGenerator = new ConfigurationGenerator(
loadExtensions(),
configuration.configuration, configuration.workingDirectory, configuration.jsonSerializer,
configuration.annotatedClassFinder, configuration.annotatedFieldFinder,
configuration.annotatedMethodFinder, configuration.implementationFinder,
configuration.extensionProperties);
configurationGenerator.run();
final List<String> command = new CommandGenerator().generate(configuration.configuration);
new ProcessExecutor(
configuration.configuration.isInheritIO(),
command,
emptyMap())
.run();
}
protected Iterable<ArthurExtension> loadExtensions() {
return ServiceLoader.load(
ArthurExtension.class,
ofNullable(Thread.currentThread().getContextClassLoader())
.orElseGet(ClassLoader::getSystemClassLoader));
}
@Builder
public static class ExecutorConfiguration {
private final BiConsumer<Object, Writer> jsonSerializer;
private final Path workingDirectory;
private final Function<Class<? extends Annotation>, Collection<Class<?>>> annotatedClassFinder;
private final Function<Class<? extends Annotation>, Collection<Method>> annotatedMethodFinder;
private final Function<Class<? extends Annotation>, Collection<Field>> annotatedFieldFinder;
private final Function<Class<?>, Collection<Class<?>>> implementationFinder;
private final ArthurNativeImageConfiguration configuration;
private final Map<String, String> extensionProperties;
}
}
| 6,131 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/archive/Extractor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.archive;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.FileTime;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Predicate;
import java.util.stream.Stream;
import static java.nio.file.attribute.PosixFilePermission.GROUP_EXECUTE;
import static java.nio.file.attribute.PosixFilePermission.GROUP_READ;
import static java.nio.file.attribute.PosixFilePermission.OTHERS_EXECUTE;
import static java.nio.file.attribute.PosixFilePermission.OTHERS_READ;
import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE;
import static java.nio.file.attribute.PosixFilePermission.OWNER_READ;
import static java.nio.file.attribute.PosixFilePermission.OWNER_WRITE;
import static java.util.stream.Collectors.toSet;
@Slf4j
public class Extractor {
public void unpack(final Path archive, final Path exploded) {
try {
final boolean isZip = archive.getFileName().toString().endsWith(".zip");
final InputStream fileStream = new BufferedInputStream(Files.newInputStream(archive));
final Predicate<ArchiveEntry> isLink = isZip ?
e -> ZipArchiveEntry.class.cast(e).isUnixSymlink() :
e -> TarArchiveEntry.class.cast(e).isSymbolicLink();
final BiFunction<ArchiveInputStream, ArchiveEntry, String> linkPath = isZip ?
(a, e) -> { // todo: validate this with cygwin
try {
return new BufferedReader(new InputStreamReader(a)).readLine();
} catch (final IOException ex) {
throw new IllegalStateException(ex);
}
} :
(a, e) -> TarArchiveEntry.class.cast(e).getLinkName();
final Map<Path, Path> linksToCopy = new HashMap<>();
final Map<Path, Path> linksToRetry = new HashMap<>();
try (final ArchiveInputStream archiveInputStream = isZip ?
new ZipArchiveInputStream(fileStream) :
new TarArchiveInputStream(new GzipCompressorInputStream(fileStream))) {
ArchiveEntry entry;
while ((entry = archiveInputStream.getNextEntry()) != null) {
final String entryName = entry.getName();
if (!archiveInputStream.canReadEntryData(entry)) {
log.error("Can't read '" + entryName + "'");
continue;
}
final int sep = entryName.indexOf('/');
if (sep == entryName.length() || sep < 0) { // first level folder, skip
continue;
}
final Path target = exploded.resolve(entryName.substring(sep + 1));
if (entry.isDirectory()) {
ensureExists(target);
} else {
ensureExists(target.getParent());
if (isLink.test(entry)) {
final Path targetLinked = Paths.get(linkPath.apply(archiveInputStream, entry));
if (Files.exists(target.getParent().resolve(targetLinked))) {
try {
Files.createSymbolicLink(target, targetLinked);
setExecutableIfNeeded(target);
} catch (final IOException ioe) {
linksToCopy.put(target, targetLinked);
}
} else {
linksToRetry.put(target, targetLinked);
}
} else {
Files.copy(archiveInputStream, target, StandardCopyOption.REPLACE_EXISTING);
Files.setLastModifiedTime(target, FileTime.fromMillis(entry.getLastModifiedDate().getTime()));
setExecutableIfNeeded(target);
}
}
}
}
linksToRetry.forEach((target, targetLinked) -> {
try {
Files.createSymbolicLink(target, targetLinked);
setExecutableIfNeeded(target);
} catch (final IOException ioe) {
linksToCopy.put(target, targetLinked);
}
});
linksToCopy.forEach((target, targetLinked) -> {
final Path actualTarget = target.getParent().resolve(targetLinked);
if (!Files.exists(actualTarget)) {
log.warn("No file '" + targetLinked + "' found, skipping link");
return;
}
try {
Files.copy(actualTarget, target, StandardCopyOption.REPLACE_EXISTING);
setExecutableIfNeeded(target);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
});
} catch (final IOException e) {
throw new IllegalStateException("Can't unpack graal archive", e);
}
}
private void setExecutableIfNeeded(final Path target) throws IOException {
final String parentFilename = target.getParent().getFileName().toString();
final String filename = target.getFileName().toString();
if ((parentFilename.equals("bin") && !Files.isExecutable(target)) ||
(parentFilename.equals("lib") && (
filename.contains("exec") || filename.startsWith("j") ||
(filename.startsWith("lib") && filename.contains(".so"))))) {
Files.setPosixFilePermissions(
target,
Stream.of(
OWNER_READ, OWNER_EXECUTE, OWNER_WRITE,
GROUP_READ, GROUP_EXECUTE,
OTHERS_READ, OTHERS_EXECUTE)
.collect(toSet()));
}
}
private void ensureExists(final Path target) throws IOException {
if (!Files.exists(target)) {
Files.createDirectories(target);
}
}
}
| 6,132 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/generator/DefautContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.generator;
import lombok.Data;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageConfiguration;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.DynamicProxyModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
import static java.util.Optional.of;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.toList;
@Data
public class DefautContext implements ArthurExtension.Context {
private final ArthurNativeImageConfiguration configuration;
private final Function<Class<? extends Annotation>, Collection<Class<?>>> annotatedClassesFinder;
private final Function<Class<? extends Annotation>, Collection<Method>> methodFinder;
private final Function<Class<? extends Annotation>, Collection<Field>> fieldFinder;
private final Function<Class<?>, Collection<Class<?>>> implementationFinder;
private final Collection<ClassReflectionModel> reflections = new HashSet<>();
private final Collection<ResourceModel> resources = new HashSet<>();
private final Collection<ResourceBundleModel> bundles = new HashSet<>();
private final Collection<DynamicProxyModel> dynamicProxyModels = new HashSet<>();
private final Map<String, String> extensionProperties;
private final Map<String, byte[]> dynamicClasses = new HashMap<>();
private boolean modified;
@Override
public <T extends Annotation> Collection<Class<?>> findAnnotatedClasses(final Class<T> annotation) {
return annotatedClassesFinder.apply(annotation);
}
@Override
public <T extends Annotation> Collection<Method> findAnnotatedMethods(final Class<T> annotation) {
return methodFinder.apply(annotation);
}
@Override
public <T extends Annotation> Collection<Field> findAnnotatedFields(final Class<T> annotation) {
return fieldFinder.apply(annotation);
}
@Override
public <T> Collection<Class<? extends T>> findImplementations(final Class<T> parent) {
return Collection.class.cast(implementationFinder.apply(parent));
}
@Override
public void registerGeneratedClass(final String name, final byte[] bytecode) {
dynamicClasses.put(name, bytecode);
}
@Override
public void register(final ClassReflectionModel classReflectionModel) {
reflections.stream()
.filter(it -> Objects.equals(classReflectionModel.getName(), it.getName()))
.findFirst()
.map(it -> {
it.merge(classReflectionModel);
return it;
})
.orElseGet(() -> {
reflections.add(classReflectionModel);
return classReflectionModel;
});
modified = true;
}
@Override
public void register(final ResourceModel resourceModel) {
resources.add(resourceModel);
modified = true;
}
@Override
public void register(final ResourceBundleModel resourceBundleModel) {
bundles.stream()
.filter(it -> Objects.equals(resourceBundleModel.getName(), it.getName()))
.findFirst()
.orElseGet(() -> {
modified = true;
bundles.add(resourceBundleModel);
return resourceBundleModel;
});
}
@Override
public void register(final DynamicProxyModel dynamicProxyModel) {
if (dynamicProxyModels.add(dynamicProxyModel)) {
modified = true;
}
}
@Override
public void enableAllSecurityServices() {
configuration.setEnableAllSecurityServices(true);
}
@Override
public void enableAllCharsets() {
configuration.setAddAllCharsets(true);
}
@Override
public void includeResourceBundle(final String name) {
if (configuration.getIncludeResourceBundles() == null) {
configuration.setIncludeResourceBundles(new ArrayList<>());
}
configuration.getIncludeResourceBundles().add(name);
}
@Override
public void initializeAtBuildTime(final String... classes) {
if (configuration.getInitializeAtBuildTime() == null) {
configuration.setInitializeAtBuildTime(new ArrayList<>());
}
configuration.getInitializeAtBuildTime().addAll(Stream.of(classes)
.filter(it -> !configuration.getInitializeAtBuildTime().contains(it))
.collect(toList()));
}
@Override
public void initializeAtRunTime(final String... classes) {
if (configuration.getInitializeAtRunTime() == null) {
configuration.setInitializeAtRunTime(new ArrayList<>());
}
configuration.getInitializeAtRunTime().addAll(Stream.of(classes)
.filter(it -> !configuration.getInitializeAtBuildTime().contains(it))
.collect(toList()));
}
@Override
public String getProperty(final String key) {
return extensionProperties == null ? null : extensionProperties.get(key);
}
@Override
public void setProperty(final String key, final String value) {
extensionProperties.put(key, value);
}
@Override
public void addNativeImageOption(final String option) {
if (configuration.getCustomOptions() == null) {
configuration.setCustomOptions(new ArrayList<>());
}
configuration.getCustomOptions().add(option);
}
@Override
public Class<?> loadClass(final String name) {
try {
return Thread.currentThread().getContextClassLoader().loadClass(name);
} catch (final ClassNotFoundException e) {
throw new IllegalStateException(e);
}
}
@Override
public Stream<Class<?>> findHierarchy(final Class<?> clazz) {
return doFindHierarchy(clazz, new HashSet<>());
}
@Override
public Optional<Predicate<String>> createPredicate(final String property, final ArthurExtension.PredicateType type) {
return ofNullable(getProperty(property))
.flatMap(ex -> Stream.of(ex.split(","))
.map(String::trim)
.filter(it -> !it.isEmpty())
.map(it -> of((Predicate<String>) n -> type.test(it, n)))
.reduce(Optional.<Predicate<String>>empty(),
(opt, p) -> opt.map(e -> of(e.or(p.orElseThrow(IllegalArgumentException::new)))).orElse(p)));
}
@Override
public Predicate<String> createIncludesExcludes(final String propertyBase, final ArthurExtension.PredicateType type) {
final Optional<Predicate<String>> includes = createPredicate(propertyBase + "includes", type);
final Optional<Predicate<String>> excludes = createPredicate(propertyBase + "excludes", type);
return n -> {
final boolean hasInclude = includes.isPresent();
if (hasInclude) {
if (includes.orElseThrow(IllegalStateException::new).test(n)) {
return true;
}
}
final boolean hasExclude = excludes.isPresent();
if (hasExclude) {
if (excludes.orElseThrow(IllegalStateException::new).test(n)) {
return false;
}
}
if (hasExclude && !hasInclude) {
return true;
}
return !hasExclude && !hasInclude;
};
}
@Override
public <T> T unwrap(final Class<T> type) {
if (ArthurNativeImageConfiguration.class == type) {
return type.cast(configuration);
}
if (type.isInstance(this)) {
return type.cast(this);
}
throw new IllegalArgumentException("Unsupported unwrapping: " + type);
}
public void addReflectionConfigFile(final String path) {
if (configuration.getReflectionConfigurationFiles() == null) {
configuration.setReflectionConfigurationFiles(new ArrayList<>());
}
configuration.getReflectionConfigurationFiles().add(path);
}
public void addResourcesConfigFile(final String path) {
if (configuration.getResourcesConfigurationFiles() == null) {
configuration.setResourcesConfigurationFiles(new ArrayList<>());
}
configuration.getResourcesConfigurationFiles().add(path);
}
public void addDynamicProxiesConfigFile(final String path) {
if (configuration.getDynamicProxyConfigurationFiles() == null) {
configuration.setDynamicProxyConfigurationFiles(new ArrayList<>());
}
configuration.getDynamicProxyConfigurationFiles().add(path);
}
private Stream<Class<?>> doFindHierarchy(final Class<?> clazz, final Set<Class<?>> visited) {
visited.add(clazz);
return Stream.concat(Stream.concat(
Stream.of(clazz), Stream.of(clazz.getSuperclass())), Stream.of(clazz.getInterfaces()))
.filter(it -> Object.class != it && it != null)
.flatMap(it -> visited.contains(it) ? Stream.of(it) : doFindHierarchy(it, visited))
.distinct();
}
}
| 6,133 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/generator/ConfigurationGenerator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.generator;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageConfiguration;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.DynamicProxyModel;
import org.apache.geronimo.arthur.spi.model.ResourcesModel;
import java.io.IOException;
import java.io.Writer;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static java.util.Collections.singletonList;
import static java.util.Comparator.comparing;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.toCollection;
import static java.util.stream.Collectors.toList;
@Slf4j
@RequiredArgsConstructor
public class ConfigurationGenerator implements Runnable {
private final List<ArthurExtension> extensions;
private final ArthurNativeImageConfiguration configuration;
private final Path workingDirectory;
private final BiConsumer<Object, Writer> jsonSerializer;
private final Function<Class<? extends Annotation>, Collection<Class<?>>> classFinder;
private final Function<Class<?>, Collection<Class<?>>> implementationFinder;
private final Function<Class<? extends Annotation>, Collection<Method>> methodFinder;
private final Function<Class<? extends Annotation>, Collection<Field>> fieldFinder;
private final Map<String, String> extensionProperties;
public ConfigurationGenerator(final Iterable<ArthurExtension> extensions, final ArthurNativeImageConfiguration configuration,
final Path workingDirectory, final BiConsumer<Object, Writer> jsonSerializer,
final Function<Class<? extends Annotation>, Collection<Class<?>>> classFinder,
final Function<Class<? extends Annotation>, Collection<Field>> fieldFinder,
final Function<Class<? extends Annotation>, Collection<Method>> methodFinder,
final Function<Class<?>, Collection<Class<?>>> implementationFinder,
final Map<String, String> extensionProperties) {
this.extensions = StreamSupport.stream(extensions.spliterator(), false).collect(toList());
this.configuration = configuration;
this.workingDirectory = workingDirectory;
this.jsonSerializer = jsonSerializer;
this.classFinder = classFinder;
this.fieldFinder = fieldFinder;
this.methodFinder = methodFinder;
this.implementationFinder = implementationFinder;
this.extensionProperties = extensionProperties;
}
@Override
public void run() {
// ensure to have a writtable instance (see Context#setProperty(String, String))
final HashMap<String, String> properties = ofNullable(this.extensionProperties).map(HashMap::new).orElseGet(HashMap::new);
properties.put("workingDirectory", workingDirectory.toAbsolutePath().toString());
final DefautContext context = new DefautContext(configuration, classFinder, methodFinder, fieldFinder, implementationFinder, properties);
for (final ArthurExtension extension : extensions) {
log.debug("Executing {}", extension);
context.setModified(false);
extension.execute(context);
if (context.isModified()) { // todo: loop while it modifies the context?
log.info("Extension {} updated build context", extension.getClass().getName());
}
}
try {
updateConfiguration(context);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private void updateConfiguration(final DefautContext context) throws IOException {
if (!context.getReflections().isEmpty()) {
ensureWorkingDirectoryExists();
final Path json = workingDirectory.resolve("reflection.arthur.json");
log.info("Creating reflection model '{}'", json);
try (final Writer writer = Files.newBufferedWriter(
json, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING)) {
jsonSerializer.accept(
context.getReflections().stream()
.collect(groupingBy(ClassReflectionModel::getName))
.values().stream()
.map(this::merge)
.sorted(comparing(ClassReflectionModel::getName))
.collect(toList()),
writer);
}
context.addReflectionConfigFile(json.toAbsolutePath().toString());
}
if (!context.getResources().isEmpty() || !context.getBundles().isEmpty()) {
final ResourcesModel resourcesModel = new ResourcesModel();
if (!context.getResources().isEmpty()) {
resourcesModel.setResources(context.getResources());
}
if (!context.getBundles().isEmpty()) {
resourcesModel.setBundles(context.getBundles());
}
ensureWorkingDirectoryExists();
final Path json = workingDirectory.resolve("resources.arthur.json");
log.info("Creating resources model '{}'", json);
try (final Writer writer = Files.newBufferedWriter(
json, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING)) {
jsonSerializer.accept(resourcesModel, writer);
}
context.addResourcesConfigFile(json.toAbsolutePath().toString());
}
if (!context.getDynamicProxyModels().isEmpty()) {
final Set<Collection<String>> proxies = context.getDynamicProxyModels().stream()
.map(DynamicProxyModel::getClasses)
.collect(toCollection(LinkedHashSet::new));
ensureWorkingDirectoryExists();
final Path json = workingDirectory.resolve("dynamicproxies.arthur.json");
log.info("Creating dynamic proxy model '{}'", json);
try (final Writer writer = Files.newBufferedWriter(
json, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING)) {
jsonSerializer.accept(proxies, writer);
}
context.addDynamicProxiesConfigFile(json.toAbsolutePath().toString());
}
if (!context.getDynamicClasses().isEmpty()) {
final Path dynamicClassesDir = workingDirectory.resolve("dynamic_classes");
context.getDynamicClasses().forEach((name, content) -> {
final Path target = dynamicClassesDir.resolve(name.replace('.', '/') + ".class");
if (!Files.exists(target.getParent())) {
try {
Files.createDirectories(target.getParent());
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
try {
Files.write(target, content, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
});
log.info("Dumped {} generated classes in '{}'", context.getDynamicClasses().size(), dynamicClassesDir);
if (configuration.getClasspath() == null) {
configuration.setClasspath(singletonList(dynamicClassesDir.toString()));
} else {
configuration.setClasspath(Stream.concat(
configuration.getClasspath().stream(),
Stream.of(dynamicClassesDir.toString())
).distinct().collect(toList()));
}
}
}
private ClassReflectionModel merge(final List<ClassReflectionModel> classReflectionModels) {
final Iterator<ClassReflectionModel> modelIterator = classReflectionModels.iterator();
final ClassReflectionModel model = modelIterator.next();
while (modelIterator.hasNext()) {
model.merge(modelIterator.next());
}
return model;
}
private void ensureWorkingDirectoryExists() throws IOException {
if (!Files.exists(workingDirectory)) {
Files.createDirectories(workingDirectory);
}
}
}
| 6,134 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/generator | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/generator/extension/AnnotationExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.generator.extension;
import org.apache.geronimo.arthur.api.RegisterClass;
import org.apache.geronimo.arthur.api.RegisterClasses;
import org.apache.geronimo.arthur.api.RegisterField;
import org.apache.geronimo.arthur.api.RegisterMethod;
import org.apache.geronimo.arthur.api.RegisterResource;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
public class AnnotationExtension implements ArthurExtension {
@Override
public int order() {
return 100;
}
@Override
public void execute(final Context context) {
ofNullable(context.getProperty("extension.annotation.custom.annotations.class"))
.ifPresent(names -> Stream.of(names.split(","))
.map(String::trim)
.filter(it -> !it.isEmpty())
.forEach(config -> { // syntax is: <annotation fqn>:<RegisterClass method=true|false>, if only fqn then all=true is supposed
final int sep = config.indexOf(':');
final String fqn = sep < 0 ? config : config.substring(0, sep);
final RegisterClass registerClass = newRegisterClass(sep < 0 ? "all=true" : config.substring(sep + 1));
context.findAnnotatedClasses(context.loadClass(fqn.trim()).asSubclass(Annotation.class)).stream()
.flatMap(clazz -> register(clazz, registerClass))
.forEach(context::register);
}));
ofNullable(context.getProperty("extension.annotation.custom.annotations.properties"))
.ifPresent(names -> Stream.of(names.split(","))
.map(String::trim)
.filter(it -> !it.isEmpty())
.forEach(config -> { // syntax is: <annotation fqn>:<RegisterClass method=true|false>, if only fqn then all=true is supposed
final int sep = config.indexOf(':');
final String fqn = sep < 0 ? config : config.substring(0, sep);
final RegisterClass registerClass = newRegisterClass(sep < 0 ? "all=true" : config.substring(sep + 1));
final Class<? extends Annotation> annot = context.loadClass(fqn.trim()).asSubclass(Annotation.class);
Stream.concat(
context.findAnnotatedMethods(annot).stream().map(Method::getDeclaringClass),
context.findAnnotatedFields(annot).stream().map(Field::getDeclaringClass))
.distinct()
.flatMap(clazz -> register(clazz, registerClass))
.forEach(context::register);
}));
context.findAnnotatedClasses(RegisterClass.class).stream()
.flatMap(clazz -> register(clazz, clazz.getAnnotation(RegisterClass.class)))
.forEach(context::register);
final Collection<RegisterResource> resources = context.findAnnotatedClasses(RegisterResource.class).stream()
.flatMap(clazz -> Stream.of(clazz.getAnnotation(RegisterResource.class)))
.collect(toList());
resources.stream()
.flatMap(rr -> Stream.of(rr.patterns()))
.map(pattern -> {
final ResourceModel resourceModel = new ResourceModel();
resourceModel.setPattern(pattern);
return resourceModel;
})
.distinct()
.forEach(context::register);
resources.stream()
.flatMap(rr -> Stream.of(rr.bundles()))
.map(name -> {
final ResourceBundleModel bundleModel = new ResourceBundleModel();
bundleModel.setName(name);
return bundleModel;
})
.distinct()
.forEach(context::register);
context.findAnnotatedClasses(RegisterClasses.Entry.class).stream()
.map(it -> it.getAnnotation(RegisterClasses.Entry.class))
.flatMap(entry -> doRegisterEntry(context, entry))
.forEach(context::register);
context.findAnnotatedClasses(RegisterClasses.class).stream()
.flatMap(it -> Stream.of(it.getAnnotation(RegisterClasses.class).value()))
.flatMap(entry -> doRegisterEntry(context, entry))
.forEach(context::register);
}
private Stream<ClassReflectionModel> doRegisterEntry(final Context context, final RegisterClasses.Entry entry) {
try {
return register(!entry.className().isEmpty() ? context.loadClass(entry.className()) : entry.clazz(), entry.registration());
} catch (final IllegalStateException ise) {
// class not loadable, ignore
return Stream.empty();
}
}
private Stream<ClassReflectionModel> register(final Class<?> clazz, final RegisterClass config) {
final ClassReflectionModel reflectionModel = new ClassReflectionModel();
reflectionModel.setName(clazz.getName());
if (config.all()) {
reflectionModel.setAllDeclaredClasses(true);
reflectionModel.setAllDeclaredConstructors(true);
reflectionModel.setAllDeclaredMethods(true);
reflectionModel.setAllDeclaredFields(true);
} else {
if (config.allDeclaredClasses()) {
reflectionModel.setAllDeclaredClasses(true);
}
if (config.allDeclaredConstructors()) {
reflectionModel.setAllDeclaredConstructors(true);
}
if (config.allDeclaredMethods()) {
reflectionModel.setAllDeclaredMethods(true);
}
if (config.allPublicClasses()) {
reflectionModel.setAllPublicClasses(true);
}
if (config.allPublicConstructors()) {
reflectionModel.setAllPublicConstructors(true);
}
if (config.allPublicMethods()) {
reflectionModel.setAllPublicMethods(true);
}
if (config.allDeclaredFields()) {
reflectionModel.setAllDeclaredFields(true);
}
if (config.allPublicFields()) {
reflectionModel.setAllPublicFields(true);
}
}
final List<ClassReflectionModel.FieldReflectionModel> registeredFields = Stream.of(clazz.getDeclaredFields())
.filter(field -> field.isAnnotationPresent(RegisterField.class))
.map(field -> {
final ClassReflectionModel.FieldReflectionModel fieldReflectionModel = new ClassReflectionModel.FieldReflectionModel();
fieldReflectionModel.setName(field.getName());
if (field.getAnnotation(RegisterField.class).allowWrite()) {
fieldReflectionModel.setAllowWrite(true);
}
return fieldReflectionModel;
})
.collect(toList());
if ((config.allDeclaredFields() || config.allPublicFields()) && !registeredFields.isEmpty()) {
throw new IllegalArgumentException("Don't use allDeclaredFields and allPublicFields with @RegisterField: " + clazz);
} else if (!registeredFields.isEmpty()) {
reflectionModel.setFields(registeredFields);
}
final List<ClassReflectionModel.MethodReflectionModel> registeredMethods = Stream.of(clazz.getDeclaredMethods())
.filter(method -> method.isAnnotationPresent(RegisterMethod.class))
.map(field -> {
final ClassReflectionModel.MethodReflectionModel methodReflectionModel = new ClassReflectionModel.MethodReflectionModel();
methodReflectionModel.setName(field.getName());
methodReflectionModel.setParameterTypes(asList(field.getParameterTypes()));
return methodReflectionModel;
})
.collect(toList());
if ((config.allDeclaredMethods() || config.allPublicMethods()) && !registeredMethods.isEmpty()) {
throw new IllegalArgumentException("Don't use allDeclaredFields and allDeclaredMethods with @RegisterMethod: " + clazz);
} else if (!registeredMethods.isEmpty()) {
reflectionModel.setMethods(registeredMethods);
}
final Stream<ClassReflectionModel> model = Stream.of(reflectionModel);
final Class<?> superclass = clazz.getSuperclass();
if (superclass != null && superclass != Object.class && superclass != clazz) {
return Stream.concat(
register(superclass, ofNullable(superclass.getAnnotation(RegisterClass.class)).orElse(config)),
model);
}
return model;
}
@Override
public boolean equals(final Object obj) {
return obj != null && AnnotationExtension.class == obj.getClass();
}
private RegisterClass newRegisterClass(final String inlineConf) {
final Map<String, Boolean> confs = Stream.of(inlineConf.split("\\|"))
.map(it -> it.split("="))
.collect(toMap(it -> it[0], it -> it.length < 2 || Boolean.parseBoolean(it[1])));
return new RegisterClass() {
@Override
public Class<? extends Annotation> annotationType() {
return RegisterClass.class;
}
@Override
public boolean allDeclaredConstructors() {
return confs.getOrDefault("allDeclaredConstructors", false);
}
@Override
public boolean allPublicConstructors() {
return confs.getOrDefault("allPublicConstructors", false);
}
@Override
public boolean allDeclaredMethods() {
return confs.getOrDefault("allDeclaredMethods", false);
}
@Override
public boolean allPublicMethods() {
return confs.getOrDefault("allPublicMethods", false);
}
@Override
public boolean allDeclaredClasses() {
return confs.getOrDefault("allDeclaredClasses", false);
}
@Override
public boolean allPublicClasses() {
return confs.getOrDefault("allPublicClasses", false);
}
@Override
public boolean allDeclaredFields() {
return confs.getOrDefault("allDeclaredFields", false);
}
@Override
public boolean allPublicFields() {
return confs.getOrDefault("allPublicFields", false);
}
@Override
public boolean all() {
return confs.getOrDefault("all", false);
}
};
}
}
| 6,135 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/installer/SdkmanGraalVMInstaller.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.installer;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.geronimo.arthur.impl.nativeimage.process.ProcessExecutor;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Optional;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonMap;
import static java.util.Comparator.comparing;
import static java.util.Objects.requireNonNull;
@Slf4j
@RequiredArgsConstructor
public class SdkmanGraalVMInstaller {
private final SdkmanGraalVMInstallerConfiguration configuration;
private Path home;
public Path install() {
Path archive;
try {
archive = configuration.getResolver().apply(configuration.getGav());
} catch (final IllegalStateException ise) {
if (configuration.isOffline()) {
throw new IllegalStateException("GraalVM was not found and mvn runs offline");
}
try {
if (!Files.exists(configuration.getWorkdir())) {
Files.createDirectories(configuration.getWorkdir());
}
log.info("Downloading GraalVM {}, this can be long", configuration.getVersion());
final Path download = download();
log.info("Installing GraalVM {}", configuration.getGav());
archive = configuration.getInstaller().apply(configuration.getGav(), download);
} catch (final IOException e) {
throw new IllegalStateException("Can't cache graal locally", e);
}
}
if (!Files.exists(archive)) {
throw new IllegalStateException("No graal archive available: " + archive);
}
final Path exploded = archive.getParent().resolve("distribution_exploded");
if (!Files.isDirectory(exploded)) {
configuration.getExtractor().accept(archive, exploded);
}
home = exploded;
// if macos
if (Files.isDirectory(home.resolve("Contents/Home"))) {
home = home.resolve("Contents/Home");
}
return home;
}
public Path installNativeImage() {
final Path bin = requireNonNull(this.home, "No home, ensure to call install() before installNativeImage()")
.resolve("bin");
try {
if (!findNativeImage(bin).isPresent()) { // likely only UNIx, windows comes with native-image.cmd
log.info("Installing native-image");
new ProcessExecutor(
configuration.isInheritIO(), asList(findGu(bin).toAbsolutePath().toString(), "install", "native-image"),
singletonMap("GRAALVM_HOME", home.toString())).run();
} else {
log.debug("native-image is already available");
}
return findNativeImage(bin)
.orElseThrow(() -> new IllegalArgumentException("No native-image found in " + bin));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private Optional<Path> findNativeImage(final Path bin) throws IOException {
try (final Stream<Path> list = Files.list(bin)) {
return list.filter(path -> {
final String name = path.getFileName().toString();
return name.equals("native-image") || name.startsWith("native-image.") /*win*/;
}).min(comparing(p -> p.getFileName().toString().length())); // support windows this way (.cmd);
}
}
private Path findGu(final Path bin) {
try (final Stream<Path> list = Files.list(bin)) {
return list
.filter(path -> path.getFileName().toString().startsWith("gu"))
.min(comparing(p -> p.getFileName().toString().length()))
.orElseThrow(() -> new IllegalStateException("No gu found in " + bin));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private Path download() throws IOException {
final String fname = "graal-" + configuration.getVersion() + "-" + configuration.getPlatform() + "." + configuration.getGav().split(":")[2];
final Path cache = configuration.getWorkdir().resolve(fname);
if (Files.exists(cache)) {
return cache;
}
final URL source = new URL(configuration.getUrl());
final HttpURLConnection connection = HttpURLConnection.class.cast(source.openConnection());
connection.setUseCaches(false);
connection.setInstanceFollowRedirects(true); // sdkman broker redirects on github
try (final InputStream stream = new BufferedInputStream(connection.getInputStream())) {
Files.copy(stream, cache);
} catch (final IOException ioe) {
if (Files.exists(cache)) {
Files.delete(cache);
}
throw ioe;
}
return cache;
}
}
| 6,136 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/installer/SdkmanGraalVMInstallerConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.installer;
import java.nio.file.Path;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Function;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class SdkmanGraalVMInstallerConfiguration {
private final boolean offline;
private final boolean inheritIO;
private final String url;
private final String version;
private final String platform;
private final String gav;
private final Path workdir;
private final Function<String, Path> resolver;
private final BiFunction<String, Path, Path> installer;
private final BiConsumer<Path, Path> extractor;
}
| 6,137 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/process/ProcessExecutor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.process;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@RequiredArgsConstructor
public class ProcessExecutor implements Runnable {
private final boolean inheritIO;
private final List<String> command;
private final Map<String, String> env;
@Override
public void run() {
if (log.isDebugEnabled()) {
log.debug("Launching {}", command);
}
Process process = null;
try {
final ProcessBuilder builder = new ProcessBuilder(command);
if (env != null) {
builder.environment().putAll(env);
}
if (inheritIO) {
builder.inheritIO();
}
process = builder.start();
final int exitCode = process.waitFor();
if (exitCode != 0) {
throw new IllegalArgumentException("Invalid exit code: " + exitCode);
}
} catch (final InterruptedException e) {
if (process.isAlive()) {
process.destroyForcibly();
}
Thread.currentThread().interrupt();
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
}
| 6,138 |
0 | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage | Create_ds/geronimo-arthur/arthur-impl/src/main/java/org/apache/geronimo/arthur/impl/nativeimage/graal/CommandGenerator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.impl.nativeimage.graal;
import static java.util.Comparator.comparing;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.toList;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.List;
import java.util.stream.Stream;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageConfiguration;
public class CommandGenerator {
public List<String> generate(final ArthurNativeImageConfiguration configuration) {
return Stream.of(ArthurNativeImageConfiguration.class.getDeclaredFields())
.filter(field -> field.isAnnotationPresent(ArthurNativeImageConfiguration.GraalCommandPart.class))
.sorted(comparing(field -> field.getAnnotation(ArthurNativeImageConfiguration.GraalCommandPart.class).order()))
.flatMap(field -> {
final ArthurNativeImageConfiguration.GraalCommandPart config = field.getAnnotation(ArthurNativeImageConfiguration.GraalCommandPart.class);
final Type genericType = field.getGenericType();
final Object instance = ofNullable(get(field, configuration))
.orElseGet(() -> getDefaultValueFor(field, configuration));
return Stream.concat(Stream.of(config.preParts()), toCommand(field, config, genericType, instance));
})
.collect(toList());
}
private Object getDefaultValueFor(final Field field, final ArthurNativeImageConfiguration config) {
switch (field.getName()) {
case "main":
return "main";
case "output":
return ofNullable(config.getMain()).orElse("main").replace("$", "_") + ".graal.exec";
default:
return null;
}
}
private Stream<? extends String> toCommand(final Field field,
final ArthurNativeImageConfiguration.GraalCommandPart config,
final Type genericType,
final Object instance) {
if (config.passthrough()) {
if (genericType == String.class) {
if (instance == null) {
return Stream.empty();
}
return Stream.of(instance.toString());
} else if (isCollectionOfString(genericType)) {
if (instance == null || Collection.class.cast(instance).isEmpty()) {
return Stream.empty();
}
return ((Collection<String>) instance).stream();
}
throw new IllegalArgumentException("@GraalCommandPart(passthrough=true) not supported for " + field);
}
if (isCollectionOfString(genericType)) {
if (instance == null || Collection.class.cast(instance).isEmpty()) {
return Stream.empty();
}
final Collection<String> strings = (Collection<String>) instance;
return Stream.of(String.format(
config.template(),
String.join(
config.joiner().replace("${File.pathSeparator}", File.pathSeparator),
strings)));
}
// else assume "primitive"
if (instance != null && !Boolean.FALSE.equals(instance) /*we skip disabled commands*/) {
return Stream.of(String.format(config.template(), instance));
}
return Stream.empty();
}
private boolean isCollectionOfString(final Type genericType) {
final boolean isPt = ParameterizedType.class.isInstance(genericType);
if (isPt) {
final ParameterizedType parameterizedType = ParameterizedType.class.cast(genericType);
return Class.class.isInstance(parameterizedType.getRawType()) &&
Collection.class.isAssignableFrom(Class.class.cast(parameterizedType.getRawType())) &&
parameterizedType.getActualTypeArguments().length == 1 &&
String.class == parameterizedType.getActualTypeArguments()[0];
}
return false;
}
private Object get(final Field field, final ArthurNativeImageConfiguration configuration) {
if (!field.isAccessible()) {
field.setAccessible(true);
}
try {
return field.get(configuration);
} catch (final IllegalAccessException e) {
throw new IllegalStateException(e);
}
}
}
| 6,139 |
0 | Create_ds/geronimo-arthur/knights/hsqldb-knight/src/main/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/hsqldb-knight/src/main/java/org/apache/geronimo/arthur/knight/hsqldb/HsqldbExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.hsqldb;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
// don't forget to set -Dhsqldb.reconfig_logging=false in main or runtime
public class HsqldbExtension implements ArthurExtension {
@Override
public void execute(final Context context) {
registerReflection(context);
registerResourceBundles(context);
resourceResources(context);
}
private void resourceResources(final Context context) {
context.register(new ResourceModel("org\\/hsqldb\\/resources\\/.+.sql"));
}
private void registerResourceBundles(final Context context) {
context.register(new ResourceBundleModel("org.hsqldb.resources.info-column-remarks"));
context.register(new ResourceBundleModel("org.hsqldb.resources.info-table-remarks"));
context.register(new ResourceBundleModel("org.hsqldb.resources.org_hsqldb_DatabaseClassLoader"));
context.register(new ResourceBundleModel("org.hsqldb.resources.org_hsqldb_server_Server_messages"));
context.register(new ResourceBundleModel("org.hsqldb.resources.sql-state-messages"));
context.register(new ResourceBundleModel("org.hsqldb.resources.webserver-content-types"));
context.register(new ResourceBundleModel("org.hsqldb.resources.webserver-pages"));
context.register(new ResourceBundleModel("org.hsqldb.lib.tar.rb"));
}
private void registerReflection(final Context context) {
context.register(new ClassReflectionModel("org.hsqldb.jdbcDriver", null, true, null, null, null, null, null, null, null, null));
context.register(new ClassReflectionModel("org.hsqldb.dbinfo.DatabaseInformationFull", true, null, null, null, null, null, null, null, null, null));
context.register(new ClassReflectionModel("org.hsqldb.dbinfo.DatabaseInformationMain", true, null, null, null, null, null, null, null, null, null));
}
}
| 6,140 |
0 | Create_ds/geronimo-arthur/knights/openjpa-knight/src/main/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/openjpa-knight/src/main/java/org/apache/geronimo/arthur/knight/openjpa/OpenJPAExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.openjpa;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import javax.persistence.Embeddable;
import javax.persistence.Entity;
import javax.persistence.MappedSuperclass;
import java.lang.reflect.Field;
import java.util.Objects;
import java.util.stream.Stream;
public class OpenJPAExtension implements ArthurExtension {
@Override
public void execute(final Context context) {
registerJPAClasses(context);
registerSPI(context);
registerI18n(context);
registerDictionaryResources(context);
registerPrimitiveWrappers(context);
registerBuildTimeProxies(context);
registerEnumForValuesReflection(context);
registerDBCP2IfPresent(context);
context.register(new ResourceModel("META-INF\\/org\\.apache\\.openjpa\\.revision\\.properties"));
context.register(new ResourceBundleModel("org.apache.openjpa.jdbc.schema.localizer"));
}
private void registerEnumForValuesReflection(final Context context) {
context.register(new ClassReflectionModel("org.apache.openjpa.persistence.jdbc.FetchMode", null, null, null, true, null, null, null, null, null, null));
context.register(new ClassReflectionModel("org.apache.openjpa.persistence.jdbc.FetchDirection", null, null, null, true, null, null, null, null, null, null));
context.register(new ClassReflectionModel("org.apache.openjpa.persistence.jdbc.JoinSyntax", null, null, null, true, null, null, null, null, null, null));
}
private void registerBuildTimeProxies(final Context context) {
Stream.of(
"java.sql.Date", "java.sql.Time", "java.sql.Timestamp",
"java.util.ArrayList", "java.util.Date", "java.util.EnumMap",
"java.util.GregorianCalendar", "java.util.HashMap", "java.util.HashSet",
"java.util.Hashtable", "java.util.IdentityHashMap", "java.util.LinkedHashMap", "java.util.LinkedHashSet",
"java.util.LinkedList", "java.util.PriorityQueue", "java.util.Properties", "java.util.TreeMap",
"java.util.TreeSet", "java.util.Vector")
.map(it -> "org.apache.openjpa.util." + it.replace('.', '$') + "$proxy")
.map(it -> new ClassReflectionModel(it, null, true, null, null, null, null, null, null, null, null))
.forEach(context::register);
}
// see Options class (stringToObject method)
private void registerPrimitiveWrappers(final Context context) {
Stream.of(Boolean.class, Byte.class, Character.class, Double.class, Float.class, Integer.class, Long.class, Short.class)
.forEach(it -> context.register(new ClassReflectionModel(it.getName(), null, true, null, true /*valueOf*/, null, null, null, null, null, null)));
}
private void registerDictionaryResources(final Context context) {
context.register(new ResourceModel("org\\/apache\\/openjpa\\/jdbc\\/sql\\/sql-keywords\\.rsrc"));
context.register(new ResourceModel("org\\/apache\\/openjpa\\/jdbc\\/sql\\/sql-error-state-codes\\.xml"));
}
// todo: dbcp2-knight and inherit from it automatically?
private void registerDBCP2IfPresent(final Context context) {
try {
final Class<?> evictionPolicy = context.loadClass("org.apache.commons.pool2.impl.DefaultEvictionPolicy");
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(evictionPolicy.getName());
model.setAllPublicConstructors(true);
context.register(model);
context.register(new ResourceBundleModel("org.apache.commons.dbcp2.LocalStrings"));
// dbcp2 depends on commons-logging in a hardcoded way (don't ask)
addCommonsLogging(context).forEach(it -> {
final ClassReflectionModel reflect = new ClassReflectionModel();
reflect.setName(it.getName());
reflect.setAllPublicConstructors(true);
reflect.setAllDeclaredConstructors(true);
context.register(reflect);
});
} catch (final NoClassDefFoundError | Exception e) {
// no-op
}
}
// todo: replace that by a ResourceFinder?
// does not move often so maybe overkill but pattern being stable
// (org/apache/openjpa/*/localizer.properties) we could
private void registerI18n(final Context context) {
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
Stream.of(
"org/apache/openjpa/abstractstore/localizer.properties",
"org/apache/openjpa/ant/localizer.properties",
"org/apache/openjpa/conf/localizer.properties",
"org/apache/openjpa/datacache/localizer.properties",
"org/apache/openjpa/ee/localizer.properties",
"org/apache/openjpa/enhance/localizer.properties",
"org/apache/openjpa/enhance/stats/localizer.properties",
"org/apache/openjpa/event/kubernetes/localizer.properties",
"org/apache/openjpa/event/localizer.properties",
"org/apache/openjpa/instrumentation/jmx/localizer.properties",
"org/apache/openjpa/jdbc/ant/localizer.properties",
"org/apache/openjpa/jdbc/conf/localizer.properties",
"org/apache/openjpa/jdbc/kernel/exps/localizer.properties",
"org/apache/openjpa/jdbc/kernel/localizer.properties",
"org/apache/openjpa/jdbc/meta/localizer.properties",
"org/apache/openjpa/jdbc/meta/strats/localizer.properties",
"org/apache/openjpa/jdbc/schema/localizer.properties",
"org/apache/openjpa/jdbc/sql/localizer.properties",
"org/apache/openjpa/kernel/exps/localizer.properties",
"org/apache/openjpa/kernel/jpql/localizer.properties",
"org/apache/openjpa/kernel/localizer.properties",
"org/apache/openjpa/lib/ant/localizer.properties",
"org/apache/openjpa/lib/conf/localizer.properties",
"org/apache/openjpa/lib/graph/localizer.properties",
"org/apache/openjpa/lib/jdbc/localizer.properties",
"org/apache/openjpa/lib/log/localizer.properties",
"org/apache/openjpa/lib/meta/localizer.properties",
"org/apache/openjpa/lib/rop/localizer.properties",
"org/apache/openjpa/lib/util/localizer.properties",
"org/apache/openjpa/lib/xml/localizer.properties",
"org/apache/openjpa/meta/localizer.properties",
"org/apache/openjpa/persistence/criteria/localizer.properties",
"org/apache/openjpa/persistence/jdbc/localizer.properties",
"org/apache/openjpa/persistence/jest/localizer.properties",
"org/apache/openjpa/persistence/localizer.properties",
"org/apache/openjpa/persistence/meta/localizer.properties",
"org/apache/openjpa/persistence/util/localizer.properties",
"org/apache/openjpa/persistence/validation/localizer.properties",
"org/apache/openjpa/slice/jdbc/localizer.properties",
"org/apache/openjpa/slice/localizer.properties",
"org/apache/openjpa/slice/transaction/localizer.properties",
"org/apache/openjpa/util/localizer.properties"
).filter(it -> loader.getResource(it) != null).forEach(it -> {
final ResourceBundleModel model = new ResourceBundleModel();
model.setName(it.replace('/', '.').substring(0, it.length() - ".properties".length()));
context.register(model);
});
}
private void registerSPI(final Context context) {
spiClasses(context)
.distinct()
.map(Class::getName)
.forEach(it -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(it);
model.setAllPublicConstructors(true);
model.setAllDeclaredConstructors(true);
model.setAllPublicMethods(true);
context.register(model);
});
}
// todo: cut more of that by a review of the reflection.arthur.json
// one option is to precompute it for a pure openjpa deployment and just add all user impl only
private Stream<? extends Class<?>> spiClasses(final Context context) {
return Stream.of(
"org.apache.openjpa.persistence.FetchPlan",
"org.apache.openjpa.kernel.BrokerFactory",
"org.apache.openjpa.lib.log.LogFactory",
"org.apache.openjpa.lib.conf.Configurable",
"org.apache.openjpa.util.CacheMap",
"org.apache.openjpa.event.SingleJVMRemoteCommitProvider",
"org.apache.openjpa.event.SingleJVMRemoteCommitProvider",
"org.apache.openjpa.persistence.jdbc.PersistenceMappingFactory",
"org.apache.openjpa.jdbc.kernel.TableJDBCSeq",
"org.apache.openjpa.jdbc.kernel.ValueTableJDBCSeq",
"org.apache.openjpa.jdbc.kernel.ClassTableJDBCSeq",
"org.apache.openjpa.jdbc.kernel.NativeJDBCSeq",
"org.apache.openjpa.kernel.TimeSeededSeq",
"org.apache.openjpa.jdbc.kernel.TableJDBCSeq",
"org.apache.openjpa.jdbc.kernel.ClassTableJDBCSeq",
"org.apache.openjpa.kernel.TimeSeededSeq",
"org.apache.openjpa.persistence.EntityManagerFactoryImpl",
"org.apache.openjpa.jdbc.meta.MappingRepository",
"org.apache.openjpa.meta.MetaDataRepository",
"org.apache.openjpa.util.ClassResolverImpl",
"org.apache.openjpa.datacache.DataCacheManagerImpl",
"org.apache.openjpa.datacache.DefaultCacheDistributionPolicy",
"org.apache.openjpa.datacache.ConcurrentDataCache",
"org.apache.openjpa.datacache.ConcurrentQueryCache",
"org.apache.openjpa.kernel.NoneLockManager",
"org.apache.openjpa.kernel.VersionLockManager",
"org.apache.openjpa.kernel.InverseManager",
"org.apache.openjpa.kernel.InMemorySavepointManager",
"org.apache.openjpa.event.LogOrphanedKeyAction",
"org.apache.openjpa.event.ExceptionOrphanedKeyAction",
"org.apache.openjpa.event.NoneOrphanedKeyAction",
"org.apache.openjpa.ee.AutomaticManagedRuntime",
"org.apache.openjpa.ee.JNDIManagedRuntime",
"org.apache.openjpa.ee.InvocationManagedRuntime",
"org.apache.openjpa.util.ProxyManagerImpl",
"org.apache.openjpa.conf.DetachOptions$Loaded",
"org.apache.openjpa.conf.DetachOptions$FetchGroups",
"org.apache.openjpa.conf.DetachOptions$All",
"org.apache.openjpa.conf.Compatibility",
"org.apache.openjpa.conf.CallbackOptions",
"org.apache.openjpa.event.LifecycleEventManager",
"org.apache.openjpa.validation.ValidatingLifecycleEventManager",
"org.apache.openjpa.instrumentation.InstrumentationManagerImpl",
"org.apache.openjpa.audit.AuditLogger",
"org.apache.openjpa.jdbc.sql.DBDictionary",
"org.apache.openjpa.jdbc.kernel.AbstractUpdateManager",
"org.apache.openjpa.jdbc.schema.DriverDataSource",
"org.apache.openjpa.jdbc.schema.DynamicSchemaFactory",
"org.apache.openjpa.jdbc.schema.LazySchemaFactory",
"org.apache.openjpa.jdbc.schema.FileSchemaFactory",
"org.apache.openjpa.jdbc.schema.TableSchemaFactory",
"org.apache.openjpa.jdbc.sql.SQLFactoryImpl",
"org.apache.openjpa.jdbc.meta.MappingDefaultsImpl",
"org.apache.openjpa.jdbc.kernel.PreparedQueryCacheImpl",
"org.apache.openjpa.jdbc.kernel.FinderCacheImpl",
"org.apache.openjpa.jdbc.identifier.DBIdentifierUtilImpl",
"org.apache.openjpa.lib.log.LogFactoryImpl",
"org.apache.openjpa.lib.log.SLF4JLogFactory",
"org.apache.openjpa.lib.log.NoneLogFactory",
"org.apache.openjpa.slice.DistributionPolicy$Default",
"org.apache.openjpa.slice.ReplicationPolicy$Default",
"javax.persistence.spi.PersistenceProvider")
.distinct()
.map(it -> {
try {
return context.loadClass(it);
} catch (final IllegalStateException | NoClassDefFoundError ise) {
return null;
}
})
.filter(Objects::nonNull)
.flatMap(context::findHierarchy)
.distinct()
.flatMap(it -> Stream.concat(Stream.of(it), context.findImplementations(it).stream()))
.distinct()
.filter(it -> needsReflection(it.getName()))
.flatMap(it -> {
if (it.getName().startsWith("org.apache.commons.logging.impl.")) {
try {
context.loadClass("org.apache.commons.logging.impl.LogFactoryImpl").getConstructor().newInstance();
return Stream.concat(Stream.of(it), addCommonsLogging(context));
} catch (final NoClassDefFoundError | Exception e) {
return Stream.empty();
}
}
return Stream.of(it);
})
.filter(it -> {
if ("org.apache.openjpa.jdbc.sql.PostgresDictionary".equals(it.getName())) {
try {
context.loadClass("org.postgresql.largeobject.LargeObjectManager");
return true;
} catch (final NoClassDefFoundError | Exception e) {
return false;
}
}
return true;
});
}
// todo: extract it in a commons-logging-knight and inherit from it automatically?
private Stream<? extends Class<?>> addCommonsLogging(final Context context) {
return Stream.of(
"org.apache.commons.logging.LogFactory",
"org.apache.commons.logging.impl.LogFactoryImpl",
"org.apache.commons.logging.impl.Jdk14Logger")
.map(n -> {
try {
return context.loadClass(n);
} catch (final NoClassDefFoundError | Exception ex) {
return null;
}
})
.filter(Objects::nonNull);
}
private boolean needsReflection(final String name) {
return name.startsWith("org.apache.openjpa.") &&
!name.equals("org.apache.openjpa.ee.OSGiManagedRuntime") &&
!name.startsWith("org.apache.openjpa.ee.WAS") &&
!name.contains("$1") &&
!name.startsWith("org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator$") &&
!(name.endsWith("Comparator") && name.contains("$"));
}
private void registerJPAClasses(final Context context) {
Stream.of(Entity.class, MappedSuperclass.class, Embeddable.class)
.flatMap(it -> context.findAnnotatedClasses(it).stream())
.flatMap(context::findHierarchy)
.distinct()
.flatMap(it -> {
final ClassReflectionModel entity = new ClassReflectionModel();
entity.setName(it.getName());
entity.setAllPublicConstructors(true);
entity.setAllPublicMethods(true);
entity.setAllDeclaredConstructors(true);
entity.setAllDeclaredFields(true);
entity.setAllDeclaredMethods(true);
return Stream.concat(Stream.of(entity), extractFieldTypesForReflection(it));
})
.distinct()
.forEach(context::register);
}
private Stream<ClassReflectionModel> extractFieldTypesForReflection(final Class<?> entity) {
try {
final Field pcFieldTypes = entity.getDeclaredField("pcFieldTypes");
pcFieldTypes.setAccessible(true);
final Object types = pcFieldTypes.get(null);
return Stream.of(Class[].class.cast(types))
.distinct() // todo: filter(it -> !it.isPrimitive())?
.map(type -> {
final ClassReflectionModel fieldType = new ClassReflectionModel();
fieldType.setName(type.getName());
return fieldType;
});
} catch (final Exception e) {
return Stream.empty();
}
}
}
| 6,141 |
0 | Create_ds/geronimo-arthur/knights/winegrower-knight/src/main/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/winegrower-knight/src/main/java/org/apache/geronimo/arthur/knight/winegrower/WinegrowerExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.winegrower;
import static java.util.Collections.list;
import static java.util.Objects.requireNonNull;
import static java.util.Optional.ofNullable;
import static org.apache.xbean.finder.ClassLoaders.findUrls;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import org.apache.winegrower.extension.build.common.MetadataBuilder;
import org.apache.xbean.finder.UrlSet;
import org.apache.xbean.finder.util.Files;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import lombok.extern.slf4j.Slf4j;
@Slf4j // todo: handle manifest.mf generation (replace bundle extension?)
public class WinegrowerExtension implements ArthurExtension {
private DocumentBuilderFactory documentBuilderFactory;
private XPath xpath;
@Override
public void execute(final Context context) {
final Predicate<String> userFiler = context.createIncludesExcludes("extension.winegrower.", PredicateType.STARTS_WITH);
final boolean generateMetadata = Boolean.parseBoolean(ofNullable(context.getProperty("extension.winegrower.metadata.generate")).orElse("true"));
final boolean metadataAutoFiltering = Boolean.parseBoolean(ofNullable(context.getProperty("extension.winegrower.metadata.autoFiltering")).orElse("true"));
final String metadataDefaultJarName = context.getProperty("extension.winegrower.metadata.defaultJarName");
final Predicate<String> filter = name -> userFiler.test(name) &&
!name.startsWith("plexus-") &&
!name.startsWith("animal-sniffer") &&
!name.startsWith("winegrower-build") &&
!name.startsWith("winegrower-core") &&
!name.startsWith("winegrower-knight") &&
!name.startsWith("commons-") &&
!name.startsWith("xbean-") &&
!name.startsWith("osgi.");
final MetadataBuilder metadata = !generateMetadata ? null : new MetadataBuilder(metadataAutoFiltering);
try {
final Collection<Class<?>> classes = visitClasspath(context, filter, metadata, metadataDefaultJarName);
registerClasses(context, classes);
if (metadata != null && !metadata.getMetadata().isEmpty()) {
final Path workDir = Paths.get(requireNonNull(context.getProperty("workingDirectory"), "workingDirectory property"));
if (metadata.getMetadata().containsKey("index")) {
context.addNativeImageOption("-H:WinegrowerIndex=" +
dump(workDir, "winegrower.index.properties", metadata.getMetadata().get("index")));
}
if (metadata.getMetadata().containsKey("manifests")) {
context.addNativeImageOption("-H:WinegrowerManifests=" +
dump(workDir, "winegrower.manifests.properties", metadata.getMetadata().get("manifests")));
}
} else if (generateMetadata) {
log.info("No winegrower metadata to dump");
}
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private String dump(final Path workDir, final String name, final Properties index) {
if (!java.nio.file.Files.isDirectory(workDir)) {
try {
java.nio.file.Files.createDirectories(workDir);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
final Path out = workDir.resolve(name);
try (final OutputStream outputStream = java.nio.file.Files.newOutputStream(out)) {
index.store(outputStream, name);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
log.info("Created '{}'", out);
return out.toAbsolutePath().toString();
}
private void registerClasses(final Context context, final Collection<Class<?>> classes) {
final Consumer<Class<?>> logger = log.isDebugEnabled() ? c -> log.debug("Registering '{}'", c) : c -> {};
classes.stream().peek(logger).map(it -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(it.getName());
model.setAllPublicConstructors(true);
model.setAllPublicMethods(true);
model.setAllDeclaredFields(true);
return model;
}).forEach(context::register);
}
private Collection<Class<?>> visitClasspath(final Context context, final Predicate<String> filter,
final MetadataBuilder metadata, final String metadataDefaultJarName) throws IOException {
final Collection<Class<?>> classes = new ArrayList<>();
new UrlSet(findUrls(Thread.currentThread().getContextClassLoader()))
.excludeJvm()
.getUrls()
.stream()
.map(Files::toFile)
.filter(file -> filter.test(file.getName()))
.map(File::toPath)
.forEach(jarOrDirectory -> {
if (java.nio.file.Files.isDirectory(jarOrDirectory)) {
browseDirectory(context, jarOrDirectory, classes, metadata, metadataDefaultJarName);
} else if (jarOrDirectory.getFileName().toString().endsWith(".jar")) {
browseJar(context, jarOrDirectory, classes, metadata);
} else {
log.info("Ignoring '{}'", jarOrDirectory);
return;
}
if (metadata != null) {
metadata.afterJar();
}
});
return classes;
}
private void browseJar(final Context context, final Path jarOrDirectory,
final Collection<Class<?>> classes, final MetadataBuilder metadata) {
try (final JarFile jar = new JarFile(jarOrDirectory.toFile())) {
if (metadata == null) { // winegrower metadata
Stream.of("index", "manifests")
.map(it -> "WINEGROWER-INF/" + it + ".properties")
.map(jar::getEntry)
.filter(Objects::nonNull)
.forEach(it -> context.register(resource(it.getName())));
}
// activator if needed
final ZipEntry manifestEntry = jar.getEntry("META-INF/MANIFEST.MF");
if (manifestEntry != null) {
try (final InputStream inputStream = jar.getInputStream(manifestEntry)) {
final Manifest manifest = handleManifest(classes, inputStream, context);
if (metadata != null) {
metadata.onJar(jarOrDirectory.getFileName().toString(), manifest);
}
} catch (final IOException e) {
throw new IllegalStateException(e);
}
} else if (metadata != null) {
metadata.onJar(jarOrDirectory.getFileName().toString(), null);
}
list(jar.entries()).stream()
.peek(e -> { // register metadata
if (metadata != null) {
metadata.onFile(e.getName());
}
})
// SCR and friends
.filter(e -> e.getName().startsWith("OSGI-INF/"))
.filter(e -> isOSGiInfDescriptor(e.getName()))
.peek(e -> {
if (e.getName().endsWith(".xml")) {
try (final InputStream stream = jar.getInputStream(e)) {
registerScrComponentsIfNeeded(jar + "#" + e.getName(), stream, classes, context);
} catch (final IOException ex) {
throw new IllegalStateException(ex);
}
}
})
.forEach(it -> context.register(resource("OSGI-INF/" + it.getName())));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private void browseDirectory(final Context context, final Path directory,
final Collection<Class<?>> classes, final MetadataBuilder metadata,
final String metadataDefaultJarName) {
// winegrower metadata
if (metadata == null) {
final Path winegrowerInf = directory.resolve("WINEGROWER-INF");
if (java.nio.file.Files.isDirectory(winegrowerInf)) {
Stream.of("index", "manifests")
.map(it -> it + ".properties")
.filter(it -> java.nio.file.Files.exists(winegrowerInf.resolve(it)))
.forEach(it -> context.register(resource("WINEGROWER-INF/" + it)));
}
}
// activator if needed
final Path manifest = directory.resolve("META-INF/MANIFEST.MF");
if (java.nio.file.Files.exists(manifest)) {
try (final InputStream inputStream = java.nio.file.Files.newInputStream(manifest)) {
handleManifest(classes, inputStream, context);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
// SCR and friends
final Path osgiInf = directory.resolve("OSGI-INF");
if (java.nio.file.Files.isDirectory(osgiInf)) {
try {
java.nio.file.Files.list(osgiInf)
.filter(path -> isOSGiInfDescriptor(path.getFileName().toString()))
.peek(it -> {
if (it.getFileName().toString().endsWith(".xml")) {
try (final InputStream stream = java.nio.file.Files.newInputStream(it)) {
registerScrComponentsIfNeeded(it.toString(), stream, classes, context);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
})
.forEach(it -> context.register(resource("OSGI-INF/" + it.getFileName())));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
// finally init the metadata if needed
if (metadata != null) {
metadata.visitFolder(ofNullable(metadataDefaultJarName).orElseGet(() -> { // bad heuristic to not get a NPE
final AtomicReference<Path> current = new AtomicReference<>(directory);
while (Stream.of("classes", "target").anyMatch(it -> it.equals(current.get().getFileName().toString())) &&
current.get().getParent() != null &&
(java.nio.file.Files.exists(current.get().getParent().resolve("pom.xml")) ||
current.get().getParent().getParent() != null &&
java.nio.file.Files.exists(current.get().getParent().getParent().resolve("pom.xml")))) {
current.set(current.get().getParent());
}
return current.get().getFileName().toString();
}), directory, new SimpleFileVisitor<Path>() {});
}
}
private boolean isOSGiInfDescriptor(final String filename) {
return filename.endsWith(".xml") || filename.endsWith(".properties");
}
private Manifest handleManifest(final Collection<Class<?>> classes, final InputStream inputStream, final Context context) throws IOException {
final Manifest mf = new Manifest(inputStream);
ofNullable(mf.getMainAttributes().getValue("Bundle-Activator")).ifPresent(activator -> {
try {
classes.add(context.loadClass(activator));
} catch (final IllegalStateException e) {
log.info("Missing class: {}", activator);
}
});
return mf;
}
private void registerScrComponentsIfNeeded(final String source, final InputStream stream, final Collection<Class<?>> classes,
final Context context) {
try {
ensureXmlIsInitialized();
final Document document = documentBuilderFactory.newDocumentBuilder().parse(stream);
xpath.reset();
final String implementation = xpath.evaluate("/*[local-name()='component']/implementation/@class", document.getDocumentElement());
if (implementation != null && !implementation.isEmpty()) {
context.findHierarchy(context.loadClass(implementation)).forEach(classes::add);
}
} catch (final XPathExpressionException | ParserConfigurationException | IOException e) {
throw new IllegalStateException(e);
} catch (final SAXException sax) {
log.warn("Can't read xml {}", source);
} catch (final IllegalStateException e) {
log.info("Missing class: {}", e.getMessage());
}
}
private void ensureXmlIsInitialized() throws ParserConfigurationException {
if (documentBuilderFactory == null) {
documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, true);
documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
xpath = XPathFactory.newInstance().newXPath();
}
}
private ResourceModel resource(final String name) {
final ResourceModel resource = new ResourceModel();
resource.setPattern(Pattern.quote(name));
return resource;
}
}
| 6,142 |
0 | Create_ds/geronimo-arthur/knights/winegrower-knight/src/main/java/org/apache/geronimo/arthur/knight/winegrower | Create_ds/geronimo-arthur/knights/winegrower-knight/src/main/java/org/apache/geronimo/arthur/knight/winegrower/feature/WinegrowerFeature.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.winegrower.feature;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.stream.Stream;
import com.oracle.svm.core.annotate.AutomaticFeature;
import com.oracle.svm.core.jdk.Resources;
import com.oracle.svm.core.option.HostedOptionKey;
import org.graalvm.compiler.options.Option;
import org.graalvm.compiler.options.OptionDescriptor;
import org.graalvm.compiler.options.OptionDescriptors;
import org.graalvm.compiler.options.OptionType;
import org.graalvm.nativeimage.hosted.Feature;
@AutomaticFeature
public class WinegrowerFeature implements Feature {
public static final class Options {
@Option(help = "Winegrower index properties.", type = OptionType.User)
static final HostedOptionKey<String> WinegrowerIndex = new HostedOptionKey<>(null);
@Option(help = "Winegrower manifests properties.", type = OptionType.User)
static final HostedOptionKey<String> WinegrowerManifests = new HostedOptionKey<>(null);
}
// org.graalvm.compiler.options.processor is not on central
public static class WinegrowerOptions implements OptionDescriptors {
@Override
public OptionDescriptor get(final String value) {
switch (value) {
case "WinegrowerIndex":
return OptionDescriptor.create(
value, OptionType.User, String.class,
"Winegrower index properties.",
Options.class, value,
Options.WinegrowerIndex);
case "WinegrowerManifests":
return OptionDescriptor.create(
value, OptionType.User, String.class,
"Winegrower manifests properties.",
Options.class, value,
Options.WinegrowerManifests);
default:
return null;
}
}
@Override
public Iterator<OptionDescriptor> iterator() {
return Stream.of("WinegrowerIndex", "WinegrowerManifests").map(this::get).iterator();
}
}
@Override
public void beforeAnalysis(final BeforeAnalysisAccess access) {
if (Options.WinegrowerIndex.hasBeenSet()) {
register(Options.WinegrowerIndex.getValue(), "WINEGROWER-INF/index.properties");
}
if (Options.WinegrowerManifests.hasBeenSet()) {
register(Options.WinegrowerManifests.getValue(), "WINEGROWER-INF/manifests.properties");
}
}
private void register(final String path, final String resource) {
try (final InputStream stream = Files.newInputStream(Paths.get(path))) {
Resources.registerResource(resource, stream);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
}
| 6,143 |
0 | Create_ds/geronimo-arthur/knights/jsch-knight/src/test/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/jsch-knight/src/test/java/org/apache/geronimo/arthur/knight/jsch/JschExtensionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.jsch;
import static java.util.Arrays.asList;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Collection;
import org.apache.geronimo.arthur.impl.nativeimage.ArthurNativeImageConfiguration;
import org.apache.geronimo.arthur.impl.nativeimage.generator.DefautContext;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
class JschExtensionTest {
@Test
void extension() {
final ArthurNativeImageConfiguration configuration = new ArthurNativeImageConfiguration();
final DefautContext context = new DefautContext(configuration, null, null, null, null, null);
new JschExtension().execute(context);
assertTrue(configuration.getEnableAllSecurityServices());
assertEquals(
asList("com.jcraft.jsch.JSch", "com.jcraft.jsch.JSch$1"),
configuration.getInitializeAtBuildTime().stream().sorted().collect(toList()));
final Collection<ClassReflectionModel> reflection = context.getReflections();
assertEquals(48, reflection.size());
reflection.stream().map(ClassReflectionModel::getAllDeclaredConstructors).forEach(Assertions::assertTrue);
assertEquals("" +
"com.jcraft.jsch.CipherNone\n" +
"com.jcraft.jsch.DHEC256\n" +
"com.jcraft.jsch.DHEC384\n" +
"com.jcraft.jsch.DHEC521\n" +
"com.jcraft.jsch.DHG1\n" +
"com.jcraft.jsch.DHG14\n" +
"com.jcraft.jsch.DHGEX\n" +
"com.jcraft.jsch.DHGEX256\n" +
"com.jcraft.jsch.UserAuthGSSAPIWithMIC\n" +
"com.jcraft.jsch.UserAuthKeyboardInteractive\n" +
"com.jcraft.jsch.UserAuthNone\n" +
"com.jcraft.jsch.UserAuthPassword\n" +
"com.jcraft.jsch.UserAuthPublicKey\n" +
"com.jcraft.jsch.jce.AES128CBC\n" +
"com.jcraft.jsch.jce.AES128CTR\n" +
"com.jcraft.jsch.jce.AES192CBC\n" +
"com.jcraft.jsch.jce.AES192CTR\n" +
"com.jcraft.jsch.jce.AES256CBC\n" +
"com.jcraft.jsch.jce.AES256CTR\n" +
"com.jcraft.jsch.jce.ARCFOUR\n" +
"com.jcraft.jsch.jce.ARCFOUR128\n" +
"com.jcraft.jsch.jce.ARCFOUR256\n" +
"com.jcraft.jsch.jce.BlowfishCBC\n" +
"com.jcraft.jsch.jce.DH\n" +
"com.jcraft.jsch.jce.ECDHN\n" +
"com.jcraft.jsch.jce.HMACMD5\n" +
"com.jcraft.jsch.jce.HMACMD596\n" +
"com.jcraft.jsch.jce.HMACSHA1\n" +
"com.jcraft.jsch.jce.HMACSHA196\n" +
"com.jcraft.jsch.jce.HMACSHA256\n" +
"com.jcraft.jsch.jce.KeyPairGenDSA\n" +
"com.jcraft.jsch.jce.KeyPairGenECDSA\n" +
"com.jcraft.jsch.jce.KeyPairGenRSA\n" +
"com.jcraft.jsch.jce.MD5\n" +
"com.jcraft.jsch.jce.PBKDF\n" +
"com.jcraft.jsch.jce.Random\n" +
"com.jcraft.jsch.jce.SHA1\n" +
"com.jcraft.jsch.jce.SHA256\n" +
"com.jcraft.jsch.jce.SHA384\n" +
"com.jcraft.jsch.jce.SHA512\n" +
"com.jcraft.jsch.jce.SignatureDSA\n" +
"com.jcraft.jsch.jce.SignatureECDSA256\n" +
"com.jcraft.jsch.jce.SignatureECDSA384\n" +
"com.jcraft.jsch.jce.SignatureECDSA521\n" +
"com.jcraft.jsch.jce.SignatureRSA\n" +
"com.jcraft.jsch.jce.TripleDESCBC\n" +
"com.jcraft.jsch.jce.TripleDESCTR\n" +
"com.jcraft.jsch.jgss.GSSContextKrb5",
reflection.stream().map(ClassReflectionModel::getName).sorted().collect(joining("\n")));
}
}
| 6,144 |
0 | Create_ds/geronimo-arthur/knights/jsch-knight/src/main/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/jsch-knight/src/main/java/org/apache/geronimo/arthur/knight/jsch/JschExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.jsch;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Hashtable;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Stream;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class JschExtension implements ArthurExtension {
@Override
public void execute(final Context context) {
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
final Function<String, Class<?>> load = name -> {
try {
return loader.loadClass(name);
} catch (final ClassNotFoundException e) {
return null;
}
};
try {
final Class<?> jsch = load.apply("com.jcraft.jsch.JSch");
if (jsch == null) {
log.info("JSch no available, skipping");
return;
}
final Field config = jsch.getDeclaredField("config");
if (!config.isAccessible()) {
config.setAccessible(true);
}
final Collection<String> values = Hashtable.class.cast(config.get(null)).values();
values.stream()
.filter(it -> !"com.jcraft.jsch.jcraft.Compression".equalsIgnoreCase(it)) // requires other libs
.map(load)
.filter(Objects::nonNull)
.distinct()
.map(clazz -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(clazz.getName());
model.setAllDeclaredConstructors(true);
return model;
})
.forEach(context::register);
context.enableAllSecurityServices();
context.initializeAtBuildTime(
Stream.of(jsch.getName(), jsch.getName() + "$1") // JSch.DEVNULL
.filter(it -> load.apply(it) != null) // tolerate jsch extraction of DEVNULL in a root class
.toArray(String[]::new));
} catch (final Exception ex) {
throw new IllegalStateException(ex);
}
}
}
| 6,145 |
0 | Create_ds/geronimo-arthur/knights/derby-knight/src/main/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/derby-knight/src/main/java/org/apache/geronimo/arthur/knight/derby/DerbyExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.derby;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Driver;
import java.util.Objects;
import java.util.Properties;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import static java.util.Optional.ofNullable;
public class DerbyExtension implements ArthurExtension {
@Override
public void execute(final Context context) {
registerProperties(context);
tryToRegisterSPI(context);
context.findImplementations(Driver.class)
.forEach(it -> registerClass(context, it.getName()));
registerI18n(context);
// extraDBMSclasses.properties + StoredFormatIds + ClassName + RegisteredFormatIds
Stream.of(
"org.apache.derby.catalog.GetProcedureColumns",
"org.apache.derby.catalog.Java5SystemProcedures",
"org.apache.derby.catalog.SystemProcedures",
"org.apache.derby.catalog.TriggerNewTransitionRows",
"org.apache.derby.catalog.TriggerOldTransitionRows",
"org.apache.derby.catalog.UUID",
"org.apache.derby.catalog.types.AggregateAliasInfo",
"org.apache.derby.catalog.types.BitTypeIdImpl",
"org.apache.derby.catalog.types.BooleanTypeIdImpl",
"org.apache.derby.catalog.types.CharTypeIdImpl",
"org.apache.derby.catalog.types.ClassAliasInfo",
"org.apache.derby.catalog.types.DecimalTypeIdImpl",
"org.apache.derby.catalog.types.DefaultInfoImpl",
"org.apache.derby.catalog.types.DoubleTypeIdImpl",
"org.apache.derby.catalog.types.IndexDescriptorImpl",
"org.apache.derby.catalog.types.IntTypeIdImpl",
"org.apache.derby.catalog.types.LongintTypeIdImpl",
"org.apache.derby.catalog.types.LongvarbitTypeIdImpl",
"org.apache.derby.catalog.types.LongvarcharTypeIdImpl",
"org.apache.derby.catalog.types.MethodAliasInfo",
"org.apache.derby.catalog.types.NationalCharTypeIdImpl",
"org.apache.derby.catalog.types.NationalLongVarcharTypeIdImpl",
"org.apache.derby.catalog.types.NationalVarcharTypeIdImpl",
"org.apache.derby.catalog.types.RealTypeIdImpl",
"org.apache.derby.catalog.types.RefTypeIdImpl",
"org.apache.derby.catalog.types.ReferencedColumnsDescriptorImpl",
"org.apache.derby.catalog.types.RoutineAliasInfo",
"org.apache.derby.catalog.types.RowMultiSetImpl",
"org.apache.derby.catalog.types.SmallintTypeIdImpl",
"org.apache.derby.catalog.types.StatisticsImpl",
"org.apache.derby.catalog.types.SynonymAliasInfo",
"org.apache.derby.catalog.types.TinyintTypeIdImpl",
"org.apache.derby.catalog.types.TypeDescriptorImpl",
"org.apache.derby.catalog.types.TypesImplInstanceGetter",
"org.apache.derby.catalog.types.UDTAliasInfo",
"org.apache.derby.catalog.types.UserAggregateAliasInfo",
"org.apache.derby.catalog.types.UserDefinedTypeIdImpl",
"org.apache.derby.catalog.types.VarbitTypeIdImpl",
"org.apache.derby.catalog.types.VarcharTypeIdImpl",
"org.apache.derby.catalog.types.WorkUnitAliasInfo",
"org.apache.derby.diag.ContainedRoles",
"org.apache.derby.diag.ErrorLogReader",
"org.apache.derby.diag.ErrorMessages",
"org.apache.derby.diag.LockTable",
"org.apache.derby.diag.SpaceTable",
"org.apache.derby.diag.StatementCache",
"org.apache.derby.diag.StatementDuration",
"org.apache.derby.diag.TransactionTable",
"org.apache.derby.iapi.db.ConsistencyChecker",
"org.apache.derby.iapi.db.Factory",
"org.apache.derby.iapi.db.OptimizerTrace",
"org.apache.derby.iapi.db.PropertyInfo",
"org.apache.derby.iapi.error.StandardException",
"org.apache.derby.iapi.error.ThreadDump",
"org.apache.derby.iapi.services.cache.ClassSizeCatalogImpl",
"org.apache.derby.iapi.services.context.Context",
"org.apache.derby.iapi.services.diag.DiagnosticUtil",
"org.apache.derby.iapi.services.diag.DiagnosticableGeneric",
"org.apache.derby.iapi.services.io.FormatableArrayHolder",
"org.apache.derby.iapi.services.io.FormatableBitSet",
"org.apache.derby.iapi.services.io.FormatableHashtable",
"org.apache.derby.iapi.services.io.FormatableIntHolder",
"org.apache.derby.iapi.services.io.FormatableLongHolder",
"org.apache.derby.iapi.services.io.FormatableProperties",
"org.apache.derby.iapi.services.io.Storable",
"org.apache.derby.iapi.services.io.StoredFormatIds",
"org.apache.derby.iapi.services.loader.GeneratedByteCode",
"org.apache.derby.iapi.services.loader.GeneratedClass",
"org.apache.derby.iapi.services.loader.GeneratedMethod",
"org.apache.derby.iapi.sql.Activation",
"org.apache.derby.iapi.sql.LanguageFactory",
"org.apache.derby.iapi.sql.ParameterValueSet",
"org.apache.derby.iapi.sql.ParameterValueSetFactory",
"org.apache.derby.iapi.sql.ResultSet",
"org.apache.derby.iapi.sql.Row",
"org.apache.derby.iapi.sql.conn.Authorizer",
"org.apache.derby.iapi.sql.conn.LanguageConnectionContext",
"org.apache.derby.iapi.sql.dictionary.DataDictionary",
"org.apache.derby.iapi.sql.dictionary.IndexRowGenerator",
"org.apache.derby.iapi.sql.dictionary.TriggerDescriptor",
"org.apache.derby.iapi.sql.execute.ConstantAction",
"org.apache.derby.iapi.sql.execute.CursorResultSet",
"org.apache.derby.iapi.sql.execute.ExecIndexRow",
"org.apache.derby.iapi.sql.execute.ExecPreparedStatement",
"org.apache.derby.iapi.sql.execute.ExecRow",
"org.apache.derby.iapi.sql.execute.ExecRowBuilder",
"org.apache.derby.iapi.sql.execute.ExecutionFactory",
"org.apache.derby.iapi.sql.execute.NoPutResultSet",
"org.apache.derby.iapi.sql.execute.ResultSetFactory",
"org.apache.derby.iapi.sql.execute.RowFactory",
"org.apache.derby.iapi.sql.execute.RunTimeStatistics",
"org.apache.derby.iapi.store.access.Qualifier",
"org.apache.derby.iapi.types.BitDataValue",
"org.apache.derby.iapi.types.BitTypeId",
"org.apache.derby.iapi.types.BooleanDataValue",
"org.apache.derby.iapi.types.BooleanTypeId",
"org.apache.derby.iapi.types.CharTypeId",
"org.apache.derby.iapi.types.ConcatableDataValue",
"org.apache.derby.iapi.types.DTSClassInfo",
"org.apache.derby.iapi.types.DataTypeDescriptor",
"org.apache.derby.iapi.types.DataValueDescriptor",
"org.apache.derby.iapi.types.DataValueFactory",
"org.apache.derby.iapi.types.DateTimeDataValue",
"org.apache.derby.iapi.types.DateTypeId",
"org.apache.derby.iapi.types.DecimalTypeId",
"org.apache.derby.iapi.types.DoubleTypeId",
"org.apache.derby.iapi.types.IntTypeId",
"org.apache.derby.iapi.types.JSQLType",
"org.apache.derby.iapi.types.LongintTypeId",
"org.apache.derby.iapi.types.LongvarbitTypeId",
"org.apache.derby.iapi.types.LongvarcharTypeId",
"org.apache.derby.iapi.types.NationalCharTypeId",
"org.apache.derby.iapi.types.NationalLongvarcharTypeId",
"org.apache.derby.iapi.types.NationalVarcharTypeId",
"org.apache.derby.iapi.types.NumberDataValue",
"org.apache.derby.iapi.types.RealTypeId",
"org.apache.derby.iapi.types.RefDataValue",
"org.apache.derby.iapi.types.RefTypeId",
"org.apache.derby.iapi.types.RowLocation",
"org.apache.derby.iapi.types.SQLLongint",
"org.apache.derby.iapi.types.SmallintTypeId",
"org.apache.derby.iapi.types.StringDataValue",
"org.apache.derby.iapi.types.TimeTypeId",
"org.apache.derby.iapi.types.TimestampTypeId",
"org.apache.derby.iapi.types.TinyintTypeId",
"org.apache.derby.iapi.types.UserDataValue",
"org.apache.derby.iapi.types.UserDefinedTypeId",
"org.apache.derby.iapi.types.UserDefinedTypeIdV2",
"org.apache.derby.iapi.types.UserDefinedTypeIdV3",
"org.apache.derby.iapi.types.UserType",
"org.apache.derby.iapi.types.VarbitTypeId",
"org.apache.derby.iapi.types.VarcharTypeId",
"org.apache.derby.iapi.types.VariableSizeDataValue",
"org.apache.derby.iapi.types.XML (implementation of",
"org.apache.derby.iapi.types.XMLDataValue",
"org.apache.derby.iapi.types.XMLDataValue)",
"org.apache.derby.impl.io.CPFile",
"org.apache.derby.impl.io.DirStorageFactory",
"org.apache.derby.impl.io.InputStreamFile",
"org.apache.derby.impl.io.JarDBFile",
"org.apache.derby.impl.io.URLFile",
"org.apache.derby.impl.io.VFMemoryStorageFactory",
"org.apache.derby.impl.jdbc.LOBStoredProcedure",
"org.apache.derby.impl.jdbc.SQLExceptionFactory",
"org.apache.derby.impl.jdbc.authentication.JNDIAuthenticationSchemeBase",
"org.apache.derby.impl.jdbc.authentication.JNDIAuthenticationService",
"org.apache.derby.impl.jdbc.authentication.LDAPAuthenticationSchemeImpl",
"org.apache.derby.impl.services.monitor.BaseMonitor",
"org.apache.derby.impl.services.monitor.FileMonitor",
"org.apache.derby.impl.services.stream.RollingFileStream",
"org.apache.derby.impl.services.stream.RollingFileStreamProvider",
"org.apache.derby.impl.services.uuid.BasicUUID",
"org.apache.derby.impl.services.uuid.BasicUUIDGetter",
"org.apache.derby.impl.sql.CursorInfo",
"org.apache.derby.impl.sql.CursorTableReference",
"org.apache.derby.impl.sql.GenericColumnDescriptor",
"org.apache.derby.impl.sql.GenericResultDescription",
"org.apache.derby.impl.sql.GenericStorablePreparedStatement",
"org.apache.derby.impl.sql.GenericTypeDescriptor",
"org.apache.derby.impl.sql.GenericTypeId",
"org.apache.derby.impl.sql.catalog.AliasDescriptorFinder",
"org.apache.derby.impl.sql.catalog.ColumnDescriptorFinder",
"org.apache.derby.impl.sql.catalog.ConglomerateDescriptorFinder",
"org.apache.derby.impl.sql.catalog.ConstraintDescriptorFinder",
"org.apache.derby.impl.sql.catalog.CoreDDFinderClassInfo",
"org.apache.derby.impl.sql.catalog.DD_AristotleVersion",
"org.apache.derby.impl.sql.catalog.DD_BuffyVersion",
"org.apache.derby.impl.sql.catalog.DD_DB2J72",
"org.apache.derby.impl.sql.catalog.DD_IvanovaVersion",
"org.apache.derby.impl.sql.catalog.DD_MulanVersion",
"org.apache.derby.impl.sql.catalog.DD_PlatoVersion",
"org.apache.derby.impl.sql.catalog.DD_SocratesVersion",
"org.apache.derby.impl.sql.catalog.DD_Version",
"org.apache.derby.impl.sql.catalog.DD_XenaVersion",
"org.apache.derby.impl.sql.catalog.DataDictionaryDescriptorFinder",
"org.apache.derby.impl.sql.catalog.DefaultDescriptorFinder",
"org.apache.derby.impl.sql.catalog.FileInfoFinder",
"org.apache.derby.impl.sql.catalog.IndexRowGeneratorImpl",
"org.apache.derby.impl.sql.catalog.OIDImpl",
"org.apache.derby.impl.sql.catalog.ParameterDescriptorImpl",
"org.apache.derby.impl.sql.catalog.RowListImpl",
"org.apache.derby.impl.sql.catalog.SPSDescriptorFinder",
"org.apache.derby.impl.sql.catalog.SchemaDescriptorFinder",
"org.apache.derby.impl.sql.catalog.SequenceDescriptorFinder",
"org.apache.derby.impl.sql.catalog.TableDescriptorFinder",
"org.apache.derby.impl.sql.catalog.TriggerDescriptor",
"org.apache.derby.impl.sql.catalog.TriggerDescriptorFinder",
"org.apache.derby.impl.sql.catalog.ViewDescriptorFinder",
"org.apache.derby.impl.sql.compile.MaxMinAggregateDefinition",
"org.apache.derby.impl.sql.compile.OptTraceViewer",
"org.apache.derby.impl.sql.compile.OptimizerTracer",
"org.apache.derby.impl.sql.compile.SumAvgAggregateDefinition",
"org.apache.derby.impl.sql.depend.DepClassInfo",
"org.apache.derby.impl.sql.execute.AggregatorInfo",
"org.apache.derby.impl.sql.execute.AggregatorInfoList",
"org.apache.derby.impl.sql.execute.AvgAggregator",
"org.apache.derby.impl.sql.execute.BaseActivation",
"org.apache.derby.impl.sql.execute.BaseExpressionActivation",
"org.apache.derby.impl.sql.execute.ColumnInfo",
"org.apache.derby.impl.sql.execute.ConstantActionActivation",
"org.apache.derby.impl.sql.execute.ConstraintInfo",
"org.apache.derby.impl.sql.execute.CountAggregator",
"org.apache.derby.impl.sql.execute.CurrentDatetime",
"org.apache.derby.impl.sql.execute.CursorActivation",
"org.apache.derby.impl.sql.execute.DeleteConstantAction",
"org.apache.derby.impl.sql.execute.FKInfo",
"org.apache.derby.impl.sql.execute.IndexColumnOrder",
"org.apache.derby.impl.sql.execute.InsertConstantAction",
"org.apache.derby.impl.sql.execute.MatchingClauseConstantAction",
"org.apache.derby.impl.sql.execute.MaxMinAggregator",
"org.apache.derby.impl.sql.execute.MergeConstantAction",
"org.apache.derby.impl.sql.execute.OrderableAggregator",
"org.apache.derby.impl.sql.execute.SavepointConstantAction",
"org.apache.derby.impl.sql.execute.StdDevPAggregator",
"org.apache.derby.impl.sql.execute.StdDevSAggregator",
"org.apache.derby.impl.sql.execute.SumAggregator",
"org.apache.derby.impl.sql.execute.TransactionConstantAction",
"org.apache.derby.impl.sql.execute.TriggerInfo",
"org.apache.derby.impl.sql.execute.UpdatableVTIConstantAction",
"org.apache.derby.impl.sql.execute.UpdateConstantAction",
"org.apache.derby.impl.sql.execute.UserDefinedAggregator",
"org.apache.derby.impl.sql.execute.VarPAggregator",
"org.apache.derby.impl.sql.execute.VarSAggregator",
"org.apache.derby.impl.store.access.ConglomerateDirectory",
"org.apache.derby.impl.store.access.PC_XenaVersion",
"org.apache.derby.impl.store.access.PropertyConglomerate",
"org.apache.derby.impl.store.access.StorableFormatId",
"org.apache.derby.impl.store.access.btree.BranchControlRow",
"org.apache.derby.impl.store.access.btree.LeafControlRow",
"org.apache.derby.impl.store.access.btree.index.B2I",
"org.apache.derby.impl.store.access.btree.index.B2IStaticCompiledInfo",
"org.apache.derby.impl.store.access.btree.index.B2IUndo",
"org.apache.derby.impl.store.access.btree.index.B2I_10_3",
"org.apache.derby.impl.store.access.btree.index.B2I_v10_2",
"org.apache.derby.impl.store.access.heap.Heap",
"org.apache.derby.impl.store.access.heap.HeapClassInfo",
"org.apache.derby.impl.store.access.heap.Heap_v10_2",
"org.apache.derby.impl.store.raw.data.AllocPage",
"org.apache.derby.impl.store.raw.data.AllocPageOperation",
"org.apache.derby.impl.store.raw.data.ChainAllocPageOperation",
"org.apache.derby.impl.store.raw.data.CompressSpacePageOperation",
"org.apache.derby.impl.store.raw.data.CompressSpacePageOperation10_2",
"org.apache.derby.impl.store.raw.data.ContainerOperation",
"org.apache.derby.impl.store.raw.data.ContainerUndoOperation",
"org.apache.derby.impl.store.raw.data.CopyRowsOperation",
"org.apache.derby.impl.store.raw.data.DeleteOperation",
"org.apache.derby.impl.store.raw.data.EncryptContainerOperation",
"org.apache.derby.impl.store.raw.data.EncryptContainerUndoOperation",
"org.apache.derby.impl.store.raw.data.FileContainer",
"org.apache.derby.impl.store.raw.data.InitPageOperation",
"org.apache.derby.impl.store.raw.data.InsertOperation",
"org.apache.derby.impl.store.raw.data.InvalidatePageOperation",
"org.apache.derby.impl.store.raw.data.LogicalUndoOperation",
"org.apache.derby.impl.store.raw.data.PhysicalUndoOperation",
"org.apache.derby.impl.store.raw.data.PurgeOperation",
"org.apache.derby.impl.store.raw.data.RemoveFileOperation",
"org.apache.derby.impl.store.raw.data.SetReservedSpaceOperation",
"org.apache.derby.impl.store.raw.data.StoredPage",
"org.apache.derby.impl.store.raw.data.StreamFileContainer",
"org.apache.derby.impl.store.raw.data.UpdateFieldOperation",
"org.apache.derby.impl.store.raw.data.UpdateOperation",
"org.apache.derby.impl.store.raw.log.CheckpointOperation",
"org.apache.derby.impl.store.raw.log.ChecksumOperation",
"org.apache.derby.impl.store.raw.log.LogCounter",
"org.apache.derby.impl.store.raw.log.LogRecord",
"org.apache.derby.impl.store.raw.log.LogToFile",
"org.apache.derby.impl.store.raw.xact.BeginXact",
"org.apache.derby.impl.store.raw.xact.EndXact",
"org.apache.derby.impl.store.raw.xact.GlobalXactId",
"org.apache.derby.impl.store.raw.xact.TransactionTable",
"org.apache.derby.impl.store.raw.xact.TransactionTableEntry",
"org.apache.derby.impl.store.raw.xact.XAXactId",
"org.apache.derby.impl.store.raw.xact.XactId",
"org.apache.derby.jdbc.BasicEmbeddedConnectionPoolDataSource40",
"org.apache.derby.jdbc.BasicEmbeddedDataSource40",
"org.apache.derby.jdbc.BasicEmbeddedXADataSource40",
"org.apache.derby.jdbc.EmbeddedConnectionPoolDataSource",
"org.apache.derby.jdbc.EmbeddedConnectionPoolDataSource40",
"org.apache.derby.jdbc.EmbeddedDataSource",
"org.apache.derby.jdbc.EmbeddedDataSource40",
"org.apache.derby.jdbc.EmbeddedDriver",
"org.apache.derby.jdbc.EmbeddedXADataSource",
"org.apache.derby.jdbc.EmbeddedXADataSource40",
"org.apache.derby.mbeans.Management",
"org.apache.derby.osgi.EmbeddedActivator",
"org.apache.derby.shared.common.sanity.ThreadDump",
"org.apache.derby.tools.sysinfo",
"org.apache.derby.vti.ForeignTableVTI",
"org.apache.derby.vti.StringColumnVTI",
"org.apache.derby.vti.UpdatableVTITemplate",
"org.apache.derby.vti.VTICosting",
"org.apache.derby.vti.VTIMetaDataTemplate",
"org.apache.derby.vti.XmlVTI"
).distinct().forEach(it -> {
try {
registerClass(context, context.loadClass(it).getName());
} catch (final IllegalStateException | NoClassDefFoundError ise) {
// no-op
}
});
context.register(new ClassReflectionModel("org.apache.derby.iapi.services.context.ContextManager", null, null, null, null, null, null, null, null, null, null));
Stream.of(
"org.apache.derby.jdbc.AutoloadedDriver",
"org.apache.derby.jdbc.EmbeddedDriver"
).forEach(it -> {
try {
context.initializeAtRunTime(context.loadClass(it).getName());
} catch (final IllegalStateException ise) {
// no-op
}
});
}
private void registerI18n(final Context context) {
IntStream.rangeClosed(0, 49).forEach(it -> context.register(new ResourceBundleModel("org.apache.derby.loc.m" + it)));
}
private void registerClass(final Context context, final String name) {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(name);
model.setAllPublicConstructors(true);
model.setAllDeclaredConstructors(true);
context.register(model);
}
private void tryToRegisterSPI(final Context context) {
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
ofNullable(loader.getResourceAsStream("org/apache/derby/modules.properties"))
.map(res -> {
final Properties properties = new Properties();
try (final InputStream s = res) {
properties.load(s);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
return properties;
})
.ifPresent(props -> props.stringPropertyNames().stream()
.map(props::getProperty)
.flatMap(it -> Stream.of(it.split(",")))
.map(String::trim)
.filter(it -> !it.isEmpty())
.map(it -> {
try {
return context.loadClass(it);
} catch (final IllegalStateException ise) {
return null;
}
})
.filter(Objects::nonNull)
.flatMap(context::findHierarchy)
.forEach(it -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(it.getName());
model.setAllDeclaredConstructors(true);
context.register(model);
}));
}
private void registerProperties(final Context context) {
final ResourceModel resourceModel = new ResourceModel();
resourceModel.setPattern("org\\/apache\\/derby\\/.+\\.properties");
context.register(resourceModel);
}
}
| 6,146 |
0 | Create_ds/geronimo-arthur/knights/openwebbeans-knight/src/main/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/openwebbeans-knight/src/main/java/org/apache/geronimo/arthur/knight/openwebbeans/OpenWebBeansExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.openwebbeans;
import lombok.extern.slf4j.Slf4j;
import org.apache.geronimo.arthur.spi.ArthurExtension;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.DynamicProxyModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
import org.apache.openwebbeans.se.CDISeScannerService;
import org.apache.openwebbeans.se.PreScannedCDISeScannerService;
import org.apache.webbeans.component.AbstractProducerBean;
import org.apache.webbeans.component.InjectionTargetBean;
import org.apache.webbeans.component.ManagedBean;
import org.apache.webbeans.config.OpenWebBeansConfiguration;
import org.apache.webbeans.config.WebBeansContext;
import org.apache.webbeans.container.BeanManagerImpl;
import org.apache.webbeans.conversation.ConversationImpl;
import org.apache.webbeans.corespi.DefaultSingletonService;
import org.apache.webbeans.corespi.se.DefaultScannerService;
import org.apache.webbeans.corespi.se.SimpleApplicationBoundaryService;
import org.apache.webbeans.corespi.se.StandaloneContextsService;
import org.apache.webbeans.inject.impl.InjectionPointImpl;
import org.apache.webbeans.intercept.ApplicationScopedBeanInterceptorHandler;
import org.apache.webbeans.intercept.NormalScopedBeanInterceptorHandler;
import org.apache.webbeans.intercept.RequestScopedBeanInterceptorHandler;
import org.apache.webbeans.intercept.SessionScopedBeanInterceptorHandler;
import org.apache.webbeans.lifecycle.StandaloneLifeCycle;
import org.apache.webbeans.logger.WebBeansLoggerFacade;
import org.apache.webbeans.portable.BaseProducerProducer;
import org.apache.webbeans.portable.ProducerFieldProducer;
import org.apache.webbeans.portable.ProducerMethodProducer;
import org.apache.webbeans.portable.events.ExtensionLoader;
import org.apache.webbeans.service.ClassLoaderProxyService;
import org.apache.webbeans.service.DefaultLoaderService;
import org.apache.webbeans.spi.ApplicationBoundaryService;
import org.apache.webbeans.spi.BeanArchiveService;
import org.apache.webbeans.spi.ContainerLifecycle;
import org.apache.webbeans.spi.ContextsService;
import org.apache.webbeans.spi.DefiningClassService;
import org.apache.webbeans.spi.InjectionPointService;
import org.apache.webbeans.spi.JNDIService;
import org.apache.webbeans.spi.LoaderService;
import org.apache.webbeans.spi.ResourceInjectionService;
import org.apache.webbeans.spi.ScannerService;
import org.apache.webbeans.spi.SecurityService;
import org.apache.xbean.finder.AnnotationFinder;
import org.apache.xbean.finder.filter.Filter;
import javax.annotation.Priority;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.context.ConversationScoped;
import javax.enterprise.context.Dependent;
import javax.enterprise.context.Destroyed;
import javax.enterprise.context.Initialized;
import javax.enterprise.context.NormalScope;
import javax.enterprise.context.RequestScoped;
import javax.enterprise.event.Observes;
import javax.enterprise.event.ObservesAsync;
import javax.enterprise.event.Reception;
import javax.enterprise.event.TransactionPhase;
import javax.enterprise.inject.Default;
import javax.enterprise.inject.se.SeContainer;
import javax.enterprise.inject.se.SeContainerInitializer;
import javax.enterprise.inject.spi.AnnotatedField;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.Interceptor;
import javax.inject.Qualifier;
import javax.interceptor.InterceptorBinding;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonList;
import static java.util.Objects.requireNonNull;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static org.apache.geronimo.arthur.spi.ArthurExtension.PredicateType.EQUALS;
@Slf4j
public class OpenWebBeansExtension implements ArthurExtension {
@Override
public void execute(final Context context) {
final Properties original = new Properties();
original.putAll(System.getProperties());
try (final SeContainer container = configureInitializer(context, SeContainerInitializer.newInstance()).initialize()) {
final WebBeansContext webBeansContext = WebBeansContext.currentInstance();
final BeanManagerImpl beanManager = webBeansContext.getBeanManagerImpl();
final Set<Bean<?>> beans = beanManager.getBeans();
final Collection<javax.enterprise.inject.spi.Interceptor<?>> interceptors = webBeansContext.getInterceptorsManager().getCdiInterceptors();
final Predicate<String> classFilter = context.createIncludesExcludes(
"extension.openwebbeans.classes.filter.", PredicateType.STARTS_WITH);
// 1. capture all proxies
dumpProxies(context, webBeansContext, beans, classFilter);
// 2. register all classes which will require reflection + proxies
final String beanClassesList = registerBeansForReflection(context, beans, classFilter, interceptors);
getProxies(webBeansContext).keySet().stream()
.filter(classFilter)
.flatMap(it -> {
try {
return hierarchy(context.loadClass(it))
.map(Class::getName);
} catch (final RuntimeException re) {
return Stream.of(it);
}
})
.sorted()
.forEach(name -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(name);
model.setAllDeclaredConstructors(true);
model.setAllDeclaredFields(true);
model.setAllDeclaredMethods(true);
context.register(model);
});
// 3. dump owb properties for runtime
final Properties properties = initProperties(context, webBeansContext.getOpenWebBeansConfiguration(), beanClassesList);
// 4. register CDI/OWB API which require some reflection
// 4.1 SPI (interface)
Stream.of(
ScannerService.class, LoaderService.class, BeanArchiveService.class, SecurityService.class,
ContainerLifecycle.class, JNDIService.class, ApplicationBoundaryService.class, ContextsService.class,
InjectionPointService.class, ResourceInjectionService.class, DefiningClassService.class,
Filter.class)
.forEach(clazz -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(clazz.getName());
model.setAllPublicMethods(true);
context.register(model);
});
// 4.2 classes which must be instantiable
Stream.concat(Stream.of(
ClassLoaderProxyService.LoadOnly.class, StandaloneLifeCycle.class, StandaloneContextsService.class,
DefaultLoaderService.class, InjectionPointImpl.class, ConversationImpl.class, SimpleApplicationBoundaryService.class,
ApplicationScopedBeanInterceptorHandler.class, RequestScopedBeanInterceptorHandler.class,
SessionScopedBeanInterceptorHandler.class, NormalScopedBeanInterceptorHandler.class,
CDISeScannerService.class, PreScannedCDISeScannerService.class, DefaultScannerService.class),
findServices(properties))
.distinct()
.forEach(clazz -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(clazz.getName());
model.setAllDeclaredConstructors(true);
context.register(model);
});
// 4.3 needed by prescanned scanner
final ClassReflectionModel owbFinder = new ClassReflectionModel();
owbFinder.setName(AnnotationFinder.class.getName());
final ClassReflectionModel.FieldReflectionModel owbFinderLinking = new ClassReflectionModel.FieldReflectionModel();
owbFinderLinking.setAllowWrite(true);
owbFinderLinking.setName("linking");
owbFinder.setFields(singletonList(owbFinderLinking));
context.register(owbFinder);
// 5 annotations
final Collection<Class<?>> customAnnotations = Stream.concat(
context.findAnnotatedClasses(Qualifier.class).stream(),
context.findAnnotatedClasses(NormalScope.class).stream())
.collect(toList());
Stream.concat(Stream.concat(Stream.of(
Initialized.class, Destroyed.class, NormalScope.class, ApplicationScoped.class, Default.class,
Dependent.class, ConversationScoped.class, RequestScoped.class, Observes.class, ObservesAsync.class,
Qualifier.class, InterceptorBinding.class, Priority.class),
beanManager.getAdditionalQualifiers().stream()),
customAnnotations.stream())
.distinct()
.map(Class::getName)
.sorted()
.forEach(clazz -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(clazz);
model.setAllDeclaredMethods(true);
context.register(model);
});
customAnnotations.stream() // DefaultAnnotation.of
.filter(it -> !it.getName().startsWith("javax.") && !it.getName().startsWith("jakarta."))
.map(Class::getName)
.sorted()
.map(it -> {
final DynamicProxyModel proxyModel = new DynamicProxyModel();
proxyModel.setClasses(singleton(it));
return proxyModel;
})
.forEach(context::register);
// 6 extensions - normally taken by graalvm service loader but we need a bit more reflection
final ExtensionLoader extensionLoader = webBeansContext.getExtensionLoader();
try {
final Field extensionClasses = ExtensionLoader.class.getDeclaredField("extensionClasses");
if (!extensionClasses.isAccessible()) {
extensionClasses.setAccessible(true);
}
final Predicate<String> extensionFilter = context.createPredicate("extension.openwebbeans.extension.excludes", PredicateType.STARTS_WITH)
.map(Predicate::negate)
.orElseGet(() -> n -> true);
final Set<Class<?>> classes = (Set<Class<?>>) extensionClasses.get(extensionLoader);
classes.stream()
.filter(it -> extensionFilter.test(it.getName()))
.flatMap(this::hierarchy)
.distinct()
.map(Class::getName)
.filter(classFilter)
.sorted()
.forEach(clazz -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(clazz);
model.setAllDeclaredConstructors(true);
model.setAllDeclaredMethods(true);
model.setAllDeclaredMethods(true);
context.register(model);
});
} catch (final NoSuchFieldException | IllegalAccessException e) {
throw new IllegalStateException("Incompatible OpenWebBeans version", e);
}
// 7. producer types must be reflection friendly
findProducedClasses(beans)
.map(Class::getName)
.sorted()
.forEach(name -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(name);
model.setAllDeclaredConstructors(true);
model.setAllDeclaredFields(true);
model.setAllDeclaredMethods(true);
context.register(model);
});
// 8. enforce some build time init for annotations and some specific classes
context.initializeAtBuildTime(
Reception.class.getName(),
TransactionPhase.class.getName(),
DefaultSingletonService.class.getName(),
WebBeansLoggerFacade.class.getName());
try { // openwebbeans-slf4j is an optional module
final Class<?> logger = context.loadClass("org.apache.openwebbeans.slf4j.Slf4jLogger");
context.initializeAtBuildTime(logger.getName());
} catch (final RuntimeException e) {
// ignore, not there
}
// 9. we add the resource bundle + the bundle as resource for some extensions (thank you JUL)
context.includeResourceBundle("openwebbeans/Messages");
final ResourceModel resourceModel = new ResourceModel();
resourceModel.setPattern("openwebbeans/Messages\\.properties");
context.register(resourceModel);
// 10. OWB creates proxies on TypeVariable (generics checks) so enable it
final DynamicProxyModel typeVariableProxyModel = new DynamicProxyModel();
typeVariableProxyModel.setClasses(singleton(TypeVariable.class.getName()));
context.register(typeVariableProxyModel);
// 11. interceptor bindings
context.findAnnotatedClasses(InterceptorBinding.class).forEach(clazz -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(clazz.getName());
model.setAllPublicMethods(true);
context.register(model);
});
} finally {
System.setProperties(original);
}
}
private Stream<Class<?>> findProducedClasses(final Set<Bean<?>> beans) {
return beans.stream()
.filter(it -> AbstractProducerBean.class.isInstance(it) && BaseProducerProducer.class.isInstance(AbstractProducerBean.class.cast(it).getProducer()))
.flatMap(it -> {
final BaseProducerProducer bpp = BaseProducerProducer.class.cast(AbstractProducerBean.class.cast(it).getProducer());
final Collection<Type> types = it.getTypes();
if (ProducerMethodProducer.class.isInstance(bpp)) {
return concat(types, get(bpp, "producerMethod", Method.class).getReturnType());
}
if (ProducerFieldProducer.class.isInstance(bpp)) {
return concat(types, get(bpp, "producerField", AnnotatedField.class).getJavaMember().getType());
}
return null;
})
.filter(Objects::nonNull);
}
private Stream<Class<?>> concat(final Collection<Type> types, final Class<?> type) {
return Stream.concat(Stream.of(type), types.stream().filter(Class.class::isInstance).map(Class.class::cast))
.distinct() // if types includes type, avoids to do twice the hierarchy
.flatMap(this::hierarchy)
.distinct();
}
private <T> T get(final BaseProducerProducer p, final String field, final Class<T> type) {
try {
final Field declaredField = p.getClass().getDeclaredField(field);
if (!declaredField.isAccessible()) {
declaredField.setAccessible(true);
}
return type.cast(declaredField.get(p));
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
private Stream<? extends Class<?>> findServices(final Properties properties) {
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
return properties.stringPropertyNames().stream()
.filter(it -> it.startsWith("org.apache.webbeans.spi.") || it.equals(Filter.class.getName()))
.map(properties::getProperty)
.map(it -> {
try {
return loader.loadClass(it);
} catch (final ClassNotFoundException e) {
if (it.contains(".")) {
log.warn(e.getMessage(), e);
} // else can be "false" so just ignore
return null;
}
})
.filter(Objects::nonNull);
}
private String registerBeansForReflection(final Context context, final Set<Bean<?>> beans, final Predicate<String> classFilter,
final Collection<javax.enterprise.inject.spi.Interceptor<?>> interceptors) {
final boolean includeClassResources = Boolean.parseBoolean(context.getProperty("extension.openwebbeans.classes.includeAsResources"));
return Stream.concat(
beans.stream().filter(ManagedBean.class::isInstance).map(Bean::getBeanClass),
interceptors.stream().map(Interceptor::getBeanClass))
.flatMap(this::hierarchy)
.distinct()
.map(Class::getName)
.filter(classFilter)
.sorted()
.peek(clazz -> {
final ClassReflectionModel model = new ClassReflectionModel();
model.setName(clazz);
model.setAllDeclaredConstructors(true);
model.setAllDeclaredMethods(true);
model.setAllDeclaredFields(true);
context.register(model);
if (includeClassResources) {
final ResourceModel resourceModel = new ResourceModel();
resourceModel.setPattern(Pattern.quote(clazz.replace('.', '/') + ".class"));
context.register(resourceModel);
}
})
.collect(joining(","));
}
private void dumpProxies(final Context context, final WebBeansContext webBeansContext, final Set<Bean<?>> beans,
final Predicate<String> classFilter) {
// interceptors/decorators
beans.stream()
.filter(InjectionTargetBean.class::isInstance)
.map(InjectionTargetBean.class::cast)
.forEach(InjectionTargetBean::defineInterceptorsIfNeeded);
// normal scope
beans.stream()
.filter(it -> webBeansContext.getBeanManagerImpl().isNormalScope(it.getScope()))
.forEach(webBeansContext.getNormalScopeProxyFactory()::createNormalScopeProxy);
final Map<String, byte[]> proxies = getProxies(webBeansContext);
log.debug("Proxies: {}", proxies.keySet());
if (proxies.isEmpty()) {
log.info("No proxy found for this application");
} else {
proxies.entrySet().stream()
.filter(it -> classFilter.test(it.getKey()))
.sorted(Map.Entry.comparingByKey())
.forEach(e -> {
context.registerGeneratedClass(e.getKey(), e.getValue());
log.info("Registered proxy '{}'", e.getKey());
});
}
}
private Map<String, byte[]> getProxies(final WebBeansContext webBeansContext) {
return ClassLoaderProxyService.Spy.class.cast(webBeansContext.getService(DefiningClassService.class)).getProxies();
}
private Stream<Class<?>> hierarchy(final Class<?> it) {
return it == null || it == Object.class ?
Stream.empty() :
Stream.concat(
Stream.concat(Stream.of(it), hierarchy(it.getSuperclass())),
Stream.of(it.getInterfaces()).flatMap(this::hierarchy));
}
private Properties initProperties(final Context context, final OpenWebBeansConfiguration configuration,
final String beanClassesList) {
try {
final Field field = OpenWebBeansConfiguration.class.getDeclaredField("configProperties");
field.setAccessible(true);
final Properties properties = Properties.class.cast(field.get(configuration));
enrichProperties(properties, true);
if (!Boolean.parseBoolean(context.getProperty("extension.openwebbeans.services.ignoreScannerService"))) {
properties.put("org.apache.webbeans.spi.ScannerService", "org.apache.openwebbeans.se.PreScannedCDISeScannerService");
}
properties.putIfAbsent("org.apache.openwebbeans.se.PreScannedCDISeScannerService.classes",
properties.getProperty("org.apache.openwebbeans.se.PreScannedCDISeScannerService.classesTemplate", "${classes}")
.replace("${classes}", beanClassesList));
if (!Boolean.parseBoolean(properties.getProperty("extension.openwebbeans.runtime.properties.skipOptimizations"))) {
properties.putIfAbsent("org.apache.webbeans.spi.deployer.skipValidations", "true");
properties.putIfAbsent("org.apache.webbeans.spi.deployer.skipVetoedOnPackages", "true");
}
final Predicate<String> droppedProperties = context.createPredicate("extension.openwebbeans.container.se.properties.runtime.excludes", EQUALS)
.orElseGet(() -> asList("configuration.ordinal", "org.apache.webbeans.lifecycle.standalone.fireApplicationScopeEvents")::contains);
properties.stringPropertyNames().stream().filter(droppedProperties).forEach(properties::remove);
final StringWriter writer = new StringWriter();
try (final StringWriter w = writer) {
properties.store(w, "Generated by Geronimo Arthur");
}
final Path workDir = Paths.get(requireNonNull(context.getProperty("workingDirectory"), "workingDirectory property"));
context.addNativeImageOption("-H:OpenWebBeansProperties=" +
dump(workDir, "openwebbeans.properties", writer.toString().replaceAll("(?m)^#.*", "")));
return properties;
} catch (final Exception e) {
throw new IllegalStateException("Incompatible OWB version", e);
}
}
private void enrichProperties(final Properties properties, final boolean runtime) {
properties.setProperty("configuration.ordinal", "10000");
properties.setProperty("org.apache.webbeans.proxy.useStaticNames", "true");
properties.setProperty("org.apache.webbeans.proxy.staticNames.useXxHash64", "true");
properties.setProperty("org.apache.webbeans.spi.DefiningClassService", runtime ?
"org.apache.webbeans.service.ClassLoaderProxyService$LoadOnly" :
"org.apache.webbeans.service.ClassLoaderProxyService$Spy");
properties.setProperty("org.apache.webbeans.spi.ApplicationBoundaryService",
"org.apache.webbeans.corespi.se.SimpleApplicationBoundaryService");
}
private SeContainerInitializer configureInitializer(final Context context, final SeContainerInitializer initializer) {
final Properties config = new Properties();
enrichProperties(config, false); // before starting ensure we use a deterministic proxy generation config
config.stringPropertyNames().forEach(k -> initializer.addProperty(k, config.getProperty(k)));
if (Boolean.parseBoolean(context.getProperty("extension.openwebbeans.container.se.disableDiscovery"))) {
initializer.disableDiscovery();
}
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
initializer.setClassLoader(loader);
ofNullable(context.getProperty("extension.openwebbeans.container.se.properties"))
.ifPresent(props -> {
final Properties properties = readProps(props);
properties.stringPropertyNames().forEach(k -> initializer.addProperty(k, properties.getProperty(k)));
});
ofNullable(context.getProperty("extension.openwebbeans.container.se.services"))
.ifPresent(props -> {
final Properties properties = readProps(props);
properties.stringPropertyNames().forEach(k -> {
try {
initializer.addProperty(k, loader.loadClass(properties.getProperty(k).trim()));
} catch (final ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
});
});
ofNullable(context.getProperty("extension.openwebbeans.container.se.classes"))
.ifPresent(classes -> initializer.addBeanClasses(Stream.of(classes.split(","))
.map(String::trim)
.filter(it -> !it.isEmpty())
.map(it -> {
try {
return loader.loadClass(it);
} catch (final ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
})
.toArray(Class<?>[]::new)));
return initializer;
}
private Properties readProps(final String props) {
final Properties properties = new Properties();
try (final StringReader reader = new StringReader(props)) {
properties.load(reader);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
return properties;
}
private String dump(final Path workDir, final String name, final String value) {
if (!java.nio.file.Files.isDirectory(workDir)) {
try {
java.nio.file.Files.createDirectories(workDir);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
final Path out = workDir.resolve(name);
try {
Files.write(
out, value.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
} catch (IOException e) {
throw new IllegalStateException(e);
}
log.info("Created '{}'", out);
return out.toAbsolutePath().toString();
}
}
| 6,147 |
0 | Create_ds/geronimo-arthur/knights/openwebbeans-knight/src/main/java/org/apache/geronimo/arthur/knight/openwebbeans | Create_ds/geronimo-arthur/knights/openwebbeans-knight/src/main/java/org/apache/geronimo/arthur/knight/openwebbeans/replacement/OWBInitializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.openwebbeans.replacement;
import com.oracle.svm.core.annotate.Substitute;
import com.oracle.svm.core.annotate.TargetClass;
import org.apache.openwebbeans.se.CDISeScannerService;
import org.apache.openwebbeans.se.PreScannedCDISeScannerService;
@TargetClass(org.apache.openwebbeans.se.OWBInitializer.class)
public final class OWBInitializer {
@Substitute
protected CDISeScannerService createDefaultScannerService() {
return new PreScannedCDISeScannerService();
}
}
| 6,148 |
0 | Create_ds/geronimo-arthur/knights/openwebbeans-knight/src/main/java/org/apache/geronimo/arthur/knight/openwebbeans | Create_ds/geronimo-arthur/knights/openwebbeans-knight/src/main/java/org/apache/geronimo/arthur/knight/openwebbeans/feature/OpenWebBeansFeature.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.openwebbeans.feature;
import com.oracle.svm.core.annotate.AutomaticFeature;
import com.oracle.svm.core.jdk.Resources;
import com.oracle.svm.core.option.HostedOptionKey;
import org.graalvm.compiler.options.Option;
import org.graalvm.compiler.options.OptionDescriptor;
import org.graalvm.compiler.options.OptionDescriptors;
import org.graalvm.compiler.options.OptionType;
import org.graalvm.nativeimage.hosted.Feature;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Iterator;
import java.util.stream.Stream;
@AutomaticFeature
public class OpenWebBeansFeature implements Feature {
public static final class Options {
@Option(help = "OpenWebBeans properties.", type = OptionType.User)
static final HostedOptionKey<String> OpenWebBeansProperties = new HostedOptionKey<>(null);
}
// org.graalvm.compiler.options.processor is not on central
public static class OpenWebBeansOptions implements OptionDescriptors {
@Override
public OptionDescriptor get(final String value) {
switch (value) {
case "OpenWebBeansProperties":
return OptionDescriptor.create(
value, OptionType.User, String.class,
"OpenWebBeans properties.",
Options.class, value,
Options.OpenWebBeansProperties);
default:
return null;
}
}
@Override
public Iterator<OptionDescriptor> iterator() {
return Stream.of("OpenWebBeansProperties").map(this::get).iterator();
}
}
@Override
public void beforeAnalysis(final BeforeAnalysisAccess access) {
if (Options.OpenWebBeansProperties.hasBeenSet()) {
register(Options.OpenWebBeansProperties.getValue(), "META-INF/openwebbeans/openwebbeans.properties");
}
}
private void register(final String path, final String resource) {
try (final InputStream stream = Files.newInputStream(Paths.get(path))) {
Resources.registerResource(resource, stream);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
}
| 6,149 |
0 | Create_ds/geronimo-arthur/knights/slf4j-knight/src/main/java/org/apache/geronimo/arthur/knight | Create_ds/geronimo-arthur/knights/slf4j-knight/src/main/java/org/apache/geronimo/arthur/knight/slf4j/Slf4jExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.knight.slf4j;
import org.apache.geronimo.arthur.spi.ArthurExtension;
public class Slf4jExtension implements ArthurExtension {
@Override
public void execute(final Context context) {
// use strings cause maven uses slf4j and we don't want NoClassDefFoundError
context.initializeAtBuildTime(
"org.slf4j.impl.JDK14LoggerAdapter",
"org.slf4j.impl.StaticLoggerBinder",
"org.slf4j.LoggerFactory");
}
@Override
public int order() {
return 10;
}
}
| 6,150 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/jsch/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/jsch/src/main/java/org/apache/geronimo/arthur/integrationtests/JschMain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import com.jcraft.jsch.ChannelExec;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
public final class JschMain {
private JschMain() {
// noop
}
public static void main(final String[] args) {
final int port = Integer.getInteger("MavenTest.jsch.port");
Session session;
try {
session = new JSch().getSession("test", "localhost", port);
session.setPassword("testpwd");
session.setConfig("StrictHostKeyChecking", "no");
session.setConfig("PreferredAuthentications", "password");
session.connect();
} catch (final JSchException e) {
throw new IllegalStateException(e);
}
try {
final ChannelExec channelExec = ChannelExec.class.cast(session.openChannel("exec"));
channelExec.setCommand("ping");
channelExec.setInputStream(System.in, true);
channelExec.setOutputStream(System.out, true);
channelExec.setErrStream(System.err, true);
channelExec.connect();
final InputStream inputStream = channelExec.getInputStream();
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final byte[] buffer = new byte[1024];
int length;
while (channelExec.isConnected() && (length = inputStream.read(buffer)) != -1) {
System.out.write(buffer, 0, length);
}
} catch (final JSchException | IOException e) {
throw new IllegalStateException(e);
} finally {
session.disconnect();
}
}
}
| 6,151 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/scr/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/scr/src/main/java/org/apache/geronimo/arthur/integrationtests/Application.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
@Component(service = Application.class, immediate = true)
public class Application {
@Activate
public void init() {
System.out.println("Starting " + getClass().getName());
}
}
| 6,152 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/scr/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/scr/src/main/java/org/apache/geronimo/arthur/integrationtests/ScrMain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests;
import java.io.IOException;
import org.apache.winegrower.Ripener;
public final class ScrMain {
private ScrMain() {
// noop
}
// strictly speaking we could just use Ripener.main(String[]) but for an IT it is weird to not have any entry point
//
// side note: to debug the binary run:
// $ ./target/scr.graal.bin -Dorg.slf4j.simpleLogger.logFile=System.out -Dorg.slf4j.simpleLogger.defaultLogLevel=DEBUG
public static void main(final String[] args) throws IOException {
setIfMissing("org.slf4j.simpleLogger.logFile", "System.out");
setIfMissing("org.slf4j.simpleLogger.defaultLogLevel", "WARN");
setIfMissing("ds.service.changecount.timeout", "1"); // otherwise shutdown will wait 5s for nothing here
final Ripener.Configuration configuration = new Ripener.Configuration();
configuration.setJarFilter(it -> true); // we built the metadata so no scanning
try (final Ripener ripener = new Ripener.Impl(configuration).start()) {
// no-op, deployment will print "Starting org.apache.geronimo.arthur.integrationtests.Application"
}
}
private static void setIfMissing(final String key, final String value) {
System.setProperty(key, System.getProperty(key, value));
}
}
| 6,153 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/owb/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/owb/src/main/java/org/apache/geronimo/arthur/integrationtests/OWB.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.context.Initialized;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.se.SeContainer;
import javax.enterprise.inject.se.SeContainerInitializer;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
public final class OWB {
private OWB() {
// noop
}
public static void main(final String[] args) throws IOException {
Logger.getLogger("org.apache").setLevel(Level.WARNING);
try (final SeContainer container = SeContainerInitializer.newInstance().initialize()) {
// starter is launched automatically
final int counter = container.select(Starter.class).get().getCounter();
if (counter != 1) {
throw new IllegalStateException("Starter didn't start: " + counter);
}
final Proxied proxied = container.select(Proxied.class).get();
final String proxyValue = proxied.getAnything();
if (!"from proxy".equals(proxyValue)) {
throw new IllegalStateException(proxied + ": " + proxyValue);
}
System.out.println("counter=" + counter + ", from proxy=" + proxyValue);
}
}
private static void setIfMissing(final String key, final String value) {
System.setProperty(key, System.getProperty(key, value));
}
@ApplicationScoped
public static class Starter {
private int counter = 0;
public int getCounter() {
return counter;
}
public void onStart(@Observes @Initialized(ApplicationScoped.class) final Object start,
final Proxied proxied) {
counter++;
Logger.getLogger(getClass().getName() + // who
" started: proxy_class=" + proxied.getClass().getName() + ", " + // uses proxy class
proxied.getAnything()); // proxy works
}
}
@ApplicationScoped
public static class Proxied {
public String getAnything() {
return "from proxy";
}
}
}
| 6,154 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/openjpa/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/openjpa/src/main/java/org/apache/geronimo/arthur/integrationtests/OpenJPAMain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.geronimo.arthur.integrationtests.entities.Child;
import org.apache.geronimo.arthur.integrationtests.entities.Root;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityTransaction;
import javax.persistence.SharedCacheMode;
import javax.persistence.ValidationMode;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.spi.ClassTransformer;
import javax.persistence.spi.PersistenceProvider;
import javax.persistence.spi.PersistenceUnitInfo;
import javax.persistence.spi.PersistenceUnitTransactionType;
import javax.sql.DataSource;
import java.net.URL;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.ServiceLoader;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
public final class OpenJPAMain {
private OpenJPAMain() {
// noop
}
/**
* [main] INFO org.apache.geronimo.arthur.integrationtests.OpenJPAMain - findbyid => root:id=10000,name=root_1,children=[child:id=10001,name=child_2, child:id=10000,name=child_1]
* [main] INFO org.apache.geronimo.arthur.integrationtests.OpenJPAMain - criteria builder => root:id=10000,name=root_1,children=[child:id=10001,name=child_2, child:id=10000,name=child_1]
*/
public static void main(final String... args) throws SQLException {
setIfMissing("hsqldb.reconfig_logging", "false");
setIfMissing("org.slf4j.simpleLogger.logFile", "System.out");
setIfMissing("org.slf4j.simpleLogger.defaultLogLevel", "WARN");
setIfMissing("org.slf4j.simpleLogger.log.org.apache.geronimo.arthur.integrationtests", "INFO");
final BasicDataSource dataSource = createDataSource();
final Map<String, Object> map = new HashMap<>();
final Properties properties = new Properties();
properties.setProperty("javax.persistence.schema-generation.database.action", "drop-and-create");
properties.setProperty("openjpa.Log", "DefaultLevel=WARN, Runtime=WARN, Tool=WARN"); // SQL=TRACE for debugging purposes
properties.setProperty("openjpa.Sequence", "class-table(Increment=20, InitialValue=1)");
final EntityManagerFactory factory = ServiceLoader.load(PersistenceProvider.class).iterator().next()
// use no xml option for now
.createContainerEntityManagerFactory(newInfo(dataSource, properties), map);
final Logger logger = LoggerFactory.getLogger(OpenJPAMain.class);
try {
final long rootId = createGraph(factory.createEntityManager());
final EntityManager findByIdEm = factory.createEntityManager();
logger.info("findbyid => " + findByIdEm.find(Root.class, rootId).toString());
findByIdEm.close();
final EntityManager criteriaBuilderEm = factory.createEntityManager();
final CriteriaBuilder cb = criteriaBuilderEm.getCriteriaBuilder();
final CriteriaQuery<Root> query = cb.createQuery(Root.class);
final javax.persistence.criteria.Root<Root> from = query.from(Root.class);
final CriteriaQuery<Root> criteriaQuery = query.select(from).where(cb.equal(from.get("id"), rootId));
logger.info("criteria builder => " + criteriaBuilderEm.createQuery(criteriaQuery).getSingleResult());
criteriaBuilderEm.close();
} finally {
factory.close();
dataSource.close();
}
System.out.flush();
}
private static long createGraph(final EntityManager entityManager) {
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
final Root root = new Root();
root.setName("root_1");
entityManager.persist(root);
final Child child1 = new Child();
child1.setName("child_1");
child1.setRoot(root);
entityManager.persist(child1);
final Child child2 = new Child();
child2.setName("child_2");
child2.setRoot(root);
entityManager.persist(child2);
transaction.commit();
entityManager.close();
return root.getId();
}
private static BasicDataSource createDataSource() throws SQLException {
// DriverManager.registerDriver(new jdbcDriver());
final BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName("org.hsqldb.jdbcDriver");
dataSource.setUrl("jdbc:hsqldb:mem:arthur;hsqldb.tx=MVCC");
dataSource.setUsername("SA");
dataSource.setPassword("");
dataSource.setMinIdle(1);
return dataSource;
}
private static PersistenceUnitInfo newInfo(final DataSource dataSource, final Properties properties) {
return new PersistenceUnitInfo() {
@Override
public String getPersistenceUnitName() {
return "arthur";
}
@Override
public String getPersistenceProviderClassName() {
return "org.apache.openjpa.persistence.PersistenceProviderImpl";
}
@Override
public PersistenceUnitTransactionType getTransactionType() {
return PersistenceUnitTransactionType.RESOURCE_LOCAL;
}
@Override
public DataSource getJtaDataSource() {
return dataSource;
}
@Override
public DataSource getNonJtaDataSource() {
return dataSource;
}
@Override
public List<String> getMappingFileNames() {
return emptyList();
}
@Override
public List<URL> getJarFileUrls() {
return emptyList();
}
@Override
public URL getPersistenceUnitRootUrl() {
return null;
}
@Override
public List<String> getManagedClassNames() {
return asList(Root.class.getName(), Child.class.getName());
}
@Override
public boolean excludeUnlistedClasses() {
return true;
}
@Override
public SharedCacheMode getSharedCacheMode() {
return SharedCacheMode.UNSPECIFIED;
}
@Override
public ValidationMode getValidationMode() {
return ValidationMode.AUTO;
}
@Override
public Properties getProperties() {
return properties;
}
@Override
public String getPersistenceXMLSchemaVersion() {
return "2.0";
}
@Override
public ClassLoader getClassLoader() {
return Thread.currentThread().getContextClassLoader();
}
@Override
public void addTransformer(final ClassTransformer transformer) {
// no-op
}
@Override
public ClassLoader getNewTempClassLoader() {
return getClassLoader();
}
};
}
private static void setIfMissing(final String key, final String value) {
System.setProperty(key, System.getProperty(key, value));
}
}
| 6,155 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/openjpa/src/main/java/org/apache/geronimo/arthur/integrationtests | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/openjpa/src/main/java/org/apache/geronimo/arthur/integrationtests/entities/Root.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests.entities;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import java.util.List;
import java.util.Objects;
@Entity
public class Root {
@Id
@GeneratedValue
private long id;
private String name;
@OrderBy("name DESC")
@OneToMany(mappedBy = "root")
private List<Child> children;
public long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public List<Child> getChildren() {
return children;
}
public void setChildren(final List<Child> children) {
this.children = children;
}
@Override
public String toString() {
return "root:id=" + id + ",name=" + name + ",children=" + children;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Root child = Root.class.cast(o);
return id == child.id;
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}
| 6,156 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/openjpa/src/main/java/org/apache/geronimo/arthur/integrationtests | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/openjpa/src/main/java/org/apache/geronimo/arthur/integrationtests/entities/Child.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests.entities;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import java.util.Objects;
@Entity
public class Child {
@Id
@GeneratedValue
private long id;
private String name;
@ManyToOne
private Root root;
public long getId() {
return id;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public Root getRoot() {
return root;
}
public void setRoot(final Root root) {
this.root = root;
}
@Override
public String toString() {
return "child:id=" + id + ",name=" + name;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final Child child = Child.class.cast(o);
return id == child.id;
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}
| 6,157 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/cuilliere/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/integration-test/src/test/resources/integration-tests/cuilliere/src/main/java/org/apache/geronimo/arthur/integrationtests/Cuillere.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests;
/**
* Simple test just printing some output.
*/
public final class Cuillere {
private Cuillere() {
// noop
}
public static void main(final String[] args) {
System.out.println("Cui-yère");
}
}
| 6,158 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur/integrationtests/MavenTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests;
import org.apache.geronimo.arthur.integrationtests.container.MavenContainer;
import org.apache.geronimo.arthur.integrationtests.junit5.Invocation;
import org.apache.geronimo.arthur.integrationtests.junit5.Spec;
import org.apache.sshd.common.file.virtualfs.VirtualFileSystemFactory;
import org.apache.sshd.server.SshServer;
import org.apache.sshd.server.auth.BuiltinUserAuthFactories;
import org.apache.sshd.server.command.AbstractCommandSupport;
import org.apache.sshd.server.keyprovider.SimpleGeneratorHostKeyProvider;
import org.junit.jupiter.api.Test;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Objects;
import static java.util.Collections.singletonList;
import static org.junit.jupiter.api.Assertions.fail;
@Testcontainers
class MavenTest {
@Container
private static final MavenContainer MVN = new MavenContainer();
@Test
@Spec(expectedOutput = "Cui-yère")
void cuilliere() {}
@Test
@Spec(expectedOutput = "counter=1, from proxy=from proxy")
void owb() {}
@Test
@Spec(expectedOutput = "Starting org.apache.geronimo.arthur.integrationtests.Application")
void scr() {}
@Test
@Spec(expectedOutput = "" +
"[main] INFO org.apache.geronimo.arthur.integrationtests.OpenJPAMain" +
" - findbyid => root:id=1,name=root_1,children=[child:id=2,name=child_2, child:id=1,name=child_1]\n" +
"[main] INFO org.apache.geronimo.arthur.integrationtests.OpenJPAMain" +
" - criteria builder => root:id=1,name=root_1,children=[child:id=2,name=child_2, child:id=1,name=child_1]")
void openjpa() {}
@Test
@Spec(expectedOutput = "pong", forwardedExecutionSystemProperties = {
"MavenTest.jsch.port", "java.library.path", "javax.net.ssl.trustStore"
})
void jsch(final Invocation invocation) {
final SshServer ssh = SshServer.setUpDefaultServer();
ssh.setFileSystemFactory(new VirtualFileSystemFactory(Paths.get("target/missing")));
ssh.setPort(0);
ssh.setKeyPairProvider(new SimpleGeneratorHostKeyProvider());
ssh.setUserAuthFactories(singletonList(BuiltinUserAuthFactories.PASSWORD.create()));
ssh.setPasswordAuthenticator((username, password, session) -> Objects.equals("test", username) && Objects.equals("testpwd", password));
ssh.setCommandFactory((channel, command) -> {
if ("ping".equals(command)) {
return new AbstractCommandSupport("ping", null) {
@Override
public void run() {
try {
getOutputStream().write("pong".getBytes(StandardCharsets.UTF_8));
getExitCallback().onExit(0);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
};
}
throw new IllegalArgumentException(command);
});
try {
ssh.start();
System.setProperty("MavenTest.jsch.port", Integer.toString(ssh.getPort()));
invocation.run();
} catch (final IOException e) {
fail(e);
} finally {
System.clearProperty("MavenTest.jsch.port");
if (ssh.isStarted()) {
try {
ssh.close(true).await();
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
}
}
}
| 6,159 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur/integrationtests | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur/integrationtests/container/MavenContainer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests.container;
import com.github.dockerjava.api.DockerClient;
import com.github.dockerjava.api.exception.NotFoundException;
import lombok.extern.slf4j.Slf4j;
import org.testcontainers.DockerClientFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.images.builder.ImageFromDockerfile;
import java.util.Optional;
@Slf4j
public class MavenContainer extends GenericContainer<MavenContainer> {
public MavenContainer() {
super(findImage());
setWorkingDirectory("/opt/geronimo/arthur/integration-test");
setCommand("sleep", "infinity");
withFileSystemBind(System.getProperty("arthur.m2.repository"), "/root/.m2/repository"); // cache
// enable to start a server in a test and connect on it from the mvn container
setNetworkMode(System.getProperty("arthur.container.maven.network", "host"));
}
private static String findImage() {
return Optional.of(System.getProperty("arthur.container.maven.image", "auto"))
.filter(it -> !"auto".equals(it))
.orElseGet(MavenContainer::getOrCreateAutoBaseImage);
}
// we can run apt update && apt install -y gcc libc6-dev zlib1g-dev in start() but it is slow so we cache it through an image
// note: we don't clean the image to be able to reuse it and speed up integration-tests, use -Darthur.container.maven.deleteOnExit=true to auto clean it
private static String getOrCreateAutoBaseImage() {
final String fromImage = System.getProperty("arthur.container.maven.baseimage", "maven:3.6.3-jdk-8-slim");
// creating a tag from the source image to ensure we can have multiple test versions (maven/jdk matrix)
final String tag = fromImage.split(":")[1];
final String targetImage = "apache/geronimo/arthur/maven-test-base:" + tag;
final DockerClient client = DockerClientFactory.instance().client();
try {
client.inspectImageCmd(targetImage).exec();
return targetImage;
} catch (final NotFoundException e) {
log.info("Didn't find '{}', creating it from '{}'", targetImage, fromImage);
return new ImageFromDockerfile(
targetImage, Boolean.getBoolean("arthur.container.maven.deleteOnExit"))
.withDockerfileFromBuilder(builder -> builder.from(fromImage)
.run("apt update && apt install -y gcc g++ libc6-dev zlib1g-dev")
.label("org.apache.geronimo.arthur.environment", "integration-tests")
.label("org.apache.geronimo.arthur.baseImage", fromImage)
.label("org.apache.geronimo.arthur.tag", tag))
.get();
}
}
}
| 6,160 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur/integrationtests | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur/integrationtests/junit5/Invocation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests.junit5;
/**
* Just a strongly typed runnable which represents the maven build + binary execution.
* It is used when the test requires some init/destroy phases so materializing this phase of the build let's you wrap it.
*/
public interface Invocation extends Runnable {
}
| 6,161 |
0 | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur/integrationtests | Create_ds/geronimo-arthur/integration-test/src/test/java/org/apache/geronimo/arthur/integrationtests/junit5/Spec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.integrationtests.junit5;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.geronimo.arthur.integrationtests.container.MavenContainer;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.extension.AfterEachCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import org.junit.platform.commons.util.AnnotationUtils;
import org.testcontainers.containers.Container.ExecResult;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.utility.MountableFile;
import java.io.IOException;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.stream.Stream;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static java.util.Objects.requireNonNull;
import static java.util.Optional.of;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.joining;
import static org.apache.geronimo.arthur.integrationtests.junit5.Spec.ExpectedType.EQUALS;
import static org.apache.ziplock.JarLocation.jarFromResource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
@Target(METHOD)
@Retention(RUNTIME)
@ExtendWith(Spec.Impl.class)
public @interface Spec {
String project() default "";
String binary() default "./target/${project.artifactId}.graal.bin";
int exitCode() default 0;
String expectedOutput() default "";
ExpectedType expectedType() default EQUALS;
String[] forwardedExecutionSystemProperties() default {};
enum ExpectedType {
EQUALS(Assertions::assertEquals),
EQUALS_TRIMMED((a, b) -> assertEquals(a, b.trim(), b)),
MATCHES((a, b) -> assertTrue(a.matches(b), b));
private final BiConsumer<String, String> assertFn;
ExpectedType(final BiConsumer<String, String> assertFn) {
this.assertFn = assertFn;
}
}
@Slf4j
// todo: make it parallelisable?
class Impl implements BeforeEachCallback, AfterEachCallback, ParameterResolver {
public static final ExtensionContext.Namespace NAMESPACE = ExtensionContext.Namespace.create(Impl.class);
@Override
public void beforeEach(final ExtensionContext context) throws Exception {
final Method method = context.getRequiredTestMethod();
final Optional<Spec> specOpt = AnnotationUtils.findAnnotation(method, Spec.class);
if (!specOpt.isPresent()) {
return;
}
final MavenContainer mvn = findContainer(context);
final Spec spec = specOpt.orElseThrow(IllegalStateException::new);
final ExtensionContext.Store store = context.getStore(NAMESPACE);
store.put(Spec.class, spec);
store.put(MavenContainer.class, mvn);
final Invocation invocation = () -> {
final String project = of(spec.project())
.filter(it -> !it.isEmpty())
.orElseGet(() -> "integration-tests/" + context.getRequiredTestMethod().getName());
final Path root = jarFromResource(project).toPath().resolve(project);
final Collection<String> files = copyProject(mvn, root, spec);
store.put(CopiedFiles.class, new CopiedFiles(mvn, files));
log.info("Compiling the project '" + project.substring(project.lastIndexOf('/') + 1) + "'");
final ExecResult result = buildAndRun(
mvn, spec.binary().replace("${project.artifactId}", findArtifactId(root.resolve("pom.xml"))),
spec.forwardedExecutionSystemProperties());
log.info("Exit code: {}", result.getExitCode());
log.info("Stdout:\n>{}<", result.getStdout());
log.info("Stderr:\n>{}<", result.getStderr());
store.put(ExecResult.class, result);
assertEquals(spec.exitCode(), result.getExitCode(), () -> result.getStdout() + result.getStderr());
spec.expectedType().assertFn.accept(
spec.expectedOutput(),
String.join("\n", result.getStdout(), result.getStderr()).trim());
};
if (Stream.of(method.getParameterTypes()).noneMatch(it -> it == Invocation.class)) {
invocation.run();
} else { // the test calls it itself since it requires some custom init/destroy
store.put(Invocation.class, invocation);
}
}
@Override
public void afterEach(final ExtensionContext context) {
final Optional<CopiedFiles> copiedFiles = ofNullable(context.getStore(NAMESPACE).get(CopiedFiles.class, CopiedFiles.class));
assertTrue(copiedFiles.isPresent(), "Maven build not executed");
copiedFiles
.filter(f -> !f.files.isEmpty())
.ifPresent(this::cleanFolder);
}
private String findArtifactId(final Path pom) {
try {
final String start = " <artifactId>";
return Files.lines(pom)
.filter(it -> it.startsWith(start))
.map(it -> it.substring(it.indexOf(start) + start.length(), it.indexOf('<', start.length() + 1)))
.findFirst()
.orElseThrow(() -> new IllegalArgumentException("No artifactId found in " + pom));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private void cleanFolder(final CopiedFiles files) {
try {
files.mvn.execInContainer(Stream.concat(
Stream.of("rm", "-Rf", "target"),
files.files.stream().map(it -> it.replace("\"", "\\\""))
).toArray(String[]::new));
} catch (final IOException e) {
throw new IllegalStateException(e);
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
private MavenContainer findContainer(ExtensionContext context) throws IllegalAccessException {
final Object instance = context.getRequiredTestInstance();
final Field containerField = AnnotationUtils.findAnnotatedFields(instance.getClass(), Container.class, i -> true).stream()
.filter(it -> MavenContainer.class == it.getType() && Modifier.isStatic(it.getModifiers()))
.findFirst()
.orElseThrow(IllegalStateException::new);
if (!containerField.isAccessible()) {
containerField.setAccessible(true);
}
return MavenContainer.class.cast(containerField.get(null));
}
private Collection<String> copyProject(final MavenContainer mvn, final Path root, final Spec spec) {
final Collection<String> files = new ArrayList<>();
try {
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException {
final String target = Paths.get(requireNonNull(mvn.getWorkingDirectory(), "mvn workdir is null"))
.resolve(root.relativize(file)).toString();
mvn.copyFileToContainer(
MountableFile.forHostPath(file),
target);
files.add(target);
log.debug("Copied '{}' to container '{}'", file, target);
return super.visitFile(file, attrs);
}
});
} catch (final IOException e) {
throw new IllegalStateException(e);
}
return files;
}
private ExecResult buildAndRun(final MavenContainer mvn, final String binary,
final String[] systemProps) {
try {
final ExecResult build = mvn.execInContainer("mvn", "-e", "package", "arthur:native-image");
if (log.isDebugEnabled()) {
log.debug("Exit status: {}, Output:\n{}", build.getExitCode(), toMvnOutput(build));
}
assertEquals(0, build.getExitCode(), () -> toMvnOutput(build));
final String[] command = Stream.concat(
Stream.of(binary),
Stream.of(systemProps).map(it -> "-D" + it + '=' + lookupSystemProperty(it)
.replace("$JAVA_HOME", "/usr/local/openjdk-8")))
.toArray(String[]::new);
return mvn.execInContainer(command);
} catch (final InterruptedException ie) {
Thread.currentThread().interrupt();
throw new IllegalStateException(ie);
} catch (final IOException e) {
throw new IllegalStateException(e);
}
}
private String lookupSystemProperty(final String it) {
switch (it) {
case "java.library.path":
return "$JAVA_HOME/jre/lib/amd64";
case "javax.net.ssl.trustStore":
return "$JAVA_HOME/jre/lib/security/cacerts";
default:
return System.getProperty(it);
}
}
private String toMvnOutput(final ExecResult mvnResult) {
return Stream.of(mvnResult.getStdout(), mvnResult.getStderr())
.map(it -> it
// workaround an issue with mvn/slf4j output through testcontainers
.replace("\n", "")
.replace("[INFO] ", "\n[INFO] ")
.replace("[WARNING] ", "\n[WARNING] ")
.replace("[ERROR] ", "\n[ERROR] ")
.replace(" at", "\n at")
.replace("Caused by:", "\nCaused by:")
.replace("ms[", "ms\n["))
.collect(joining("\n"));
}
@Override
public boolean supportsParameter(final ParameterContext parameterContext, final ExtensionContext extensionContext) throws ParameterResolutionException {
return resolveParameter(parameterContext, extensionContext) != null;
}
@Override
public Object resolveParameter(final ParameterContext parameterContext, final ExtensionContext extensionContext) throws ParameterResolutionException {
final Class<?> type = parameterContext.getParameter().getType();
return extensionContext.getStore(NAMESPACE).get(type, type);
}
@RequiredArgsConstructor
private static final class CopiedFiles {
private final MavenContainer mvn;
private final Collection<String> files;
}
}
}
| 6,162 |
0 | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi/ArthurExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.spi;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiPredicate;
import java.util.function.Predicate;
import java.util.stream.Stream;
import org.apache.geronimo.arthur.spi.model.ClassReflectionModel;
import org.apache.geronimo.arthur.spi.model.DynamicProxyModel;
import org.apache.geronimo.arthur.spi.model.ResourceBundleModel;
import org.apache.geronimo.arthur.spi.model.ResourceModel;
/**
* Enable to enrich the build with some metadata.
*/
public interface ArthurExtension {
/**
* @return the priority of this extension (natural order sorting).
*/
default int order() {
return 0;
}
/**
* @param context current build context.
*/
void execute(Context context);
/**
* Enables to mutate the native image command generation and to manipulate current "context" (classloader).
*/
interface Context {
/**
* Find classes based on annotation on classes.
* @param annotation the marker annotation to look for.
* @param <T> the annotation type.
* @return the list of classes with this annotation.
*/
<T extends Annotation> Collection<Class<?>> findAnnotatedClasses(Class<T> annotation);
/**
* Simular to {@link #findAnnotatedClasses(Class)} but at method level.
* @param annotation the marker annotation to look for.
* @param <T> the annotation type.
* @return the list of methods with this annotation.
*/
<T extends Annotation> Collection<Method> findAnnotatedMethods(Class<T> annotation);
/**
* Simular to {@link #findAnnotatedMethods(Class)} but at field level.
* @param annotation the marker annotation to look for.
* @param <T> the annotation type.
* @return the list of methods with this annotation.
*/
<T extends Annotation> Collection<Field> findAnnotatedFields(Class<T> annotation);
/**
* Find subclasses and implementation of a parent.
*
* @param parent the parent class to use as marker.
* @param <T> the type of the parent.
* @return the list of children classes.
*/
<T> Collection<Class<? extends T>> findImplementations(Class<T> parent);
/**
* Adds a reflection model in the context, if it already exists it is replaced.
* @param classReflectionModel the instance to register.
*/
void register(ClassReflectionModel classReflectionModel);
/**
* Adds a resource model in the context, if it already exists it is replaced.
* @param resourceModel the instance to register.
*/
void register(ResourceModel resourceModel);
/**
* Adds a bundle model in the context, if it already exists it is replaced.
* @param resourceModel the instance to register.
*/
void register(ResourceBundleModel resourceModel);
/**
* Adds a proxy model in the context, if it already exists it is replaced.
* @param dynamicProxyModel the instance to register.
*/
void register(DynamicProxyModel dynamicProxyModel);
/**
* Enables java security in the native image.
*/
void enableAllSecurityServices();
/**
* Includes charsets in the native image.
*/
void enableAllCharsets();
/**
* Includes resource bundle (directly from the CLI since graal has some bugs on that as of today).
*/
void includeResourceBundle(String name);
/**
* Forces classes to be initialized during the build and not at run time.
* @param classes classes to initialize.
*/
void initializeAtBuildTime(String... classes);
/**
* Forces classes to be initialized during the run only, not the build.
* @param classes classes to initialize.
*/
void initializeAtRunTime(String... classes);
/**
* Retrieve a context property, used to configured an extension.
* @param key the key to read.
* @return the value or null if missing.
*/
String getProperty(String key);
/**
* Sets a property in the context, it can be used if extensions are chained for example.
* @param key the property key.
* @param value the value to set for the specified key.
*/
void setProperty(String key, String value);
/**
* Add specific native image option to the command line. Useful for custom graal extension options.
* @param option the command line option to set.
*/
void addNativeImageOption(String option);
/**
* Loads a class in current context.
* @param name the class name to load.
* @return the loaded class.
* @throws IllegalStateException if the class can't be found.
*/
Class<?> loadClass(String name);
/**
* Enable to add to native-image execution custom classes generated before the native-image execution.
* It can be common for proxies and classes created at runtime normally.
*
* @param name the class name.
* @param bytecode the class bytecode.
*/
void registerGeneratedClass(final String name, final byte[] bytecode);
/**
* Creates a stream of all classes (class, super classes and interfaces) for the specified class.
* @param from the class to look the hierarchy for.
* @return the class hierarchy.
*/
Stream<Class<?>> findHierarchy(Class<?> from);
/**
* Creates a predicate from the specified context property (key). It uses the value as a comma separated list.
* @param property the key to read the property from in the context.
* @param type matching type.
* @return an optional predicate for the specified property.
*/
Optional<Predicate<String>> createPredicate(String property, PredicateType type);
/**
* Use a base property suffixed with "includes" and "excludes" to create a matching predicate.
* It relies on {@link #createPredicate(String, PredicateType)} and combine both in a single predicate.
*
* @param propertyBase the prefix to use to read predicate properties.
* @param type the type of matching.
* @return the predicate for that properties pair.
*/
Predicate<String> createIncludesExcludes(String propertyBase, PredicateType type);
/**
* Tries to unwrap current context in another type.
* @param type type to extract.
* @param <T> type to extract.
* @return the extracted instance.
*/
<T> T unwrap(Class<T> type);
}
enum PredicateType implements BiPredicate<String, String> {
EQUALS {
@Override
public boolean test(final String value, final String item) {
return Objects.equals(value, item);
}
},
STARTS_WITH {
@Override
public boolean test(final String value, final String item) {
return item != null && item.startsWith(value);
}
},
MATCHES {
@Override
public boolean test(final String value, final String item) {
return item != null && item.matches(value);
}
}
}
}
| 6,163 |
0 | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi/model/ResourceBundleModel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.spi.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ResourceBundleModel {
private String name;
}
| 6,164 |
0 | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi/model/ResourceModel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.spi.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ResourceModel {
private String pattern;
}
| 6,165 |
0 | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi/model/DynamicProxyModel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.spi.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Collection;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class DynamicProxyModel {
private Collection<String> classes;
}
| 6,166 |
0 | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi/model/ClassReflectionModel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.spi.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Collection;
import java.util.Objects;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ClassReflectionModel {
private String name;
private Condition condition; // >= GraalVM 22.2
private Boolean allDeclaredConstructors;
private Boolean allPublicConstructors;
private Boolean allDeclaredMethods;
private Boolean allPublicMethods;
private Boolean allDeclaredClasses;
private Boolean allPublicClasses;
private Boolean allDeclaredFields;
private Boolean allPublicFields;
private Boolean queryAllDeclaredMethods;
private Boolean queryAllDeclaredConstructors;
private Boolean queryAllPublicMethods;
private Boolean queryAllPublicConstructors;
private Boolean unsafeAllocated;
private Collection<FieldReflectionModel> fields;
private Collection<MethodReflectionModel> methods;
private Collection<MethodReflectionModel> queriedMethods;
public ClassReflectionModel(final String name,
final Boolean allDeclaredConstructors, final Boolean allPublicConstructors,
final Boolean allDeclaredMethods, final Boolean allPublicMethods,
final Boolean allDeclaredClasses, final Boolean allPublicClasses,
final Boolean allDeclaredFields, final Boolean allPublicFields,
final Collection<FieldReflectionModel> fields, final Collection<MethodReflectionModel> methods) {
this(name, null, allDeclaredConstructors, allPublicConstructors, allDeclaredMethods, allPublicMethods, allDeclaredClasses, allPublicClasses, allDeclaredFields, allPublicFields, null, null, null, null, null, fields, methods, null);
}
public ClassReflectionModel allPublic(final String name) {
return new ClassReflectionModel(name, null, null, true, null, true, null, null, null, true, null, null, null, null, null, null, null, null);
}
public ClassReflectionModel allPublicConstructors(final String name) {
return new ClassReflectionModel(name, null, null, true, null, null, null, null, null, null, null, null, null, null, null, null, null, null);
}
public ClassReflectionModel allDeclaredConstructors(final String name) {
return new ClassReflectionModel(name, null, null, null, true, null, null, null, null, null, null, null, null, null, null, null, null, null);
}
public ClassReflectionModel allDeclared(final String name) {
return new ClassReflectionModel(name, null, true, null, true, null, null, null, true, null, null, null, null, null, null, null, null, null);
}
public void merge(final ClassReflectionModel other) {
if (other.getAllDeclaredClasses() != null && other.getAllDeclaredClasses()) {
setAllDeclaredClasses(true);
}
if (other.getAllDeclaredFields() != null && other.getAllDeclaredFields()) {
setAllDeclaredFields(true);
}
if (other.getAllDeclaredConstructors() != null && other.getAllDeclaredConstructors()) {
setAllDeclaredConstructors(true);
}
if (other.getAllDeclaredMethods() != null && other.getAllDeclaredMethods()) {
setAllDeclaredMethods(true);
}
if (other.getAllPublicMethods() != null && other.getAllPublicMethods()) {
setAllPublicMethods(true);
}
if (other.getAllPublicFields() != null && other.getAllPublicFields()) {
setAllPublicFields(true);
}
if (other.getAllPublicConstructors() != null && other.getAllPublicConstructors()) {
setAllPublicConstructors(true);
}
if (other.getAllPublicClasses() != null && other.getAllPublicClasses()) {
setAllPublicClasses(true);
}
if (other.getQueryAllDeclaredMethods() != null && other.getQueryAllDeclaredMethods()) {
setQueryAllDeclaredMethods(true);
}
if (other.getQueryAllDeclaredConstructors() != null && other.getQueryAllDeclaredConstructors()) {
setQueryAllDeclaredConstructors(true);
}
if (other.getQueryAllPublicMethods() != null && other.getQueryAllPublicMethods()) {
setQueryAllPublicMethods(true);
}
if (other.getQueryAllPublicConstructors() != null && other.getQueryAllPublicConstructors()) {
setQueryAllPublicConstructors(true);
}
if (other.getUnsafeAllocated() != null && other.getUnsafeAllocated()) {
setUnsafeAllocated(true);
}
setFields(merge(other.getFields(), getFields()));
setMethods(merge(other.getMethods(), getMethods()));
setQueriedMethods(merge(other.getQueriedMethods(), getQueriedMethods()));
}
private <T> Collection<T> merge(final Collection<T> v1, final Collection<T> v2) {
if (v1 == null && v2 == null) {
return null;
}
return Stream.of(v1, v2).filter(Objects::nonNull).flatMap(Collection::stream).distinct().collect(toList());
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class FieldReflectionModel {
private String name;
private Boolean allowWrite;
private Boolean allowUnsafeAccess;
public FieldReflectionModel(final String name, final Boolean allowWrite) {
this.name = name;
this.allowWrite = allowWrite;
}
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class MethodReflectionModel {
private String name;
private Collection<Class<?>> parameterTypes;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Condition {
private String typeReachable;
}
}
| 6,167 |
0 | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi | Create_ds/geronimo-arthur/arthur-spi/src/main/java/org/apache/geronimo/arthur/spi/model/ResourcesModel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.spi.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Collection;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ResourcesModel {
private Collection<ResourceModel> resources;
private Collection<ResourceBundleModel> bundles;
}
| 6,168 |
0 | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur/api/RegisterField.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.api;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Target(FIELD)
@Retention(RUNTIME)
public @interface RegisterField {
boolean allowWrite() default false;
}
| 6,169 |
0 | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur/api/RegisterClasses.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.api;
import java.lang.annotation.Repeatable;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Enables to register classes which are not in the project itself.
*/
@Target(TYPE)
@Retention(RUNTIME)
public @interface RegisterClasses {
Entry[] value();
/**
* Represent a class registered through {@link RegisterClasses}.
*/
@Retention(RUNTIME)
@Repeatable(RegisterClasses.class)
@interface Entry {
/**
* @return the class ref if accessible in the project.
*/
Class<?> clazz() default Object.class;
/**
* @return the classname if not always accessible in the project.
*/
String className() default "";
/**
* @return how to register the class in the graal build.
*/
RegisterClass registration() default @RegisterClass;
}
}
| 6,170 |
0 | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur/api/RegisterClass.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.api;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@Target(TYPE)
@Retention(RUNTIME)
public @interface RegisterClass {
boolean allDeclaredConstructors() default false;
boolean allPublicConstructors() default false;
boolean allDeclaredMethods() default false;
boolean allPublicMethods() default false;
boolean allDeclaredClasses() default false;
boolean allPublicClasses() default false;
boolean allDeclaredFields() default false;
boolean allPublicFields() default false;
/**
* @return alias for allDeclared*.
*/
boolean all() default false;
}
| 6,171 |
0 | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur/api/RegisterMethod.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.api;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Target(METHOD)
@Retention(RUNTIME)
public @interface RegisterMethod {
}
| 6,172 |
0 | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur | Create_ds/geronimo-arthur/arthur-api/src/main/java/org/apache/geronimo/arthur/api/RegisterResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.arthur.api;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Target(TYPE)
@Retention(RUNTIME)
public @interface RegisterResource {
String[] patterns() default {};
String[] bundles() default {};
}
| 6,173 |
0 | Create_ds/amazon-ivs-react-native-player/example/android/app/src/androidTest/java/com/example | Create_ds/amazon-ivs-react-native-player/example/android/app/src/androidTest/java/com/example/amazonivsreactnativeplayer/DetoxTest.java | package com.example.amazonivsreactnativeplayer;
import com.wix.detox.Detox;
import com.wix.detox.config.DetoxConfig;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import androidx.test.rule.ActivityTestRule;
@RunWith(AndroidJUnit4.class)
@LargeTest
public class DetoxTest {
@Rule
public ActivityTestRule<MainActivity> mActivityRule = new ActivityTestRule<>(MainActivity.class, false, false);
@Test
public void runDetoxTests() {
DetoxConfig detoxConfig = new DetoxConfig();
detoxConfig.idlePolicyConfig.masterTimeoutSec = 90;
detoxConfig.idlePolicyConfig.idleResourceTimeoutSec = 60;
detoxConfig.rnContextLoadTimeoutSec = (com.amazonaws.ivs.reactnative.player.BuildConfig.DEBUG ? 180 : 60);
Detox.runTests(mActivityRule, detoxConfig);
}
}
| 6,174 |
0 | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer/MainApplication.java | package com.example.amazonivsreactnativeplayer;
import android.app.Application;
import android.content.Context;
import com.facebook.react.PackageList;
import com.facebook.react.ReactApplication;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.react.config.ReactFeatureFlags;
import com.facebook.react.ReactInstanceManager;
import com.facebook.soloader.SoLoader;
import com.example.amazonivsreactnativeplayer.newarchitecture.MainApplicationReactNativeHost;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
import com.amazonaws.ivs.reactnative.player.AmazonIvsPackage;
import com.facebook.react.bridge.JSIModulePackage;
import com.swmansion.reanimated.ReanimatedJSIModulePackage;
public class MainApplication extends Application implements ReactApplication {
private final ReactNativeHost mReactNativeHost =
new ReactNativeHost(this) {
@Override
public boolean getUseDeveloperSupport() {
return BuildConfig.DEBUG;
}
@Override
protected List<ReactPackage> getPackages() {
@SuppressWarnings("UnnecessaryLocalVariable")
List<ReactPackage> packages = new PackageList(this).getPackages();
// Packages that cannot be autolinked yet can be added manually here, for AmazonIvsExample:
// packages.add(new MyReactNativePackage());
packages.add(new AmazonIvsPackage());
return packages;
}
@Override
protected String getJSMainModuleName() {
return "index";
}
@Override
protected JSIModulePackage getJSIModulePackage() {
return new ReanimatedJSIModulePackage();
}
};
private final ReactNativeHost mNewArchitectureNativeHost = new MainApplicationReactNativeHost(this);
@Override
public ReactNativeHost getReactNativeHost() {
if (BuildConfig.IS_NEW_ARCHITECTURE_ENABLED) {
return mNewArchitectureNativeHost;
} else {
return mReactNativeHost;
}
}
@Override
public void onCreate() {
super.onCreate();
// If you opted-in for the New Architecture, we enable the TurboModule system
ReactFeatureFlags.useTurboModules = BuildConfig.IS_NEW_ARCHITECTURE_ENABLED;
SoLoader.init(this, /* native exopackage */ false);
}
/**
* Loads Flipper in React Native templates.
*
* @param context
*/
private static void initializeFlipper(Context context, ReactInstanceManager reactInstanceManager) {
if (BuildConfig.DEBUG) {
try {
/*
We use reflection here to pick up the class that initializes Flipper,
since Flipper library is not available in release mode
*/
Class<?> aClass = Class.forName("com.amazonaws.ivs.reactnative.playerExample.ReactNativeFlipper");
aClass
.getMethod("initializeFlipper", Context.class, ReactInstanceManager.class)
.invoke(null, context, reactInstanceManager);
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
}
| 6,175 |
0 | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer/MainActivity.java | package com.example.amazonivsreactnativeplayer;
import android.Manifest;
import android.content.pm.PackageManager;
import android.os.Bundle;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.facebook.react.ReactActivity;
import com.facebook.react.ReactActivityDelegate;
import com.facebook.react.ReactRootView;
public class MainActivity extends ReactActivity {
/**
* Returns the name of the main component registered from JavaScript. This is used to schedule
* rendering of the component.
*/
@Override
protected String getMainComponentName() {
return "AmazonIvsExample";
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(null);
}
/**
* Returns the instance of the {@link ReactActivityDelegate}. There the RootView is created and
* you can specify the rendered you wish to use (Fabric or the older renderer).
*/
@Override
protected ReactActivityDelegate createReactActivityDelegate() {
return new MainActivityDelegate(this, getMainComponentName());
}
public static class MainActivityDelegate extends ReactActivityDelegate {
public MainActivityDelegate(ReactActivity activity, String mainComponentName) {
super(activity, mainComponentName);
}
@Override
protected ReactRootView createRootView() {
ReactRootView reactRootView = new ReactRootView(getContext());
// If you opted-in for the New Architecture, we enable the Fabric Renderer.
reactRootView.setIsFabric(BuildConfig.IS_NEW_ARCHITECTURE_ENABLED);
return reactRootView;
}
@Override
protected boolean isConcurrentRootEnabled() {
// If you opted-in for the New Architecture, we enable Concurrent Root (i.e. React 18).
// More on this on https://reactjs.org/blog/2022/03/29/react-v18.html
return BuildConfig.IS_NEW_ARCHITECTURE_ENABLED;
}
}
}
| 6,176 |
0 | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer/newarchitecture/MainApplicationReactNativeHost.java | package com.example.amazonivsreactnativeplayer.newarchitecture;
import android.app.Application;
import androidx.annotation.NonNull;
import com.facebook.react.PackageList;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.react.ReactPackageTurboModuleManagerDelegate;
import com.facebook.react.bridge.JSIModulePackage;
import com.facebook.react.bridge.JSIModuleProvider;
import com.facebook.react.bridge.JSIModuleSpec;
import com.facebook.react.bridge.JSIModuleType;
import com.facebook.react.bridge.JavaScriptContextHolder;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.UIManager;
import com.facebook.react.fabric.ComponentFactory;
import com.facebook.react.fabric.CoreComponentsRegistry;
import com.facebook.react.fabric.FabricJSIModuleProvider;
import com.facebook.react.fabric.ReactNativeConfig;
import com.facebook.react.uimanager.ViewManagerRegistry;
import com.example.amazonivsreactnativeplayer.BuildConfig;
import com.example.amazonivsreactnativeplayer.newarchitecture.components.MainComponentRegistry;
import com.example.amazonivsreactnativeplayer.newarchitecture.modules.MainApplicationTurboModuleManagerDelegate;
import java.util.ArrayList;
import java.util.List;
/**
* A {@link ReactNativeHost} that helps you load everything needed for the New Architecture, both
* TurboModule delegates and the Fabric Renderer.
*
* <p>Please note that this class is used ONLY if you opt-in for the New Architecture (see the
* `newArchEnabled` property). Is ignored otherwise.
*/
public class MainApplicationReactNativeHost extends ReactNativeHost {
public MainApplicationReactNativeHost(Application application) {
super(application);
}
@Override
public boolean getUseDeveloperSupport() {
return BuildConfig.DEBUG;
}
@Override
protected List<ReactPackage> getPackages() {
List<ReactPackage> packages = new PackageList(this).getPackages();
// Packages that cannot be autolinked yet can be added manually here, for example:
// packages.add(new MyReactNativePackage());
// TurboModules must also be loaded here providing a valid TurboReactPackage implementation:
// packages.add(new TurboReactPackage() { ... });
// If you have custom Fabric Components, their ViewManagers should also be loaded here
// inside a ReactPackage.
return packages;
}
@Override
protected String getJSMainModuleName() {
return "index";
}
@NonNull
@Override
protected ReactPackageTurboModuleManagerDelegate.Builder
getReactPackageTurboModuleManagerDelegateBuilder() {
// Here we provide the ReactPackageTurboModuleManagerDelegate Builder. This is necessary
// for the new architecture and to use TurboModules correctly.
return new MainApplicationTurboModuleManagerDelegate.Builder();
}
@Override
protected JSIModulePackage getJSIModulePackage() {
return new JSIModulePackage() {
@Override
public List<JSIModuleSpec> getJSIModules(
final ReactApplicationContext reactApplicationContext,
final JavaScriptContextHolder jsContext) {
final List<JSIModuleSpec> specs = new ArrayList<>();
// Here we provide a new JSIModuleSpec that will be responsible of providing the
// custom Fabric Components.
specs.add(
new JSIModuleSpec() {
@Override
public JSIModuleType getJSIModuleType() {
return JSIModuleType.UIManager;
}
@Override
public JSIModuleProvider<UIManager> getJSIModuleProvider() {
final ComponentFactory componentFactory = new ComponentFactory();
CoreComponentsRegistry.register(componentFactory);
// Here we register a Components Registry.
// The one that is generated with the template contains no components
// and just provides you the one from React Native core.
MainComponentRegistry.register(componentFactory);
final ReactInstanceManager reactInstanceManager = getReactInstanceManager();
ViewManagerRegistry viewManagerRegistry =
new ViewManagerRegistry(
reactInstanceManager.getOrCreateViewManagers(reactApplicationContext));
return new FabricJSIModuleProvider(
reactApplicationContext,
componentFactory,
ReactNativeConfig.DEFAULT_CONFIG,
viewManagerRegistry);
}
});
return specs;
}
};
}
}
| 6,177 |
0 | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer/newarchitecture | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer/newarchitecture/components/MainComponentRegistry.java | package com.example.amazonivsreactnativeplayer.newarchitecture.components;
import com.facebook.jni.HybridData;
import com.facebook.proguard.annotations.DoNotStrip;
import com.facebook.react.fabric.ComponentFactory;
import com.facebook.soloader.SoLoader;
/**
* Class responsible to load the custom Fabric Components. This class has native methods and needs a
* corresponding C++ implementation/header file to work correctly (already placed inside the jni/
* folder for you).
*
* <p>Please note that this class is used ONLY if you opt-in for the New Architecture (see the
* `newArchEnabled` property). Is ignored otherwise.
*/
@DoNotStrip
public class MainComponentRegistry {
static {
SoLoader.loadLibrary("fabricjni");
}
@DoNotStrip private final HybridData mHybridData;
@DoNotStrip
private native HybridData initHybrid(ComponentFactory componentFactory);
@DoNotStrip
private MainComponentRegistry(ComponentFactory componentFactory) {
mHybridData = initHybrid(componentFactory);
}
@DoNotStrip
public static MainComponentRegistry register(ComponentFactory componentFactory) {
return new MainComponentRegistry(componentFactory);
}
}
| 6,178 |
0 | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer/newarchitecture | Create_ds/amazon-ivs-react-native-player/example/android/app/src/main/java/com/example/amazonivsreactnativeplayer/newarchitecture/modules/MainApplicationTurboModuleManagerDelegate.java | package com.example.amazonivsreactnativeplayer.newarchitecture.modules;
import com.facebook.jni.HybridData;
import com.facebook.react.ReactPackage;
import com.facebook.react.ReactPackageTurboModuleManagerDelegate;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.soloader.SoLoader;
import java.util.List;
/**
* Class responsible to load the TurboModules. This class has native methods and needs a
* corresponding C++ implementation/header file to work correctly (already placed inside the jni/
* folder for you).
*
* <p>Please note that this class is used ONLY if you opt-in for the New Architecture (see the
* `newArchEnabled` property). Is ignored otherwise.
*/
public class MainApplicationTurboModuleManagerDelegate
extends ReactPackageTurboModuleManagerDelegate {
private static volatile boolean sIsSoLibraryLoaded;
protected MainApplicationTurboModuleManagerDelegate(
ReactApplicationContext reactApplicationContext, List<ReactPackage> packages) {
super(reactApplicationContext, packages);
}
protected native HybridData initHybrid();
native boolean canCreateTurboModule(String moduleName);
public static class Builder extends ReactPackageTurboModuleManagerDelegate.Builder {
protected MainApplicationTurboModuleManagerDelegate build(
ReactApplicationContext context, List<ReactPackage> packages) {
return new MainApplicationTurboModuleManagerDelegate(context, packages);
}
}
@Override
protected synchronized void maybeLoadOtherSoLibraries() {
if (!sIsSoLibraryLoaded) {
// If you change the name of your application .so file in the Android.mk file,
// make sure you update the name here as well.
SoLoader.loadLibrary("example_amazonivsreactnativeplayer_appmodules");
sIsSoLibraryLoaded = true;
}
}
} | 6,179 |
0 | Create_ds/amazon-ivs-react-native-player/example/android/app/src/debug/java/com/example | Create_ds/amazon-ivs-react-native-player/example/android/app/src/debug/java/com/example/amazonivsreactnativeplayer/ReactNativeFlipper.java | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* <p>This source code is licensed under the MIT license found in the LICENSE file in the root
* directory of this source tree.
*/
package com.example.amazonivsreactnativeplayer;
import android.content.Context;
import com.facebook.flipper.android.AndroidFlipperClient;
import com.facebook.flipper.android.utils.FlipperUtils;
import com.facebook.flipper.core.FlipperClient;
import com.facebook.flipper.plugins.crashreporter.CrashReporterPlugin;
import com.facebook.flipper.plugins.databases.DatabasesFlipperPlugin;
import com.facebook.flipper.plugins.fresco.FrescoFlipperPlugin;
import com.facebook.flipper.plugins.inspector.DescriptorMapping;
import com.facebook.flipper.plugins.inspector.InspectorFlipperPlugin;
import com.facebook.flipper.plugins.network.FlipperOkhttpInterceptor;
import com.facebook.flipper.plugins.network.NetworkFlipperPlugin;
import com.facebook.flipper.plugins.react.ReactFlipperPlugin;
import com.facebook.react.ReactInstanceEventListener;
import com.facebook.flipper.plugins.sharedpreferences.SharedPreferencesFlipperPlugin;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.modules.network.NetworkingModule;
import okhttp3.OkHttpClient;
public class ReactNativeFlipper {
public static void initializeFlipper(Context context, ReactInstanceManager reactInstanceManager) {
if (FlipperUtils.shouldEnableFlipper(context)) {
final FlipperClient client = AndroidFlipperClient.getInstance(context);
client.addPlugin(new InspectorFlipperPlugin(context, DescriptorMapping.withDefaults()));
client.addPlugin(new ReactFlipperPlugin());
client.addPlugin(new DatabasesFlipperPlugin(context));
client.addPlugin(new SharedPreferencesFlipperPlugin(context));
client.addPlugin(CrashReporterPlugin.getInstance());
NetworkFlipperPlugin networkFlipperPlugin = new NetworkFlipperPlugin();
NetworkingModule.setCustomClientBuilder(
new NetworkingModule.CustomClientBuilder() {
@Override
public void apply(OkHttpClient.Builder builder) {
builder.addNetworkInterceptor(new FlipperOkhttpInterceptor(networkFlipperPlugin));
}
});
client.addPlugin(networkFlipperPlugin);
client.start();
// Fresco Plugin needs to ensure that ImagePipelineFactory is initialized
// Hence we run if after all native modules have been initialized
ReactContext reactContext = reactInstanceManager.getCurrentReactContext();
if (reactContext == null) {
reactInstanceManager.addReactInstanceEventListener(
new ReactInstanceEventListener() {
@Override
public void onReactContextInitialized(ReactContext reactContext) {
reactInstanceManager.removeReactInstanceEventListener(this);
reactContext.runOnNativeModulesQueueThread(
new Runnable() {
@Override
public void run() {
client.addPlugin(new FrescoFlipperPlugin());
}
});
}
});
} else {
client.addPlugin(new FrescoFlipperPlugin());
}
}
}
}
| 6,180 |
0 | Create_ds/accumulo-wikisearch/ingest/src/test/hadoop2/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/test/hadoop2/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.reader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileWriter;
import java.io.StringReader;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
public class AggregatingRecordReaderTest {
public static class MyErrorHandler implements ErrorHandler {
@Override
public void error(SAXParseException exception) throws SAXException {
// System.out.println(exception.getMessage());
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
// System.out.println(exception.getMessage());
}
@Override
public void warning(SAXParseException exception) throws SAXException {
// System.out.println(exception.getMessage());
}
}
private static final String xml1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<doc>\n" + " <a>A</a>\n" + " <b>B</b>\n" + "</doc>\n"
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<doc>\n" + " <a>C</a>\n" + " <b>D</b>\n" + "</doc>\n" + "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<doc>\n" + " <a>E</a>\n" + " <b>F</b>\n" + "</doc>\n";
private static final String xml2 = " <b>B</b>\n" + "</doc>\n" + "<doc>\n" + " <a>C</a>\n" + " <b>D</b>\n" + "</doc>\n" + "<doc>\n" + " <a>E</a>\n"
+ " <b>F</b>\n" + "</doc>\n";
private static final String xml3 = "<doc>\n" + " <a>A</a>\n" + " <b>B</b>\n" + "</doc>\n" + "<doc>\n" + " <a>C</a>\n" + " <b>D</b>\n" + "</doc>\n"
+ "<doc>\n" + " <a>E</a>\n";
private static final String xml4 = "<doc>" + " <a>A</a>" + " <b>B</b>" + "</doc>" + "<doc>" + " <a>C</a>" + " <b>D</b>" + "</doc>" + "<doc>"
+ " <a>E</a>" + " <b>F</b>" + "</doc>";
private static final String xml5 = "<doc attr=\"G\">" + " <a>A</a>" + " <b>B</b>" + "</doc>" + "<doc>" + " <a>C</a>" + " <b>D</b>" + "</doc>"
+ "<doc attr=\"H\"/>" + "<doc>" + " <a>E</a>" + " <b>F</b>" + "</doc>" + "<doc attr=\"I\"/>";
private Configuration conf = null;
private TaskAttemptContext ctx = null;
private static DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
private XPathFactory xpFactory = XPathFactory.newInstance();
private XPathExpression EXPR_A = null;
private XPathExpression EXPR_B = null;
private XPathExpression EXPR_ATTR = null;
@Before
public void setUp() throws Exception {
conf = new Configuration();
conf.set(AggregatingRecordReader.START_TOKEN, "<doc");
conf.set(AggregatingRecordReader.END_TOKEN, "</doc>");
conf.set(AggregatingRecordReader.RETURN_PARTIAL_MATCHES, Boolean.toString(true));
TaskAttemptID id = new TaskAttemptID();
ctx = new TaskAttemptContextImpl(conf, id);
XPath xp = xpFactory.newXPath();
EXPR_A = xp.compile("/doc/a");
EXPR_B = xp.compile("/doc/b");
EXPR_ATTR = xp.compile("/doc/@attr");
}
public File createFile(String data) throws Exception {
// Write out test file
File f = File.createTempFile("aggReaderTest", ".xml");
f.deleteOnExit();
FileWriter writer = new FileWriter(f);
writer.write(data);
writer.flush();
writer.close();
return f;
}
private void testXML(Text xml, String aValue, String bValue, String attrValue) throws Exception {
StringReader reader = new StringReader(xml.toString());
InputSource source = new InputSource(reader);
DocumentBuilder parser = factory.newDocumentBuilder();
parser.setErrorHandler(new MyErrorHandler());
Document root = parser.parse(source);
assertNotNull(root);
reader = new StringReader(xml.toString());
source = new InputSource(reader);
assertEquals(EXPR_A.evaluate(source), aValue);
reader = new StringReader(xml.toString());
source = new InputSource(reader);
assertEquals(EXPR_B.evaluate(source), bValue);
reader = new StringReader(xml.toString());
source = new InputSource(reader);
assertEquals(EXPR_ATTR.evaluate(source), attrValue);
}
@Test
public void testIncorrectArgs() throws Exception {
File f = createFile(xml1);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
AggregatingRecordReader reader = new AggregatingRecordReader();
try {
// Clear the values for BEGIN and STOP TOKEN
conf.set(AggregatingRecordReader.START_TOKEN, null);
conf.set(AggregatingRecordReader.END_TOKEN, null);
reader.initialize(split, ctx);
// If we got here, then the code didnt throw an exception
fail();
} catch (Exception e) {
// Do nothing, we succeeded
f = null;
}
reader.close();
}
@Test
public void testCorrectXML() throws Exception {
File f = createFile(xml1);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue(!reader.nextKeyValue());
}
@Test
public void testPartialXML() throws Exception {
File f = createFile(xml2);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue(!reader.nextKeyValue());
}
public void testPartialXML2WithNoPartialRecordsReturned() throws Exception {
conf.set(AggregatingRecordReader.RETURN_PARTIAL_MATCHES, Boolean.toString(false));
File f = createFile(xml3);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(!reader.nextKeyValue());
}
@Test
public void testPartialXML2() throws Exception {
File f = createFile(xml3);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
try {
testXML(reader.getCurrentValue(), "E", "", "");
fail("Fragment returned, and it somehow passed XML parsing.");
} catch (SAXParseException e) {
// ignore
}
assertTrue(!reader.nextKeyValue());
}
@Test
public void testLineSplitting() throws Exception {
File f = createFile(xml4);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue(!reader.nextKeyValue());
}
@Test
public void testNoEndTokenHandling() throws Exception {
File f = createFile(xml5);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "G");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "", "", "H");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "", "", "I");
assertTrue("Too many records returned.", !reader.nextKeyValue());
}
}
| 6,181 |
0 | Create_ds/accumulo-wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaInputSplitTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.junit.Assert;
import org.junit.Test;
public class WikipediaInputSplitTest {
@Test
public void testSerialization() throws IOException {
Path testPath = new Path("/foo/bar");
String[] hosts = new String[2];
hosts[0] = "abcd";
hosts[1] = "efgh";
FileSplit fSplit = new FileSplit(testPath, 1, 2, hosts);
WikipediaInputSplit split = new WikipediaInputSplit(fSplit, 7);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(baos);
split.write(out);
out.close();
baos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInput in = new ObjectInputStream(bais);
WikipediaInputSplit split2 = new WikipediaInputSplit();
split2.readFields(in);
Assert.assertTrue(bais.available() == 0);
bais.close();
Assert.assertTrue(split.getPartition() == split2.getPartition());
FileSplit fSplit2 = split2.getFileSplit();
Assert.assertTrue(fSplit.getPath().equals(fSplit2.getPath()));
Assert.assertTrue(fSplit.getStart() == fSplit2.getStart());
Assert.assertTrue(fSplit.getLength() == fSplit2.getLength());
String[] hosts2 = fSplit2.getLocations();
Assert.assertEquals(hosts.length, hosts2.length);
for (int i = 0; i < hosts.length; i++) {
Assert.assertEquals(hosts[i], hosts2[i]);
}
}
}
| 6,182 |
0 | Create_ds/accumulo-wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/iterator/GlobalIndexUidTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.iterator;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.Combiner;
import org.apache.accumulo.examples.wikisearch.protobuf.Uid;
import org.apache.accumulo.examples.wikisearch.protobuf.Uid.List.Builder;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.Before;
import org.junit.Test;
public class GlobalIndexUidTest {
private GlobalIndexUidCombiner combiner;
private List<Value> values;
@Before
public void setup() throws Exception {
combiner = new GlobalIndexUidCombiner();
combiner.init(null, Collections.singletonMap("all", "true"), null);
values = new ArrayList<Value>();
}
private Uid.List.Builder createNewUidList() {
return Uid.List.newBuilder();
}
@Test
public void testSingleUid() {
Builder b = createNewUidList();
b.setCOUNT(1);
b.setIGNORE(false);
b.addUID(UUID.randomUUID().toString());
Uid.List uidList = b.build();
Value val = new Value(uidList.toByteArray());
values.add(val);
Value result = combiner.reduce(new Key(), values.iterator());
assertTrue(val.compareTo(result.get()) == 0);
}
@Test
public void testLessThanMax() throws Exception {
List<String> savedUUIDs = new ArrayList<String>();
for (int i = 0; i < GlobalIndexUidCombiner.MAX - 1; i++) {
Builder b = createNewUidList();
b.setIGNORE(false);
String uuid = UUID.randomUUID().toString();
savedUUIDs.add(uuid);
b.setCOUNT(i);
b.addUID(uuid);
Uid.List uidList = b.build();
Value val = new Value(uidList.toByteArray());
values.add(val);
}
Value result = combiner.reduce(new Key(), values.iterator());
Uid.List resultList = Uid.List.parseFrom(result.get());
assertTrue(resultList.getIGNORE() == false);
assertTrue(resultList.getUIDCount() == (GlobalIndexUidCombiner.MAX - 1));
List<String> resultListUUIDs = resultList.getUIDList();
for (String s : savedUUIDs)
assertTrue(resultListUUIDs.contains(s));
}
@Test
public void testEqualsMax() throws Exception {
List<String> savedUUIDs = new ArrayList<String>();
for (int i = 0; i < GlobalIndexUidCombiner.MAX; i++) {
Builder b = createNewUidList();
b.setIGNORE(false);
String uuid = UUID.randomUUID().toString();
savedUUIDs.add(uuid);
b.setCOUNT(i);
b.addUID(uuid);
Uid.List uidList = b.build();
Value val = new Value(uidList.toByteArray());
values.add(val);
}
Value result = combiner.reduce(new Key(), values.iterator());
Uid.List resultList = Uid.List.parseFrom(result.get());
assertTrue(resultList.getIGNORE() == false);
assertTrue(resultList.getUIDCount() == (GlobalIndexUidCombiner.MAX));
List<String> resultListUUIDs = resultList.getUIDList();
for (String s : savedUUIDs)
assertTrue(resultListUUIDs.contains(s));
}
@Test
public void testMoreThanMax() throws Exception {
List<String> savedUUIDs = new ArrayList<String>();
for (int i = 0; i < GlobalIndexUidCombiner.MAX + 10; i++) {
Builder b = createNewUidList();
b.setIGNORE(false);
String uuid = UUID.randomUUID().toString();
savedUUIDs.add(uuid);
b.setCOUNT(1);
b.addUID(uuid);
Uid.List uidList = b.build();
Value val = new Value(uidList.toByteArray());
values.add(val);
}
Value result = combiner.reduce(new Key(), values.iterator());
Uid.List resultList = Uid.List.parseFrom(result.get());
assertTrue(resultList.getIGNORE() == true);
assertTrue(resultList.getUIDCount() == 0);
assertTrue(resultList.getCOUNT() == (GlobalIndexUidCombiner.MAX + 10));
}
@Test
public void testSeenIgnore() throws Exception {
Builder b = createNewUidList();
b.setIGNORE(true);
b.setCOUNT(0);
Uid.List uidList = b.build();
Value val = new Value(uidList.toByteArray());
values.add(val);
b = createNewUidList();
b.setIGNORE(false);
b.setCOUNT(1);
b.addUID(UUID.randomUUID().toString());
uidList = b.build();
val = new Value(uidList.toByteArray());
values.add(val);
Value result = combiner.reduce(new Key(), values.iterator());
Uid.List resultList = Uid.List.parseFrom(result.get());
assertTrue(resultList.getIGNORE() == true);
assertTrue(resultList.getUIDCount() == 0);
assertTrue(resultList.getCOUNT() == 1);
}
@Test
public void testInvalidValueType() throws Exception {
Combiner comb = new GlobalIndexUidCombiner();
IteratorSetting setting = new IteratorSetting(1, GlobalIndexUidCombiner.class);
GlobalIndexUidCombiner.setCombineAllColumns(setting, true);
GlobalIndexUidCombiner.setLossyness(setting, true);
comb.init(null, setting.getOptions(), null);
Logger.getLogger(GlobalIndexUidCombiner.class).setLevel(Level.OFF);
Value val = new Value(UUID.randomUUID().toString().getBytes());
values.add(val);
Value result = comb.reduce(new Key(), values.iterator());
Uid.List resultList = Uid.List.parseFrom(result.get());
assertTrue(resultList.getIGNORE() == false);
assertTrue(resultList.getUIDCount() == 0);
assertTrue(resultList.getCOUNT() == 0);
}
@Test
public void testCount() throws Exception {
UUID uuid = UUID.randomUUID();
// Collect the same UUID five times.
for (int i = 0; i < 5; i++) {
Builder b = createNewUidList();
b.setCOUNT(1);
b.setIGNORE(false);
b.addUID(uuid.toString());
Uid.List uidList = b.build();
Value val = new Value(uidList.toByteArray());
values.add(val);
}
Value result = combiner.reduce(new Key(), values.iterator());
Uid.List resultList = Uid.List.parseFrom(result.get());
assertTrue(resultList.getIGNORE() == false);
assertTrue(resultList.getUIDCount() == 1);
assertTrue(resultList.getCOUNT() == 5);
}
}
| 6,183 |
0 | Create_ds/accumulo-wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/iterator/TextIndexTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.iterator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.examples.wikisearch.protobuf.TermWeight;
import org.apache.accumulo.examples.wikisearch.protobuf.TermWeight.Info.Builder;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.protobuf.InvalidProtocolBufferException;
public class TextIndexTest {
private TextIndexCombiner combiner;
private List<Value> values;
@Before
public void setup() throws Exception {
combiner = new TextIndexCombiner();
combiner.init(null, Collections.singletonMap("all", "true"), null);
values = new ArrayList<>();
}
@After
public void cleanup() {
}
private TermWeight.Info.Builder createBuilder() {
return TermWeight.Info.newBuilder();
}
@Test
public void testSingleValue() throws InvalidProtocolBufferException {
Builder builder = createBuilder();
builder.addWordOffset(1);
builder.addWordOffset(5);
builder.setNormalizedTermFrequency(0.1f);
values.add(new Value(builder.build().toByteArray()));
Value result = combiner.reduce(new Key(), values.iterator());
TermWeight.Info info = TermWeight.Info.parseFrom(result.get());
Assert.assertTrue(info.getNormalizedTermFrequency() == 0.1f);
List<Integer> offsets = info.getWordOffsetList();
Assert.assertTrue(offsets.size() == 2);
Assert.assertTrue(offsets.get(0) == 1);
Assert.assertTrue(offsets.get(1) == 5);
}
@Test
public void testAggregateTwoValues() throws InvalidProtocolBufferException {
Builder builder = createBuilder();
builder.addWordOffset(1);
builder.addWordOffset(5);
builder.setNormalizedTermFrequency(0.1f);
values.add(new Value(builder.build().toByteArray()));
builder = createBuilder();
builder.addWordOffset(3);
builder.setNormalizedTermFrequency(0.05f);
values.add(new Value(builder.build().toByteArray()));
Value result = combiner.reduce(new Key(), values.iterator());
TermWeight.Info info = TermWeight.Info.parseFrom(result.get());
Assert.assertTrue(info.getNormalizedTermFrequency() == 0.15f);
List<Integer> offsets = info.getWordOffsetList();
Assert.assertTrue(offsets.size() == 3);
Assert.assertTrue(offsets.get(0) == 1);
Assert.assertTrue(offsets.get(1) == 3);
Assert.assertTrue(offsets.get(2) == 5);
}
@Test
public void testAggregateManyValues() throws InvalidProtocolBufferException {
Builder builder = createBuilder();
builder.addWordOffset(13);
builder.addWordOffset(15);
builder.addWordOffset(19);
builder.setNormalizedTermFrequency(0.12f);
values.add(new Value(builder.build().toByteArray()));
builder = createBuilder();
builder.addWordOffset(1);
builder.addWordOffset(5);
builder.setNormalizedTermFrequency(0.1f);
values.add(new Value(builder.build().toByteArray()));
builder = createBuilder();
builder.addWordOffset(3);
builder.setNormalizedTermFrequency(0.05f);
values.add(new Value(builder.build().toByteArray()));
Value result = combiner.reduce(new Key(), values.iterator());
TermWeight.Info info = TermWeight.Info.parseFrom(result.get());
Assert.assertTrue(info.getNormalizedTermFrequency() == 0.27f);
List<Integer> offsets = info.getWordOffsetList();
Assert.assertTrue(offsets.size() == 6);
Assert.assertTrue(offsets.get(0) == 1);
Assert.assertTrue(offsets.get(1) == 3);
Assert.assertTrue(offsets.get(2) == 5);
Assert.assertTrue(offsets.get(3) == 13);
Assert.assertTrue(offsets.get(4) == 15);
Assert.assertTrue(offsets.get(5) == 19);
}
@Test
public void testEmptyValue() throws InvalidProtocolBufferException {
Builder builder = createBuilder();
builder.addWordOffset(13);
builder.addWordOffset(15);
builder.addWordOffset(19);
builder.setNormalizedTermFrequency(0.12f);
values.add(new Value("".getBytes()));
values.add(new Value(builder.build().toByteArray()));
values.add(new Value("".getBytes()));
builder = createBuilder();
builder.addWordOffset(1);
builder.addWordOffset(5);
builder.setNormalizedTermFrequency(0.1f);
values.add(new Value(builder.build().toByteArray()));
values.add(new Value("".getBytes()));
builder = createBuilder();
builder.addWordOffset(3);
builder.setNormalizedTermFrequency(0.05f);
values.add(new Value(builder.build().toByteArray()));
values.add(new Value("".getBytes()));
Value result = combiner.reduce(new Key(), values.iterator());
TermWeight.Info info = TermWeight.Info.parseFrom(result.get());
Assert.assertTrue(info.getNormalizedTermFrequency() == 0.27f);
List<Integer> offsets = info.getWordOffsetList();
Assert.assertTrue(offsets.size() == 6);
Assert.assertTrue(offsets.get(0) == 1);
Assert.assertTrue(offsets.get(1) == 3);
Assert.assertTrue(offsets.get(2) == 5);
Assert.assertTrue(offsets.get(3) == 13);
Assert.assertTrue(offsets.get(4) == 15);
Assert.assertTrue(offsets.get(5) == 19);
}
}
| 6,184 |
0 | Create_ds/accumulo-wikisearch/ingest/src/test/hadoop1/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/test/hadoop1/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.reader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileWriter;
import java.io.StringReader;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
public class AggregatingRecordReaderTest {
public static class MyErrorHandler implements ErrorHandler {
@Override
public void error(SAXParseException exception) throws SAXException {
// System.out.println(exception.getMessage());
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
// System.out.println(exception.getMessage());
}
@Override
public void warning(SAXParseException exception) throws SAXException {
// System.out.println(exception.getMessage());
}
}
private static final String xml1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<doc>\n" + " <a>A</a>\n" + " <b>B</b>\n" + "</doc>\n"
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<doc>\n" + " <a>C</a>\n" + " <b>D</b>\n" + "</doc>\n" + "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<doc>\n" + " <a>E</a>\n" + " <b>F</b>\n" + "</doc>\n";
private static final String xml2 = " <b>B</b>\n" + "</doc>\n" + "<doc>\n" + " <a>C</a>\n" + " <b>D</b>\n" + "</doc>\n" + "<doc>\n" + " <a>E</a>\n"
+ " <b>F</b>\n" + "</doc>\n";
private static final String xml3 = "<doc>\n" + " <a>A</a>\n" + " <b>B</b>\n" + "</doc>\n" + "<doc>\n" + " <a>C</a>\n" + " <b>D</b>\n" + "</doc>\n"
+ "<doc>\n" + " <a>E</a>\n";
private static final String xml4 = "<doc>" + " <a>A</a>" + " <b>B</b>" + "</doc>" + "<doc>" + " <a>C</a>" + " <b>D</b>" + "</doc>" + "<doc>"
+ " <a>E</a>" + " <b>F</b>" + "</doc>";
private static final String xml5 = "<doc attr=\"G\">" + " <a>A</a>" + " <b>B</b>" + "</doc>" + "<doc>" + " <a>C</a>" + " <b>D</b>" + "</doc>"
+ "<doc attr=\"H\"/>" + "<doc>" + " <a>E</a>" + " <b>F</b>" + "</doc>" + "<doc attr=\"I\"/>";
private Configuration conf = null;
private TaskAttemptContext ctx = null;
private static DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
private XPathFactory xpFactory = XPathFactory.newInstance();
private XPathExpression EXPR_A = null;
private XPathExpression EXPR_B = null;
private XPathExpression EXPR_ATTR = null;
@Before
public void setUp() throws Exception {
conf = new Configuration();
conf.set(AggregatingRecordReader.START_TOKEN, "<doc");
conf.set(AggregatingRecordReader.END_TOKEN, "</doc>");
conf.set(AggregatingRecordReader.RETURN_PARTIAL_MATCHES, Boolean.toString(true));
TaskAttemptID id = new TaskAttemptID();
ctx = new TaskAttemptContext(conf, id);
XPath xp = xpFactory.newXPath();
EXPR_A = xp.compile("/doc/a");
EXPR_B = xp.compile("/doc/b");
EXPR_ATTR = xp.compile("/doc/@attr");
}
public File createFile(String data) throws Exception {
// Write out test file
File f = File.createTempFile("aggReaderTest", ".xml");
f.deleteOnExit();
FileWriter writer = new FileWriter(f);
writer.write(data);
writer.flush();
writer.close();
return f;
}
private void testXML(Text xml, String aValue, String bValue, String attrValue) throws Exception {
StringReader reader = new StringReader(xml.toString());
InputSource source = new InputSource(reader);
DocumentBuilder parser = factory.newDocumentBuilder();
parser.setErrorHandler(new MyErrorHandler());
Document root = parser.parse(source);
assertNotNull(root);
reader = new StringReader(xml.toString());
source = new InputSource(reader);
assertEquals(EXPR_A.evaluate(source), aValue);
reader = new StringReader(xml.toString());
source = new InputSource(reader);
assertEquals(EXPR_B.evaluate(source), bValue);
reader = new StringReader(xml.toString());
source = new InputSource(reader);
assertEquals(EXPR_ATTR.evaluate(source), attrValue);
}
@Test
public void testIncorrectArgs() throws Exception {
File f = createFile(xml1);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
AggregatingRecordReader reader = new AggregatingRecordReader();
try {
// Clear the values for BEGIN and STOP TOKEN
conf.set(AggregatingRecordReader.START_TOKEN, null);
conf.set(AggregatingRecordReader.END_TOKEN, null);
reader.initialize(split, ctx);
// If we got here, then the code didnt throw an exception
fail();
} catch (Exception e) {
// Do nothing, we succeeded
f = null;
}
reader.close();
}
@Test
public void testCorrectXML() throws Exception {
File f = createFile(xml1);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue(!reader.nextKeyValue());
}
@Test
public void testPartialXML() throws Exception {
File f = createFile(xml2);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue(!reader.nextKeyValue());
}
public void testPartialXML2WithNoPartialRecordsReturned() throws Exception {
conf.set(AggregatingRecordReader.RETURN_PARTIAL_MATCHES, Boolean.toString(false));
File f = createFile(xml3);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(!reader.nextKeyValue());
}
@Test
public void testPartialXML2() throws Exception {
File f = createFile(xml3);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
try {
testXML(reader.getCurrentValue(), "E", "", "");
fail("Fragment returned, and it somehow passed XML parsing.");
} catch (SAXParseException e) {
// ignore
}
assertTrue(!reader.nextKeyValue());
}
@Test
public void testLineSplitting() throws Exception {
File f = createFile(xml4);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue(reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue(!reader.nextKeyValue());
}
@Test
public void testNoEndTokenHandling() throws Exception {
File f = createFile(xml5);
// Create FileSplit
Path p = new Path(f.toURI().toString());
WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0);
// Initialize the RecordReader
AggregatingRecordReader reader = new AggregatingRecordReader();
reader.initialize(split, ctx);
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "A", "B", "G");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "C", "D", "");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "", "", "H");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "E", "F", "");
assertTrue("Not enough records returned.", reader.nextKeyValue());
testXML(reader.getCurrentValue(), "", "", "I");
assertTrue("Too many records returned.", !reader.nextKeyValue());
}
}
| 6,185 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/reader/LongLineRecordReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.reader;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.LineRecordReader;
import org.apache.hadoop.util.LineReader;
/**
* A copy of {@link LineRecordReader} which does not discard lines longer than "mapred.linerecordreader.maxlength". Instead, it returns them, leaving it to the
* mapper to decide what to do with it. It also does not treat '\r' (CR) characters as new lines -- it uses {@link LfLineReader} instead of {@link LineReader}
* to read lines.
*/
public class LongLineRecordReader extends RecordReader<LongWritable,Text> {
private CompressionCodecFactory compressionCodecs = null;
private long start;
private long pos;
private long end;
private LfLineReader in;
private int maxLineLength;
private LongWritable key = null;
private Text value = null;
@Override
public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
FileSplit split = (FileSplit) genericSplit;
Configuration job = context.getConfiguration();
this.maxLineLength = job.getInt("mapred.linerecordreader.maxlength", Integer.MAX_VALUE);
start = split.getStart();
end = start + split.getLength();
final Path file = split.getPath();
compressionCodecs = new CompressionCodecFactory(job);
final CompressionCodec codec = compressionCodecs.getCodec(file);
// open the file and seek to the start of the split
FileSystem fs = file.getFileSystem(job);
FSDataInputStream fileIn = fs.open(split.getPath());
boolean skipFirstLine = false;
if (codec != null) {
in = new LfLineReader(codec.createInputStream(fileIn), job);
end = Long.MAX_VALUE;
} else {
if (start != 0) {
skipFirstLine = true;
--start;
fileIn.seek(start);
}
in = new LfLineReader(fileIn, job);
}
if (skipFirstLine) { // skip first line and re-establish "start".
start += in.readLine(new Text(), 0, (int) Math.min(Integer.MAX_VALUE, end - start));
}
this.pos = start;
}
@Override
public boolean nextKeyValue() throws IOException {
if (key == null) {
key = new LongWritable();
}
key.set(pos);
if (value == null) {
value = new Text();
}
int newSize = 0;
if (pos < end) {
newSize = in.readLine(value, maxLineLength, Math.max((int) Math.min(Integer.MAX_VALUE, end - pos), maxLineLength));
if (newSize != 0) {
pos += newSize;
}
}
if (newSize == 0) {
key = null;
value = null;
return false;
} else {
return true;
}
}
@Override
public LongWritable getCurrentKey() {
return key;
}
@Override
public Text getCurrentValue() {
return value;
}
/**
* Get the progress within the split
*/
@Override
public float getProgress() {
if (start == end) {
return 0.0f;
} else {
return Math.min(1.0f, (pos - start) / (float) (end - start));
}
}
@Override
public synchronized void close() throws IOException {
if (in != null) {
in.close();
}
}
}
| 6,186 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/reader/LfLineReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.reader;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
/**
* A class that provides a line reader from an input stream.
*/
public class LfLineReader {
private static final int DEFAULT_BUFFER_SIZE = 64 * 1024;
private int bufferSize = DEFAULT_BUFFER_SIZE;
private InputStream in;
private byte[] buffer;
// the number of bytes of real data in the buffer
private int bufferLength = 0;
// the current position in the buffer
private int bufferPosn = 0;
private static final byte LF = '\n';
/**
* Create a line reader that reads from the given stream using the default buffer-size (64k).
*
* @param in
* The input stream
*/
public LfLineReader(InputStream in) {
this(in, DEFAULT_BUFFER_SIZE);
}
/**
* Create a line reader that reads from the given stream using the given buffer-size.
*
* @param in
* The input stream
* @param bufferSize
* Size of the read buffer
*/
public LfLineReader(InputStream in, int bufferSize) {
this.in = in;
this.bufferSize = bufferSize;
this.buffer = new byte[this.bufferSize];
}
/**
* Create a line reader that reads from the given stream using the
* <code>io.file.buffer.size</code> specified in the given <code>Configuration</code>.
*
* @param in
* input stream
* @param conf
* configuration
*/
public LfLineReader(InputStream in, Configuration conf) throws IOException {
this(in, conf.getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE));
}
/**
* Close the underlying stream.
*/
public void close() throws IOException {
in.close();
}
/**
* Read one line from the InputStream into the given Text. A line can be terminated by '\n' (LF).
* EOF also terminates an otherwise unterminated line.
*
* @param str
* the object to store the given line (without newline)
* @param maxLineLength
* the maximum number of bytes to store into str; the rest of the line is silently
* discarded.
* @param maxBytesToConsume
* the maximum number of bytes to consume in this call. This is only a hint, because if
* the line cross this threshold, we allow it to happen. It can overshoot potentially by
* as much as one buffer length.
*
* @return the number of bytes read including the (longest) newline found.
*
* @throws IOException
* if the underlying stream throws
*/
public int readLine(Text str, int maxLineLength, int maxBytesToConsume) throws IOException {
/*
* We're reading data from in, but the head of the stream may be already buffered in buffer, so
* we have several cases: 1. No newline characters are in the buffer, so we need to copy
* everything and read another buffer from the stream. 2. An unambiguously terminated line is in
* buffer, so we just copy to str.
*/
str.clear();
int txtLength = 0; // tracks str.getLength(), as an optimization
int newlineLength = 0; // length of terminating newline
long bytesConsumed = 0;
do {
int startPosn = bufferPosn; // starting from where we left off the last time
if (bufferPosn >= bufferLength) {
startPosn = bufferPosn = 0;
bufferLength = in.read(buffer);
if (bufferLength <= 0) {
break; // EOF
}
}
for (; bufferPosn < bufferLength; ++bufferPosn) { // search for newline
if (buffer[bufferPosn] == LF) {
newlineLength = 1;
++bufferPosn; // at next invocation proceed from following byte
break;
}
}
int readLength = bufferPosn - startPosn;
bytesConsumed += readLength;
int appendLength = readLength - newlineLength;
if (appendLength > maxLineLength - txtLength) {
appendLength = maxLineLength - txtLength;
}
if (appendLength > 0) {
str.append(buffer, startPosn, appendLength);
txtLength += appendLength;
}
} while (newlineLength == 0 && bytesConsumed < maxBytesToConsume);
if (bytesConsumed > Integer.MAX_VALUE) {
throw new IOException("Too many bytes before newline: " + bytesConsumed);
}
return (int) bytesConsumed;
}
/**
* Read from the InputStream into the given Text.
*
* @param str
* the object to store the given line
* @param maxLineLength
* the maximum number of bytes to store into str.
* @return the number of bytes read including the newline
* @throws IOException
* if the underlying stream throws
*/
public int readLine(Text str, int maxLineLength) throws IOException {
return readLine(str, maxLineLength, Integer.MAX_VALUE);
}
/**
* Read from the InputStream into the given Text.
*
* @param str
* the object to store the given line
* @return the number of bytes read including the newline
* @throws IOException
* if the underlying stream throws
*/
public int readLine(Text str) throws IOException {
return readLine(str, Integer.MAX_VALUE, Integer.MAX_VALUE);
}
}
| 6,187 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.reader;
import java.io.IOException;
import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration;
import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
import org.apache.accumulo.examples.wikisearch.util.TextUtil;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/**
* This class aggregates Text values based on a start and end filter. An example use case for this
* would be XML data. This will not work with data that has nested start and stop tokens.
*
*/
public class AggregatingRecordReader extends LongLineRecordReader {
public static final String START_TOKEN = "aggregating.token.start";
public static final String END_TOKEN = "aggregating.token.end";
public static final String RETURN_PARTIAL_MATCHES = "aggregating.allow.partial";
private LongWritable key = new LongWritable();
private String startToken = null;
private String endToken = null;
private long counter = 0;
private Text aggValue = new Text();
private boolean startFound = false;
private StringBuilder remainder = new StringBuilder(0);
private boolean returnPartialMatches = false;
@Override
public LongWritable getCurrentKey() {
key.set(counter);
return key;
}
@Override
public Text getCurrentValue() {
return aggValue;
}
@Override
public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
super.initialize(((WikipediaInputSplit) genericSplit).getFileSplit(), context);
this.startToken =
WikipediaConfiguration.isNull(context.getConfiguration(), START_TOKEN, String.class);
this.endToken =
WikipediaConfiguration.isNull(context.getConfiguration(), END_TOKEN, String.class);
this.returnPartialMatches =
context.getConfiguration().getBoolean(RETURN_PARTIAL_MATCHES, false);
/*
* Text-appending works almost exactly like the + operator on Strings- it creates a byte array
* exactly the size of [prefix + suffix] and dumps the bytes into the new array. This module
* works by doing lots of little additions, one line at a time. With most XML, the documents are
* partitioned on line boundaries, so we will generally have lots of additions. Setting a large
* default byte array for a text object can avoid this and give us StringBuilder-like
* functionality for Text objects.
*/
byte[] txtBuffer = new byte[2048];
aggValue.set(txtBuffer);
}
@Override
public boolean nextKeyValue() throws IOException {
aggValue.clear();
boolean hasNext = false;
boolean finished = false;
// Find the start token
while (!finished && (((hasNext = super.nextKeyValue()) == true) || remainder.length() > 0)) {
if (hasNext) {
finished = process(super.getCurrentValue());
} else {
finished = process(null);
}
if (finished) {
startFound = false;
counter++;
return true;
}
}
// If we have anything loaded in the agg value (and we found a start)
// then we ran out of data before finding the end. Just return the
// data we have and if it's not valid, downstream parsing of the data
// will fail.
if (returnPartialMatches && startFound && aggValue.getLength() > 0) {
startFound = false;
counter++;
return true;
}
return false;
}
/**
* Populates aggValue with the contents of the Text object.
*
* @return true if aggValue is complete, else false and needs more data.
*/
private boolean process(Text t) {
if (null != t) {
remainder.append(t.toString());
}
while (remainder.length() > 0) {
if (!startFound) {
// If found, then begin aggregating at the start offset
int start = remainder.indexOf(startToken);
if (-1 != start) {
// Append the start token to the aggregate value
TextUtil.textAppendNoNull(aggValue,
remainder.substring(start, start + startToken.length()), false);
// Remove to the end of the start token from the remainder
remainder.delete(0, start + startToken.length());
startFound = true;
} else {
// If we are looking for the start and have not found it, then remove
// the bytes
remainder.delete(0, remainder.length());
}
} else {
// Try to find the end
int end = remainder.indexOf(endToken);
// Also try to find the start
int start = remainder.indexOf(startToken);
if (-1 == end) {
if (returnPartialMatches && start >= 0) {
// End token not found, but another start token was found...
// The amount to copy is up to the beginning of the next start token
TextUtil.textAppendNoNull(aggValue, remainder.substring(0, start), false);
remainder.delete(0, start);
return true;
} else {
// Not found, aggregate the entire remainder
TextUtil.textAppendNoNull(aggValue, remainder.toString(), false);
// Delete all chars from remainder
remainder.delete(0, remainder.length());
}
} else {
if (returnPartialMatches && start >= 0 && start < end) {
// We found the end token, but found another start token first, so
// deal with that.
TextUtil.textAppendNoNull(aggValue, remainder.substring(0, start), false);
remainder.delete(0, start);
return true;
} else {
// END_TOKEN was found. Extract to the end of END_TOKEN
TextUtil.textAppendNoNull(aggValue, remainder.substring(0, end + endToken.length()),
false);
// Remove from remainder up to the end of END_TOKEN
remainder.delete(0, end + endToken.length());
return true;
}
}
}
}
return false;
}
}
| 6,188 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/util/TextUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.util;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import org.apache.accumulo.core.iterators.user.SummingCombiner;
import org.apache.hadoop.io.Text;
public class TextUtil {
/**
* Appends a null byte followed by the UTF-8 bytes of the given string to the given {@link Text}
*
* @param text
* the Text to which to append
* @param string
* the String to append
*/
public static void textAppend(Text text, String string) {
appendNullByte(text);
textAppendNoNull(text, string);
}
public static void textAppend(Text text, String string, boolean replaceBadChar) {
appendNullByte(text);
textAppendNoNull(text, string, replaceBadChar);
}
public static void textAppend(Text t, long s) {
t.append(nullByte, 0, 1);
t.append(SummingCombiner.FIXED_LEN_ENCODER.encode(s), 0, 8);
}
private static final byte[] nullByte = {0};
/**
* Appends a null byte to the given text
*
* @param text
* the text to which to append the null byte
*/
public static void appendNullByte(Text text) {
text.append(nullByte, 0, nullByte.length);
}
/**
* Appends the UTF-8 bytes of the given string to the given {@link Text}
*
* @param t
* the Text to which to append
* @param s
* the String to append
*/
public static void textAppendNoNull(Text t, String s) {
textAppendNoNull(t, s, false);
}
/**
* Appends the UTF-8 bytes of the given string to the given {@link Text}
*/
public static void textAppendNoNull(Text t, String s, boolean replaceBadChar) {
try {
ByteBuffer buffer = Text.encode(s, replaceBadChar);
t.append(buffer.array(), 0, buffer.limit());
} catch (CharacterCodingException cce) {
throw new IllegalArgumentException(cce);
}
}
/**
* Converts the given string its UTF-8 bytes. This uses Hadoop's method for converting string to
* UTF-8 and is much faster than calling {@link String#getBytes(String)}.
*
* @param string
* the string to convert
* @return the UTF-8 representation of the string
*/
public static byte[] toUtf8(String string) {
ByteBuffer buffer;
try {
buffer = Text.encode(string, false);
} catch (CharacterCodingException cce) {
throw new IllegalArgumentException(cce);
}
byte[] bytes = new byte[buffer.limit()];
System.arraycopy(buffer.array(), 0, bytes, 0, bytes.length);
return bytes;
}
}
| 6,189 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/LRUOutputCombiner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.util.LinkedHashMap;
import java.util.Map;
public class LRUOutputCombiner<Key,Value> extends LinkedHashMap<Key,Value> {
private static final long serialVersionUID = 1L;
public static abstract class Fold<Value> {
public abstract Value fold(Value oldValue, Value newValue);
}
public static abstract class Output<Key,Value> {
public abstract void output(Key key, Value value);
}
private final int capacity;
private final Fold<Value> fold;
private final Output<Key,Value> output;
private long cacheHits = 0;
private long cacheMisses = 0;
public LRUOutputCombiner(int capacity, Fold<Value> fold, Output<Key,Value> output) {
super(capacity + 1, 1.1f, true);
this.capacity = capacity;
this.fold = fold;
this.output = output;
}
protected boolean removeEldestEntry(Map.Entry<Key,Value> eldest) {
if (size() > capacity) {
output.output(eldest.getKey(), eldest.getValue());
return true;
}
return false;
}
@Override
public Value put(Key key, Value value) {
Value val = get(key);
if (val != null) {
value = fold.fold(val, value);
cacheHits++;
} else {
cacheMisses++;
}
super.put(key, value);
return null;
}
public void flush() {
for (Map.Entry<Key,Value> e : entrySet()) {
output.output(e.getKey(), e.getValue());
}
clear();
}
}
| 6,190 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/ArticleExtractor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Reader;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.Map;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.accumulo.examples.wikisearch.normalizer.LcNoDiacriticsNormalizer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
public class ArticleExtractor {
public final static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'Z");
private static LcNoDiacriticsNormalizer lcdn = new LcNoDiacriticsNormalizer();
public static class Article implements Writable {
int id;
String title;
long timestamp;
String comments;
String text;
public Article(){}
private Article(int id, String title, long timestamp, String comments, String text) {
super();
this.id = id;
this.title = title;
this.timestamp = timestamp;
this.comments = comments;
this.text = text;
}
public int getId() {
return id;
}
public String getTitle() {
return title;
}
public String getComments() {
return comments;
}
public String getText() {
return text;
}
public long getTimestamp() {
return timestamp;
}
public Map<String,Object> getFieldValues() {
Map<String,Object> fields = new HashMap<String,Object>();
fields.put("ID", this.id);
fields.put("TITLE", this.title);
fields.put("TIMESTAMP", this.timestamp);
fields.put("COMMENTS", this.comments);
return fields;
}
public Map<String,String> getNormalizedFieldValues() {
Map<String,String> fields = new HashMap<String,String>();
//fields.put("ID", nn.normalizeFieldValue("ID", this.id));
fields.put("ID", Integer.toString(this.id));
fields.put("TITLE", lcdn.normalizeFieldValue("TITLE", this.title));
//fields.put("TIMESTAMP", nn.normalizeFieldValue("TIMESTAMP", this.timestamp));
fields.put("TIMESTAMP", Long.toString(this.timestamp));
fields.put("COMMENTS", lcdn.normalizeFieldValue("COMMENTS", this.comments));
return fields;
}
@Override
public void readFields(DataInput in) throws IOException {
id = in.readInt();
Text foo = new Text();
foo.readFields(in);
title = foo.toString();
timestamp = in.readLong();
foo.readFields(in);
comments = foo.toString();
foo.readFields(in);
text = foo.toString();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(id);
(new Text(title)).write(out);
out.writeLong(timestamp);
(new Text(comments)).write(out);
(new Text(text)).write(out);
}
}
public ArticleExtractor() {}
private static XMLInputFactory xmlif = XMLInputFactory.newInstance();
static
{
xmlif.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, Boolean.TRUE);
}
public Article extract(Reader reader) {
XMLStreamReader xmlr = null;
try {
xmlr = xmlif.createXMLStreamReader(reader);
} catch (XMLStreamException e1) {
throw new RuntimeException(e1);
}
QName titleName = QName.valueOf("title");
QName textName = QName.valueOf("text");
QName revisionName = QName.valueOf("revision");
QName timestampName = QName.valueOf("timestamp");
QName commentName = QName.valueOf("comment");
QName idName = QName.valueOf("id");
Map<QName,StringBuilder> tags = new HashMap<QName,StringBuilder>();
for (QName tag : new QName[] {titleName, textName, timestampName, commentName, idName}) {
tags.put(tag, new StringBuilder());
}
StringBuilder articleText = tags.get(textName);
StringBuilder titleText = tags.get(titleName);
StringBuilder timestampText = tags.get(timestampName);
StringBuilder commentText = tags.get(commentName);
StringBuilder idText = tags.get(idName);
StringBuilder current = null;
boolean inRevision = false;
while (true) {
try {
if (!xmlr.hasNext())
break;
xmlr.next();
} catch (XMLStreamException e) {
throw new RuntimeException(e);
}
QName currentName = null;
if (xmlr.hasName()) {
currentName = xmlr.getName();
}
if (xmlr.isStartElement() && tags.containsKey(currentName)) {
if (!inRevision || (!currentName.equals(revisionName) && !currentName.equals(idName))) {
current = tags.get(currentName);
current.setLength(0);
}
} else if (xmlr.isStartElement() && currentName.equals(revisionName)) {
inRevision = true;
} else if (xmlr.isEndElement() && currentName.equals(revisionName)) {
inRevision = false;
} else if (xmlr.isEndElement() && current != null) {
if (textName.equals(currentName)) {
String title = titleText.toString();
String text = articleText.toString();
String comment = commentText.toString();
int id = Integer.parseInt(idText.toString());
long timestamp;
try {
timestamp = dateFormat.parse(timestampText.append("+0000").toString()).getTime();
return new Article(id, title, timestamp, comment, text);
} catch (ParseException e) {
return null;
}
}
current = null;
} else if (current != null && xmlr.hasText()) {
current.append(xmlr.getText());
}
}
return null;
}
}
| 6,191 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.IOException;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.ZooKeeperInstance;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.lucene.analysis.Analyzer;
public class WikipediaConfiguration {
public final static String INSTANCE_NAME = "wikipedia.accumulo.instance_name";
public final static String USER = "wikipedia.accumulo.user";
public final static String PASSWORD = "wikipedia.accumulo.password";
public final static String TABLE_NAME = "wikipedia.accumulo.table";
public final static String ZOOKEEPERS = "wikipedia.accumulo.zookeepers";
public final static String NAMESPACES_FILENAME = "wikipedia.namespaces.filename";
public final static String LANGUAGES_FILENAME = "wikipedia.languages.filename";
public final static String WORKING_DIRECTORY = "wikipedia.ingest.working";
public final static String ANALYZER = "wikipedia.index.analyzer";
public final static String NUM_PARTITIONS = "wikipedia.ingest.partitions";
public final static String NUM_GROUPS = "wikipedia.ingest.groups";
public final static String PARTITIONED_ARTICLES_DIRECTORY = "wikipedia.partitioned.directory";
public final static String RUN_PARTITIONER = "wikipedia.run.partitioner";
public final static String RUN_INGEST = "wikipedia.run.ingest";
public final static String BULK_INGEST = "wikipedia.bulk.ingest";
public final static String BULK_INGEST_DIR = "wikipedia.bulk.ingest.dir";
public final static String BULK_INGEST_FAILURE_DIR = "wikipedia.bulk.ingest.failure.dir";
public final static String BULK_INGEST_BUFFER_SIZE = "wikipedia.bulk.ingest.buffer.size";
public final static String PARTITIONED_INPUT_MIN_SPLIT_SIZE = "wikipedia.min.input.split.size";
public static String getUser(Configuration conf) {
return conf.get(USER);
}
public static byte[] getPassword(Configuration conf) {
String pass = conf.get(PASSWORD);
if (pass == null) {
return null;
}
return pass.getBytes();
}
public static String getTableName(Configuration conf) {
String tablename = conf.get(TABLE_NAME);
if (tablename == null) {
throw new RuntimeException("No data table name specified in " + TABLE_NAME);
}
return tablename;
}
public static String getInstanceName(Configuration conf) {
return conf.get(INSTANCE_NAME);
}
public static String getZookeepers(Configuration conf) {
String zookeepers = conf.get(ZOOKEEPERS);
if (zookeepers == null) {
throw new RuntimeException("No zookeepers specified in " + ZOOKEEPERS);
}
return zookeepers;
}
public static Path getNamespacesFile(Configuration conf) {
String filename = conf.get(NAMESPACES_FILENAME,
new Path(getWorkingDirectory(conf), "namespaces.dat").toString());
return new Path(filename);
}
public static Path getLanguagesFile(Configuration conf) {
String filename = conf.get(LANGUAGES_FILENAME,
new Path(getWorkingDirectory(conf), "languages.txt").toString());
return new Path(filename);
}
public static Path getWorkingDirectory(Configuration conf) {
String filename = conf.get(WORKING_DIRECTORY);
return new Path(filename);
}
public static Connector getConnector(Configuration conf)
throws AccumuloException, AccumuloSecurityException {
return getInstance(conf).getConnector(getUser(conf), getPassword(conf));
}
public static Instance getInstance(Configuration conf) {
return new ZooKeeperInstance(getInstanceName(conf), getZookeepers(conf));
}
public static int getNumPartitions(Configuration conf) {
return conf.getInt(NUM_PARTITIONS, 25);
}
public static int getNumGroups(Configuration conf) {
return conf.getInt(NUM_GROUPS, 1);
}
public static Path getPartitionedArticlesPath(Configuration conf) {
return new Path(conf.get(PARTITIONED_ARTICLES_DIRECTORY));
}
public static long getMinInputSplitSize(Configuration conf) {
return conf.getLong(PARTITIONED_INPUT_MIN_SPLIT_SIZE, 1l << 27);
}
public static boolean runPartitioner(Configuration conf) {
return conf.getBoolean(RUN_PARTITIONER, false);
}
public static boolean runIngest(Configuration conf) {
return conf.getBoolean(RUN_INGEST, true);
}
public static boolean bulkIngest(Configuration conf) {
return conf.getBoolean(BULK_INGEST, true);
}
public static String bulkIngestDir(Configuration conf) {
return conf.get(BULK_INGEST_DIR);
}
public static String bulkIngestFailureDir(Configuration conf) {
return conf.get(BULK_INGEST_FAILURE_DIR);
}
public static long bulkIngestBufferSize(Configuration conf) {
return conf.getLong(BULK_INGEST_BUFFER_SIZE, 1l << 28);
}
/**
* Helper method to get properties from Hadoop configuration
*
* @throws IllegalArgumentException
* if property is not defined, null, or empty. Or if resultClass is not handled.
* @return value of property
*/
@SuppressWarnings("unchecked")
public static <T> T isNull(Configuration conf, String propertyName, Class<T> resultClass) {
String p = conf.get(propertyName);
if (StringUtils.isEmpty(p)) {
throw new IllegalArgumentException(propertyName + " must be specified");
}
if (resultClass.equals(String.class)) {
return (T) p;
} else if (resultClass.equals(String[].class)) {
return (T) conf.getStrings(propertyName);
} else if (resultClass.equals(Boolean.class)) {
return (T) Boolean.valueOf(p);
} else if (resultClass.equals(Long.class)) {
return (T) Long.valueOf(p);
} else if (resultClass.equals(Integer.class)) {
return (T) Integer.valueOf(p);
} else if (resultClass.equals(Float.class)) {
return (T) Float.valueOf(p);
} else if (resultClass.equals(Double.class)) {
return (T) Double.valueOf(p);
} else {
throw new IllegalArgumentException(resultClass.getSimpleName() + " is unhandled.");
}
}
}
| 6,192 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.nio.charset.Charset;
import java.util.HashSet;
import java.util.IllegalFormatException;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.accumulo.examples.wikisearch.ingest.ArticleExtractor.Article;
import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
import org.apache.accumulo.examples.wikisearch.normalizer.LcNoDiacriticsNormalizer;
import org.apache.accumulo.examples.wikisearch.protobuf.Uid;
import org.apache.accumulo.examples.wikisearch.protobuf.Uid.List.Builder;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.wikipedia.WikipediaTokenizer;
import org.apache.lucene.util.Attribute;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
public class WikipediaMapper extends Mapper<LongWritable,Text,Text,Mutation> {
private static final Logger log = Logger.getLogger(WikipediaMapper.class);
public final static Charset UTF8 = Charset.forName("UTF-8");
public static final String DOCUMENT_COLUMN_FAMILY = "d";
public static final String METADATA_EVENT_COLUMN_FAMILY = "e";
public static final String METADATA_INDEX_COLUMN_FAMILY = "i";
public static final String TOKENS_FIELD_NAME = "TEXT";
private final static Pattern languagePattern = Pattern.compile("([a-z_]+).*.xml(.bz2)?");
private static final Value NULL_VALUE = new Value(new byte[0]);
private static final String cvPrefix = "all|";
private ArticleExtractor extractor;
private String language;
private int numPartitions = 0;
private ColumnVisibility cv = null;
private int myGroup = -1;
private int numGroups = -1;
private Text tablename = null;
private Text indexTableName = null;
private Text reverseIndexTableName = null;
private Text metadataTableName = null;
@Override
public void setup(Context context) {
Configuration conf = context.getConfiguration();
tablename = new Text(WikipediaConfiguration.getTableName(conf));
indexTableName = new Text(tablename + "Index");
reverseIndexTableName = new Text(tablename + "ReverseIndex");
metadataTableName = new Text(tablename + "Metadata");
WikipediaInputSplit wiSplit = (WikipediaInputSplit) context.getInputSplit();
myGroup = wiSplit.getPartition();
numGroups = WikipediaConfiguration.getNumGroups(conf);
FileSplit split = wiSplit.getFileSplit();
String fileName = split.getPath().getName();
Matcher matcher = languagePattern.matcher(fileName);
if (matcher.matches()) {
language = matcher.group(1).replace('_', '-').toLowerCase();
} else {
throw new RuntimeException("Unknown ingest language! " + fileName);
}
extractor = new ArticleExtractor();
numPartitions = WikipediaConfiguration.getNumPartitions(conf);
cv = new ColumnVisibility(cvPrefix + language);
}
/**
* We will partition the documents based on the document id
*
* @return The number of the partition for a given article.
*/
public static int getPartitionId(Article article, int numPartitions)
throws IllegalFormatException {
return article.getId() % numPartitions;
}
static HashSet<String> metadataSent = new HashSet<>();
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
Article article =
extractor.extract(new InputStreamReader(new ByteArrayInputStream(value.getBytes()), UTF8));
String NULL_BYTE = "\u0000";
String colfPrefix = language + NULL_BYTE;
String indexPrefix = "fi" + NULL_BYTE;
if (article != null) {
int groupId = WikipediaMapper.getPartitionId(article, numGroups);
if (groupId != myGroup) {
return;
}
Text partitionId =
new Text(Integer.toString(WikipediaMapper.getPartitionId(article, numPartitions)));
// Create the mutations for the document.
// Row is partition id, colf is language0articleid, colq is fieldName\0fieldValue
Mutation m = new Mutation(partitionId);
for (Entry<String,Object> entry : article.getFieldValues().entrySet()) {
m.put(colfPrefix + article.getId(),
entry.getKey() + NULL_BYTE + entry.getValue().toString(), cv, article.getTimestamp(),
NULL_VALUE);
// Create mutations for the metadata table.
String metadataKey = entry.getKey() + METADATA_EVENT_COLUMN_FAMILY + language;
if (!metadataSent.contains(metadataKey)) {
Mutation mm = new Mutation(entry.getKey());
mm.put(METADATA_EVENT_COLUMN_FAMILY, language, cv, article.getTimestamp(), NULL_VALUE);
context.write(metadataTableName, mm);
metadataSent.add(metadataKey);
}
}
// Tokenize the content
Set<String> tokens = getTokens(article);
// We are going to put the fields to be indexed into a multimap. This allows us to iterate
// over the entire set once.
Multimap<String,String> indexFields = HashMultimap.create();
// Add the normalized field values
LcNoDiacriticsNormalizer normalizer = new LcNoDiacriticsNormalizer();
for (Entry<String,String> index : article.getNormalizedFieldValues().entrySet()) {
indexFields.put(index.getKey(), index.getValue());
}
// Add the tokens
for (String token : tokens) {
indexFields.put(TOKENS_FIELD_NAME, normalizer.normalizeFieldValue("", token));
}
for (Entry<String,String> index : indexFields.entries()) {
// Create mutations for the in partition index
// Row is partition id, colf is 'fi'\0fieldName, colq is fieldValue\0language\0article id
m.put(indexPrefix + index.getKey(),
index.getValue() + NULL_BYTE + colfPrefix + article.getId(), cv, article.getTimestamp(),
NULL_VALUE);
// Create mutations for the global index
// Create a UID object for the Value
Builder uidBuilder = Uid.List.newBuilder();
uidBuilder.setIGNORE(false);
uidBuilder.setCOUNT(1);
uidBuilder.addUID(Integer.toString(article.getId()));
Uid.List uidList = uidBuilder.build();
Value val = new Value(uidList.toByteArray());
// Create mutations for the global index
// Row is field value, colf is field name, colq is partitionid\0language, value is Uid.List
// object
Mutation gm = new Mutation(index.getValue());
gm.put(index.getKey(), partitionId + NULL_BYTE + language, cv, article.getTimestamp(), val);
context.write(indexTableName, gm);
// Create mutations for the global reverse index
Mutation grm = new Mutation(StringUtils.reverse(index.getValue()));
grm.put(index.getKey(), partitionId + NULL_BYTE + language, cv, article.getTimestamp(),
val);
context.write(reverseIndexTableName, grm);
// Create mutations for the metadata table.
String metadataKey = index.getKey() + METADATA_INDEX_COLUMN_FAMILY + language;
if (!metadataSent.contains(metadataKey)) {
Mutation mm = new Mutation(index.getKey());
mm.put(METADATA_INDEX_COLUMN_FAMILY,
language + NULL_BYTE + LcNoDiacriticsNormalizer.class.getName(), cv,
article.getTimestamp(), NULL_VALUE);
context.write(metadataTableName, mm);
metadataSent.add(metadataKey);
}
}
// Add the entire text to the document section of the table.
// row is the partition, colf is 'd', colq is language\0articleid, value is Base64 encoded
// GZIP'd document
m.put(DOCUMENT_COLUMN_FAMILY, colfPrefix + article.getId(), cv, article.getTimestamp(),
new Value(Base64.encodeBase64(article.getText().getBytes())));
context.write(tablename, m);
} else {
context.getCounter("wikipedia", "invalid articles").increment(1);
}
context.progress();
}
/**
* Tokenize the wikipedia content
*/
static Set<String> getTokens(Article article) {
Set<String> tokenList = new HashSet<>();
try (WikipediaTokenizer tok = new WikipediaTokenizer(new StringReader(article.getText()))) {
tokenList.add(tok.toString());
} catch (IOException e) {
log.error("Error tokenizing text", e);
}
return tokenList;
}
}
| 6,193 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitionedIngester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.IteratorSetting.Column;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
import org.apache.accumulo.core.iterators.user.SummingCombiner;
import org.apache.accumulo.examples.wikisearch.ingest.ArticleExtractor.Article;
import org.apache.accumulo.examples.wikisearch.iterator.GlobalIndexUidCombiner;
import org.apache.accumulo.examples.wikisearch.iterator.TextIndexCombiner;
import org.apache.accumulo.examples.wikisearch.output.SortingRFileOutputFormat;
import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;
public class WikipediaPartitionedIngester extends Configured implements Tool {
private static final Logger log = Logger.getLogger(WikipediaPartitionedIngester.class);
public final static String INGEST_LANGUAGE = "wikipedia.ingest_language";
public final static String SPLIT_FILE = "wikipedia.split_file";
public final static String TABLE_NAME = "wikipedia.table";
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new WikipediaPartitionedIngester(), args);
System.exit(res);
}
private void createTables(TableOperations tops, String tableName) throws AccumuloException,
AccumuloSecurityException, TableNotFoundException, TableExistsException {
// Create the shard table
String indexTableName = tableName + "Index";
String reverseIndexTableName = tableName + "ReverseIndex";
String metadataTableName = tableName + "Metadata";
// create the shard table
if (!tops.exists(tableName)) {
// Set a text index combiner on the given field names. No combiner is set if the option is not
// supplied
String textIndexFamilies = WikipediaMapper.TOKENS_FIELD_NAME;
tops.create(tableName);
if (textIndexFamilies.length() > 0) {
System.out.println("Adding content combiner on the fields: " + textIndexFamilies);
IteratorSetting setting = new IteratorSetting(10, TextIndexCombiner.class);
List<Column> columns = new ArrayList<>();
for (String family : StringUtils.split(textIndexFamilies, ',')) {
columns.add(new Column("fi\0" + family));
}
TextIndexCombiner.setColumns(setting, columns);
TextIndexCombiner.setLossyness(setting, true);
tops.attachIterator(tableName, setting, EnumSet.allOf(IteratorScope.class));
}
// Set the locality group for the full content column family
tops.setLocalityGroups(tableName, Collections.singletonMap("WikipediaDocuments",
Collections.singleton(new Text(WikipediaMapper.DOCUMENT_COLUMN_FAMILY))));
}
if (!tops.exists(indexTableName)) {
tops.create(indexTableName);
// Add the UID combiner
IteratorSetting setting =
new IteratorSetting(19, "UIDAggregator", GlobalIndexUidCombiner.class);
GlobalIndexUidCombiner.setCombineAllColumns(setting, true);
GlobalIndexUidCombiner.setLossyness(setting, true);
tops.attachIterator(indexTableName, setting, EnumSet.allOf(IteratorScope.class));
}
if (!tops.exists(reverseIndexTableName)) {
tops.create(reverseIndexTableName);
// Add the UID combiner
IteratorSetting setting =
new IteratorSetting(19, "UIDAggregator", GlobalIndexUidCombiner.class);
GlobalIndexUidCombiner.setCombineAllColumns(setting, true);
GlobalIndexUidCombiner.setLossyness(setting, true);
tops.attachIterator(reverseIndexTableName, setting, EnumSet.allOf(IteratorScope.class));
}
if (!tops.exists(metadataTableName)) {
// Add the SummingCombiner with VARLEN encoding for the frequency column
tops.create(metadataTableName);
IteratorSetting setting = new IteratorSetting(10, SummingCombiner.class);
SummingCombiner.setColumns(setting, Collections.singletonList(new Column("f")));
SummingCombiner.setEncodingType(setting, SummingCombiner.Type.VARLEN);
tops.attachIterator(metadataTableName, setting, EnumSet.allOf(IteratorScope.class));
}
}
@Override
public int run(String[] args) throws Exception {
Configuration conf = getConf();
if (WikipediaConfiguration.runPartitioner(conf)) {
int result = runPartitionerJob();
if (result != 0) {
return result;
}
}
if (WikipediaConfiguration.runIngest(conf)) {
int result = runIngestJob();
if (result != 0) {
return result;
}
if (WikipediaConfiguration.bulkIngest(conf)) {
return loadBulkFiles();
}
}
return 0;
}
private int runPartitionerJob() throws Exception {
Job partitionerJob = new Job(getConf(), "Partition Wikipedia");
Configuration partitionerConf = partitionerJob.getConfiguration();
partitionerConf.set("mapred.map.tasks.speculative.execution", "false");
configurePartitionerJob(partitionerJob);
List<Path> inputPaths = new ArrayList<>();
SortedSet<String> languages = new TreeSet<>();
FileSystem fs = FileSystem.get(partitionerConf);
Path parent = new Path(partitionerConf.get("wikipedia.input"));
listFiles(parent, fs, inputPaths, languages);
System.out.println("Input files in " + parent + ":" + inputPaths.size());
Path[] inputPathsArray = new Path[inputPaths.size()];
inputPaths.toArray(inputPathsArray);
System.out.println("Languages:" + languages.size());
// setup input format
WikipediaInputFormat.setInputPaths(partitionerJob, inputPathsArray);
partitionerJob.setMapperClass(WikipediaPartitioner.class);
partitionerJob.setNumReduceTasks(0);
// setup output format
partitionerJob.setMapOutputKeyClass(Text.class);
partitionerJob.setMapOutputValueClass(Article.class);
partitionerJob.setOutputKeyClass(Text.class);
partitionerJob.setOutputValueClass(Article.class);
partitionerJob.setOutputFormatClass(SequenceFileOutputFormat.class);
Path outputDir = WikipediaConfiguration.getPartitionedArticlesPath(partitionerConf);
SequenceFileOutputFormat.setOutputPath(partitionerJob, outputDir);
SequenceFileOutputFormat.setCompressOutput(partitionerJob, true);
SequenceFileOutputFormat.setOutputCompressionType(partitionerJob, CompressionType.RECORD);
return partitionerJob.waitForCompletion(true) ? 0 : 1;
}
private int runIngestJob() throws Exception {
Job ingestJob = Job.getInstance(getConf(), "Ingest Partitioned Wikipedia");
Configuration ingestConf = ingestJob.getConfiguration();
ingestConf.set("mapred.map.tasks.speculative.execution", "false");
configureIngestJob(ingestJob);
String tablename = WikipediaConfiguration.getTableName(ingestConf);
Connector connector = WikipediaConfiguration.getConnector(ingestConf);
TableOperations tops = connector.tableOperations();
createTables(tops, tablename);
ingestJob.setMapperClass(WikipediaPartitionedMapper.class);
ingestJob.setNumReduceTasks(0);
// setup input format
ingestJob.setInputFormatClass(SequenceFileInputFormat.class);
SequenceFileInputFormat.setInputPaths(ingestJob,
WikipediaConfiguration.getPartitionedArticlesPath(ingestConf));
// TODO make split size configurable
SequenceFileInputFormat.setMinInputSplitSize(ingestJob,
WikipediaConfiguration.getMinInputSplitSize(ingestConf));
// setup output format
ingestJob.setMapOutputKeyClass(Text.class);
ingestJob.setMapOutputValueClass(Mutation.class);
if (WikipediaConfiguration.bulkIngest(ingestConf)) {
ingestJob.setOutputFormatClass(SortingRFileOutputFormat.class);
SortingRFileOutputFormat.setMaxBufferSize(ingestConf,
WikipediaConfiguration.bulkIngestBufferSize(ingestConf));
String bulkIngestDir = WikipediaConfiguration.bulkIngestDir(ingestConf);
if (bulkIngestDir == null) {
log.error("Bulk ingest dir not set");
return 1;
}
SortingRFileOutputFormat.setPathName(ingestConf,
WikipediaConfiguration.bulkIngestDir(ingestConf));
} else {
ingestJob.setOutputFormatClass(AccumuloOutputFormat.class);
ClientConfiguration clientConfig = ClientConfiguration.create();
clientConfig.setProperty(ClientProperty.INSTANCE_NAME,
WikipediaConfiguration.getInstanceName(ingestConf));
clientConfig.setProperty(ClientProperty.INSTANCE_ZK_HOST,
WikipediaConfiguration.getZookeepers(ingestConf));
String user = WikipediaConfiguration.getUser(ingestConf);
byte[] password = WikipediaConfiguration.getPassword(ingestConf);
AccumuloOutputFormat.setConnectorInfo(ingestJob, user, new PasswordToken(password));
AccumuloOutputFormat.setZooKeeperInstance(ingestJob, clientConfig);
}
return ingestJob.waitForCompletion(true) ? 0 : 1;
}
private int loadBulkFiles()
throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException {
Configuration conf = getConf();
Connector connector = WikipediaConfiguration.getConnector(conf);
FileSystem fs = FileSystem.get(conf);
String directory = WikipediaConfiguration.bulkIngestDir(conf);
String failureDirectory = WikipediaConfiguration.bulkIngestFailureDir(conf);
for (FileStatus status : fs.listStatus(new Path(directory))) {
if (status.isDir() == false) {
continue;
}
Path dir = status.getPath();
Path failPath = new Path(failureDirectory + "/" + dir.getName());
fs.mkdirs(failPath);
connector.tableOperations().importDirectory(dir.getName(), dir.toString(),
failPath.toString(), true);
}
return 0;
}
public final static PathFilter partFilter = path -> path.getName().startsWith("part");
protected void configurePartitionerJob(Job job) {
Configuration conf = job.getConfiguration();
job.setJarByClass(WikipediaPartitionedIngester.class);
job.setInputFormatClass(WikipediaInputFormat.class);
conf.set(AggregatingRecordReader.START_TOKEN, "<page>");
conf.set(AggregatingRecordReader.END_TOKEN, "</page>");
}
protected void configureIngestJob(Job job) {
job.setJarByClass(WikipediaPartitionedIngester.class);
}
protected static final Pattern filePattern = Pattern.compile("([a-z_]+).*.xml(.bz2)?");
protected void listFiles(Path path, FileSystem fs, List<Path> files, Set<String> languages)
throws IOException {
for (FileStatus status : fs.listStatus(path)) {
if (status.isDir()) {
listFiles(status.getPath(), fs, files, languages);
} else {
Path p = status.getPath();
Matcher matcher = filePattern.matcher(p.getName());
if (matcher.matches()) {
languages.add(matcher.group(1));
files.add(p);
}
}
}
}
}
| 6,194 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaInputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
public class WikipediaInputFormat extends TextInputFormat {
public static class WikipediaInputSplit extends InputSplit implements Writable {
public WikipediaInputSplit(){}
public WikipediaInputSplit(FileSplit fileSplit, int partition)
{
this.fileSplit = fileSplit;
this.partition = partition;
}
private FileSplit fileSplit = null;
private int partition = -1;
public int getPartition()
{
return partition;
}
public FileSplit getFileSplit()
{
return fileSplit;
}
@Override
public long getLength() throws IOException, InterruptedException {
return fileSplit.getLength();
}
@Override
public String[] getLocations() throws IOException, InterruptedException {
// for highly replicated files, returning all of the locations can lead to bunching
// TODO replace this with a subset of the locations
return fileSplit.getLocations();
}
@Override
public void readFields(DataInput in) throws IOException {
Path file = new Path(in.readUTF());
long start = in.readLong();
long length = in.readLong();
String [] hosts = null;
if(in.readBoolean())
{
int numHosts = in.readInt();
hosts = new String[numHosts];
for(int i = 0; i < numHosts; i++)
hosts[i] = in.readUTF();
}
fileSplit = new FileSplit(file, start, length, hosts);
partition = in.readInt();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(fileSplit.getPath().toString());
out.writeLong(fileSplit.getStart());
out.writeLong(fileSplit.getLength());
String [] hosts = fileSplit.getLocations();
if(hosts == null)
{
out.writeBoolean(false);
}
else
{
out.writeBoolean(true);
out.writeInt(hosts.length);
for(String host:hosts)
out.writeUTF(host);
}
out.writeInt(partition);
}
}
@Override
public List<InputSplit> getSplits(JobContext job) throws IOException {
List<InputSplit> superSplits = super.getSplits(job);
List<InputSplit> splits = new ArrayList<InputSplit>();
int numGroups = WikipediaConfiguration.getNumGroups(job.getConfiguration());
for(int group = 0; group < numGroups; group++)
{
for(InputSplit split:superSplits)
{
FileSplit fileSplit = (FileSplit)split;
splits.add(new WikipediaInputSplit(fileSplit,group));
}
}
return splits;
}
@Override
public RecordReader<LongWritable,Text> createRecordReader(InputSplit split, TaskAttemptContext context) {
return new AggregatingRecordReader();
}
}
| 6,195 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitionedMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.MultiTableBatchWriter;
import org.apache.accumulo.core.client.MutationsRejectedException;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.accumulo.examples.wikisearch.ingest.ArticleExtractor.Article;
import org.apache.accumulo.examples.wikisearch.iterator.GlobalIndexUidCombiner;
import org.apache.accumulo.examples.wikisearch.normalizer.LcNoDiacriticsNormalizer;
import org.apache.accumulo.examples.wikisearch.protobuf.Uid;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
public class WikipediaPartitionedMapper extends Mapper<Text,Article,Text,Mutation> {
// private static final Logger log = Logger.getLogger(WikipediaPartitionedMapper.class);
public final static Charset UTF8 = Charset.forName("UTF-8");
public static final String DOCUMENT_COLUMN_FAMILY = "d";
public static final String METADATA_EVENT_COLUMN_FAMILY = "e";
public static final String METADATA_INDEX_COLUMN_FAMILY = "i";
public static final String TOKENS_FIELD_NAME = "TEXT";
private static final Value NULL_VALUE = new Value(new byte[0]);
private static final String cvPrefix = "all|";
private int numPartitions = 0;
private Text tablename = null;
private Text indexTableName = null;
private Text reverseIndexTableName = null;
private Text metadataTableName = null;
private static class MutationInfo {
final String row;
final String colfam;
final String colqual;
final ColumnVisibility cv;
final long timestamp;
public MutationInfo(String row, String colfam, String colqual, ColumnVisibility cv, long timestamp) {
super();
this.row = row;
this.colfam = colfam;
this.colqual = colqual;
this.cv = cv;
this.timestamp = timestamp;
}
@Override
public boolean equals(Object obj) {
MutationInfo other = (MutationInfo)obj;
return (row == other.row || row.equals(other.row)) &&
(colfam == other.colfam || colfam.equals(other.colfam)) &&
colqual.equals(other.colqual) &&
(cv == other.cv || cv.equals(other.cv)) &&
timestamp == other.timestamp;
}
@Override
public int hashCode() {
return row.hashCode() ^ colfam.hashCode() ^ colqual.hashCode() ^ cv.hashCode() ^ (int)timestamp;
}
}
private LRUOutputCombiner<MutationInfo,CountAndSet> wikiIndexOutput;
private LRUOutputCombiner<MutationInfo,CountAndSet> wikiReverseIndexOutput;
private LRUOutputCombiner<MutationInfo,Value> wikiMetadataOutput;
private static class CountAndSet
{
public int count;
public HashSet<String> set;
public CountAndSet(String entry)
{
set = new HashSet<String>();
set.add(entry);
count = 1;
}
}
MultiTableBatchWriter mtbw;
@Override
public void setup(final Context context) {
Configuration conf = context.getConfiguration();
tablename = new Text(WikipediaConfiguration.getTableName(conf));
indexTableName = new Text(tablename + "Index");
reverseIndexTableName = new Text(tablename + "ReverseIndex");
metadataTableName = new Text(tablename + "Metadata");
try {
mtbw = WikipediaConfiguration.getConnector(conf).createMultiTableBatchWriter(10000000, 1000, 10);
} catch (AccumuloException e) {
throw new RuntimeException(e);
} catch (AccumuloSecurityException e) {
throw new RuntimeException(e);
}
final Text metadataTableNameFinal = metadataTableName;
final Text indexTableNameFinal = indexTableName;
final Text reverseIndexTableNameFinal = reverseIndexTableName;
numPartitions = WikipediaConfiguration.getNumPartitions(conf);
LRUOutputCombiner.Fold<CountAndSet> indexFold =
new LRUOutputCombiner.Fold<CountAndSet>() {
@Override
public CountAndSet fold(CountAndSet oldValue, CountAndSet newValue) {
oldValue.count += newValue.count;
if(oldValue.set == null || newValue.set == null)
{
oldValue.set = null;
return oldValue;
}
oldValue.set.addAll(newValue.set);
if(oldValue.set.size() > GlobalIndexUidCombiner.MAX)
oldValue.set = null;
return oldValue;
}
};
LRUOutputCombiner.Output<MutationInfo,CountAndSet> indexOutput =
new LRUOutputCombiner.Output<WikipediaPartitionedMapper.MutationInfo,CountAndSet>() {
@Override
public void output(MutationInfo key, CountAndSet value)
{
Uid.List.Builder builder = Uid.List.newBuilder();
builder.setCOUNT(value.count);
if (value.set == null) {
builder.setIGNORE(true);
builder.clearUID();
} else {
builder.setIGNORE(false);
builder.addAllUID(value.set);
}
Uid.List list = builder.build();
Value val = new Value(list.toByteArray());
Mutation m = new Mutation(key.row);
m.put(key.colfam, key.colqual, key.cv, key.timestamp, val);
try {
mtbw.getBatchWriter(indexTableNameFinal.toString()).addMutation(m);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
LRUOutputCombiner.Output<MutationInfo,CountAndSet> reverseIndexOutput =
new LRUOutputCombiner.Output<WikipediaPartitionedMapper.MutationInfo,CountAndSet>() {
@Override
public void output(MutationInfo key, CountAndSet value)
{
Uid.List.Builder builder = Uid.List.newBuilder();
builder.setCOUNT(value.count);
if (value.set == null) {
builder.setIGNORE(true);
builder.clearUID();
} else {
builder.setIGNORE(false);
builder.addAllUID(value.set);
}
Uid.List list = builder.build();
Value val = new Value(list.toByteArray());
Mutation m = new Mutation(key.row);
m.put(key.colfam, key.colqual, key.cv, key.timestamp, val);
try {
mtbw.getBatchWriter(reverseIndexTableNameFinal.toString()).addMutation(m);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
wikiIndexOutput = new LRUOutputCombiner<WikipediaPartitionedMapper.MutationInfo,CountAndSet>(10000,indexFold,indexOutput);
wikiReverseIndexOutput = new LRUOutputCombiner<WikipediaPartitionedMapper.MutationInfo,CountAndSet>(10000, indexFold,reverseIndexOutput);
wikiMetadataOutput = new LRUOutputCombiner<WikipediaPartitionedMapper.MutationInfo,Value>(10000,
new LRUOutputCombiner.Fold<Value>() {
@Override
public Value fold(Value oldValue, Value newValue) {
return oldValue;
}},
new LRUOutputCombiner.Output<MutationInfo,Value>() {
@Override
public void output(MutationInfo key, Value value) {
Mutation m = new Mutation(key.row);
m.put(key.colfam, key.colqual, key.cv, key.timestamp, value);
try {
mtbw.getBatchWriter(metadataTableNameFinal.toString()).addMutation(m);
} catch (Exception e) {
throw new RuntimeException(e);
}
}});
}
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
wikiIndexOutput.flush();
wikiMetadataOutput.flush();
wikiReverseIndexOutput.flush();
try {
mtbw.close();
} catch (MutationsRejectedException e) {
throw new RuntimeException(e);
}
}
@Override
protected void map(Text language, Article article, Context context) throws IOException, InterruptedException {
String NULL_BYTE = "\u0000";
String colfPrefix = language.toString() + NULL_BYTE;
String indexPrefix = "fi" + NULL_BYTE;
ColumnVisibility cv = new ColumnVisibility(cvPrefix + language);
if (article != null) {
Text partitionId = new Text(Integer.toString(WikipediaMapper.getPartitionId(article, numPartitions)));
// Create the mutations for the document.
// Row is partition id, colf is language0articleid, colq is fieldName\0fieldValue
Mutation m = new Mutation(partitionId);
for (Entry<String,Object> entry : article.getFieldValues().entrySet()) {
m.put(colfPrefix + article.getId(), entry.getKey() + NULL_BYTE + entry.getValue().toString(), cv, article.getTimestamp(), NULL_VALUE);
// Create mutations for the metadata table.
MutationInfo mm = new MutationInfo(entry.getKey(), METADATA_EVENT_COLUMN_FAMILY, language.toString(), cv, article.getTimestamp());
wikiMetadataOutput.put(mm, NULL_VALUE);
}
// Tokenize the content
Set<String> tokens = WikipediaMapper.getTokens(article);
// We are going to put the fields to be indexed into a multimap. This allows us to iterate
// over the entire set once.
Multimap<String,String> indexFields = HashMultimap.create();
// Add the normalized field values
LcNoDiacriticsNormalizer normalizer = new LcNoDiacriticsNormalizer();
for (Entry<String,String> index : article.getNormalizedFieldValues().entrySet())
indexFields.put(index.getKey(), index.getValue());
// Add the tokens
for (String token : tokens)
indexFields.put(TOKENS_FIELD_NAME, normalizer.normalizeFieldValue("", token));
for (Entry<String,String> index : indexFields.entries()) {
// Create mutations for the in partition index
// Row is partition id, colf is 'fi'\0fieldName, colq is fieldValue\0language\0article id
m.put(indexPrefix + index.getKey(), index.getValue() + NULL_BYTE + colfPrefix + article.getId(), cv, article.getTimestamp(), NULL_VALUE);
// Create mutations for the global index
// Row is field value, colf is field name, colq is partitionid\0language, value is Uid.List object
MutationInfo gm = new MutationInfo(index.getValue(),index.getKey(),partitionId + NULL_BYTE + language, cv, article.getTimestamp());
wikiIndexOutput.put(gm, new CountAndSet(Integer.toString(article.getId())));
// Create mutations for the global reverse index
MutationInfo grm = new MutationInfo(StringUtils.reverse(index.getValue()),index.getKey(),partitionId + NULL_BYTE + language, cv, article.getTimestamp());
wikiReverseIndexOutput.put(grm, new CountAndSet(Integer.toString(article.getId())));
// Create mutations for the metadata table.
MutationInfo mm = new MutationInfo(index.getKey(),METADATA_INDEX_COLUMN_FAMILY, language + NULL_BYTE + LcNoDiacriticsNormalizer.class.getName(), cv, article.getTimestamp());
wikiMetadataOutput.put(mm, NULL_VALUE);
}
// Add the entire text to the document section of the table.
// row is the partition, colf is 'd', colq is language\0articleid, value is Base64 encoded GZIP'd document
m.put(DOCUMENT_COLUMN_FAMILY, colfPrefix + article.getId(), cv, article.getTimestamp(), new Value(Base64.encodeBase64(article.getText().getBytes())));
context.write(tablename, m);
} else {
context.getCounter("wikipedia", "invalid articles").increment(1);
}
context.progress();
}
}
| 6,196 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaPartitioner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.accumulo.examples.wikisearch.ingest.ArticleExtractor.Article;
import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
public class WikipediaPartitioner extends Mapper<LongWritable,Text,Text,Article> {
// private static final Logger log = Logger.getLogger(WikipediaPartitioner.class);
public final static Charset UTF8 = Charset.forName("UTF-8");
public static final String DOCUMENT_COLUMN_FAMILY = "d";
public static final String METADATA_EVENT_COLUMN_FAMILY = "e";
public static final String METADATA_INDEX_COLUMN_FAMILY = "i";
public static final String TOKENS_FIELD_NAME = "TEXT";
private final static Pattern languagePattern = Pattern.compile("([a-z_]+).*.xml(.bz2)?");
private ArticleExtractor extractor;
private String language;
private int myGroup = -1;
private int numGroups = -1;
@Override
public void setup(Context context) {
Configuration conf = context.getConfiguration();
WikipediaInputSplit wiSplit = (WikipediaInputSplit)context.getInputSplit();
myGroup = wiSplit.getPartition();
numGroups = WikipediaConfiguration.getNumGroups(conf);
FileSplit split = wiSplit.getFileSplit();
String fileName = split.getPath().getName();
Matcher matcher = languagePattern.matcher(fileName);
if (matcher.matches()) {
language = matcher.group(1).replace('_', '-').toLowerCase();
} else {
throw new RuntimeException("Unknown ingest language! " + fileName);
}
extractor = new ArticleExtractor();
}
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
Article article = extractor.extract(new InputStreamReader(new ByteArrayInputStream(value.getBytes()), UTF8));
if (article != null) {
int groupId = WikipediaMapper.getPartitionId(article, numGroups);
if(groupId != myGroup)
return;
context.write(new Text(language), article);
} else {
context.getCounter("wikipedia", "invalid articles").increment(1);
context.progress();
}
}
}
| 6,197 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.ingest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.ClientConfiguration;
import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.IteratorSetting;
import org.apache.accumulo.core.client.IteratorSetting.Column;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
import org.apache.accumulo.core.iterators.user.SummingCombiner;
import org.apache.accumulo.examples.wikisearch.iterator.GlobalIndexUidCombiner;
import org.apache.accumulo.examples.wikisearch.iterator.TextIndexCombiner;
import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class WikipediaIngester extends Configured implements Tool {
public final static String INGEST_LANGUAGE = "wikipedia.ingest_language";
public final static String SPLIT_FILE = "wikipedia.split_file";
public final static String TABLE_NAME = "wikipedia.table";
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new WikipediaIngester(), args);
System.exit(res);
}
public static void createTables(TableOperations tops, String tableName,
boolean configureLocalityGroups) throws AccumuloException, AccumuloSecurityException,
TableNotFoundException, TableExistsException {
// Create the shard table
String indexTableName = tableName + "Index";
String reverseIndexTableName = tableName + "ReverseIndex";
String metadataTableName = tableName + "Metadata";
// create the shard table
if (!tops.exists(tableName)) {
// Set a text index combiner on the given field names. No combiner is set if the option is not
// supplied
String textIndexFamilies = WikipediaMapper.TOKENS_FIELD_NAME;
tops.create(tableName);
if (textIndexFamilies.length() > 0) {
System.out.println("Adding content combiner on the fields: " + textIndexFamilies);
IteratorSetting setting = new IteratorSetting(10, TextIndexCombiner.class);
List<Column> columns = new ArrayList<>();
for (String family : StringUtils.split(textIndexFamilies, ',')) {
columns.add(new Column("fi\0" + family));
}
TextIndexCombiner.setColumns(setting, columns);
TextIndexCombiner.setLossyness(setting, true);
tops.attachIterator(tableName, setting, EnumSet.allOf(IteratorScope.class));
}
// Set the locality group for the full content column family
if (configureLocalityGroups) {
tops.setLocalityGroups(tableName, Collections.singletonMap("WikipediaDocuments",
Collections.singleton(new Text(WikipediaMapper.DOCUMENT_COLUMN_FAMILY))));
}
}
if (!tops.exists(indexTableName)) {
tops.create(indexTableName);
// Add the UID combiner
IteratorSetting setting =
new IteratorSetting(19, "UIDAggregator", GlobalIndexUidCombiner.class);
GlobalIndexUidCombiner.setCombineAllColumns(setting, true);
GlobalIndexUidCombiner.setLossyness(setting, true);
tops.attachIterator(indexTableName, setting, EnumSet.allOf(IteratorScope.class));
}
if (!tops.exists(reverseIndexTableName)) {
tops.create(reverseIndexTableName);
// Add the UID combiner
IteratorSetting setting =
new IteratorSetting(19, "UIDAggregator", GlobalIndexUidCombiner.class);
GlobalIndexUidCombiner.setCombineAllColumns(setting, true);
GlobalIndexUidCombiner.setLossyness(setting, true);
tops.attachIterator(reverseIndexTableName, setting, EnumSet.allOf(IteratorScope.class));
}
if (!tops.exists(metadataTableName)) {
// Add the SummingCombiner with VARLEN encoding for the frequency column
tops.create(metadataTableName);
IteratorSetting setting = new IteratorSetting(10, SummingCombiner.class);
SummingCombiner.setColumns(setting, Collections.singletonList(new Column("f")));
SummingCombiner.setEncodingType(setting, SummingCombiner.Type.VARLEN);
tops.attachIterator(metadataTableName, setting, EnumSet.allOf(IteratorScope.class));
}
}
@Override
public int run(String[] args) throws Exception {
Job job = new Job(getConf(), "Ingest Wikipedia");
Configuration conf = job.getConfiguration();
conf.set("mapred.map.tasks.speculative.execution", "false");
String tablename = WikipediaConfiguration.getTableName(conf);
ClientConfiguration clientConfig = ClientConfiguration.create();
clientConfig.setProperty(ClientProperty.INSTANCE_NAME,
WikipediaConfiguration.getInstanceName(conf));
clientConfig.setProperty(ClientProperty.INSTANCE_ZK_HOST,
WikipediaConfiguration.getZookeepers(conf));
String user = WikipediaConfiguration.getUser(conf);
byte[] password = WikipediaConfiguration.getPassword(conf);
Connector connector = WikipediaConfiguration.getConnector(conf);
TableOperations tops = connector.tableOperations();
createTables(tops, tablename, true);
configureJob(job);
List<Path> inputPaths = new ArrayList<>();
SortedSet<String> languages = new TreeSet<>();
FileSystem fs = FileSystem.get(conf);
Path parent = new Path(conf.get("wikipedia.input"));
listFiles(parent, fs, inputPaths, languages);
System.out.println("Input files in " + parent + ":" + inputPaths.size());
Path[] inputPathsArray = new Path[inputPaths.size()];
inputPaths.toArray(inputPathsArray);
System.out.println("Languages:" + languages.size());
FileInputFormat.setInputPaths(job, inputPathsArray);
job.setMapperClass(WikipediaMapper.class);
job.setNumReduceTasks(0);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Mutation.class);
job.setOutputFormatClass(AccumuloOutputFormat.class);
AccumuloOutputFormat.setConnectorInfo(job, user, new PasswordToken(password));
AccumuloOutputFormat.setZooKeeperInstance(job, clientConfig);
return job.waitForCompletion(true) ? 0 : 1;
}
public final static PathFilter partFilter = path -> path.getName().startsWith("part");
protected void configureJob(Job job) {
Configuration conf = job.getConfiguration();
job.setJarByClass(WikipediaIngester.class);
job.setInputFormatClass(WikipediaInputFormat.class);
conf.set(AggregatingRecordReader.START_TOKEN, "<page>");
conf.set(AggregatingRecordReader.END_TOKEN, "</page>");
}
protected static final Pattern filePattern = Pattern.compile("([a-z_]+).*.xml(.bz2)?");
protected void listFiles(Path path, FileSystem fs, List<Path> files, Set<String> languages)
throws IOException {
for (FileStatus status : fs.listStatus(path)) {
if (status.isDir()) {
listFiles(status.getPath(), fs, files, languages);
} else {
Path p = status.getPath();
Matcher matcher = filePattern.matcher(p.getName());
if (matcher.matches()) {
languages.add(matcher.group(1));
files.add(p);
}
}
}
}
}
| 6,198 |
0 | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch | Create_ds/accumulo-wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/output/SortingRFileOutputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.wikisearch.output;
import java.io.IOException;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.data.Mutation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
public class SortingRFileOutputFormat extends OutputFormat<Text,Mutation> {
// private static final Logger log = Logger.getLogger(SortingRFileOutputFormat.class);
public static final String PATH_NAME = "sortingrfileoutputformat.path";
public static final String MAX_BUFFER_SIZE = "sortingrfileoutputformat.max.buffer.size";
public static void setPathName(Configuration conf, String path) {
conf.set(PATH_NAME, path);
}
public static String getPathName(Configuration conf) {
return conf.get(PATH_NAME);
}
public static void setMaxBufferSize(Configuration conf, long maxBufferSize) {
conf.setLong(MAX_BUFFER_SIZE, maxBufferSize);
}
public static long getMaxBufferSize(Configuration conf) {
return conf.getLong(MAX_BUFFER_SIZE, -1);
}
@Override
public void checkOutputSpecs(JobContext job) throws IOException, InterruptedException {
// TODO make sure the path is writable?
// TODO make sure the max buffer size is set and is reasonable
}
@Override
public OutputCommitter getOutputCommitter(TaskAttemptContext arg0) throws IOException, InterruptedException {
return new OutputCommitter() {
@Override
public void setupTask(TaskAttemptContext arg0) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void setupJob(JobContext arg0) throws IOException {
// TODO Auto-generated method stub
}
@Override
public boolean needsTaskCommit(TaskAttemptContext arg0) throws IOException {
// TODO Auto-generated method stub
return false;
}
@Override
public void commitTask(TaskAttemptContext arg0) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void cleanupJob(JobContext arg0) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void abortTask(TaskAttemptContext arg0) throws IOException {
// TODO Auto-generated method stub
}
};
}
@Override
public RecordWriter<Text,Mutation> getRecordWriter(TaskAttemptContext attempt) throws IOException, InterruptedException {
// grab the configuration
final Configuration conf = attempt.getConfiguration();
// grab the max size
final long maxSize = getMaxBufferSize(conf);
return new BufferingRFileRecordWriter(maxSize, conf);
}
}
| 6,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.