_id
stringlengths
2
7
title
stringlengths
3
140
partition
stringclasses
3 values
text
stringlengths
73
34.1k
language
stringclasses
1 value
meta_information
dict
q167200
GTFS.export
validation
public static FeedLoadResult export (String feedId, String outFile, DataSource dataSource, boolean fromEditor) { JdbcGtfsExporter exporter = new JdbcGtfsExporter(feedId, outFile, dataSource, fromEditor); FeedLoadResult result = exporter.exportTables(); return result; }
java
{ "resource": "" }
q167201
GTFS.load
validation
public static FeedLoadResult load (String filePath, DataSource dataSource) { JdbcGtfsLoader loader = new JdbcGtfsLoader(filePath, dataSource); FeedLoadResult result = loader.loadTables(); return result; }
java
{ "resource": "" }
q167202
GTFS.validate
validation
public static ValidationResult validate (String feedId, DataSource dataSource) { Feed feed = new Feed(dataSource, feedId); ValidationResult result = feed.validate(); return result; }
java
{ "resource": "" }
q167203
GTFS.delete
validation
public static void delete (String feedId, DataSource dataSource) throws SQLException, InvalidNamespaceException { LOG.info("Deleting all tables (dropping schema) for {} feed namespace.", feedId); Connection connection = null; try { connection = dataSource.getConnection(); ensureValidNamespace(feedId); // Mark entry in feeds table as deleted. String deleteFeedEntrySql = "update feeds set deleted = true where namespace = ?"; PreparedStatement deleteFeedStatement = connection.prepareStatement(deleteFeedEntrySql); deleteFeedStatement.setString(1, feedId); deleteFeedStatement.executeUpdate(); // Drop all tables bearing the feedId namespace. // Note: It does not appear to be possible to use prepared statements with "drop schema." String dropSchemaSql = String.format("DROP SCHEMA %s CASCADE", feedId); Statement statement = connection.createStatement(); statement.executeUpdate(dropSchemaSql); // Commit the changes. connection.commit(); } catch (InvalidNamespaceException | SQLException e) { LOG.error(String.format("Could not drop feed for namespace %s", feedId), e); throw e; } finally { if (connection != null) DbUtils.closeQuietly(connection); } }
java
{ "resource": "" }
q167204
GTFS.createDataSource
validation
public static DataSource createDataSource (String url, String username, String password) { String characterEncoding = Charset.defaultCharset().toString(); LOG.debug("Default character encoding: {}", characterEncoding); if (!Charset.defaultCharset().equals(StandardCharsets.UTF_8)) { // Character encoding must be set to UTF-8 in order for the database connection to work without error. // To override default encoding at runtime, run application jar with encoding environment variable set to // UTF-8 (or update IDE settings). TODO we should also check that JDBC and the database know to use UTF-8. throw new RuntimeException("Your system's default encoding (" + characterEncoding + ") is not supported. Please set it to UTF-8. Example: java -Dfile.encoding=UTF-8 application.jar"); } // ConnectionFactory can handle null username and password (for local host-based authentication) ConnectionFactory connectionFactory = new DriverManagerConnectionFactory(url, username, password); PoolableConnectionFactory poolableConnectionFactory = new PoolableConnectionFactory(connectionFactory, null); GenericObjectPool connectionPool = new GenericObjectPool(poolableConnectionFactory); // TODO: set other options on connectionPool? connectionPool.setMaxTotal(300); connectionPool.setMaxIdle(4); connectionPool.setMinIdle(2); poolableConnectionFactory.setPool(connectionPool); // We also want auto-commit switched off for bulk inserts, and also because fetches are super-slow with // auto-commit turned on. Apparently it interferes with result cursors. poolableConnectionFactory.setDefaultAutoCommit(false); return new PoolingDataSource(connectionPool); // We might want already-loaded feeds to be treated as read-only. // But we need to call this on the connection, not the connectionSource. // connection.setReadOnly(); // Not sure we need to close cursors - closing the pool-wrapped connection when we're done with it should also close cursors. // will this help? https://stackoverflow.com/a/18300252 // connection.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); }
java
{ "resource": "" }
q167205
NewGTFSError.addInfo
validation
public NewGTFSError addInfo (String key, String value) { errorInfo.put(key, value); return this; }
java
{ "resource": "" }
q167206
NewGTFSError.forLine
validation
public static NewGTFSError forLine (Table table, int lineNumber, NewGTFSErrorType errorType, String badValue) { NewGTFSError error = new NewGTFSError(table.getEntityClass(), errorType); error.lineNumber = lineNumber; error.badValue = badValue; return error; }
java
{ "resource": "" }
q167207
NewGTFSError.forEntity
validation
public static NewGTFSError forEntity(Entity entity, NewGTFSErrorType errorType) { NewGTFSError error = new NewGTFSError(entity.getClass(), errorType); error.lineNumber = entity.id; error.entityId = entity.getId(); error.entitySequenceNumber = entity.getSequenceNumber(); return error; }
java
{ "resource": "" }
q167208
NewGTFSError.forFeed
validation
public static NewGTFSError forFeed (NewGTFSErrorType errorType, String badValue) { return new NewGTFSError(null, errorType).setBadValue(badValue); }
java
{ "resource": "" }
q167209
PatternStats.getPatternSpeed
validation
public double getPatternSpeed (String pattern_id, LocalDate date, LocalTime from, LocalTime to) { List<Trip> trips = getTripsForDate(pattern_id, date); return getAverageSpeedForTrips(trips, from, to); }
java
{ "resource": "" }
q167210
PatternStats.getAverageSpeedForTrips
validation
public double getAverageSpeedForTrips (Collection<Trip> trips, LocalTime from, LocalTime to) { TDoubleList speeds = new TDoubleArrayList(); for (Trip trip : trips) { StopTime firstStopTime = feed.stop_times.ceilingEntry(Fun.t2(trip.trip_id, null)).getValue(); LocalTime tripBeginTime = LocalTime.ofSecondOfDay(firstStopTime.departure_time % 86399); // convert 24hr+ seconds to 0 - 86399 // skip trip if begin time is before or after specified time period if (tripBeginTime.isAfter(to) || tripBeginTime.isBefore(from)) { continue; } // TODO: swap straight lines for actual geometry? double speed = feed.getTripSpeed(trip.trip_id, true); if (!Double.isNaN(speed)) { speeds.add(speed); } } if (speeds.isEmpty()) return -1; return speeds.sum() / speeds.size(); }
java
{ "resource": "" }
q167211
PatternStats.getStartTimeForTrips
validation
public LocalTime getStartTimeForTrips (Collection<Trip> trips) { int earliestDeparture = Integer.MAX_VALUE; for (Trip trip : trips) { StopTime st = feed.getOrderedStopTimesForTrip(trip.trip_id).iterator().next(); int dep = st.departure_time; // these trips begin on the next day, so we need to cast them to 0 - 86399 if (dep > 86399) { dep = dep % 86399; } if (dep <= earliestDeparture) { earliestDeparture = dep; } } return LocalTime.ofSecondOfDay(earliestDeparture); }
java
{ "resource": "" }
q167212
PatternStats.getEndTimeForTrips
validation
public LocalTime getEndTimeForTrips (Collection<Trip> trips) { int latestArrival = Integer.MIN_VALUE; for (Trip trip : trips) { StopTime st = feed.getOrderedStopTimesForTrip(trip.trip_id).iterator().next(); if (st.arrival_time >= latestArrival) { latestArrival = st.arrival_time; } } // return end time as 2:00 am if last arrival occurs after midnight return LocalTime.ofSecondOfDay(latestArrival % 86399); }
java
{ "resource": "" }
q167213
PatternStats.getPatternDistance
validation
public double getPatternDistance (String pattern_id) { Pattern pattern = feed.patterns.get(pattern_id); return feed.getTripDistance(pattern.associatedTrips.iterator().next(), false); }
java
{ "resource": "" }
q167214
PatternStats.getAverageStopSpacing
validation
public double getAverageStopSpacing (String pattern_id) { Pattern pattern = feed.patterns.get(pattern_id); return getPatternDistance(pattern_id) / pattern.orderedStops.size(); }
java
{ "resource": "" }
q167215
JdbcGTFSFeedConverter.copyEntityToSql
validation
private <E extends Entity> void copyEntityToSql(Iterable<E> entities, Table table) throws SQLException { table.createSqlTable(connection, namespace, true); String entityInsertSql = table.generateInsertSql(namespace, true); PreparedStatement insertStatement = connection.prepareStatement(entityInsertSql); // Iterate over agencies and add to prepared statement int count = 0, batchSize = 0; for (E entity : entities) { entity.setStatementParameters(insertStatement, true); insertStatement.addBatch(); count++; batchSize++; // FIXME: Add batching execute on n if (batchSize > JdbcGtfsLoader.INSERT_BATCH_SIZE) { insertStatement.executeBatch(); batchSize = 0; } } // Handle remaining insertStatement.executeBatch(); LOG.info("Inserted {} {}", count, table.name); // FIXME: Should some tables not have indexes? table.createIndexes(connection, namespace); }
java
{ "resource": "" }
q167216
ValidateFieldResult.from
validation
public static ValidateFieldResult<String> from(ValidateFieldResult result) { ValidateFieldResult<String> stringResult = new ValidateFieldResult<>(); stringResult.clean = String.valueOf(result.clean); stringResult.errors.addAll(result.errors); return stringResult; }
java
{ "resource": "" }
q167217
SQLErrorStorage.getErrorCount
validation
public int getErrorCount () { try { // Ensure any outstanding inserts are committed so that count is accurate. this.commit(); Statement statement = connection.createStatement(); statement.execute(String.format("select count(*) from %serrors", tablePrefix)); ResultSet resultSet = statement.getResultSet(); resultSet.next(); int count = resultSet.getInt(1); return count; } catch (SQLException ex) { throw new StorageException(ex); } }
java
{ "resource": "" }
q167218
SQLErrorStorage.commit
validation
private void commit() { try { // Execute any remaining batch inserts and commit the transaction. insertError.executeBatch(); insertInfo.executeBatch(); connection.commit(); } catch (SQLException ex) { throw new StorageException(ex); } }
java
{ "resource": "" }
q167219
TimeField.validateAndConvert
validation
@Override public ValidateFieldResult<String> validateAndConvert(String hhmmss) { return ValidateFieldResult.from(getSeconds(hhmmss)); }
java
{ "resource": "" }
q167220
ReferentialIntegrityError.compareTo
validation
@Override public int compareTo (GTFSError o) { int compare = super.compareTo(o); if (compare != 0) return compare; return this.badReference.compareTo((((ReferentialIntegrityError) o).badReference)); }
java
{ "resource": "" }
q167221
GeoUtils.getDistance
validation
public static double getDistance(LineString tripGeometry) { double distance = 0; for (int i = 0; i < tripGeometry.getNumPoints() - 1; i++) { try { distance += JTS.orthodromicDistance(tripGeometry.getCoordinateN(i), tripGeometry.getCoordinateN(i + 1), DefaultGeographicCRS.WGS84); } catch (TransformException e) { throw new RuntimeException(e); } } return distance; }
java
{ "resource": "" }
q167222
RowCountFetcher.field
validation
public static GraphQLFieldDefinition field (String fieldName, String tableName) { return newFieldDefinition() .name(fieldName) .type(GraphQLInt) .dataFetcher(new RowCountFetcher(tableName)) .build(); }
java
{ "resource": "" }
q167223
RowCountFetcher.groupedField
validation
public static GraphQLFieldDefinition groupedField(String tableName, String groupByColumn) { return newFieldDefinition() .name(groupByColumn) .type(groupCountType) .argument(stringArg("pattern_id")) .type(new GraphQLList(groupCountType)) .dataFetcher(new RowCountFetcher(tableName, null, groupByColumn)) .build(); }
java
{ "resource": "" }
q167224
Deduplicator.deduplicateIntArray
validation
public int[] deduplicateIntArray(int[] original) { if (original == null) return null; IntArray intArray = new IntArray(original); IntArray canonical = canonicalIntArrays.get(intArray); if (canonical == null) { canonical = intArray; canonicalIntArrays.put(canonical, canonical); } return canonical.array; }
java
{ "resource": "" }
q167225
StopStats.getTripsForDate
validation
public List<Trip> getTripsForDate (String stop_id, LocalDate date) { List<String> tripIds = stats.getTripsForDate(date).stream() .map(trip -> trip.trip_id) .collect(Collectors.toList()); return feed.getDistinctTripsForStop(stop_id).stream() .filter(t -> tripIds.contains(t.trip_id)) // filter by trip_id list for date .collect(Collectors.toList()); }
java
{ "resource": "" }
q167226
StopStats.getAverageHeadwayForStop
validation
public int getAverageHeadwayForStop (String stop_id, LocalDate date, LocalTime from, LocalTime to) { List<Trip> tripsForStop = getTripsForDate(stop_id, date); return getStopHeadwayForTrips(stop_id, tripsForStop, from, to); }
java
{ "resource": "" }
q167227
StopStats.getRouteHeadwaysForStop
validation
public Map<String, Integer> getRouteHeadwaysForStop (String stop_id, LocalDate date, LocalTime from, LocalTime to) { Map<String, Integer> routeHeadwayMap = new HashMap<>(); List<Route> routes = feed.patterns.values().stream() .filter(p -> p.orderedStops.contains(stop_id)) .map(p -> feed.routes.get(p.route_id)) .collect(Collectors.toList()); for (Route route : routes) { routeHeadwayMap.put(route.route_id, getHeadwayForStopByRoute(stop_id, route.route_id, date, from, to)); } return routeHeadwayMap; }
java
{ "resource": "" }
q167228
StopStats.getHeadwayForStopByRoute
validation
public int getHeadwayForStopByRoute (String stop_id, String route_id, LocalDate date, LocalTime from, LocalTime to) { List<Trip> tripsForStop = feed.getDistinctTripsForStop(stop_id).stream() .filter(trip -> feed.trips.get(trip.trip_id).route_id.equals(route_id)) .filter(trip -> feed.services.get(trip.service_id).activeOn(date)) .collect(Collectors.toList()); return getStopHeadwayForTrips(stop_id, tripsForStop, from, to); }
java
{ "resource": "" }
q167229
JdbcTableWriter.getJsonNode
validation
private static JsonNode getJsonNode (String json) throws IOException { try { return mapper.readTree(json); } catch (IOException e) { LOG.error("Bad JSON syntax", e); throw e; } }
java
{ "resource": "" }
q167230
JdbcTableWriter.create
validation
@Override public String create(String json, boolean autoCommit) throws SQLException, IOException { return update(null, json, autoCommit); }
java
{ "resource": "" }
q167231
JdbcTableWriter.updateStopTimesForPatternStop
validation
private int updateStopTimesForPatternStop(ObjectNode patternStop, int previousTravelTime) throws SQLException { String sql = String.format( "update %s.stop_times st set arrival_time = ?, departure_time = ? from %s.trips t " + "where st.trip_id = t.trip_id AND t.pattern_id = ? AND st.stop_sequence = ?", tablePrefix, tablePrefix ); // Prepare the statement and set statement parameters PreparedStatement statement = connection.prepareStatement(sql); int oneBasedIndex = 1; int travelTime = patternStop.get("default_travel_time").asInt(); int arrivalTime = previousTravelTime + travelTime; statement.setInt(oneBasedIndex++, arrivalTime); int dwellTime = patternStop.get("default_dwell_time").asInt(); statement.setInt(oneBasedIndex++, arrivalTime + dwellTime); // Set "where clause" with value for pattern_id and stop_sequence statement.setString(oneBasedIndex++, patternStop.get("pattern_id").asText()); // In the editor, we can depend on stop_times#stop_sequence matching pattern_stops#stop_sequence because we // normalize stop sequence values for stop times during snapshotting for the editor. statement.setInt(oneBasedIndex++, patternStop.get("stop_sequence").asInt()); // Log query, execute statement, and log result. LOG.debug(statement.toString()); int entitiesUpdated = statement.executeUpdate(); LOG.debug("{} stop_time arrivals/departures updated", entitiesUpdated); return travelTime + dwellTime; }
java
{ "resource": "" }
q167232
JdbcTableWriter.verifyReferencesExist
validation
private void verifyReferencesExist(String referringTableName, Multimap<Table, String> referencesPerTable) throws SQLException { for (Table referencedTable: referencesPerTable.keySet()) { LOG.info("Checking {} references to {}", referringTableName, referencedTable.name); Collection<String> referenceStrings = referencesPerTable.get(referencedTable); String referenceFieldName = referencedTable.getKeyFieldName(); String questionMarks = String.join(", ", Collections.nCopies(referenceStrings.size(), "?")); String checkCountSql = String.format( "select %s from %s.%s where %s in (%s)", referenceFieldName, tablePrefix, referencedTable.name, referenceFieldName, questionMarks); PreparedStatement preparedStatement = connection.prepareStatement(checkCountSql); int oneBasedIndex = 1; for (String ref : referenceStrings) { preparedStatement.setString(oneBasedIndex++, ref); } LOG.info(preparedStatement.toString()); ResultSet resultSet = preparedStatement.executeQuery(); Set<String> foundReferences = new HashSet<>(); while (resultSet.next()) { String referenceValue = resultSet.getString(1); foundReferences.add(referenceValue); } // Determine if any references were not found. referenceStrings.removeAll(foundReferences); if (referenceStrings.size() > 0) { throw new SQLException( String.format( "%s entities must contain valid %s references. (Invalid references: %s)", referringTableName, referenceFieldName, String.join(", ", referenceStrings))); } else { LOG.info("All {} {} {} references are valid.", foundReferences.size(), referencedTable.name, referenceFieldName); } } }
java
{ "resource": "" }
q167233
JdbcTableWriter.verifyInteriorStopsAreUnchanged
validation
private static void verifyInteriorStopsAreUnchanged( List<String> originalStopIds, List<PatternStop> newStops, int firstDifferentIndex, int lastDifferentIndex, boolean movedRight ) { //Stops mapped to list of stop IDs simply for easier viewing/comparison with original IDs while debugging with // breakpoints. List<String> newStopIds = newStops.stream().map(s -> s.stop_id).collect(Collectors.toList()); // Determine the bounds of the region that should be identical between the two lists. int beginRegion = movedRight ? firstDifferentIndex : firstDifferentIndex + 1; int endRegion = movedRight ? lastDifferentIndex - 1 : lastDifferentIndex; for (int i = beginRegion; i <= endRegion; i++) { // Shift index when selecting stop from original list to account for displaced stop. int shiftedIndex = movedRight ? i + 1 : i - 1; String newStopId = newStopIds.get(i); String originalStopId = originalStopIds.get(shiftedIndex); if (!newStopId.equals(originalStopId)) { // If stop ID for new stop at the given index does not match the original stop ID, the order of at least // one stop within the changed region has been changed, which is illegal according to the rule enforcing // only a single addition, deletion, or transposition per update. throw new IllegalStateException(RECONCILE_STOPS_ERROR_MSG); } } }
java
{ "resource": "" }
q167234
JdbcTableWriter.insertBlankStopTimes
validation
private void insertBlankStopTimes( List<String> tripIds, List<PatternStop> newStops, int startingStopSequence, int stopTimesToAdd, Connection connection ) throws SQLException { if (tripIds.isEmpty()) { // There is no need to insert blank stop times if there are no trips for the pattern. return; } String insertSql = Table.STOP_TIMES.generateInsertSql(tablePrefix, true); PreparedStatement insertStatement = connection.prepareStatement(insertSql); int count = 0; int totalRowsUpdated = 0; // Create a new stop time for each sequence value (times each trip ID) that needs to be inserted. for (int i = startingStopSequence; i < stopTimesToAdd + startingStopSequence; i++) { PatternStop patternStop = newStops.get(i); StopTime stopTime = new StopTime(); stopTime.stop_id = patternStop.stop_id; stopTime.drop_off_type = patternStop.drop_off_type; stopTime.pickup_type = patternStop.pickup_type; stopTime.timepoint = patternStop.timepoint; stopTime.shape_dist_traveled = patternStop.shape_dist_traveled; stopTime.stop_sequence = i; // Update stop time with each trip ID and add to batch. for (String tripId : tripIds) { stopTime.trip_id = tripId; stopTime.setStatementParameters(insertStatement, true); insertStatement.addBatch(); if (count % INSERT_BATCH_SIZE == 0) { int[] rowsUpdated = insertStatement.executeBatch(); totalRowsUpdated += rowsUpdated.length; } } } int[] rowsUpdated = insertStatement.executeBatch(); totalRowsUpdated += rowsUpdated.length; LOG.info("{} blank stop times inserted", totalRowsUpdated); }
java
{ "resource": "" }
q167235
JdbcTableWriter.delete
validation
@Override public int delete(Integer id, boolean autoCommit) throws SQLException { try { // Handle "cascading" delete or constraints on deleting entities that other entities depend on // (e.g., keep a calendar from being deleted if trips reference it). // FIXME: actually add "cascading"? Currently, it just deletes one level down. deleteFromReferencingTables(tablePrefix, specTable, id); // Next, delete the actual record specified by id. PreparedStatement statement = connection.prepareStatement(specTable.generateDeleteSql(tablePrefix)); statement.setInt(1, id); LOG.info(statement.toString()); // Execute query int result = statement.executeUpdate(); if (result == 0) { LOG.error("Could not delete {} entity with id: {}", specTable.name, id); throw new SQLException("Could not delete entity"); } if (autoCommit) connection.commit(); // FIXME: change return message based on result value return result; } catch (Exception e) { LOG.error("Could not delete {} entity with id: {}", specTable.name, id); e.printStackTrace(); // Rollback changes if errors encountered. connection.rollback(); throw e; } finally { // Always close connection if auto-committing. Otherwise, leave open (for potential further updates). if (autoCommit) connection.close(); } }
java
{ "resource": "" }
q167236
JdbcTableWriter.handleStatementExecution
validation
private static long handleStatementExecution(PreparedStatement statement, boolean isCreating) throws SQLException { // Log the SQL for the prepared statement LOG.info(statement.toString()); int affectedRows = statement.executeUpdate(); // Determine operation-specific action for any error messages String messageAction = isCreating ? "Creating" : "Updating"; if (affectedRows == 0) { // No update occurred. // TODO: add some clarity on cause (e.g., where clause found no entity with provided ID)? throw new SQLException(messageAction + " entity failed, no rows affected."); } try (ResultSet generatedKeys = statement.getGeneratedKeys()) { if (generatedKeys.next()) { // Get the auto-generated ID from the update execution long newId = generatedKeys.getLong(1); return newId; } else { throw new SQLException(messageAction + " entity failed, no ID obtained."); } } catch (SQLException e) { e.printStackTrace(); throw e; } }
java
{ "resource": "" }
q167237
JdbcTableWriter.getRowCount
validation
private static int getRowCount(String tableName, Connection connection) throws SQLException { String rowCountSql = String.format("SELECT COUNT(*) FROM %s", tableName); LOG.info(rowCountSql); // Create statement for counting rows selected Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(rowCountSql); if (resultSet.next()) return resultSet.getInt(1); else return 0; }
java
{ "resource": "" }
q167238
JdbcTableWriter.getReferencingTables
validation
private static Set<Table> getReferencingTables(Table table) { Set<Table> referencingTables = new HashSet<>(); for (Table gtfsTable : Table.tablesInOrder) { // IMPORTANT: Skip the table for the entity we're modifying or if loop table does not have field. if (table.name.equals(gtfsTable.name)) continue; for (Field field : gtfsTable.fields) { if (field.isForeignReference() && field.referenceTable.name.equals(table.name)) { // If any of the table's fields are foreign references to the specified table, add to the return set. referencingTables.add(gtfsTable); } } } return referencingTables; }
java
{ "resource": "" }
q167239
JdbcTableWriter.getValueForId
validation
private static String getValueForId(int id, String fieldName, String namespace, Table table, Connection connection) throws SQLException { String tableName = String.join(".", namespace, table.name); String selectIdSql = String.format("select %s from %s where id = %d", fieldName, tableName, id); LOG.info(selectIdSql); Statement selectIdStatement = connection.createStatement(); ResultSet selectResults = selectIdStatement.executeQuery(selectIdSql); String value = null; while (selectResults.next()) { value = selectResults.getString(1); } return value; }
java
{ "resource": "" }
q167240
BaseGTFSCache.put
validation
public T put (String id, File feedFile) throws Exception { return put(id, feedFile, null); }
java
{ "resource": "" }
q167241
BaseGTFSCache.put
validation
public T put (Function<GTFSFeed, String> idGenerator, File feedFile) throws Exception { return put(null, feedFile, idGenerator); }
java
{ "resource": "" }
q167242
Service.activeOn
validation
public boolean activeOn (LocalDate date) { // first check for exceptions CalendarDate exception = calendar_dates.get(date); if (exception != null) return exception.exception_type == 1; else if (calendar == null) return false; else { boolean outsideValidityRange = date.isAfter(calendar.end_date) || date.isBefore(calendar.start_date); if (outsideValidityRange) return false; switch (date.getDayOfWeek()) { case MONDAY: return calendar.monday == 1; case TUESDAY: return calendar.tuesday == 1; case WEDNESDAY: return calendar.wednesday == 1; case THURSDAY: return calendar.thursday == 1; case FRIDAY: return calendar.friday == 1; case SATURDAY: return calendar.saturday == 1; case SUNDAY: return calendar.sunday == 1; default: throw new IllegalArgumentException("unknown day of week constant!"); } } }
java
{ "resource": "" }
q167243
GTFSGraphQL.initialize
validation
public static void initialize (DataSource dataSource) { GTFSGraphQL.dataSource = dataSource; GRAPHQL = GraphQL.newGraphQL(GraphQLGtfsSchema.feedBasedSchema) .build(); }
java
{ "resource": "" }
q167244
Table.editorFields
validation
public List<Field> editorFields() { List<Field> editorFields = new ArrayList<>(); for (Field f : fields) if (f.requirement == REQUIRED || f.requirement == OPTIONAL || f.requirement == EDITOR) { editorFields.add(f); } return editorFields; }
java
{ "resource": "" }
q167245
Table.requiredFields
validation
public List<Field> requiredFields () { // Filter out fields not used in editor (i.e., extension fields). List<Field> requiredFields = new ArrayList<>(); for (Field f : fields) if (f.requirement == REQUIRED) requiredFields.add(f); return requiredFields; }
java
{ "resource": "" }
q167246
Table.specFields
validation
public List<Field> specFields () { List<Field> specFields = new ArrayList<>(); for (Field f : fields) if (f.requirement == REQUIRED || f.requirement == OPTIONAL) specFields.add(f); return specFields; }
java
{ "resource": "" }
q167247
Table.createSqlTable
validation
public boolean createSqlTable (Connection connection, String namespace, boolean makeIdSerial, String[] primaryKeyFields) { // Optionally join namespace and name to create full table name if namespace is not null (i.e., table object is // a spec table). String tableName = namespace != null ? String.join(".", namespace, name) : name; String fieldDeclarations = Arrays.stream(fields) .map(Field::getSqlDeclaration) .collect(Collectors.joining(", ")); if (primaryKeyFields != null) { fieldDeclarations += String.format(", primary key (%s)", String.join(", ", primaryKeyFields)); } String dropSql = String.format("drop table if exists %s", tableName); // Adding the unlogged keyword gives about 12 percent speedup on loading, but is non-standard. String idFieldType = makeIdSerial ? "serial" : "bigint"; String createSql = String.format("create table %s (id %s not null, %s)", tableName, idFieldType, fieldDeclarations); try { Statement statement = connection.createStatement(); LOG.info(dropSql); statement.execute(dropSql); LOG.info(createSql); return statement.execute(createSql); } catch (Exception ex) { throw new StorageException(ex); } }
java
{ "resource": "" }
q167248
Table.generateInsertSql
validation
public String generateInsertSql (String namespace, boolean setDefaultId) { String tableName = namespace == null ? name : String.join(".", namespace, name); String joinedFieldNames = commaSeparatedNames(editorFields()); String idValue = setDefaultId ? "DEFAULT" : "?"; return String.format( "insert into %s (id, %s) values (%s, %s)", tableName, joinedFieldNames, idValue, String.join(", ", Collections.nCopies(editorFields().size(), "?")) ); }
java
{ "resource": "" }
q167249
Table.commaSeparatedNames
validation
public static String commaSeparatedNames(List<Field> fieldsToJoin, String prefix, boolean csvOutput) { return fieldsToJoin.stream() // NOTE: This previously only prefixed fields that were foreign refs or key fields. However, this // caused an issue where shared fields were ambiguously referenced in a select query (specifically, // wheelchair_accessible in routes and trips). So this filter has been removed. .map(f -> f.getColumnExpression(prefix, csvOutput)) .collect(Collectors.joining(", ")); }
java
{ "resource": "" }
q167250
Table.generateUpdateSql
validation
public String generateUpdateSql (String namespace, int id) { // Collect field names for string joining from JsonObject. String joinedFieldNames = editorFields().stream() // If updating, add suffix for use in set clause .map(field -> field.name + " = ?") .collect(Collectors.joining(", ")); String tableName = namespace == null ? name : String.join(".", namespace, name); return String.format("update %s set %s where id = %d", tableName, joinedFieldNames, id); }
java
{ "resource": "" }
q167251
Table.generateSelectSql
validation
public String generateSelectSql (String namespace, Requirement minimumRequirement) { String fieldsString; String tableName = String.join(".", namespace, name); String fieldPrefix = tableName + "."; if (minimumRequirement.equals(EDITOR)) { fieldsString = commaSeparatedNames(editorFields(), fieldPrefix, true); } else if (minimumRequirement.equals(OPTIONAL)) { fieldsString = commaSeparatedNames(specFields(), fieldPrefix, true); } else if (minimumRequirement.equals(REQUIRED)) { fieldsString = commaSeparatedNames(requiredFields(), fieldPrefix, true); } else fieldsString = "*"; return String.format("select %s from %s", fieldsString, tableName); }
java
{ "resource": "" }
q167252
Table.generateSelectAllExistingFieldsSql
validation
public String generateSelectAllExistingFieldsSql(Connection connection, String namespace) throws SQLException { // select all columns from table // FIXME This is postgres-specific and needs to be made generic for non-postgres databases. PreparedStatement statement = connection.prepareStatement( "SELECT column_name FROM information_schema.columns WHERE table_schema = ? AND table_name = ?" ); statement.setString(1, namespace); statement.setString(2, name); ResultSet result = statement.executeQuery(); // get result and add fields that are defined in this table List<Field> existingFields = new ArrayList<>(); while (result.next()) { String columnName = result.getString(1); existingFields.add(getFieldForName(columnName)); } String tableName = String.join(".", namespace, name); String fieldPrefix = tableName + "."; return String.format( "select %s from %s", commaSeparatedNames(existingFields, fieldPrefix, true), tableName ); }
java
{ "resource": "" }
q167253
Table.generateDeleteSql
validation
public String generateDeleteSql (String namespace, String fieldName) { String whereField = fieldName == null ? "id" : fieldName; return String.format("delete from %s where %s = ?", String.join(".", namespace, name), whereField); }
java
{ "resource": "" }
q167254
Table.createSqlTableFrom
validation
public boolean createSqlTableFrom(Connection connection, String tableToClone) { long startTime = System.currentTimeMillis(); try { Statement statement = connection.createStatement(); // Drop target table to avoid a conflict. String dropSql = String.format("drop table if exists %s", name); LOG.info(dropSql); statement.execute(dropSql); if (tableToClone.endsWith("stop_times")) { normalizeAndCloneStopTimes(statement, name, tableToClone); } else { // Adding the unlogged keyword gives about 12 percent speedup on loading, but is non-standard. // FIXME: Which create table operation is more efficient? String createTableAsSql = String.format("create table %s as table %s", name, tableToClone); // Create table in the image of the table we're copying (indexes are not included). LOG.info(createTableAsSql); statement.execute(createTableAsSql); } applyAutoIncrementingSequence(statement); // FIXME: Is there a need to add primary key constraint here? if (usePrimaryKey) { // Add primary key to ID column for any tables that require it. String addPrimaryKeySql = String.format("ALTER TABLE %s ADD PRIMARY KEY (id)", name); LOG.info(addPrimaryKeySql); statement.execute(addPrimaryKeySql); } return true; } catch (SQLException ex) { LOG.error("Error cloning table {}: {}", name, ex.getSQLState()); LOG.error("details: ", ex); try { connection.rollback(); // It is likely that if cloning the table fails, the reason was that the table did not already exist. // Try to create the table here from scratch. // FIXME: Maybe we should check that the reason the clone failed was that the table already exists. createSqlTable(connection, true); return true; } catch (SQLException e) { e.printStackTrace(); return false; } } finally { LOG.info("Cloned table {} as {} in {} ms", tableToClone, name, System.currentTimeMillis() - startTime); } }
java
{ "resource": "" }
q167255
Table.normalizeAndCloneStopTimes
validation
private void normalizeAndCloneStopTimes(Statement statement, String name, String tableToClone) throws SQLException { // Create table with matching columns first and then insert all rows with a special select query that // normalizes the stop sequences before inserting. // "Create table like" can optionally include indexes, but we want to avoid creating the indexes beforehand // because this will slow down our massive insert for stop times. String createTableLikeSql = String.format("create table %s (like %s)", name, tableToClone); LOG.info(createTableLikeSql); statement.execute(createTableLikeSql); long normalizeStartTime = System.currentTimeMillis(); LOG.info("Normalizing stop sequences"); // First get the column names (to account for any non-standard fields that may be present) List<String> columns = new ArrayList<>(); ResultSet resultSet = statement.executeQuery(String.format("select * from %s limit 1", tableToClone)); ResultSetMetaData metadata = resultSet.getMetaData(); int nColumns = metadata.getColumnCount(); for (int i = 1; i <= nColumns; i++) { columns.add(metadata.getColumnName(i)); } // Replace stop sequence column with the normalized sequence values. columns.set(columns.indexOf("stop_sequence"), "-1 + row_number() over (partition by trip_id order by stop_sequence) as stop_sequence"); String insertAllSql = String.format("insert into %s (select %s from %s)", name, String.join(", ", columns), tableToClone); LOG.info(insertAllSql); statement.execute(insertAllSql); LOG.info("Normalized stop times sequences in {} ms", System.currentTimeMillis() - normalizeStartTime); }
java
{ "resource": "" }
q167256
Table.getKeyFieldIndex
validation
public int getKeyFieldIndex(Field[] fields) { String keyField = getKeyFieldName(); return Field.getFieldIndex(fields, keyField); }
java
{ "resource": "" }
q167257
Util.ensureValidNamespace
validation
public static void ensureValidNamespace(String namespace) throws InvalidNamespaceException { Pattern pattern = Pattern.compile("[^a-zA-Z0-9_\\.]"); Matcher matcher = pattern.matcher(namespace); if (matcher.find()) { throw new InvalidNamespaceException(); } }
java
{ "resource": "" }
q167258
Field.getFieldIndex
validation
public static int getFieldIndex (Field[] fields, String name) { // Linear search, assuming a small number of fields per table. for (int i = 0; i < fields.length; i++) if (fields[i].name.equals(name)) return i; return -1; }
java
{ "resource": "" }
q167259
ShapeMap.keySet
validation
@Override public Set<Integer> keySet() { // use a linkedhashset so values come out in order Set<Integer> ret = new LinkedHashSet<>(); for (Tuple2<String, Integer> t : wrapped.keySet()) { ret.add(t.b); } // Don't let the user modify the set as it won't do what they expect (change the map) return Collections.unmodifiableSet(ret); }
java
{ "resource": "" }
q167260
GTFSFeed.fromFile
validation
public static GTFSFeed fromFile(String file, String feedId) { GTFSFeed feed = new GTFSFeed(); ZipFile zip; try { zip = new ZipFile(file); if (feedId == null) { feed.loadFromFile(zip); } else { feed.loadFromFile(zip, feedId); } zip.close(); return feed; } catch (Exception e) { LOG.error("Error loading GTFS: {}", e.getMessage()); throw new RuntimeException(e); } }
java
{ "resource": "" }
q167261
GTFSFeed.findPatterns
validation
public void findPatterns () { PatternFinder patternFinder = new PatternFinder(); // Iterate over trips and process each trip and its stop times. for (Trip trip : this.trips.values()) { Iterable<StopTime> orderedStopTimesForTrip = this.getOrderedStopTimesForTrip(trip.trip_id); patternFinder.processTrip(trip, orderedStopTimesForTrip); } Map<TripPatternKey, Pattern> patternObjects = patternFinder.createPatternObjects(this.stops, null); this.patterns.putAll(patternObjects.values().stream() .collect(Collectors.toMap(Pattern::getId, pattern -> pattern))); }
java
{ "resource": "" }
q167262
GTFSFeed.getTripDistance
validation
public double getTripDistance (String trip_id, boolean straightLine) { return straightLine ? GeoUtils.getDistance(this.getStraightLineForStops(trip_id)) : GeoUtils.getDistance(this.getTripGeometry(trip_id)); }
java
{ "resource": "" }
q167263
GTFSFeed.getTripSpeed
validation
public double getTripSpeed (String trip_id, boolean straightLine) { StopTime firstStopTime = this.stop_times.ceilingEntry(Fun.t2(trip_id, null)).getValue(); StopTime lastStopTime = this.stop_times.floorEntry(Fun.t2(trip_id, Fun.HI)).getValue(); // ensure that stopTime returned matches trip id (i.e., that the trip has stoptimes) if (!firstStopTime.trip_id.equals(trip_id) || !lastStopTime.trip_id.equals(trip_id)) { return Double.NaN; } double distance = getTripDistance(trip_id, straightLine); // trip time (in seconds) int time = lastStopTime.arrival_time - firstStopTime.departure_time; return distance / time; // meters per second }
java
{ "resource": "" }
q167264
GTFSFeed.getStopTimesForStop
validation
public List<StopTime> getStopTimesForStop (String stop_id) { SortedSet<Tuple2<String, Tuple2>> index = this.stopStopTimeSet .subSet(new Tuple2<>(stop_id, null), new Tuple2(stop_id, Fun.HI)); return index.stream() .map(tuple -> this.stop_times.get(tuple.b)) .collect(Collectors.toList()); }
java
{ "resource": "" }
q167265
GTFSFeed.getServicesForDate
validation
public List<Service> getServicesForDate (LocalDate date) { String dateString = date.format(dateFormatter); SortedSet<Tuple2<String, String>> index = this.servicesPerDate .subSet(new Tuple2<>(dateString, null), new Tuple2(dateString, Fun.HI)); return index.stream() .map(tuple -> this.services.get(tuple.b)) .collect(Collectors.toList()); }
java
{ "resource": "" }
q167266
GTFSFeed.getAgencyTimeZoneForStop
validation
public ZoneId getAgencyTimeZoneForStop (String stop_id) { StopTime stopTime = getStopTimesForStop(stop_id).iterator().next(); Trip trip = this.trips.get(stopTime.trip_id); Route route = this.routes.get(trip.route_id); Agency agency = route.agency_id != null ? this.agency.get(route.agency_id) : this.agency.get(0); return ZoneId.of(agency.agency_timezone); }
java
{ "resource": "" }
q167267
PatternFinder.createPatternObjects
validation
public Map<TripPatternKey, Pattern> createPatternObjects(Map<String, Stop> stopById, SQLErrorStorage errorStorage) { // Make pattern ID one-based to avoid any JS type confusion between an ID of zero vs. null value. int nextPatternId = 1; // Create an in-memory list of Patterns because we will later rename them before inserting them into storage. Map<TripPatternKey, Pattern> patterns = new HashMap<>(); // TODO assign patterns sequential small integer IDs (may include route) for (TripPatternKey key : tripsForPattern.keySet()) { Collection<Trip> trips = tripsForPattern.get(key); Pattern pattern = new Pattern(key.stops, trips, null); // Overwrite long UUID with sequential integer pattern ID pattern.pattern_id = Integer.toString(nextPatternId++); // FIXME: Should associated shapes be a single entry? pattern.associatedShapes = new HashSet<>(); trips.stream().forEach(trip -> pattern.associatedShapes.add(trip.shape_id)); if (pattern.associatedShapes.size() > 1 && errorStorage != null) { // Store an error if there is more than one shape per pattern. Note: error storage is null if called via // MapDB implementation. // TODO: Should shape ID be added to trip pattern key? errorStorage.storeError(NewGTFSError.forEntity( pattern, NewGTFSErrorType.MULTIPLE_SHAPES_FOR_PATTERN) .setBadValue(pattern.associatedShapes.toString())); } patterns.put(key, pattern); } // Name patterns before storing in SQL database. renamePatterns(patterns.values(), stopById); LOG.info("Total patterns: {}", tripsForPattern.keySet().size()); return patterns; }
java
{ "resource": "" }
q167268
IdleReceiver.setAnywayTimer
validation
private static void setAnywayTimer(Context context) { AlarmManager am = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); Intent intent = new Intent(context, IdleReceiver.class); intent.setAction(ACTION_TRIGGER_IDLE); PendingIntent pendingIntent = PendingIntent.getBroadcast(context, 0, intent, 0); final long nowElapsed = SystemClock.elapsedRealtime(); final long when = nowElapsed + INACTIVITY_ANYWAY_THRESHOLD; compatSetWindow(am, AlarmManager.ELAPSED_REALTIME_WAKEUP, when, IDLE_WINDOW_SLOP, pendingIntent); }
java
{ "resource": "" }
q167269
IdleReceiver.enableReceiver
validation
public static void enableReceiver(Context context) { IntentFilter filter = getFilter(); sReceiver = new IdleReceiver(); context.getApplicationContext().registerReceiver(sReceiver, filter); }
java
{ "resource": "" }
q167270
IdleReceiver.reportNewIdleState
validation
void reportNewIdleState(Context context, boolean isIdle) { JobStore jobStore = JobStore.initAndGet(context); synchronized (jobStore) { ArraySet<JobStatus> jobs = jobStore.getJobs(); for (int i = 0; i < jobs.size(); i++) { JobStatus ts = jobs.valueAt(i); ts.idleConstraintSatisfied.set(isIdle); } } startWakefulService(context, JobServiceCompat.maybeRunJobs(context)); }
java
{ "resource": "" }
q167271
JobStore.add
validation
public boolean add(JobStatus jobStatus) { boolean replaced = mJobSet.remove(jobStatus); mJobSet.add(jobStatus); if (!jobStatus.isPersisted()) { markForBootSession(jobStatus); } maybeWriteStatusToDiskAsync(); return replaced; }
java
{ "resource": "" }
q167272
JobStore.containsJobId
validation
public boolean containsJobId(int jobId) { for (int i = mJobSet.size() - 1; i >= 0; i--) { JobStatus ts = mJobSet.valueAt(i); if (ts.matches(jobId)) { return true; } } return false; }
java
{ "resource": "" }
q167273
JobStore.remove
validation
public boolean remove(JobStatus jobStatus) { boolean removed = mJobSet.remove(jobStatus); if (!removed) { return false; } if (!jobStatus.isPersisted()) { unmarkForBootSession(jobStatus); } maybeWriteStatusToDiskAsync(); return removed; }
java
{ "resource": "" }
q167274
JobSchedulerService.rescheduleFailedJob
validation
private JobStatus rescheduleFailedJob(JobStatus job) { if (job.hasIdleConstraint()) { // Don't need to modify time on idle job, it will run whenever the next idle period is. return job; } final long elapsedNowMillis = SystemClock.elapsedRealtime(); final JobInfo jobInfo = job.getJob(); final long initialBackoffMillis = jobInfo.getInitialBackoffMillis(); final int backoffAttemps = job.getNumFailures() + 1; long delayMillis; switch (job.getJob().getBackoffPolicy()) { case JobInfo.BACKOFF_POLICY_LINEAR: delayMillis = initialBackoffMillis * backoffAttemps; break; default: case JobInfo.BACKOFF_POLICY_EXPONENTIAL: delayMillis = (long) Math.scalb(initialBackoffMillis, backoffAttemps - 1); break; } delayMillis = Math.min(delayMillis, JobInfo.MAX_BACKOFF_DELAY_MILLIS); return new JobStatus(job, elapsedNowMillis + delayMillis, JobStatus.NO_LATEST_RUNTIME, backoffAttemps); }
java
{ "resource": "" }
q167275
ArraySet.clear
validation
@Override public void clear() { if (mSize != 0) { freeArrays(mHashes, mArray, mSize); mHashes = EmptyArray.INT; mArray = EmptyArray.OBJECT; mSize = 0; } }
java
{ "resource": "" }
q167276
MainActivity.onReceivedStartJob
validation
public void onReceivedStartJob(JobParameters params) { mShowStartView.setBackgroundColor(startJobColor); Message m = Message.obtain(mHandler, MSG_UNCOLOUR_START); mHandler.sendMessageDelayed(m, 1000L); // uncolour in 1 second. mParamsTextView.setText("Executing: " + params.getJobId() + " " + params.getExtras()); }
java
{ "resource": "" }
q167277
MainActivity.onReceivedStopJob
validation
public void onReceivedStopJob() { mShowStopView.setBackgroundColor(stopJobColor); Message m = Message.obtain(mHandler, MSG_UNCOLOUR_STOP); mHandler.sendMessageDelayed(m, 2000L); // uncolour in 1 second. mParamsTextView.setText(""); }
java
{ "resource": "" }
q167278
TimeReceiver.canStopTrackingJob
validation
private static boolean canStopTrackingJob(JobStatus job) { return (!job.hasTimingDelayConstraint() || job.timeDelayConstraintSatisfied.get()) && (!job.hasDeadlineConstraint() || job.deadlineConstraintSatisfied.get()); }
java
{ "resource": "" }
q167279
PersistableBundle.putString
validation
public void putString(String key, String value) { PersistableBundleCompat.putString(bundle, key, value); }
java
{ "resource": "" }
q167280
PersistableBundle.putStringArray
validation
public void putStringArray(String key, String[] value) { PersistableBundleCompat.putStringArray(bundle, key, value); }
java
{ "resource": "" }
q167281
PersistableBundle.getString
validation
public String getString(String key, String defaultValue) { return PersistableBundleCompat.getString(bundle, key, defaultValue); }
java
{ "resource": "" }
q167282
PersistableBundle.putPersistableBundle
validation
public void putPersistableBundle(String key, PersistableBundle value) { PersistableBundleCompat.putPersistableBundle(bundle, key, value.bundle); }
java
{ "resource": "" }
q167283
Snack.writeToParcel
validation
public void writeToParcel(Parcel out, int flags) { out.writeString(mMessage); out.writeString(mActionMessage); out.writeInt(mActionIcon); out.writeParcelable(mToken, 0); out.writeInt((int) mDuration); out.writeParcelable(mBtnTextColor, 0); out.writeParcelable(mBackgroundColor, 0); out.writeInt(mHeight); out.writeValue(mTypeface); }
java
{ "resource": "" }
q167284
SnackBar.getHeight
validation
public int getHeight() { mParentView.measure(View.MeasureSpec.makeMeasureSpec(mParentView.getWidth(), View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(mParentView.getHeight(), View.MeasureSpec.AT_MOST)); return mParentView.getMeasuredHeight(); }
java
{ "resource": "" }
q167285
BasicLTIUtil.dPrint
validation
public static void dPrint(String str) { if (verbosePrint) { System.out.println(str); } M_log.fine(str); }
java
{ "resource": "" }
q167286
BasicLTIUtil.checkProperties
validation
public static boolean checkProperties(Properties postProp, String url, String method, String oauth_consumer_key, String oauth_consumer_secret) { return checkProperties(convertToMap(postProp), url, method, oauth_consumer_key, oauth_consumer_secret); }
java
{ "resource": "" }
q167287
BasicLTIUtil.checkProperties
validation
public static boolean checkProperties( Map<String, String> postProp, String url, String method, String oauth_consumer_key, String oauth_consumer_secret) { OAuthMessage oam = new OAuthMessage(method, url, postProp.entrySet()); OAuthConsumer cons = new OAuthConsumer("about:blank", oauth_consumer_key, oauth_consumer_secret, null); OAuthValidator oav = new SimpleOAuthValidator(); OAuthAccessor acc = new OAuthAccessor(cons); String base_string = null; try { base_string = OAuthSignatureMethod.getBaseString(oam); } catch (Exception e) { M_log.warning(e.getLocalizedMessage()); base_string = null; return false; } try { oav.validateMessage(oam, acc); } catch (Exception e) { M_log.warning("Provider failed to validate message"); M_log.warning(e.getLocalizedMessage()); if (base_string != null) { M_log.warning(base_string); } return false; } return true; }
java
{ "resource": "" }
q167288
BasicLTIUtil.prepareForExport
validation
public static String prepareForExport(String descriptor) { Map<String, Object> tm = null; try { tm = XMLMap.getFullMap(descriptor.trim()); } catch (Exception e) { M_log.warning("BasicLTIUtil exception parsing BasicLTI descriptor" + e.getMessage()); return null; } if (tm == null) { M_log.warning("Unable to parse XML in prepareForExport"); return null; } XMLMap.removeSubMap(tm, "/basic_lti_link/x-secure"); String retval = XMLMap.getXML(tm, true); return retval; }
java
{ "resource": "" }
q167289
BasicLTIUtil.setProperty
validation
public static void setProperty(Properties props, String key, String value) { if (value == null) { return; } if (value.trim().length() < 1) { return; } props.setProperty(key, value); }
java
{ "resource": "" }
q167290
BasicLTIUtil.htmlspecialchars
validation
public static String htmlspecialchars(String input) { if (input == null) { return null; } String retval = input.replace("&", "&amp;"); retval = retval.replace("\"", "&quot;"); retval = retval.replace("<", "&lt;"); retval = retval.replace(">", "&gt;"); retval = retval.replace(">", "&gt;"); retval = retval.replace("=", "&#61;"); return retval; }
java
{ "resource": "" }
q167291
BasicLTIUtil.getRealPath
validation
static public String getRealPath(String servletUrl, String extUrl) { Pattern pat = Pattern.compile("^https??://[^/]*"); // Deal with potential bad extUrl formats Matcher m = pat.matcher(extUrl); if (m.find()) { extUrl = m.group(0); } String retval = pat.matcher(servletUrl).replaceFirst(extUrl); return retval; }
java
{ "resource": "" }
q167292
LtiOauthVerifier.verify
validation
@Override public LtiVerificationResult verify(HttpServletRequest request, String secret) throws LtiVerificationException { OAuthMessage oam = OAuthServlet.getMessage(request, OAuthServlet.getRequestURL(request)); String oauth_consumer_key = null; try { oauth_consumer_key = oam.getConsumerKey(); } catch (Exception e) { return new LtiVerificationResult(false, LtiError.BAD_REQUEST, "Unable to find consumer key in message"); } OAuthValidator oav = new SimpleOAuthValidator(); OAuthConsumer cons = new OAuthConsumer(null, oauth_consumer_key, secret, null); OAuthAccessor acc = new OAuthAccessor(cons); try { oav.validateMessage(oam, acc); } catch (Exception e) { return new LtiVerificationResult(false, LtiError.BAD_REQUEST, "Failed to validate: " + e.getLocalizedMessage()); } return new LtiVerificationResult(true, new LtiLaunch(request)); }
java
{ "resource": "" }
q167293
LtiOauthVerifier.verifyParameters
validation
@Override public LtiVerificationResult verifyParameters(Map<String, String> parameters, String url, String method, String secret) throws LtiVerificationException { OAuthMessage oam = new OAuthMessage(method, url, parameters.entrySet()); OAuthConsumer cons = new OAuthConsumer(null, parameters.get(OAUTH_KEY_PARAMETER), secret, null); OAuthValidator oav = new SimpleOAuthValidator(); OAuthAccessor acc = new OAuthAccessor(cons); try { oav.validateMessage(oam, acc); } catch (Exception e) { return new LtiVerificationResult(false, LtiError.BAD_REQUEST, "Failed to validate: " + e.getLocalizedMessage() + ", Parameters: " + Arrays.toString(parameters.entrySet().toArray())); } return new LtiVerificationResult(true, new LtiLaunch(parameters)); }
java
{ "resource": "" }
q167294
IMSPOXRequest.validateRequest
validation
public void validateRequest(String oauth_consumer_key, String oauth_secret, HttpServletRequest request) { validateRequest(oauth_consumer_key, oauth_secret, request, null) ; }
java
{ "resource": "" }
q167295
LTI2Servlet.doLaunch
validation
@SuppressWarnings("unused") protected void doLaunch(HttpServletRequest request, HttpServletResponse response) { String profile = PERSIST.get("profile"); response.setContentType("text/html"); String output = null; if ( profile == null ) { output = "Missing profile"; } else { JSONObject providerProfile = (JSONObject) JSONValue.parse(profile); List<Properties> profileTools = new ArrayList<Properties> (); Properties info = new Properties(); String retval = LTI2Util.parseToolProfile(profileTools, info, providerProfile); String launch = null; String parameter = null; for ( Properties profileTool : profileTools ) { launch = (String) profileTool.get("launch"); parameter = (String) profileTool.get("parameter"); } JSONObject security_contract = (JSONObject) providerProfile.get(LTI2Constants.SECURITY_CONTRACT); String shared_secret = (String) security_contract.get(LTI2Constants.SHARED_SECRET); System.out.println("launch="+launch); System.out.println("shared_secret="+shared_secret); Properties ltiProps = LTI2SampleData.getLaunch(); ltiProps.setProperty(BasicLTIConstants.LTI_VERSION,BasicLTIConstants.LTI_VERSION_2); Properties lti2subst = LTI2SampleData.getSubstitution(); String settings_url = getServiceURL(request) + SVC_Settings + "/"; lti2subst.setProperty("LtiLink.custom.url", settings_url + LTI2Util.SCOPE_LtiLink + "/" + ltiProps.getProperty(BasicLTIConstants.RESOURCE_LINK_ID)); lti2subst.setProperty("ToolProxyBinding.custom.url", settings_url + LTI2Util.SCOPE_ToolProxyBinding + "/" + ltiProps.getProperty(BasicLTIConstants.CONTEXT_ID)); lti2subst.setProperty("ToolProxy.custom.url", settings_url + LTI2Util.SCOPE_ToolProxy + "/" + TEST_KEY); lti2subst.setProperty("Result.url", getServiceURL(request) + SVC_Result + "/" + ltiProps.getProperty(BasicLTIConstants.RESOURCE_LINK_ID)); // Do the substitutions Properties custom = new Properties(); LTI2Util.mergeLTI2Parameters(custom, parameter); LTI2Util.substituteCustom(custom, lti2subst); // Place the custom values into the launch LTI2Util.addCustomToLaunch(ltiProps, custom); ltiProps = BasicLTIUtil.signProperties(ltiProps, launch, "POST", TEST_KEY, shared_secret, null, null, null); boolean dodebug = true; output = BasicLTIUtil.postLaunchHTML(ltiProps, launch, dodebug); } try { PrintWriter out = response.getWriter(); out.println(output); } catch (Exception e) { e.printStackTrace(); } }
java
{ "resource": "" }
q167296
LTI2Servlet.buildToolConsumerProfile
validation
protected ToolConsumer buildToolConsumerProfile(HttpServletRequest request, Map<String, Object> deploy, String profile_id) { // Load the configuration data LTI2Config cnf = new org.imsglobal.lti2.LTI2ConfigSample(); ToolConsumer consumer = new ToolConsumer(profile_id, "LTI-2p0", getServiceURL(request), cnf); // Normally we would check permissions before we offer capabilities List<String> capabilities = consumer.getCapability_offered(); LTI2Util.allowEmail(capabilities); LTI2Util.allowName(capabilities); LTI2Util.allowSettings(capabilities); LTI2Util.allowResult(capabilities); // Normally we would check permissions before we offer services List<ServiceOffered> services = consumer.getService_offered(); services.add(StandardServices.LTI2Registration(getServiceURL(request) + SVC_tc_registration + "/" + profile_id)); services.add(StandardServices.LTI2ResultItem(getServiceURL(request) + SVC_Result + "/{" + BasicLTIConstants.LIS_RESULT_SOURCEDID + "}")); services.add(StandardServices.LTI2LtiLinkSettings(getServiceURL(request) + SVC_Settings + "/" + LTI2Util.SCOPE_LtiLink + "/{" + BasicLTIConstants.RESOURCE_LINK_ID + "}")); services.add(StandardServices.LTI2ToolProxySettings(getServiceURL(request) + SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxyBinding + "/{" + BasicLTIConstants.CONTEXT_ID + "}")); services.add(StandardServices.LTI2ToolProxySettings(getServiceURL(request) + SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxy + "/{" + LTI2Constants.TOOL_PROXY_GUID + "}")); return consumer; }
java
{ "resource": "" }
q167297
LTI2Util.validateServices
validation
public static String validateServices(ToolConsumer consumer, JSONObject providerProfile) { // Mostly to catch casting errors from bad JSON try { JSONObject security_contract = (JSONObject) providerProfile.get(LTI2Constants.SECURITY_CONTRACT); if ( security_contract == null ) { return "JSON missing security_contract"; } JSONArray tool_services = (JSONArray) security_contract.get(LTI2Constants.TOOL_SERVICE); List<ServiceOffered> services_offered = consumer.getService_offered(); if ( tool_services != null ) for (Object o : tool_services) { JSONObject tool_service = (JSONObject) o; String json_service = (String) tool_service.get(LTI2Constants.SERVICE); boolean found = false; for (ServiceOffered service : services_offered ) { String service_endpoint = service.getEndpoint(); if ( service_endpoint.equals(json_service) ) { found = true; break; } } if ( ! found ) return "Service not allowed: "+json_service; } return null; } catch (Exception e) { return "Exception:"+ e.getLocalizedMessage(); } }
java
{ "resource": "" }
q167298
LTI2Util.validateCapabilities
validation
public static String validateCapabilities(ToolConsumer consumer, JSONObject providerProfile) { List<Properties> theTools = new ArrayList<Properties> (); Properties info = new Properties(); // Mostly to catch casting errors from bad JSON try { String retval = parseToolProfile(theTools, info, providerProfile); if ( retval != null ) return retval; if ( theTools.size() < 1 ) return "No tools found in profile"; // Check all the capabilities requested by all the tools comparing against consumer List<String> capabilities = consumer.getCapability_offered(); for ( Properties theTool : theTools ) { String ec = (String) theTool.get("enabled_capability"); JSONArray enabled_capability = (JSONArray) JSONValue.parse(ec); if ( enabled_capability != null ) for (Object o : enabled_capability) { ec = (String) o; if ( capabilities.contains(ec) ) continue; return "Capability not permitted="+ec; } } return null; } catch (Exception e ) { return "Exception:"+ e.getLocalizedMessage(); } }
java
{ "resource": "" }
q167299
LTI2Util.addCustomToLaunch
validation
public static void addCustomToLaunch(Properties ltiProps, Properties custom) { Enumeration<?> e = custom.propertyNames(); while (e.hasMoreElements()) { String keyStr = (String) e.nextElement(); String value = custom.getProperty(keyStr); setProperty(ltiProps,"custom_"+keyStr,value); } }
java
{ "resource": "" }