idx int64 0 165k | question stringlengths 73 4.15k | target stringlengths 5 918 | len_question int64 21 890 | len_target int64 3 255 |
|---|---|---|---|---|
8,000 | public static boolean copyToFile ( InputStream inputStream , File destFile ) { try { OutputStream out = new FileOutputStream ( destFile ) ; try { byte [ ] buffer = new byte [ 4096 ] ; int bytesRead ; while ( ( bytesRead = inputStream . read ( buffer ) ) >= 0 ) { out . write ( buffer , 0 , bytesRead ) ; } } finally { out . close ( ) ; } return true ; } catch ( IOException e ) { return false ; } } | Copy data from a source stream to destFile . Return true if succeed return false if failed . | 106 | 19 |
8,001 | public static String readTextFile ( File file , int max , String ellipsis ) throws IOException { InputStream input = new FileInputStream ( file ) ; try { if ( max > 0 ) { // "head" mode: read the first N bytes byte [ ] data = new byte [ max + 1 ] ; int length = input . read ( data ) ; if ( length <= 0 ) return "" ; if ( length <= max ) return new String ( data , 0 , length ) ; if ( ellipsis == null ) return new String ( data , 0 , max ) ; return new String ( data , 0 , max ) + ellipsis ; } else if ( max < 0 ) { // "tail" mode: read it all, keep the last N int len ; boolean rolled = false ; byte [ ] last = null , data = null ; do { if ( last != null ) rolled = true ; byte [ ] tmp = last ; last = data ; data = tmp ; if ( data == null ) data = new byte [ - max ] ; len = input . read ( data ) ; } while ( len == data . length ) ; if ( last == null && len <= 0 ) return "" ; if ( last == null ) return new String ( data , 0 , len ) ; if ( len > 0 ) { rolled = true ; System . arraycopy ( last , len , last , 0 , last . length - len ) ; System . arraycopy ( data , 0 , last , last . length - len , len ) ; } if ( ellipsis == null || ! rolled ) return new String ( last ) ; return ellipsis + new String ( last ) ; } else { // "cat" mode: read it all ByteArrayOutputStream contents = new ByteArrayOutputStream ( ) ; int len ; byte [ ] data = new byte [ 1024 ] ; do { len = input . read ( data ) ; if ( len > 0 ) contents . write ( data , 0 , len ) ; } while ( len == data . length ) ; return contents . toString ( ) ; } } finally { input . close ( ) ; } } | Read a text file into a String optionally limiting the length . | 449 | 12 |
8,002 | public void setFilePermissions ( FilePermissions filePermissions ) { if ( isWritable ( ) ) { meta_data . put ( PERMISSIONS_KEY . KEY , gson . toJson ( filePermissions , FilePermissions . class ) ) ; } } | Sets new file permissions for this object . | 60 | 9 |
8,003 | protected void setOwnerId ( Integer id ) { if ( getOwnerId ( ) != null ) { logger . warn ( "Attempting to set owner id for an object where it as previously been set. Ignoring new id" ) ; return ; } meta_put ( OWNER_ID_KEY , id ) ; // dont allow null } | accessable to subclasses and this class stupid java . | 71 | 11 |
8,004 | public DAOArray fromDao ( DAO [ ] daoList ) throws DAOArrayException { for ( DAO dao : daoList ) { add ( dao ) ; } return this ; } | convert a list of dao into this format | 45 | 10 |
8,005 | public DAO [ ] convert ( ) { int size = data . size ( ) ; DAO [ ] daoList = new DAO [ size ] ; for ( int i = 0 ; i < size ; i ++ ) { Object [ ] dat = data . get ( i ) ; DAO dao = new DAO ( modelName ) ; for ( int j = 0 ; j < attributes . length ; j ++ ) { dao . set_Value ( attributes [ j ] , dat [ j ] ) ; } daoList [ i ] = dao ; } return daoList ; } | From this format convert back to DAO | 126 | 8 |
8,006 | public List < T > getSortedKeys ( ) { List < T > l = Lists . newArrayList ( counts . keySet ( ) ) ; Collections . sort ( l , Ordering . natural ( ) . reverse ( ) . onResultOf ( Functions . forMap ( counts . getBaseMap ( ) ) ) ) ; return l ; } | Gets the keys in this sorted order from the highest count to the lowest count . | 73 | 17 |
8,007 | public static ListSupertaggedSentence createWithUnobservedSupertags ( List < String > words , List < String > pos ) { return new ListSupertaggedSentence ( WordAndPos . createExample ( words , pos ) , Collections . nCopies ( words . size ( ) , Collections . < HeadedSyntacticCategory > emptyList ( ) ) , Collections . nCopies ( words . size ( ) , Collections . < Double > emptyList ( ) ) ) ; } | Creates a supertagged sentence where the supertags for each word are unobserved . Using this sentence during CCG parsing allows any syntactic category to be assigned to each word . | 104 | 37 |
8,008 | @ Override public Object next ( ) { assertGeneratorStarted ( ) ; if ( ! hasNext ( ) ) { throw new NoSuchElementException ( "No more object to generate" ) ; } Object object = currentFixtureGenerator . next ( ) ; logger . debug ( "Generated {}" , object ) ; extractorDelegate . extractEntity ( object ) ; return object ; } | Returns the next entity to load . | 84 | 7 |
8,009 | public static MapReduceExecutor getMapReduceExecutor ( ) { if ( executor == null ) { // Default to using a local executor with one thread per CPU. executor = new LocalMapReduceExecutor ( Runtime . getRuntime ( ) . availableProcessors ( ) , 20 ) ; } return executor ; } | Gets the global map - reduce executor . | 71 | 10 |
8,010 | public List < Assignment > getNonzeroAssignments ( ) { Iterator < Outcome > outcomeIter = outcomeIterator ( ) ; List < Assignment > assignments = Lists . newArrayList ( ) ; while ( outcomeIter . hasNext ( ) ) { Outcome outcome = outcomeIter . next ( ) ; if ( outcome . getProbability ( ) != 0.0 ) { assignments . add ( outcome . getAssignment ( ) ) ; } } return assignments ; } | Gets all assignments to this factor which have non - zero weight . | 100 | 14 |
8,011 | private double getPartitionFunction ( ) { if ( partitionFunction != - 1.0 ) { return partitionFunction ; } partitionFunction = 0.0 ; Iterator < Outcome > outcomeIterator = outcomeIterator ( ) ; while ( outcomeIterator . hasNext ( ) ) { partitionFunction += outcomeIterator . next ( ) . getProbability ( ) ; } return partitionFunction ; } | Get the partition function = denominator = total sum probability of all assignments . | 80 | 15 |
8,012 | public static < A , B > Map < A , B > fromLists ( List < A > keys , List < B > values ) { Preconditions . checkArgument ( keys . size ( ) == values . size ( ) ) ; Map < A , B > map = Maps . newHashMap ( ) ; for ( int i = 0 ; i < keys . size ( ) ; i ++ ) { map . put ( keys . get ( i ) , values . get ( i ) ) ; } return map ; } | Returns a map where the ith element of keys maps to the ith element of values . | 110 | 19 |
8,013 | public static Type inferType ( Expression2 expression , Type rootType , TypeDeclaration typeDeclaration ) { Map < Integer , Type > subexpressionTypeMap = inferTypeMap ( expression , rootType , typeDeclaration ) ; return subexpressionTypeMap . get ( 0 ) ; } | Implementation of type inference that infers types for expressions using the basic type information in typeDeclaration . | 59 | 21 |
8,014 | public static boolean eventDateValid ( String eventDate ) { boolean result = false ; if ( extractDate ( eventDate ) != null ) { result = true ; } else { Interval interval = extractInterval ( eventDate ) ; if ( interval != null ) { if ( interval . getStart ( ) . isBefore ( interval . getEnd ( ) ) ) { result = true ; } } } return result ; } | Test to see whether an eventDate contains a string in an expected ISO format . | 87 | 16 |
8,015 | public static Map < String , String > extractDateFromVerbatim ( String verbatimEventDate ) { return extractDateFromVerbatim ( verbatimEventDate , DateUtils . YEAR_BEFORE_SUSPECT ) ; } | Attempt to extract a date or date range in standard format from a provided verbatim date string . | 53 | 20 |
8,016 | public static boolean isRange ( String eventDate ) { boolean isRange = false ; if ( eventDate != null ) { String [ ] dateBits = eventDate . split ( "/" ) ; if ( dateBits != null && dateBits . length == 2 ) { //probably a range. DateTimeParser [ ] parsers = { DateTimeFormat . forPattern ( "yyyy-MM" ) . getParser ( ) , DateTimeFormat . forPattern ( "yyyy" ) . getParser ( ) , ISODateTimeFormat . dateOptionalTimeParser ( ) . getParser ( ) } ; DateTimeFormatter formatter = new DateTimeFormatterBuilder ( ) . append ( null , parsers ) . toFormatter ( ) ; try { // must be at least a 4 digit year. if ( dateBits [ 0 ] . length ( ) > 3 && dateBits [ 1 ] . length ( ) > 3 ) { DateMidnight startDate = LocalDate . parse ( dateBits [ 0 ] , formatter ) . toDateMidnight ( ) ; DateMidnight endDate = LocalDate . parse ( dateBits [ 1 ] , formatter ) . toDateMidnight ( ) ; // both start date and end date must parse as dates. isRange = true ; } } catch ( Exception e ) { // not a date range e . printStackTrace ( ) ; logger . debug ( e . getMessage ( ) ) ; } } else if ( dateBits != null && dateBits . length == 1 ) { logger . debug ( dateBits [ 0 ] ) ; // Date bits does not contain a / // Is eventDate in the form yyyy-mm-dd, if so, not a range DateTimeParser [ ] parsers = { DateTimeFormat . forPattern ( "yyyy-MM-dd" ) . getParser ( ) , } ; DateTimeFormatter formatter = new DateTimeFormatterBuilder ( ) . append ( null , parsers ) . toFormatter ( ) ; try { DateMidnight date = DateMidnight . parse ( eventDate , formatter ) ; isRange = false ; } catch ( Exception e ) { logger . debug ( e . getMessage ( ) ) ; // not parsable with the yyyy-mm-dd parser. DateTimeParser [ ] parsers2 = { DateTimeFormat . forPattern ( "yyyy-MM" ) . getParser ( ) , DateTimeFormat . forPattern ( "yyyy" ) . getParser ( ) , } ; formatter = new DateTimeFormatterBuilder ( ) . append ( null , parsers2 ) . toFormatter ( ) ; try { // must be at least a 4 digit year. if ( dateBits [ 0 ] . length ( ) > 3 ) { DateMidnight startDate = DateMidnight . parse ( dateBits [ 0 ] , formatter ) ; // date must parse as either year or year and month dates. isRange = true ; } } catch ( Exception e1 ) { // not a date range } } } } return isRange ; } | Test to see if a string appears to represent a date range of more than one day . | 661 | 18 |
8,017 | public static Interval extractInterval ( String eventDate ) { Interval result = null ; DateTimeParser [ ] parsers = { DateTimeFormat . forPattern ( "yyyy-MM" ) . getParser ( ) , DateTimeFormat . forPattern ( "yyyy" ) . getParser ( ) , ISODateTimeFormat . dateOptionalTimeParser ( ) . getParser ( ) } ; DateTimeFormatter formatter = new DateTimeFormatterBuilder ( ) . append ( null , parsers ) . toFormatter ( ) ; if ( eventDate != null && eventDate . contains ( "/" ) && isRange ( eventDate ) ) { String [ ] dateBits = eventDate . split ( "/" ) ; try { // must be at least a 4 digit year. if ( dateBits [ 0 ] . length ( ) > 3 && dateBits [ 1 ] . length ( ) > 3 ) { DateMidnight startDate = DateMidnight . parse ( dateBits [ 0 ] , formatter ) ; DateTime endDate = DateTime . parse ( dateBits [ 1 ] , formatter ) ; logger . debug ( startDate ) ; logger . debug ( endDate ) ; if ( dateBits [ 1 ] . length ( ) == 4 ) { result = new Interval ( startDate , endDate . plusMonths ( 12 ) . minus ( 1l ) ) ; } else if ( dateBits [ 1 ] . length ( ) == 7 ) { result = new Interval ( startDate , endDate . plusMonths ( 1 ) . minus ( 1l ) ) ; } else { result = new Interval ( startDate , endDate . plusDays ( 1 ) . minus ( 1l ) ) ; } logger . debug ( result ) ; } } catch ( Exception e ) { // not a date range logger . error ( e . getMessage ( ) ) ; } } else { try { DateMidnight startDate = DateMidnight . parse ( eventDate , formatter ) ; logger . debug ( startDate ) ; if ( eventDate . length ( ) == 4 ) { DateTime endDate = startDate . toDateTime ( ) . plusMonths ( 12 ) . minus ( 1l ) ; result = new Interval ( startDate , endDate ) ; logger . debug ( result ) ; } else if ( eventDate . length ( ) == 7 ) { DateTime endDate = startDate . toDateTime ( ) . plusMonths ( 1 ) . minus ( 1l ) ; result = new Interval ( startDate , endDate ) ; logger . debug ( result ) ; } else { DateTime endDate = startDate . toDateTime ( ) . plusDays ( 1 ) . minus ( 1l ) ; result = new Interval ( startDate , endDate ) ; logger . debug ( result ) ; } } catch ( Exception e ) { // not a date logger . error ( e . getMessage ( ) ) ; } } return result ; } | Given a string that may be a date or a date range extract a interval of dates from that date range up to the end milisecond of the last day . | 635 | 33 |
8,018 | public static DateMidnight extractDate ( String eventDate ) { DateMidnight result = null ; DateTimeParser [ ] parsers = { DateTimeFormat . forPattern ( "yyyy-MM" ) . getParser ( ) , DateTimeFormat . forPattern ( "yyyy" ) . getParser ( ) , DateTimeFormat . forPattern ( "yyyy-MM-dd/yyyy-MM-dd" ) . getParser ( ) , ISODateTimeFormat . dateOptionalTimeParser ( ) . getParser ( ) , ISODateTimeFormat . date ( ) . getParser ( ) } ; DateTimeFormatter formatter = new DateTimeFormatterBuilder ( ) . append ( null , parsers ) . toFormatter ( ) ; try { result = DateMidnight . parse ( eventDate , formatter ) ; logger . debug ( result ) ; } catch ( Exception e ) { // not a date logger . error ( e . getMessage ( ) ) ; } return result ; } | Extract a single joda date from an event date . | 212 | 12 |
8,019 | public static boolean isConsistent ( String eventDate , String startDayOfYear , String endDayOfYear , String year , String month , String day ) { if ( isEmpty ( eventDate ) || ( isEmpty ( startDayOfYear ) && isEmpty ( endDayOfYear ) && isEmpty ( year ) && isEmpty ( month ) && isEmpty ( day ) ) ) { return true ; } // TODO: Add support for eventTime boolean result = false ; result = isConsistent ( eventDate , year , month , day ) ; logger . debug ( result ) ; if ( ( result || ( ! isEmpty ( eventDate ) && isEmpty ( year ) && isEmpty ( month ) && isEmpty ( day ) ) ) && ( ! isEmpty ( startDayOfYear ) || ! isEmpty ( endDayOfYear ) ) ) { if ( endDayOfYear == null || endDayOfYear . trim ( ) . length ( ) == 0 || startDayOfYear . trim ( ) . equals ( endDayOfYear . trim ( ) ) ) { int startDayInt = - 1 ; try { startDayInt = Integer . parseInt ( startDayOfYear ) ; } catch ( NumberFormatException e ) { logger . debug ( e . getMessage ( ) ) ; logger . debug ( startDayOfYear + " is not an integer." ) ; result = false ; } if ( DateUtils . extractDate ( eventDate ) != null && DateUtils . extractDate ( eventDate ) . getDayOfYear ( ) == startDayInt ) { result = true ; } else { result = false ; } } else { int startDayInt = - 1 ; int endDayInt = - 1 ; try { startDayInt = Integer . parseInt ( startDayOfYear ) ; endDayInt = Integer . parseInt ( endDayOfYear ) ; } catch ( NumberFormatException e ) { logger . debug ( e . getMessage ( ) ) ; result = false ; } Interval eventDateInterval = DateUtils . extractDateInterval ( eventDate ) ; logger . debug ( eventDateInterval ) ; int endDayOfInterval = eventDateInterval . getEnd ( ) . getDayOfYear ( ) ; // midnight on the next day, so subtract 1 to get the same integer day. if ( eventDateInterval . getStart ( ) . getDayOfYear ( ) == startDayInt && endDayOfInterval == endDayInt ) { result = true ; } else { result = false ; } } } return result ; } | Identify whether an event date is consistent with its atomic parts . | 543 | 13 |
8,020 | public static boolean isEmpty ( String aString ) { boolean result = true ; if ( aString != null && aString . trim ( ) . length ( ) > 0 ) { if ( ! aString . trim ( ) . toUpperCase ( ) . equals ( "NULL" ) ) { result = false ; } } return result ; } | Does a string contain a non - blank value . | 72 | 10 |
8,021 | public static boolean specificToDay ( String eventDate ) { boolean result = false ; if ( ! isEmpty ( eventDate ) ) { Interval eventDateInterval = extractInterval ( eventDate ) ; logger . debug ( eventDateInterval ) ; logger . debug ( eventDateInterval . toDuration ( ) ) ; if ( eventDateInterval . toDuration ( ) . getStandardDays ( ) < 1l ) { result = true ; } else if ( eventDateInterval . toDuration ( ) . getStandardDays ( ) == 1l && eventDateInterval . getStart ( ) . getDayOfYear ( ) == eventDateInterval . getEnd ( ) . getDayOfYear ( ) ) { result = true ; } } return result ; } | Test if an event date specifies a duration of one day or less . | 162 | 14 |
8,022 | public static boolean specificToDecadeScale ( String eventDate ) { boolean result = false ; if ( ! isEmpty ( eventDate ) ) { Interval eventDateInterval = extractDateInterval ( eventDate ) ; if ( eventDateInterval . toDuration ( ) . getStandardDays ( ) <= 3650l ) { result = true ; } } return result ; } | Test if an event date specifies a duration of 10 years or less . | 79 | 14 |
8,023 | protected static String instantToStringTime ( Instant instant ) { String result = "" ; if ( instant != null ) { StringBuffer time = new StringBuffer ( ) ; time . append ( String . format ( "%02d" , instant . get ( DateTimeFieldType . hourOfDay ( ) ) ) ) ; time . append ( ":" ) . append ( String . format ( "%02d" , instant . get ( DateTimeFieldType . minuteOfHour ( ) ) ) ) ; time . append ( ":" ) . append ( String . format ( "%02d" , instant . get ( DateTimeFieldType . secondOfMinute ( ) ) ) ) ; time . append ( "." ) . append ( String . format ( "%03d" , instant . get ( DateTimeFieldType . millisOfSecond ( ) ) ) ) ; String timeZone = instant . getZone ( ) . getID ( ) ; if ( timeZone . equals ( "UTC" ) ) { time . append ( "Z" ) ; } else { time . append ( timeZone ) ; } result = time . toString ( ) ; } return result ; } | Given an instant return the time within one day that it represents as a string . | 242 | 16 |
8,024 | public static int countLeapDays ( String eventDate ) { int result = 0 ; if ( ! DateUtils . isEmpty ( eventDate ) && DateUtils . eventDateValid ( eventDate ) ) { Interval interval = extractInterval ( eventDate ) ; Integer sYear = interval . getStart ( ) . getYear ( ) ; Integer eYear = interval . getEnd ( ) . getYear ( ) ; String startYear = Integer . toString ( sYear ) . trim ( ) ; String endYear = Integer . toString ( eYear ) . trim ( ) ; String leapDay = startYear + "-02-29" ; logger . debug ( leapDay ) ; if ( DateUtils . eventDateValid ( leapDay ) ) { if ( interval . contains ( DateUtils . extractInterval ( leapDay ) ) ) { result = 1 ; } } // Range spanning more than one year, check last year if ( ! endYear . equals ( startYear ) ) { leapDay = endYear + "-02-29" ; logger . debug ( leapDay ) ; if ( DateUtils . eventDateValid ( leapDay ) ) { if ( interval . contains ( DateUtils . extractInterval ( leapDay ) ) ) { result ++ ; } } } // Ranges of more than two years, check intermediate years if ( eYear > sYear + 1 ) { for ( int testYear = sYear + 1 ; testYear < eYear ; testYear ++ ) { leapDay = Integer . toString ( testYear ) . trim ( ) + "-02-29" ; logger . debug ( leapDay ) ; if ( DateUtils . eventDateValid ( leapDay ) ) { if ( interval . contains ( DateUtils . extractInterval ( leapDay ) ) ) { result ++ ; } } } } } return result ; } | Count the number of leap days present in an event date | 392 | 11 |
8,025 | @ Override public void store ( final RandomRoutingTable . Snapshot snapshot ) throws IOException { try { saveString ( serialize ( snapshot ) ) ; } catch ( final JsonGenerationException e ) { throw new IOException ( "Error serializing routing snapshot" , e ) ; } catch ( final JsonMappingException e ) { throw new IOException ( "Error serializing routing snapshot" , e ) ; } } | Store routing table information via this mechanism . | 91 | 8 |
8,026 | private < T > String buildClassListTag ( final T t ) { return ( exportClassFullName != null ) ? exportClassFullName : t . getClass ( ) . getSimpleName ( ) + exportClassEnding ; } | Build XML list tag determined by fullname status and ending class phrase | 49 | 13 |
8,027 | private HashMap < Class , String > buildDefaultDataTypeMap ( ) { return new HashMap < Class , String > ( ) { { put ( boolean . class , "BOOLEAN" ) ; put ( Boolean . class , "BOOLEAN" ) ; put ( byte . class , "BYTE" ) ; put ( Byte . class , "BYTE" ) ; put ( short . class , "INT" ) ; put ( Short . class , "INT" ) ; put ( int . class , "INT" ) ; put ( Integer . class , "INT" ) ; put ( long . class , "BIGINT" ) ; put ( Long . class , "BIGINT" ) ; put ( float . class , "DOUBLE PRECISION" ) ; put ( Float . class , "DOUBLE PRECISION" ) ; put ( double . class , "DOUBLE PRECISION" ) ; put ( Double . class , "DOUBLE PRECISION" ) ; put ( char . class , "CHAR" ) ; put ( Character . class , "CHAR" ) ; put ( Date . class , "BIGINT" ) ; put ( String . class , "VARCHAR" ) ; put ( Object . class , "VARCHAR" ) ; put ( Timestamp . class , "TIMESTAMP" ) ; put ( LocalDate . class , "TIMESTAMP" ) ; put ( LocalTime . class , "TIMESTAMP" ) ; put ( LocalDateTime . class , "TIMESTAMP" ) ; } } ; } | Build default data types | 341 | 4 |
8,028 | private String buildCreateTableQuery ( final IClassContainer container , final String primaryKeyField ) { final StringBuilder builder = new StringBuilder ( "CREATE TABLE IF NOT EXISTS " ) . append ( container . getExportClassName ( ) . toLowerCase ( ) ) . append ( "(\n" ) ; final String resultValues = container . getFormatSupported ( Format . SQL ) . entrySet ( ) . stream ( ) . map ( e -> "\t" + buildInsertNameTypeQuery ( e . getValue ( ) . getExportName ( ) , container ) ) . collect ( Collectors . joining ( ",\n" ) ) ; builder . append ( resultValues ) ; // Write primary key constraint return builder . append ( ",\n" ) . append ( "\tPRIMARY KEY (" ) . append ( primaryKeyField ) . append ( ")\n);\n" ) . toString ( ) ; } | Create String of Create Table Query | 196 | 6 |
8,029 | private String buildInsertNameTypeQuery ( final String finalFieldName , final IClassContainer container ) { final Class < ? > exportFieldType = container . getField ( finalFieldName ) . getType ( ) ; switch ( container . getContainer ( finalFieldName ) . getType ( ) ) { case ARRAY : case COLLECTION : final Class < ? > type = exportFieldType . getComponentType ( ) ; return finalFieldName + "\t" + translateJavaTypeToSqlType ( type ) + "[]" ; case ARRAY_2D : final Class < ? > type2D = exportFieldType . getComponentType ( ) . getComponentType ( ) ; return finalFieldName + "\t" + translateJavaTypeToSqlType ( type2D ) + "[][]" ; default : return finalFieldName + "\t" + translateJavaTypeToSqlType ( exportFieldType ) ; } } | Creates String of Create Table Insert Quert | 196 | 9 |
8,030 | private < T > String buildInsertQuery ( final T t , final IClassContainer container ) { final List < ExportContainer > exportContainers = extractExportContainers ( t , container ) ; final StringBuilder builder = new StringBuilder ( "INSERT INTO " ) . append ( container . getExportClassName ( ) . toLowerCase ( ) ) . append ( " (" ) ; final String names = exportContainers . stream ( ) . map ( ExportContainer :: getExportName ) . collect ( Collectors . joining ( ", " ) ) ; return builder . append ( names ) . append ( ") " ) . append ( "VALUES\n" ) . toString ( ) ; } | Build insert query part with values | 145 | 6 |
8,031 | private < T > String format ( final T t , final IClassContainer container ) { final List < ExportContainer > exportContainers = extractExportContainers ( t , container ) ; final String resultValues = exportContainers . stream ( ) . map ( c -> convertFieldValue ( container . getField ( c . getExportName ( ) ) , c ) ) . collect ( Collectors . joining ( ", " ) ) ; return "(" + resultValues + ")" ; } | Creates insert query field name | 100 | 6 |
8,032 | private boolean isTypeTimestampConvertible ( final Field field ) { return dataTypes . entrySet ( ) . stream ( ) . anyMatch ( e -> e . getValue ( ) . equals ( "TIMESTAMP" ) && e . getKey ( ) . equals ( field . getType ( ) ) ) ; } | Check data types for field class compatibility with Timestamp class | 69 | 11 |
8,033 | private String convertFieldValue ( final Field field , final ExportContainer container ) { final boolean isArray2D = ( container . getType ( ) == FieldContainer . Type . ARRAY_2D ) ; if ( field . getType ( ) . equals ( String . class ) ) { return wrapWithComma ( container . getExportValue ( ) ) ; } else if ( isTypeTimestampConvertible ( field ) ) { return wrapWithComma ( String . valueOf ( convertFieldValueToTimestamp ( field , container ) ) ) ; } else if ( container . getType ( ) == FieldContainer . Type . ARRAY || isArray2D || container . getType ( ) == FieldContainer . Type . COLLECTION ) { final Class < ? > componentType = extractType ( container . getType ( ) , field ) ; final String sqlType = dataTypes . getOrDefault ( componentType , "VARCHAR" ) ; final String result = ( sqlType . equals ( "VARCHAR" ) || sqlType . equals ( "CHAR" ) ) ? container . getExportValue ( ) . replace ( "[" , "{\"" ) . replace ( "]" , "\"}" ) . replace ( "," , "\",\"" ) . replace ( " " , "" ) : container . getExportValue ( ) . replace ( "[" , "{" ) . replace ( "]" , "}" ) ; return wrapWithComma ( result ) ; } return container . getExportValue ( ) ; } | Convert container value to Sql specific value type | 321 | 10 |
8,034 | private Timestamp convertFieldValueToTimestamp ( final Field field , final ExportContainer exportContainer ) { if ( field . getType ( ) . equals ( LocalDateTime . class ) ) { return convertToTimestamp ( parseDateTime ( exportContainer . getExportValue ( ) ) ) ; } else if ( field . getType ( ) . equals ( LocalDate . class ) ) { return convertToTimestamp ( parseDate ( exportContainer . getExportValue ( ) ) ) ; } else if ( field . getType ( ) . equals ( LocalTime . class ) ) { return convertToTimestamp ( parseTime ( exportContainer . getExportValue ( ) ) ) ; } else if ( field . getType ( ) . equals ( Date . class ) ) { return convertToTimestamp ( parseSimpleDateLong ( exportContainer . getExportValue ( ) ) ) ; } else if ( field . getType ( ) . equals ( Timestamp . class ) ) { return Timestamp . valueOf ( exportContainer . getExportValue ( ) ) ; } return null ; } | Convert container export value to timestamp value type | 224 | 9 |
8,035 | public final List < DiscreteVariable > getDiscreteVariables ( ) { List < DiscreteVariable > discreteVars = new ArrayList < DiscreteVariable > ( ) ; for ( int i = 0 ; i < vars . length ; i ++ ) { if ( vars [ i ] instanceof DiscreteVariable ) { discreteVars . add ( ( DiscreteVariable ) vars [ i ] ) ; } } return discreteVars ; } | Get the discrete variables in this map ordered by variable index . | 95 | 12 |
8,036 | private final void checkCompatibility ( VariableNumMap other ) { int i = 0 , j = 0 ; int [ ] otherNums = other . nums ; String [ ] otherNames = other . names ; Variable [ ] otherVars = other . vars ; while ( i < nums . length && j < otherNums . length ) { if ( nums [ i ] < otherNums [ j ] ) { i ++ ; } else if ( nums [ i ] > otherNums [ j ] ) { j ++ ; } else { // Equal Preconditions . checkArgument ( names [ i ] . equals ( otherNames [ j ] ) ) ; Preconditions . checkArgument ( vars [ i ] . getName ( ) . equals ( otherVars [ j ] . getName ( ) ) ) ; i ++ ; j ++ ; } } } | Ensures that all variable numbers which are shared between other and this are mapped to the same variables . | 186 | 21 |
8,037 | public Assignment outcomeToAssignment ( Object [ ] outcome ) { Preconditions . checkArgument ( outcome . length == nums . length , "outcome %s cannot be assigned to %s (wrong number of values)" , outcome , this ) ; return Assignment . fromSortedArrays ( nums , outcome ) ; } | Get the assignment corresponding to a particular setting of the variables in this factor . | 69 | 15 |
8,038 | public static VariableNumMap unionAll ( Collection < VariableNumMap > varNumMaps ) { VariableNumMap curMap = EMPTY ; for ( VariableNumMap varNumMap : varNumMaps ) { curMap = curMap . union ( varNumMap ) ; } return curMap ; } | Returns the union of all of the passed - in maps which may not contain conflicting mappings for any variable number . | 61 | 23 |
8,039 | public static Credentials getUserByEmail ( DAO serverDao , String email ) throws DAO . DAOException { Query query = new QueryBuilder ( ) . select ( ) . from ( Credentials . class ) . where ( Credentials . EMAIL_KEY , OPERAND . EQ , email ) . build ( ) ; TransientObject to = ( TransientObject ) ObjectUtils . get1stOrNull ( serverDao . query ( query ) ) ; if ( to == null ) { return null ; } else { return to ( Credentials . class , to ) ; } } | Convience method to do a Credentials query against an email address | 130 | 15 |
8,040 | public static Credentials getUserById ( DAO serverDao , String userId ) throws DAO . DAOException { Query query = new QueryBuilder ( ) . select ( ) . from ( Credentials . class ) . where ( Credentials . OWNER_ID_KEY , OPERAND . EQ , userId ) . build ( ) ; TransientObject to = ( TransientObject ) ObjectUtils . get1stOrNull ( serverDao . query ( query ) ) ; if ( to == null ) { return null ; } else { return to ( Credentials . class , to ) ; } } | Convience method to do a Credentials query against an user id . | 132 | 16 |
8,041 | private static String getSalt ( byte [ ] value ) { byte [ ] salt = new byte [ Generate . SALT_BYTES ] ; System . arraycopy ( value , 0 , salt , 0 , salt . length ) ; return ByteArray . toBase64 ( salt ) ; } | Retrieves the salt from the given value . | 61 | 10 |
8,042 | private static byte [ ] getHash ( byte [ ] value ) { byte [ ] hash = new byte [ value . length - Generate . SALT_BYTES ] ; System . arraycopy ( value , Generate . SALT_BYTES , hash , 0 , hash . length ) ; return hash ; } | Retrieves the hash from the given value . | 67 | 10 |
8,043 | @ Override public SufficientStatistics getNewSufficientStatistics ( ) { List < SufficientStatistics > lexiconParameterList = Lists . newArrayList ( ) ; List < String > lexiconParameterNames = Lists . newArrayList ( ) ; for ( int i = 0 ; i < lexiconFamilies . size ( ) ; i ++ ) { ParametricCcgLexicon lexiconFamily = lexiconFamilies . get ( i ) ; lexiconParameterList . add ( lexiconFamily . getNewSufficientStatistics ( ) ) ; lexiconParameterNames . add ( Integer . toString ( i ) ) ; } SufficientStatistics lexiconParameters = new ListSufficientStatistics ( lexiconParameterNames , lexiconParameterList ) ; List < SufficientStatistics > lexiconScorerParameterList = Lists . newArrayList ( ) ; List < String > lexiconScorerParameterNames = Lists . newArrayList ( ) ; for ( int i = 0 ; i < lexiconScorerFamilies . size ( ) ; i ++ ) { ParametricLexiconScorer lexiconScorerFamily = lexiconScorerFamilies . get ( i ) ; lexiconScorerParameterList . add ( lexiconScorerFamily . getNewSufficientStatistics ( ) ) ; lexiconScorerParameterNames . add ( Integer . toString ( i ) ) ; } SufficientStatistics lexiconScorerParameters = new ListSufficientStatistics ( lexiconScorerParameterNames , lexiconScorerParameterList ) ; SufficientStatistics wordSkipParameters = ListSufficientStatistics . empty ( ) ; if ( wordSkipFamily != null ) { wordSkipParameters = wordSkipFamily . getNewSufficientStatistics ( ) ; } SufficientStatistics dependencyParameters = dependencyFamily . getNewSufficientStatistics ( ) ; SufficientStatistics wordDistanceParameters = wordDistanceFamily . getNewSufficientStatistics ( ) ; SufficientStatistics puncDistanceParameters = puncDistanceFamily . getNewSufficientStatistics ( ) ; SufficientStatistics verbDistanceParameters = verbDistanceFamily . getNewSufficientStatistics ( ) ; SufficientStatistics syntaxParameters = syntaxFamily . getNewSufficientStatistics ( ) ; SufficientStatistics unaryRuleParameters = unaryRuleFamily . getNewSufficientStatistics ( ) ; SufficientStatistics headedBinaryRuleParameters = headedBinaryRuleFamily . getNewSufficientStatistics ( ) ; SufficientStatistics rootSyntaxParameters = rootSyntaxFamily . getNewSufficientStatistics ( ) ; SufficientStatistics headedRootSyntaxParameters = headedRootSyntaxFamily . getNewSufficientStatistics ( ) ; return new ListSufficientStatistics ( STATISTIC_NAME_LIST , Arrays . asList ( lexiconParameters , lexiconScorerParameters , wordSkipParameters , dependencyParameters , wordDistanceParameters , puncDistanceParameters , verbDistanceParameters , syntaxParameters , unaryRuleParameters , headedBinaryRuleParameters , rootSyntaxParameters , headedRootSyntaxParameters ) ) ; } | Gets a new all - zero parameter vector . | 622 | 10 |
8,044 | public static synchronized < BackendType extends Backend > BackendType init ( Config < BackendType > config ) { // if(initialized) throw new RuntimeException("Backend already initialized!"); logger . debug ( "Initializing... " + config ) ; Guice . createInjector ( config . getModule ( ) ) ; return injector . getInstance ( config . getModuleType ( ) ) ; } | Initialization point for divide . Returns an instance of the Divide object . Only one instance may exist at a time . | 87 | 23 |
8,045 | public static Type getGenericType ( final Type type , final int paramNumber ) { try { final ParameterizedType parameterizedType = ( ( ParameterizedType ) type ) ; return ( parameterizedType . getActualTypeArguments ( ) . length < paramNumber ) ? Object . class : parameterizedType . getActualTypeArguments ( ) [ paramNumber ] ; } catch ( Exception e ) { return Object . class ; } } | Extracts generic type | 94 | 5 |
8,046 | public static boolean areEquals ( final Class < ? > firstClass , final Class < ? > secondClass ) { final boolean isFirstShort = firstClass . isAssignableFrom ( Short . class ) ; final boolean isSecondShort = secondClass . isAssignableFrom ( Short . class ) ; if ( isFirstShort && isSecondShort || isFirstShort && secondClass . equals ( short . class ) || firstClass . equals ( short . class ) && isSecondShort ) return true ; final boolean isFirstByte = firstClass . isAssignableFrom ( Byte . class ) ; final boolean isSecondByte = secondClass . isAssignableFrom ( Byte . class ) ; if ( isFirstByte && isSecondByte || isFirstByte && secondClass . equals ( byte . class ) || firstClass . equals ( byte . class ) && isSecondByte ) return true ; final boolean isFirstInt = firstClass . isAssignableFrom ( Integer . class ) ; final boolean isSecondInt = secondClass . isAssignableFrom ( Integer . class ) ; if ( isFirstInt && isSecondInt || isFirstInt && secondClass . equals ( int . class ) || firstClass . equals ( int . class ) && isSecondInt ) return true ; final boolean isFirstLong = firstClass . isAssignableFrom ( Long . class ) ; final boolean isSecondLong = secondClass . isAssignableFrom ( Long . class ) ; if ( isFirstLong && isSecondLong || isFirstLong && secondClass . equals ( long . class ) || firstClass . equals ( long . class ) && isSecondLong ) return true ; final boolean isFirstDouble = firstClass . isAssignableFrom ( Double . class ) ; final boolean isSecondDouble = secondClass . isAssignableFrom ( Double . class ) ; if ( isFirstDouble && isSecondDouble || isFirstDouble && secondClass . equals ( double . class ) || firstClass . equals ( double . class ) && isSecondDouble ) return true ; final boolean isFirstFloat = firstClass . isAssignableFrom ( Float . class ) ; final boolean isSecondFloat = secondClass . isAssignableFrom ( Float . class ) ; if ( isFirstFloat && isSecondFloat || isFirstFloat && secondClass . equals ( float . class ) || firstClass . equals ( float . class ) && isSecondFloat ) return true ; final boolean isFirstChar = firstClass . isAssignableFrom ( Character . class ) ; final boolean isSecondChar = secondClass . isAssignableFrom ( Character . class ) ; if ( isFirstChar && isSecondChar || isFirstChar && secondClass . equals ( char . class ) || firstClass . equals ( char . class ) && isSecondChar ) return true ; final boolean isFirstBool = firstClass . isAssignableFrom ( Boolean . class ) ; final boolean isSecondBool = secondClass . isAssignableFrom ( Boolean . class ) ; if ( isFirstBool && isSecondBool || isFirstChar && secondClass . equals ( boolean . class ) || firstClass . equals ( boolean . class ) && isSecondChar ) return true ; return firstClass . equals ( secondClass ) ; } | Check if objects have equals types even if they are primitive | 687 | 11 |
8,047 | @ Override public < T extends DAO > T insert ( DAO dao , boolean excludePrimaryKeys ) throws DatabaseException { ModelDef model = db . getModelMetaDataDefinition ( ) . getDefinition ( dao . getModelName ( ) ) ; if ( excludePrimaryKeys ) { dao . add_IgnoreColumn ( model . getPrimaryAttributes ( ) ) ; } return insertRecord ( dao , model , true ) ; } | The primary keys should have defaults on the database to make this work | 93 | 13 |
8,048 | @ Override public < T extends DAO > T insertNoChangeLog ( DAO dao , ModelDef model ) throws DatabaseException { DAO ret = db . insert ( dao , null , model , null ) ; Class < ? extends DAO > clazz = getDaoClass ( dao . getModelName ( ) ) ; return cast ( clazz , ret ) ; } | Insert the record without bothering changelog to avoid infinite method recursive calls when inserting changelogs into record_changelog table | 82 | 26 |
8,049 | @ Override public Fixture addObjects ( Object ... objectsToAdd ) { if ( 0 < objectsToAdd . length ) { Collections . addAll ( objects , objectsToAdd ) ; } return this ; } | Add entities to the current list of entities to load . | 45 | 11 |
8,050 | public String digest ( String message ) { try { Mac mac = Mac . getInstance ( algorithm ) ; SecretKeySpec macKey = new SecretKeySpec ( key , algorithm ) ; mac . init ( macKey ) ; byte [ ] digest = mac . doFinal ( ByteArray . fromString ( message ) ) ; return ByteArray . toHex ( digest ) ; } catch ( NoSuchAlgorithmException e ) { throw new IllegalStateException ( "Algorithm unavailable: " + algorithm , e ) ; } catch ( InvalidKeyException e ) { throw new IllegalArgumentException ( "Unable to construct key for " + algorithm + ". Please check the value passed in when this class was initialised." , e ) ; } } | Computes an HMAC for the given message using the key passed to the constructor . | 152 | 17 |
8,051 | public void reweightRootEntries ( CcgChart chart ) { int spanStart = 0 ; int spanEnd = chart . size ( ) - 1 ; int numChartEntries = chart . getNumChartEntriesForSpan ( spanStart , spanEnd ) ; // Apply unary rules. ChartEntry [ ] entries = CcgBeamSearchChart . copyChartEntryArray ( chart . getChartEntriesForSpan ( spanStart , spanEnd ) , numChartEntries ) ; double [ ] probs = ArrayUtils . copyOf ( chart . getChartEntryProbsForSpan ( spanStart , spanEnd ) , numChartEntries ) ; chart . clearChartEntriesForSpan ( spanStart , spanEnd ) ; for ( int i = 0 ; i < entries . length ; i ++ ) { chart . addChartEntryForSpan ( entries [ i ] , probs [ i ] , spanStart , spanEnd , syntaxVarType ) ; applyUnaryRules ( chart , entries [ i ] , probs [ i ] , spanStart , spanEnd ) ; } chart . doneAddingChartEntriesForSpan ( spanStart , spanEnd ) ; // Apply root factor. numChartEntries = chart . getNumChartEntriesForSpan ( spanStart , spanEnd ) ; entries = CcgBeamSearchChart . copyChartEntryArray ( chart . getChartEntriesForSpan ( spanStart , spanEnd ) , numChartEntries ) ; probs = ArrayUtils . copyOf ( chart . getChartEntryProbsForSpan ( spanStart , spanEnd ) , numChartEntries ) ; chart . clearChartEntriesForSpan ( spanStart , spanEnd ) ; for ( int i = 0 ; i < entries . length ; i ++ ) { ChartEntry entry = entries [ i ] ; double rootProb = scoreRootEntry ( entry , chart ) ; chart . addChartEntryForSpan ( entry , probs [ i ] * rootProb , spanStart , spanEnd , syntaxVarType ) ; } chart . doneAddingChartEntriesForSpan ( spanStart , spanEnd ) ; } | Updates entries in the beam for the root node with a factor for the root syntactic category and any unary rules . | 460 | 25 |
8,052 | public < B extends BackendObject > Observable < Void > send ( final Collection < B > objects ) { return getWebService ( ) . save ( isLoggedIn ( ) , objects ) . subscribeOn ( config . subscribeOn ( ) ) . observeOn ( config . observeOn ( ) ) ; } | Function used to save objects on remote server . | 65 | 9 |
8,053 | public < B extends BackendObject > Observable < Collection < B > > get ( final Class < B > type , final Collection < String > objects ) { return Observable . create ( new Observable . OnSubscribe < Collection < B > > ( ) { @ Override public void call ( Subscriber < ? super Collection < B > > observer ) { try { observer . onNext ( convertRequest ( getArrayType ( type ) , getWebService ( ) . get ( isLoggedIn ( ) , Query . safeTable ( type ) , objects ) ) ) ; observer . onCompleted ( ) ; } catch ( Exception e ) { observer . onError ( e ) ; } } } ) . subscribeOn ( config . subscribeOn ( ) ) . observeOn ( config . observeOn ( ) ) ; } | Function used to return specific objects corrosponding to the object keys provided . | 170 | 15 |
8,054 | public < B extends BackendObject > Observable < Integer > count ( final Class < B > type ) { return getWebService ( ) . count ( isLoggedIn ( ) , Query . safeTable ( type ) ) . subscribeOn ( config . subscribeOn ( ) ) . observeOn ( config . observeOn ( ) ) ; } | Functin used to perform a count query against remote sever for specifed type . | 71 | 18 |
8,055 | public static Set < HeadedSyntacticCategory > getSyntacticCategoryClosure ( Collection < HeadedSyntacticCategory > syntacticCategories ) { Set < String > featureValues = Sets . newHashSet ( ) ; for ( HeadedSyntacticCategory cat : syntacticCategories ) { getAllFeatureValues ( cat . getSyntax ( ) , featureValues ) ; } // Compute the closure of syntactic categories, assuming the only // operations are function application and feature assignment. Queue < HeadedSyntacticCategory > unprocessed = new LinkedList < HeadedSyntacticCategory > ( ) ; unprocessed . addAll ( syntacticCategories ) ; Set < HeadedSyntacticCategory > allCategories = Sets . newHashSet ( ) ; while ( unprocessed . size ( ) > 0 ) { HeadedSyntacticCategory cat = unprocessed . poll ( ) ; Preconditions . checkArgument ( cat . isCanonicalForm ( ) ) ; allCategories . addAll ( canonicalizeCategories ( cat . getSubcategories ( featureValues ) ) ) ; if ( ! cat . isAtomic ( ) ) { HeadedSyntacticCategory ret = cat . getReturnType ( ) . getCanonicalForm ( ) ; if ( ! allCategories . contains ( ret ) && ! unprocessed . contains ( ret ) ) { unprocessed . offer ( ret ) ; } HeadedSyntacticCategory arg = cat . getArgumentType ( ) . getCanonicalForm ( ) ; if ( ! allCategories . contains ( arg ) && ! unprocessed . contains ( arg ) ) { unprocessed . offer ( arg ) ; } } } // XXX: jayantk 1/8/2016 I think this loop does exactly the same thing as // the previous one. /* Set<HeadedSyntacticCategory> allCategories = Sets.newHashSet(); for (HeadedSyntacticCategory cat : syntacticCategories) { Preconditions.checkArgument(cat.isCanonicalForm()); allCategories.addAll(canonicalizeCategories(cat.getSubcategories(featureValues))); while (!cat.getSyntax().isAtomic()) { allCategories.addAll(canonicalizeCategories(cat.getArgumentType().getCanonicalForm().getSubcategories(featureValues))); allCategories.addAll(canonicalizeCategories(cat.getReturnType().getCanonicalForm().getSubcategories(featureValues))); cat = cat.getReturnType(); } } */ return allCategories ; } | Gets the closure of a set of syntactic categories under function application and feature assignment . | 571 | 18 |
8,056 | public static DiscreteFactor buildUnrestrictedBinaryDistribution ( DiscreteVariable syntaxType , Iterable < CcgBinaryRule > rules , boolean allowComposition ) { List < HeadedSyntacticCategory > allCategories = syntaxType . getValuesWithCast ( HeadedSyntacticCategory . class ) ; Set < List < Object > > validOutcomes = Sets . newHashSet ( ) ; Set < Combinator > combinators = Sets . newHashSet ( ) ; // Compute function application rules. for ( HeadedSyntacticCategory functionCat : allCategories ) { for ( HeadedSyntacticCategory argumentCat : allCategories ) { appendApplicationRules ( functionCat , argumentCat , syntaxType , validOutcomes , combinators ) ; } } if ( allowComposition ) { // Compute function composition rules. for ( HeadedSyntacticCategory functionCat : allCategories ) { for ( HeadedSyntacticCategory argumentCat : allCategories ) { appendCompositionRules ( functionCat , argumentCat , syntaxType , validOutcomes , combinators ) ; } } } appendBinaryRules ( rules , syntaxType , validOutcomes , combinators ) ; return buildSyntaxDistribution ( syntaxType , validOutcomes , combinators ) ; } | Constructs a distribution over binary combination rules for CCG given a set of syntactic categories . This method compiles out all of the possible ways to combine two adjacent CCG categories using function application composition and any other binary rules . | 274 | 46 |
8,057 | private LinkedList < Class > getClassesToDelete ( ) { LinkedHashSet < Class > classesToDelete = new LinkedHashSet < Class > ( ) ; for ( Object object : getObjects ( ) ) { classesToDelete . add ( object . getClass ( ) ) ; } return new LinkedList < Class > ( classesToDelete ) ; } | Returns the list of mapping classes representing the tables to truncate . | 79 | 13 |
8,058 | public static SparseTensor diagonal ( int [ ] dimensionNumbers , int [ ] dimensionSizes , double value ) { int minDimensionSize = Ints . min ( dimensionSizes ) ; double [ ] values = new double [ minDimensionSize ] ; Arrays . fill ( values , value ) ; return diagonal ( dimensionNumbers , dimensionSizes , values ) ; } | Creates a tensor whose only non - zero entries are on its main diagonal . | 79 | 17 |
8,059 | public static CcgCategory fromSyntaxLf ( HeadedSyntacticCategory cat , Expression2 lf ) { String head = lf . toString ( ) ; head = head . replaceAll ( " " , "_" ) ; List < String > subjects = Lists . newArrayList ( ) ; List < Integer > argumentNums = Lists . newArrayList ( ) ; List < Integer > objects = Lists . newArrayList ( ) ; List < HeadedSyntacticCategory > argumentCats = Lists . newArrayList ( cat . getArgumentTypes ( ) ) ; Collections . reverse ( argumentCats ) ; for ( int i = 0 ; i < argumentCats . size ( ) ; i ++ ) { subjects . add ( head ) ; argumentNums . add ( i + 1 ) ; objects . add ( argumentCats . get ( i ) . getHeadVariable ( ) ) ; } List < Set < String > > assignments = Lists . newArrayList ( ) ; for ( int i = 0 ; i < cat . getUniqueVariables ( ) . length ; i ++ ) { assignments . add ( Collections . < String > emptySet ( ) ) ; } int headVar = cat . getHeadVariable ( ) ; assignments . set ( headVar , Sets . newHashSet ( head ) ) ; return new CcgCategory ( cat , lf , subjects , argumentNums , objects , assignments ) ; } | Generates a CCG category with head and dependency information automatically populated from the syntactic category and logical form . The logical form itself is used as the semantic head of the returned category . | 303 | 37 |
8,060 | public List < String > getSemanticHeads ( ) { int headSemanticVariable = syntax . getHeadVariable ( ) ; int [ ] allSemanticVariables = getSemanticVariables ( ) ; for ( int i = 0 ; i < allSemanticVariables . length ; i ++ ) { if ( allSemanticVariables [ i ] == headSemanticVariable ) { return Lists . newArrayList ( variableAssignments . get ( i ) ) ; } } return Collections . emptyList ( ) ; } | The semantic head of this category i . e . the assignment to the semantic variable at the root of the syntactic tree . | 111 | 25 |
8,061 | public Combinator . Type getDerivingCombinatorType ( ) { if ( combinator == null ) { return Combinator . Type . OTHER ; } else { return combinator . getType ( ) ; } } | Gets the type of the combinator used to produce this chart entry . | 46 | 15 |
8,062 | public SparseTensor getOldKeyIndicatorTensor ( ) { double [ ] values = new double [ oldKeyNums . length ] ; Arrays . fill ( values , 1.0 ) ; return SparseTensor . fromUnorderedKeyValues ( oldTensor . getDimensionNumbers ( ) , oldTensor . getDimensionSizes ( ) , oldKeyNums , values ) ; } | Gets a tensor of indicator variables for the old key values in this . The returned tensor has value 1 for all keys in this and 0 for all other keys . | 87 | 35 |
8,063 | public static SExpression readProgram ( List < String > filenames , IndexedList < String > symbolTable ) { StringBuilder programBuilder = new StringBuilder ( ) ; programBuilder . append ( "(begin " ) ; for ( String filename : filenames ) { for ( String line : IoUtils . readLines ( filename ) ) { line = line . replaceAll ( "^[ \t]*;.*" , "" ) ; programBuilder . append ( line ) ; programBuilder . append ( " " ) ; } } programBuilder . append ( " )" ) ; String program = programBuilder . toString ( ) ; ExpressionParser < SExpression > parser = ExpressionParser . sExpression ( symbolTable ) ; SExpression programExpression = parser . parse ( program ) ; return programExpression ; } | Reads a program from a list of files . Lines starting with any amount of whitespace followed by ; are ignored as comments . | 175 | 26 |
8,064 | public static boolean hasRecords ( String hostName , String dnsType ) throws DNSLookupException { return DNSLookup . doLookup ( hostName , dnsType ) > 0 ; } | Checks if a host name has a valid record . | 42 | 11 |
8,065 | public static int doLookup ( String hostName , String dnsType ) throws DNSLookupException { // JNDI cannot take two-byte chars, so we convert the hostname into Punycode hostName = UniPunyCode . toPunycodeIfPossible ( hostName ) ; Hashtable < String , String > env = new Hashtable < String , String > ( ) ; env . put ( "java.naming.factory.initial" , "com.sun.jndi.dns.DnsContextFactory" ) ; DirContext ictx ; try { ictx = new InitialDirContext ( env ) ; } catch ( NamingException e ) { throw new DNSInitialContextException ( e ) ; } Attributes attrs ; try { attrs = ictx . getAttributes ( hostName , new String [ ] { dnsType } ) ; } catch ( NameNotFoundException e ) { // The hostname was not found or is invalid return - 1 ; } catch ( InvalidAttributeIdentifierException e ) { // The DNS type is invalid throw new DNSInvalidTypeException ( e ) ; } catch ( NamingException e ) { // Unknown reason throw new DNSLookupException ( e ) ; } Attribute attr = attrs . get ( dnsType ) ; if ( attr == null ) { return 0 ; } return attr . size ( ) ; } | Counts the number of records found for hostname and the specific type . Outputs 0 if no record is found or - 1 if the hostname is unknown invalid! | 300 | 34 |
8,066 | public static void shutdown ( ) { if ( MetricsManager . executorService != null ) { MetricsManager . executorService . shutdown ( ) ; MetricsManager . executorService = null ; } if ( MetricsManager . instance != null ) { MetricsManager . instance = null ; MetricsManager . poolManager . shutdown ( ) ; MetricsManager . poolManager = null ; MetricsManager . httpClient = null ; MetricsManager . rootMetricsLogger = null ; } } | Shutdown MetricsManager clean up resources This method is not thread - safe only use it to clean up resources when your application is shutting down . | 105 | 29 |
8,067 | public static MetricsLogger getMetricsLogger ( final String dimensions ) { if ( MetricsManager . instance != null ) { final Map < String , String > dimensionsMap = DimensionsUtils . parseDimensions ( dimensions ) ; if ( ! dimensionsMap . isEmpty ( ) ) { dimensionsMap . put ( "service" , MetricsManager . instance . serviceName ) ; if ( MetricsManager . instance . env . length ( ) > 0 ) { dimensionsMap . put ( "env" , MetricsManager . instance . env ) ; } return MetricsManager . instance . metricsLoggers . computeIfAbsent ( DimensionsUtils . serializeDimensionsToString ( dimensionsMap ) , key -> new MetricsLogger ( dimensionsMap ) ) ; } else { throw new IllegalArgumentException ( "Dimensions must be valid and non-empty" ) ; } } return dummyLogger ; } | Get MetricsLogger to start collecting metrics . MetricsLogger can be used to collect Counter Timer or Recorder | 191 | 25 |
8,068 | public static void flushAll ( long now ) { if ( MetricsManager . instance != null ) { MetricsManager . instance . metricsLoggers . values ( ) . forEach ( MetricsManager :: flushMetricsLogger ) ; flushToServer ( now ) ; } } | Flush all metrics which have been collected so far by all MetricsLoggers . Metrics can also be flushed by each MetricsLogger individually . | 58 | 31 |
8,069 | static void flushToServer ( long now ) { LOG . debug ( "Flush to BeeInstant Server" ) ; Collection < String > readyToSubmit = new ArrayList <> ( ) ; metricsQueue . drainTo ( readyToSubmit ) ; StringBuilder builder = new StringBuilder ( ) ; readyToSubmit . forEach ( string -> { builder . append ( string ) ; builder . append ( "\n" ) ; } ) ; if ( ! readyToSubmit . isEmpty ( ) && beeInstantHost != null ) { try { final String body = builder . toString ( ) ; StringEntity entity = new StringEntity ( body ) ; entity . setContentType ( "text/plain" ) ; String uri = "/PutMetric" ; final String signature = sign ( entity ) ; if ( ! signature . isEmpty ( ) ) { uri += "?signature=" + URLEncoder . encode ( signature , "UTF-8" ) ; uri += "&publicKey=" + URLEncoder . encode ( publicKey , "UTF-8" ) ; uri += "×tamp=" + now ; HttpPost putMetricCommand = new HttpPost ( uri ) ; try { putMetricCommand . setEntity ( entity ) ; HttpResponse response = httpClient . execute ( beeInstantHost , putMetricCommand ) ; LOG . info ( "Response: " + response . getStatusLine ( ) . getStatusCode ( ) ) ; } finally { putMetricCommand . releaseConnection ( ) ; } } } catch ( Throwable e ) { LOG . error ( "Fail to emit metrics" , e ) ; } } } | Flush metrics to BeeInstant Server | 352 | 7 |
8,070 | static void reportError ( final String errorMessage ) { if ( MetricsManager . instance != null ) { MetricsManager . rootMetricsLogger . incCounter ( METRIC_ERRORS , 1 ) ; } LOG . error ( errorMessage ) ; } | Report errors during metric data collecting process . Report in two forms a host level metric which counts number of errors and a log line with message for each error . Will be used by MetricsLogger to report errors . | 54 | 43 |
8,071 | static void flushMetricsLogger ( final MetricsLogger metricsLogger ) { metricsLogger . flushToString ( MetricsManager :: queue ) ; MetricsManager . rootMetricsLogger . flushToString ( MetricsManager :: queue ) ; } | Flush metrics collected by MetricsLogger to log files . Will be used by MetricsLogger to flush itself . | 55 | 25 |
8,072 | public static < A > Mapper < A , A > identity ( ) { return new Mapper < A , A > ( ) { @ Override public A map ( A item ) { return item ; } } ; } | Gets the identity mapper . | 46 | 7 |
8,073 | public KeyPair unwrapKeyPair ( String wrappedPrivateKey , String encodedPublicKey ) { PrivateKey privateKey = unwrapPrivateKey ( wrappedPrivateKey ) ; PublicKey publicKey = decodePublicKey ( encodedPublicKey ) ; return new KeyPair ( publicKey , privateKey ) ; } | Convenience method to unwrap a public - private key pain in a single call . | 64 | 18 |
8,074 | public static final DimensionSpec mergeDimensions ( int [ ] firstDimensionNums , int [ ] firstDimensionSizes , int [ ] secondDimensionNums , int [ ] secondDimensionSizes ) { SortedSet < Integer > first = Sets . newTreeSet ( Ints . asList ( firstDimensionNums ) ) ; SortedSet < Integer > second = Sets . newTreeSet ( Ints . asList ( secondDimensionNums ) ) ; SortedSet < Integer > all = Sets . newTreeSet ( first ) ; all . addAll ( second ) ; int [ ] resultDims = Ints . toArray ( all ) ; int [ ] resultSizes = new int [ resultDims . length ] ; for ( int i = 0 ; i < resultDims . length ; i ++ ) { int dim = resultDims [ i ] ; if ( first . contains ( dim ) && second . contains ( dim ) ) { int firstIndex = Ints . indexOf ( firstDimensionNums , dim ) ; int secondIndex = Ints . indexOf ( secondDimensionNums , dim ) ; int firstSize = firstDimensionSizes [ firstIndex ] ; int secondSize = secondDimensionSizes [ secondIndex ] ; Preconditions . checkArgument ( firstSize == secondSize , "Dimension sizes do not match: dim %s, sizes %s and %s." , dim , firstSize , secondSize ) ; resultSizes [ i ] = firstSize ; } else if ( first . contains ( dim ) ) { int firstIndex = Ints . indexOf ( firstDimensionNums , dim ) ; int firstSize = firstDimensionSizes [ firstIndex ] ; resultSizes [ i ] = firstSize ; } else { int secondIndex = Ints . indexOf ( secondDimensionNums , dim ) ; int secondSize = secondDimensionSizes [ secondIndex ] ; resultSizes [ i ] = secondSize ; } } return new DimensionSpec ( resultDims , resultSizes ) ; } | Merges the given sets of dimensions verifying that any dimensions in both sets have the same size . | 443 | 19 |
8,075 | @ Override public T next ( ) { if ( ! hasNext ( ) ) { throw new NoSuchElementException ( toString ( ) + " ended" ) ; } current = iterator . next ( ) ; return current ( ) ; } | Returns the next value in list . | 50 | 7 |
8,076 | < T > IClassContainer buildClassContainer ( final List < T > list ) { return ( BasicCollectionUtils . isNotEmpty ( list ) ) ? buildClassContainer ( list . get ( 0 ) ) : null ; } | Build class container with export entity parameters | 48 | 7 |
8,077 | IWriter buildWriter ( final IClassContainer classContainer ) { try { return new BufferedFileWriter ( classContainer . getExportClassName ( ) , path , format . getExtension ( ) ) ; } catch ( IOException e ) { logger . warning ( e . getMessage ( ) ) ; return null ; } } | Build buffered writer for export | 68 | 6 |
8,078 | < T > List < ExportContainer > extractExportContainers ( final T t , final IClassContainer classContainer ) { final List < ExportContainer > exports = new ArrayList <> ( ) ; // Using only SIMPLE values containers classContainer . getFormatSupported ( format ) . forEach ( ( k , v ) -> { try { k . setAccessible ( true ) ; final String exportFieldName = v . getExportName ( ) ; final Object exportFieldValue = k . get ( t ) ; exports . add ( buildContainer ( exportFieldName , exportFieldValue , v . getType ( ) ) ) ; k . setAccessible ( false ) ; } catch ( Exception ex ) { logger . warning ( ex . getMessage ( ) ) ; } } ) ; return exports ; } | Generates class export field - value map | 166 | 8 |
8,079 | < T > boolean isExportEntityInvalid ( final List < T > t ) { return ( BasicCollectionUtils . isEmpty ( t ) || isExportEntityInvalid ( t . get ( 0 ) ) ) ; } | Validate export arguments | 45 | 4 |
8,080 | public static ExpressionSimplifier lambdaCalculus ( ) { List < ExpressionReplacementRule > rules = Lists . newArrayList ( ) ; rules . add ( new LambdaApplicationReplacementRule ( ) ) ; rules . add ( new VariableCanonicalizationReplacementRule ( ) ) ; return new ExpressionSimplifier ( rules ) ; } | Default simplifier for lambda calculus expressions . This simplifier performs beta reduction of lambda expressions and canonicalizes variable names . | 72 | 23 |
8,081 | public static boolean is_email ( String email , boolean checkDNS ) throws DNSLookupException { return ( is_email_verbose ( email , checkDNS ) . getState ( ) == GeneralState . OK ) ; } | Checks the syntax of an email address . | 49 | 9 |
8,082 | private static String replaceCharAt ( String s , int pos , char c ) { return s . substring ( 0 , pos ) + c + s . substring ( pos + 1 ) ; } | Replaces a char in a String | 41 | 7 |
8,083 | public void updateOutsideEntry ( int spanStart , int spanEnd , double [ ] values , Factor factor , VariableNumMap var ) { if ( sumProduct ) { updateEntrySumProduct ( outsideChart [ spanStart ] [ spanEnd ] , values , factor . coerceToDiscrete ( ) . getWeights ( ) , var . getOnlyVariableNum ( ) ) ; } else { updateEntryMaxProduct ( outsideChart [ spanStart ] [ spanEnd ] , values , factor . coerceToDiscrete ( ) . getWeights ( ) , var . getOnlyVariableNum ( ) ) ; } } | Update an entry of the outside chart with a new production . Depending on the type of the chart this performs either a sum or max over productions of the same type in the same entry . | 127 | 37 |
8,084 | public Factor getInsideEntries ( int spanStart , int spanEnd ) { Tensor entries = new DenseTensor ( parentVar . getVariableNumsArray ( ) , parentVar . getVariableSizes ( ) , insideChart [ spanStart ] [ spanEnd ] ) ; return new TableFactor ( parentVar , entries ) ; } | Get the inside unnormalized probabilities over productions at a particular span in the tree . | 71 | 17 |
8,085 | public Factor getOutsideEntries ( int spanStart , int spanEnd ) { Tensor entries = new DenseTensor ( parentVar . getVariableNumsArray ( ) , parentVar . getVariableSizes ( ) , outsideChart [ spanStart ] [ spanEnd ] ) ; return new TableFactor ( parentVar , entries ) ; } | Get the outside unnormalized probabilities over productions at a particular span in the tree . | 71 | 17 |
8,086 | public Factor getMarginalEntries ( int spanStart , int spanEnd ) { return getOutsideEntries ( spanStart , spanEnd ) . product ( getInsideEntries ( spanStart , spanEnd ) ) ; } | Get the marginal unnormalized probabilities over productions at a particular node in the tree . | 45 | 17 |
8,087 | public CfgParseTree getBestParseTree ( ) { Factor rootMarginal = getMarginalEntries ( 0 , chartSize ( ) - 1 ) ; Assignment bestAssignment = rootMarginal . getMostLikelyAssignments ( 1 ) . get ( 0 ) ; return getBestParseTree ( bestAssignment . getOnlyValue ( ) ) ; } | Gets the best parse tree spanning the entire sentence . | 76 | 11 |
8,088 | public CfgParseTree getBestParseTreeWithSpan ( Object root , int spanStart , int spanEnd ) { Preconditions . checkState ( ! sumProduct ) ; Assignment rootAssignment = parentVar . outcomeArrayToAssignment ( root ) ; int rootNonterminalNum = parentVar . assignmentToIntArray ( rootAssignment ) [ 0 ] ; double prob = insideChart [ spanStart ] [ spanEnd ] [ rootNonterminalNum ] * outsideChart [ spanStart ] [ spanEnd ] [ rootNonterminalNum ] ; if ( prob == 0.0 ) { return null ; } int splitInd = splitBackpointers [ spanStart ] [ spanEnd ] [ rootNonterminalNum ] ; if ( splitInd < 0 ) { long terminalKey = backpointers [ spanStart ] [ spanEnd ] [ rootNonterminalNum ] ; int positiveSplitInd = ( - 1 * splitInd ) - 1 ; int terminalSpanStart = positiveSplitInd / numTerminals ; int terminalSpanEnd = positiveSplitInd % numTerminals ; // This is a really sucky way to transform the keys back to objects. VariableNumMap vars = parentVar . union ( ruleTypeVar ) ; int [ ] dimKey = TableFactor . zero ( vars ) . getWeights ( ) . keyNumToDimKey ( terminalKey ) ; Assignment a = vars . intArrayToAssignment ( dimKey ) ; Object ruleType = a . getValue ( ruleTypeVar . getOnlyVariableNum ( ) ) ; List < Object > terminalList = Lists . newArrayList ( ) ; terminalList . addAll ( terminals . subList ( terminalSpanStart , terminalSpanEnd + 1 ) ) ; return new CfgParseTree ( root , ruleType , terminalList , prob , spanStart , spanEnd ) ; } else { long binaryRuleKey = backpointers [ spanStart ] [ spanEnd ] [ rootNonterminalNum ] ; int [ ] binaryRuleComponents = binaryRuleDistribution . coerceToDiscrete ( ) . getWeights ( ) . keyNumToDimKey ( binaryRuleKey ) ; Assignment best = binaryRuleDistribution . getVars ( ) . intArrayToAssignment ( binaryRuleComponents ) ; Object leftRoot = best . getValue ( leftVar . getOnlyVariableNum ( ) ) ; Object rightRoot = best . getValue ( rightVar . getOnlyVariableNum ( ) ) ; Object ruleType = best . getValue ( ruleTypeVar . getOnlyVariableNum ( ) ) ; Preconditions . checkArgument ( spanStart + splitInd != spanEnd , "CFG parse decoding error: %s %s %s" , spanStart , spanEnd , splitInd ) ; CfgParseTree leftTree = getBestParseTreeWithSpan ( leftRoot , spanStart , spanStart + splitInd ) ; CfgParseTree rightTree = getBestParseTreeWithSpan ( rightRoot , spanStart + splitInd + 1 , spanEnd ) ; Preconditions . checkState ( leftTree != null ) ; Preconditions . checkState ( rightTree != null ) ; return new CfgParseTree ( root , ruleType , leftTree , rightTree , prob ) ; } } | If this tree contains max - marginals recover the best parse subtree for a given symbol with the specified span . | 697 | 23 |
8,089 | public int get ( long idx ) { if ( idx < 0 || idx >= size ) { return 0 ; } return elements [ SafeCast . safeLongToInt ( idx + start ) ] ; } | Gets the idx th entry in the vector . | 45 | 11 |
8,090 | protected Element findTag ( String tagName , Element element ) { Node result = element . getFirstChild ( ) ; while ( result != null ) { if ( result instanceof Element && ( tagName . equals ( ( ( Element ) result ) . getNodeName ( ) ) || tagName . equals ( ( ( Element ) result ) . getLocalName ( ) ) ) ) { break ; } result = result . getNextSibling ( ) ; } return ( Element ) result ; } | Find the child element whose tag matches the specified tag name . | 101 | 12 |
8,091 | protected NodeList getTagChildren ( String tagName , Element element ) { return element . getNamespaceURI ( ) == null ? element . getElementsByTagName ( tagName ) : element . getElementsByTagNameNS ( element . getNamespaceURI ( ) , tagName ) ; } | Returns the children under the specified tag . Compensates for namespace usage . | 64 | 15 |
8,092 | protected void addProperties ( Element element , BeanDefinitionBuilder builder ) { NamedNodeMap attributes = element . getAttributes ( ) ; for ( int i = 0 ; i < attributes . getLength ( ) ; i ++ ) { Node node = attributes . item ( i ) ; String attrName = getNodeName ( node ) ; attrName = "class" . equals ( attrName ) ? "clazz" : attrName ; builder . addPropertyValue ( attrName , node . getNodeValue ( ) ) ; } } | Adds all attributes of the specified elements as properties in the current builder . | 114 | 14 |
8,093 | protected String getNodeName ( Node node ) { String result = node . getLocalName ( ) ; return result == null ? node . getNodeName ( ) : result ; } | Returns the node name . First tries local name . If this is null returns instead the full node name . | 37 | 21 |
8,094 | protected Object fromXml ( String xml , String tagName ) throws Exception { Document document = XMLUtil . parseXMLFromString ( xml ) ; NodeList nodeList = document . getElementsByTagName ( tagName ) ; if ( nodeList == null || nodeList . getLength ( ) != 1 ) { throw new DOMException ( DOMException . NOT_FOUND_ERR , "Top level tag '" + tagName + "' was not found." ) ; } Element element = ( Element ) nodeList . item ( 0 ) ; Class < ? > beanClass = getBeanClass ( element ) ; BeanDefinitionBuilder builder = BeanDefinitionBuilder . rootBeanDefinition ( beanClass ) ; doParse ( element , builder ) ; DefaultListableBeanFactory factory = new DefaultListableBeanFactory ( ) ; factory . setParentBeanFactory ( SpringUtil . getAppContext ( ) ) ; AbstractBeanDefinition beanDefinition = builder . getBeanDefinition ( ) ; factory . registerBeanDefinition ( tagName , beanDefinition ) ; return factory . getBean ( tagName ) ; } | Parses an xml extension from an xml string . | 237 | 11 |
8,095 | protected String getResourcePath ( ParserContext parserContext ) { if ( parserContext != null ) { try { Resource resource = parserContext . getReaderContext ( ) . getResource ( ) ; return resource == null ? null : resource . getURL ( ) . getPath ( ) ; } catch ( IOException e ) { } } return null ; } | Return the path of the resource being parsed . | 73 | 9 |
8,096 | protected void removeAction ( ) { component . removeEventListener ( eventName , this ) ; if ( component . getAttribute ( attrName ) == this ) { component . removeAttribute ( attrName ) ; } } | Remove this listener from its associated component . | 46 | 8 |
8,097 | public boolean isHelpSetFile ( String fileName ) { if ( helpSetFilter == null ) { helpSetFilter = new WildcardFileFilter ( helpSetPattern ) ; } return helpSetFilter . accept ( new File ( fileName ) ) ; } | Returns true if the file name matches the pattern specified for the main help set file . | 53 | 17 |
8,098 | public IResourceIterator load ( String archiveName ) throws Exception { File file = new File ( archiveName ) ; if ( file . isDirectory ( ) ) { return new DirectoryIterator ( file ) ; } return iteratorClass . getConstructor ( String . class ) . newInstance ( archiveName ) ; } | Returns a resource iterator instance for the given archive name . | 63 | 11 |
8,099 | public void setUrl ( String url ) { this . url = url ; if ( child != null ) { child . destroy ( ) ; child = null ; } if ( url . startsWith ( "http" ) || ! url . endsWith ( ".fsp" ) ) { child = new Iframe ( ) ; ( ( Iframe ) child ) . setSrc ( url ) ; } else { child = new Import ( ) ; ( ( Import ) child ) . setSrc ( url ) ; } fullSize ( child ) ; root . addChild ( child ) ; } | Sets the URL of the content to be retrieved . If the URL starts with http it is fetched into an iframe . Otherwise an include component is created and used to fetch the content . | 121 | 39 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.