idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
148,000
protected void markStatementsForInsertion ( StatementDocument currentDocument , List < Statement > addStatements ) { for ( Statement statement : addStatements ) { addStatement ( statement , true ) ; } for ( StatementGroup sg : currentDocument . getStatementGroups ( ) ) { if ( this . toKeep . containsKey ( sg . getProperty ( ) ) ) { for ( Statement statement : sg ) { if ( ! this . toDelete . contains ( statement . getStatementId ( ) ) ) { addStatement ( statement , false ) ; } } } } }
Marks a given list of statements for insertion into the current document . Inserted statements can have an id if they should update an existing statement or use an empty string as id if they should be added . The method removes duplicates and avoids unnecessary modifications by checking the current content of the given document before marking statements for being written .
122
66
148,001
protected void addStatement ( Statement statement , boolean isNew ) { PropertyIdValue pid = statement . getMainSnak ( ) . getPropertyId ( ) ; // This code maintains the following properties: // (1) the toKeep structure does not contain two statements with the // same statement id // (2) the toKeep structure does not contain two statements that can // be merged if ( this . toKeep . containsKey ( pid ) ) { List < StatementWithUpdate > statements = this . toKeep . get ( pid ) ; for ( int i = 0 ; i < statements . size ( ) ; i ++ ) { Statement currentStatement = statements . get ( i ) . statement ; boolean currentIsNew = statements . get ( i ) . write ; if ( ! "" . equals ( currentStatement . getStatementId ( ) ) && currentStatement . getStatementId ( ) . equals ( statement . getStatementId ( ) ) ) { // Same, non-empty id: ignore existing statement as if // deleted return ; } Statement newStatement = mergeStatements ( statement , currentStatement ) ; if ( newStatement != null ) { boolean writeNewStatement = ( isNew || ! newStatement . equals ( statement ) ) && ( currentIsNew || ! newStatement . equals ( currentStatement ) ) ; // noWrite: (newS == statement && !isNew) // || (newS == cur && !curIsNew) // Write: (newS != statement || isNew ) // && (newS != cur || curIsNew) statements . set ( i , new StatementWithUpdate ( newStatement , writeNewStatement ) ) ; // Impossible with default merge code: // Kept here for future extensions that may choose to not // reuse this id. if ( ! "" . equals ( statement . getStatementId ( ) ) && ! newStatement . getStatementId ( ) . equals ( statement . getStatementId ( ) ) ) { this . toDelete . add ( statement . getStatementId ( ) ) ; } if ( ! "" . equals ( currentStatement . getStatementId ( ) ) && ! newStatement . getStatementId ( ) . equals ( currentStatement . getStatementId ( ) ) ) { this . toDelete . add ( currentStatement . getStatementId ( ) ) ; } return ; } } statements . add ( new StatementWithUpdate ( statement , isNew ) ) ; } else { List < StatementWithUpdate > statements = new ArrayList <> ( ) ; statements . add ( new StatementWithUpdate ( statement , isNew ) ) ; this . toKeep . put ( pid , statements ) ; } }
Adds one statement to the list of statements to be kept possibly merging it with other statements to be kept if possible . When two existing statements are merged one of them will be updated and the other will be marked for deletion .
546
44
148,002
protected List < Reference > mergeReferences ( List < ? extends Reference > references1 , List < ? extends Reference > references2 ) { List < Reference > result = new ArrayList <> ( ) ; for ( Reference reference : references1 ) { addBestReferenceToList ( reference , result ) ; } for ( Reference reference : references2 ) { addBestReferenceToList ( reference , result ) ; } return result ; }
Merges two lists of references eliminating duplicates in the process .
87
13
148,003
protected boolean equivalentClaims ( Claim claim1 , Claim claim2 ) { return claim1 . getMainSnak ( ) . equals ( claim2 . getMainSnak ( ) ) && isSameSnakSet ( claim1 . getAllQualifiers ( ) , claim2 . getAllQualifiers ( ) ) ; }
Checks if two claims are equivalent in the sense that they have the same main snak and the same qualifiers but possibly in a different order .
67
29
148,004
protected boolean isSameSnakSet ( Iterator < Snak > snaks1 , Iterator < Snak > snaks2 ) { ArrayList < Snak > snakList1 = new ArrayList <> ( 5 ) ; while ( snaks1 . hasNext ( ) ) { snakList1 . add ( snaks1 . next ( ) ) ; } int snakCount2 = 0 ; while ( snaks2 . hasNext ( ) ) { snakCount2 ++ ; Snak snak2 = snaks2 . next ( ) ; boolean found = false ; for ( int i = 0 ; i < snakList1 . size ( ) ; i ++ ) { if ( snak2 . equals ( snakList1 . get ( i ) ) ) { snakList1 . set ( i , null ) ; found = true ; break ; } } if ( ! found ) { return false ; } } return snakCount2 == snakList1 . size ( ) ; }
Compares two sets of snaks given by iterators . The method is optimised for short lists of snaks as they are typically found in claims and references .
212
33
148,005
protected long getRevisionIdFromResponse ( JsonNode response ) throws JsonMappingException { if ( response == null ) { throw new JsonMappingException ( "API response is null" ) ; } JsonNode entity = null ; if ( response . has ( "entity" ) ) { entity = response . path ( "entity" ) ; } else if ( response . has ( "pageinfo" ) ) { entity = response . path ( "pageinfo" ) ; } if ( entity != null && entity . has ( "lastrevid" ) ) { return entity . path ( "lastrevid" ) . asLong ( ) ; } throw new JsonMappingException ( "The last revision id could not be found in API response" ) ; }
Extracts the last revision id from the JSON response returned by the API after an edit
163
18
148,006
protected < T > T getDatamodelObjectFromResponse ( JsonNode response , List < String > path , Class < T > targetClass ) throws JsonProcessingException { if ( response == null ) { throw new JsonMappingException ( "The API response is null" ) ; } JsonNode currentNode = response ; for ( String field : path ) { if ( ! currentNode . has ( field ) ) { throw new JsonMappingException ( "Field '" + field + "' not found in API response." ) ; } currentNode = currentNode . path ( field ) ; } return mapper . treeToValue ( currentNode , targetClass ) ; }
Extracts a particular data model instance from a JSON response returned by MediaWiki . The location is described by a list of successive fields to use from the root to the target object .
145
37
148,007
public static void findSomeStringProperties ( ApiConnection connection ) throws MediaWikiApiErrorException , IOException { WikibaseDataFetcher wbdf = new WikibaseDataFetcher ( connection , siteIri ) ; wbdf . getFilter ( ) . excludeAllProperties ( ) ; wbdf . getFilter ( ) . setLanguageFilter ( Collections . singleton ( "en" ) ) ; ArrayList < PropertyIdValue > stringProperties = new ArrayList <> ( ) ; System . out . println ( "*** Trying to find string properties for the example ... " ) ; int propertyNumber = 1 ; while ( stringProperties . size ( ) < 5 ) { ArrayList < String > fetchProperties = new ArrayList <> ( ) ; for ( int i = propertyNumber ; i < propertyNumber + 10 ; i ++ ) { fetchProperties . add ( "P" + i ) ; } propertyNumber += 10 ; Map < String , EntityDocument > results = wbdf . getEntityDocuments ( fetchProperties ) ; for ( EntityDocument ed : results . values ( ) ) { PropertyDocument pd = ( PropertyDocument ) ed ; if ( DatatypeIdValue . DT_STRING . equals ( pd . getDatatype ( ) . getIri ( ) ) && pd . getLabels ( ) . containsKey ( "en" ) ) { stringProperties . add ( pd . getEntityId ( ) ) ; System . out . println ( "* Found string property " + pd . getEntityId ( ) . getId ( ) + " (" + pd . getLabels ( ) . get ( "en" ) + ")" ) ; } } } stringProperty1 = stringProperties . get ( 0 ) ; stringProperty2 = stringProperties . get ( 1 ) ; stringProperty3 = stringProperties . get ( 2 ) ; stringProperty4 = stringProperties . get ( 3 ) ; stringProperty5 = stringProperties . get ( 4 ) ; System . out . println ( "*** Done." ) ; }
Finds properties of datatype string on test . wikidata . org . Since the test site changes all the time we cannot hardcode a specific property here . Instead we just look through all properties starting from P1 to find the first few properties of type string that have an English label . These properties are used for testing in this code .
448
70
148,008
public long findPosition ( long nOccurrence ) { updateCount ( ) ; if ( nOccurrence <= 0 ) { return RankedBitVector . NOT_FOUND ; } int findPos = ( int ) ( nOccurrence / this . blockSize ) ; if ( findPos < this . positionArray . length ) { long pos0 = this . positionArray [ findPos ] ; long leftOccurrences = nOccurrence - ( findPos * this . blockSize ) ; if ( leftOccurrences == 0 ) { return pos0 ; } for ( long index = pos0 + 1 ; index < this . bitVector . size ( ) ; index ++ ) { if ( this . bitVector . getBit ( index ) == this . bit ) { leftOccurrences -- ; } if ( leftOccurrences == 0 ) { return index ; } } } return RankedBitVector . NOT_FOUND ; }
Returns the position for a given number of occurrences or NOT_FOUND if this value is not found .
192
21
148,009
void writeNoValueRestriction ( RdfWriter rdfWriter , String propertyUri , String rangeUri , String subject ) throws RDFHandlerException { Resource bnodeSome = rdfWriter . getFreshBNode ( ) ; rdfWriter . writeTripleValueObject ( subject , RdfWriter . RDF_TYPE , RdfWriter . OWL_CLASS ) ; rdfWriter . writeTripleValueObject ( subject , RdfWriter . OWL_COMPLEMENT_OF , bnodeSome ) ; rdfWriter . writeTripleValueObject ( bnodeSome , RdfWriter . RDF_TYPE , RdfWriter . OWL_RESTRICTION ) ; rdfWriter . writeTripleUriObject ( bnodeSome , RdfWriter . OWL_ON_PROPERTY , propertyUri ) ; rdfWriter . writeTripleUriObject ( bnodeSome , RdfWriter . OWL_SOME_VALUES_FROM , rangeUri ) ; }
Writes no - value restriction .
215
7
148,010
public StatementBuilder withQualifierValue ( PropertyIdValue propertyIdValue , Value value ) { withQualifier ( factory . getValueSnak ( propertyIdValue , value ) ) ; return getThis ( ) ; }
Adds a qualifier with the given property and value to the constructed statement .
45
14
148,011
protected void countStatements ( UsageStatistics usageStatistics , StatementDocument statementDocument ) { // Count Statement data: for ( StatementGroup sg : statementDocument . getStatementGroups ( ) ) { // Count Statements: usageStatistics . countStatements += sg . size ( ) ; // Count uses of properties in Statements: countPropertyMain ( usageStatistics , sg . getProperty ( ) , sg . size ( ) ) ; for ( Statement s : sg ) { for ( SnakGroup q : s . getQualifiers ( ) ) { countPropertyQualifier ( usageStatistics , q . getProperty ( ) , q . size ( ) ) ; } for ( Reference r : s . getReferences ( ) ) { usageStatistics . countReferencedStatements ++ ; for ( SnakGroup snakGroup : r . getSnakGroups ( ) ) { countPropertyReference ( usageStatistics , snakGroup . getProperty ( ) , snakGroup . size ( ) ) ; } } } } }
Count the statements and property uses of an item or property document .
211
13
148,012
private void writeFinalResults ( ) { // Print a final report: printStatus ( ) ; // Store property counts in files: writePropertyStatisticsToFile ( this . itemStatistics , "item-property-counts.csv" ) ; writePropertyStatisticsToFile ( this . propertyStatistics , "property-property-counts.csv" ) ; // Store site link statistics in file: try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "site-link-counts.csv" ) ) ) { out . println ( "Site key,Site links" ) ; for ( Entry < String , Integer > entry : this . siteLinkStatistics . entrySet ( ) ) { out . println ( entry . getKey ( ) + "," + entry . getValue ( ) ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } // Store term statistics in file: writeTermStatisticsToFile ( this . itemStatistics , "item-term-counts.csv" ) ; writeTermStatisticsToFile ( this . propertyStatistics , "property-term-counts.csv" ) ; }
Prints and stores final result of the processing . This should be called after finishing the processing of a dump . It will print the statistics gathered during processing and it will write a CSV file with usage counts for every property .
246
44
148,013
private void writePropertyStatisticsToFile ( UsageStatistics usageStatistics , String fileName ) { try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( fileName ) ) ) { out . println ( "Property id,in statements,in qualifiers,in references,total" ) ; for ( Entry < PropertyIdValue , Integer > entry : usageStatistics . propertyCountsMain . entrySet ( ) ) { int qCount = usageStatistics . propertyCountsQualifier . get ( entry . getKey ( ) ) ; int rCount = usageStatistics . propertyCountsReferences . get ( entry . getKey ( ) ) ; int total = entry . getValue ( ) + qCount + rCount ; out . println ( entry . getKey ( ) . getId ( ) + "," + entry . getValue ( ) + "," + qCount + "," + rCount + "," + total ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Stores the gathered usage statistics about property uses to a CSV file .
218
14
148,014
private void writeTermStatisticsToFile ( UsageStatistics usageStatistics , String fileName ) { // Make sure all keys are present in label count map: for ( String key : usageStatistics . aliasCounts . keySet ( ) ) { countKey ( usageStatistics . labelCounts , key , 0 ) ; } for ( String key : usageStatistics . descriptionCounts . keySet ( ) ) { countKey ( usageStatistics . labelCounts , key , 0 ) ; } try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( fileName ) ) ) { out . println ( "Language,Labels,Descriptions,Aliases" ) ; for ( Entry < String , Integer > entry : usageStatistics . labelCounts . entrySet ( ) ) { countKey ( usageStatistics . aliasCounts , entry . getKey ( ) , 0 ) ; int aCount = usageStatistics . aliasCounts . get ( entry . getKey ( ) ) ; countKey ( usageStatistics . descriptionCounts , entry . getKey ( ) , 0 ) ; int dCount = usageStatistics . descriptionCounts . get ( entry . getKey ( ) ) ; out . println ( entry . getKey ( ) + "," + entry . getValue ( ) + "," + dCount + "," + aCount ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Stores the gathered usage statistics about term uses by language to a CSV file .
304
16
148,015
private void printStatistics ( UsageStatistics usageStatistics , String entityLabel ) { System . out . println ( "Processed " + usageStatistics . count + " " + entityLabel + ":" ) ; System . out . println ( " * Labels: " + usageStatistics . countLabels + ", descriptions: " + usageStatistics . countDescriptions + ", aliases: " + usageStatistics . countAliases ) ; System . out . println ( " * Statements: " + usageStatistics . countStatements + ", with references: " + usageStatistics . countReferencedStatements ) ; }
Prints a report about the statistics stored in the given data object .
124
14
148,016
private void countPropertyMain ( UsageStatistics usageStatistics , PropertyIdValue property , int count ) { addPropertyCounters ( usageStatistics , property ) ; usageStatistics . propertyCountsMain . put ( property , usageStatistics . propertyCountsMain . get ( property ) + count ) ; }
Counts additional occurrences of a property as the main property of statements .
60
14
148,017
private void addPropertyCounters ( UsageStatistics usageStatistics , PropertyIdValue property ) { if ( ! usageStatistics . propertyCountsMain . containsKey ( property ) ) { usageStatistics . propertyCountsMain . put ( property , 0 ) ; usageStatistics . propertyCountsQualifier . put ( property , 0 ) ; usageStatistics . propertyCountsReferences . put ( property , 0 ) ; } }
Initializes the counters for a property to zero if not done yet .
84
14
148,018
private void countKey ( Map < String , Integer > map , String key , int count ) { if ( map . containsKey ( key ) ) { map . put ( key , map . get ( key ) + count ) ; } else { map . put ( key , count ) ; } }
Helper method that stores in a hash map how often a certain key occurs . If the key has not been encountered yet a new entry is created for it in the map . Otherwise the existing value for the key is incremented .
61
45
148,019
public void addSite ( String siteKey ) { ValueMap gv = new ValueMap ( siteKey ) ; if ( ! this . valueMaps . contains ( gv ) ) { this . valueMaps . add ( gv ) ; } }
Registers a new site for specific data collection . If null is used as a site key then all data is collected .
51
24
148,020
private void countCoordinateStatement ( Statement statement , ItemDocument itemDocument ) { Value value = statement . getValue ( ) ; if ( ! ( value instanceof GlobeCoordinatesValue ) ) { return ; } GlobeCoordinatesValue coordsValue = ( GlobeCoordinatesValue ) value ; if ( ! this . globe . equals ( ( coordsValue . getGlobe ( ) ) ) ) { return ; } int xCoord = ( int ) ( ( ( coordsValue . getLongitude ( ) + 180.0 ) / 360.0 ) * this . width ) % this . width ; int yCoord = ( int ) ( ( ( coordsValue . getLatitude ( ) + 90.0 ) / 180.0 ) * this . height ) % this . height ; if ( xCoord < 0 || yCoord < 0 || xCoord >= this . width || yCoord >= this . height ) { System . out . println ( "Dropping out-of-range coordinate: " + coordsValue ) ; return ; } countCoordinates ( xCoord , yCoord , itemDocument ) ; this . count += 1 ; if ( this . count % 100000 == 0 ) { reportProgress ( ) ; writeImages ( ) ; } }
Counts the coordinates stored in a single statement for the relevant property if they are actually given and valid .
272
21
148,021
private void countCoordinates ( int xCoord , int yCoord , ItemDocument itemDocument ) { for ( String siteKey : itemDocument . getSiteLinks ( ) . keySet ( ) ) { Integer count = this . siteCounts . get ( siteKey ) ; if ( count == null ) { this . siteCounts . put ( siteKey , 1 ) ; } else { this . siteCounts . put ( siteKey , count + 1 ) ; } } for ( ValueMap vm : this . valueMaps ) { vm . countCoordinates ( xCoord , yCoord , itemDocument ) ; } }
Counts a single pair of coordinates in all datasets .
134
11
148,022
private void writeImages ( ) { for ( ValueMap gv : this . valueMaps ) { gv . writeImage ( ) ; } try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "map-site-count.csv" ) ) ) { out . println ( "Site key,Number of geo items" ) ; out . println ( "wikidata total," + this . count ) ; for ( Entry < String , Integer > entry : this . siteCounts . entrySet ( ) ) { out . println ( entry . getKey ( ) + "," + entry . getValue ( ) ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Writes image files for all data that was collected and the statistics file for all sites .
159
18
148,023
private int getColor ( int value ) { if ( value == 0 ) { return 0 ; } double scale = Math . log10 ( value ) / Math . log10 ( this . topValue ) ; double lengthScale = Math . min ( 1.0 , scale ) * ( colors . length - 1 ) ; int index = 1 + ( int ) lengthScale ; if ( index == colors . length ) { index -- ; } double partScale = lengthScale - ( index - 1 ) ; int r = ( int ) ( colors [ index - 1 ] [ 0 ] + partScale * ( colors [ index ] [ 0 ] - colors [ index - 1 ] [ 0 ] ) ) ; int g = ( int ) ( colors [ index - 1 ] [ 1 ] + partScale * ( colors [ index ] [ 1 ] - colors [ index - 1 ] [ 1 ] ) ) ; int b = ( int ) ( colors [ index - 1 ] [ 2 ] + partScale * ( colors [ index ] [ 2 ] - colors [ index - 1 ] [ 2 ] ) ) ; r = Math . min ( 255 , r ) ; b = Math . min ( 255 , b ) ; g = Math . min ( 255 , g ) ; return ( r << 16 ) | ( g << 8 ) | b ; }
Returns a color for a given absolute number that is to be shown on the map .
274
17
148,024
public ReferenceBuilder withPropertyValue ( PropertyIdValue propertyIdValue , Value value ) { getSnakList ( propertyIdValue ) . add ( factory . getValueSnak ( propertyIdValue , value ) ) ; return getThis ( ) ; }
Adds the given property and value to the constructed reference .
52
11
148,025
private static void close ( Closeable closeable ) { if ( closeable != null ) { try { closeable . close ( ) ; } catch ( IOException ignored ) { logger . error ( "Failed to close output stream: " + ignored . getMessage ( ) ) ; } } }
Closes a Closeable and swallows any exceptions that might occur in the process .
61
17
148,026
public static void configureLogging ( ) { // Create the appender that will write log messages to the console. ConsoleAppender consoleAppender = new ConsoleAppender ( ) ; // Define the pattern of log messages. // Insert the string "%c{1}:%L" to also show class name and line. String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n" ; consoleAppender . setLayout ( new PatternLayout ( pattern ) ) ; // Change to Level.ERROR for fewer messages: consoleAppender . setThreshold ( Level . INFO ) ; consoleAppender . activateOptions ( ) ; Logger . getRootLogger ( ) . addAppender ( consoleAppender ) ; }
Defines how messages should be logged . This method can be modified to restrict the logging messages that are shown on the console or to change their formatting . See the documentation of Log4J for details on how to do this .
162
45
148,027
public static void processEntitiesFromWikidataDump ( EntityDocumentProcessor entityDocumentProcessor ) { // Controller object for processing dumps: DumpProcessingController dumpProcessingController = new DumpProcessingController ( "wikidatawiki" ) ; dumpProcessingController . setOfflineMode ( OFFLINE_MODE ) ; // // Optional: Use another download directory: // dumpProcessingController.setDownloadDirectory(System.getProperty("user.dir")); // Should we process historic revisions or only current ones? boolean onlyCurrentRevisions ; switch ( DUMP_FILE_MODE ) { case ALL_REVS : case ALL_REVS_WITH_DAILIES : onlyCurrentRevisions = false ; break ; case CURRENT_REVS : case CURRENT_REVS_WITH_DAILIES : case JSON : case JUST_ONE_DAILY_FOR_TEST : default : onlyCurrentRevisions = true ; } // Subscribe to the most recent entity documents of type wikibase item: dumpProcessingController . registerEntityDocumentProcessor ( entityDocumentProcessor , null , onlyCurrentRevisions ) ; // Also add a timer that reports some basic progress information: EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor ( TIMEOUT_SEC ) ; dumpProcessingController . registerEntityDocumentProcessor ( entityTimerProcessor , null , onlyCurrentRevisions ) ; MwDumpFile dumpFile = null ; try { // Start processing (may trigger downloads where needed): switch ( DUMP_FILE_MODE ) { case ALL_REVS : case CURRENT_REVS : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . FULL ) ; break ; case ALL_REVS_WITH_DAILIES : case CURRENT_REVS_WITH_DAILIES : MwDumpFile fullDumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . FULL ) ; MwDumpFile incrDumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . DAILY ) ; lastDumpFileName = fullDumpFile . getProjectName ( ) + "-" + incrDumpFile . getDateStamp ( ) + "." + fullDumpFile . getDateStamp ( ) ; dumpProcessingController . processAllRecentRevisionDumps ( ) ; break ; case JSON : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . JSON ) ; break ; case JUST_ONE_DAILY_FOR_TEST : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . DAILY ) ; break ; default : throw new RuntimeException ( "Unsupported dump processing type " + DUMP_FILE_MODE ) ; } if ( dumpFile != null ) { lastDumpFileName = dumpFile . getProjectName ( ) + "-" + dumpFile . getDateStamp ( ) ; dumpProcessingController . processDump ( dumpFile ) ; } } catch ( TimeoutException e ) { // The timer caused a time out. Continue and finish normally. } // Print final timer results: entityTimerProcessor . close ( ) ; }
Processes all entities in a Wikidata dump using the given entity processor . By default the most recent JSON dump will be used . In offline mode only the most recent previously downloaded file is considered .
698
40
148,028
void addValue ( V value , Resource resource ) { this . valueQueue . add ( value ) ; this . valueSubjectQueue . add ( resource ) ; }
Adds the given value to the list of values that should still be serialized . The given RDF resource will be used as a subject .
33
28
148,029
protected void processAliases ( List < MonolingualTextValue > addAliases , List < MonolingualTextValue > deleteAliases ) { for ( MonolingualTextValue val : addAliases ) { addAlias ( val ) ; } for ( MonolingualTextValue val : deleteAliases ) { deleteAlias ( val ) ; } }
Processes changes on aliases updating the planned state of the item .
74
13
148,030
protected void deleteAlias ( MonolingualTextValue alias ) { String lang = alias . getLanguageCode ( ) ; AliasesWithUpdate currentAliases = newAliases . get ( lang ) ; if ( currentAliases != null ) { currentAliases . aliases . remove ( alias ) ; currentAliases . deleted . add ( alias ) ; currentAliases . write = true ; } }
Deletes an individual alias
82
5
148,031
protected void addAlias ( MonolingualTextValue alias ) { String lang = alias . getLanguageCode ( ) ; AliasesWithUpdate currentAliasesUpdate = newAliases . get ( lang ) ; NameWithUpdate currentLabel = newLabels . get ( lang ) ; // If there isn't any label for that language, put the alias there if ( currentLabel == null ) { newLabels . put ( lang , new NameWithUpdate ( alias , true ) ) ; // If the new alias is equal to the current label, skip it } else if ( ! currentLabel . value . equals ( alias ) ) { if ( currentAliasesUpdate == null ) { currentAliasesUpdate = new AliasesWithUpdate ( new ArrayList < MonolingualTextValue > ( ) , true ) ; } List < MonolingualTextValue > currentAliases = currentAliasesUpdate . aliases ; if ( ! currentAliases . contains ( alias ) ) { currentAliases . add ( alias ) ; currentAliasesUpdate . added . add ( alias ) ; currentAliasesUpdate . write = true ; } newAliases . put ( lang , currentAliasesUpdate ) ; } }
Adds an individual alias . It will be merged with the current list of aliases or added as a label if there is no label for this item in this language yet .
246
33
148,032
protected void processDescriptions ( List < MonolingualTextValue > descriptions ) { for ( MonolingualTextValue description : descriptions ) { NameWithUpdate currentValue = newDescriptions . get ( description . getLanguageCode ( ) ) ; // only mark the description as added if the value we are writing is different from the current one if ( currentValue == null || ! currentValue . value . equals ( description ) ) { newDescriptions . put ( description . getLanguageCode ( ) , new NameWithUpdate ( description , true ) ) ; } } }
Adds descriptions to the item .
119
6
148,033
protected void processLabels ( List < MonolingualTextValue > labels ) { for ( MonolingualTextValue label : labels ) { String lang = label . getLanguageCode ( ) ; NameWithUpdate currentValue = newLabels . get ( lang ) ; if ( currentValue == null || ! currentValue . value . equals ( label ) ) { newLabels . put ( lang , new NameWithUpdate ( label , true ) ) ; // Delete any alias that matches the new label AliasesWithUpdate currentAliases = newAliases . get ( lang ) ; if ( currentAliases != null && currentAliases . aliases . contains ( label ) ) { deleteAlias ( label ) ; } } } }
Adds labels to the item
149
5
148,034
@ JsonProperty ( "labels" ) @ JsonInclude ( Include . NON_EMPTY ) public Map < String , TermImpl > getLabelUpdates ( ) { return getMonolingualUpdatedValues ( newLabels ) ; }
Label accessor provided for JSON serialization only .
52
10
148,035
@ JsonProperty ( "descriptions" ) @ JsonInclude ( Include . NON_EMPTY ) public Map < String , TermImpl > getDescriptionUpdates ( ) { return getMonolingualUpdatedValues ( newDescriptions ) ; }
Description accessor provided for JSON serialization only .
54
10
148,036
@ JsonProperty ( "aliases" ) @ JsonInclude ( Include . NON_EMPTY ) public Map < String , List < TermImpl > > getAliasUpdates ( ) { Map < String , List < TermImpl > > updatedValues = new HashMap <> ( ) ; for ( Map . Entry < String , AliasesWithUpdate > entry : newAliases . entrySet ( ) ) { AliasesWithUpdate update = entry . getValue ( ) ; if ( ! update . write ) { continue ; } List < TermImpl > convertedAliases = new ArrayList <> ( ) ; for ( MonolingualTextValue alias : update . aliases ) { convertedAliases . add ( monolingualToJackson ( alias ) ) ; } updatedValues . put ( entry . getKey ( ) , convertedAliases ) ; } return updatedValues ; }
Alias accessor provided for JSON serialization only
182
9
148,037
protected Map < String , TermImpl > getMonolingualUpdatedValues ( Map < String , NameWithUpdate > updates ) { Map < String , TermImpl > updatedValues = new HashMap <> ( ) ; for ( NameWithUpdate update : updates . values ( ) ) { if ( ! update . write ) { continue ; } updatedValues . put ( update . value . getLanguageCode ( ) , monolingualToJackson ( update . value ) ) ; } return updatedValues ; }
Helper to format term updates as expected by the Wikibase API
102
13
148,038
protected RdfSerializer createRdfSerializer ( ) throws IOException { String outputDestinationFinal ; if ( this . outputDestination != null ) { outputDestinationFinal = this . outputDestination ; } else { outputDestinationFinal = "{PROJECT}" + this . taskName + "{DATE}" + ".nt" ; } OutputStream exportOutputStream = getOutputStream ( this . useStdOut , insertDumpInformation ( outputDestinationFinal ) , this . compressionType ) ; RdfSerializer serializer = new RdfSerializer ( RDFFormat . NTRIPLES , exportOutputStream , this . sites , PropertyRegister . getWikidataPropertyRegister ( ) ) ; serializer . setTasks ( this . tasks ) ; return serializer ; }
Creates a new RDF serializer based on the current configuration of this object .
167
17
148,039
private void setTasks ( String tasks ) { for ( String task : tasks . split ( "," ) ) { if ( KNOWN_TASKS . containsKey ( task ) ) { this . tasks |= KNOWN_TASKS . get ( task ) ; this . taskName += ( this . taskName . isEmpty ( ) ? "" : "-" ) + task ; } else { logger . warn ( "Unsupported RDF serialization task \"" + task + "\". Run without specifying any tasks for help." ) ; } } }
Sets the RDF serialization tasks based on the given string value .
116
15
148,040
void resizeArray ( int newArraySize ) { long [ ] newArray = new long [ newArraySize ] ; System . arraycopy ( this . arrayOfBits , 0 , newArray , 0 , Math . min ( this . arrayOfBits . length , newArraySize ) ) ; this . arrayOfBits = newArray ; }
Resizes the array that represents this bit vector .
73
10
148,041
public ItemDocument updateStatements ( ItemIdValue itemIdValue , List < Statement > addStatements , List < Statement > deleteStatements , String summary ) throws MediaWikiApiErrorException , IOException { ItemDocument currentDocument = ( ItemDocument ) this . wikibaseDataFetcher . getEntityDocument ( itemIdValue . getId ( ) ) ; return updateStatements ( currentDocument , addStatements , deleteStatements , summary ) ; }
Updates the statements of the item document identified by the given item id . The updates are computed with respect to the current data found online making sure that no redundant deletions or duplicate insertions happen . The references of duplicate statements will be merged .
97
49
148,042
public ItemDocument updateTermsStatements ( ItemIdValue itemIdValue , List < MonolingualTextValue > addLabels , List < MonolingualTextValue > addDescriptions , List < MonolingualTextValue > addAliases , List < MonolingualTextValue > deleteAliases , List < Statement > addStatements , List < Statement > deleteStatements , String summary ) throws MediaWikiApiErrorException , IOException { ItemDocument currentDocument = ( ItemDocument ) this . wikibaseDataFetcher . getEntityDocument ( itemIdValue . getId ( ) ) ; return updateTermsStatements ( currentDocument , addLabels , addDescriptions , addAliases , deleteAliases , addStatements , deleteStatements , summary ) ; }
Updates the terms and statements of the item document identified by the given item id . The updates are computed with respect to the current data found online making sure that no redundant deletions or duplicate insertions happen . The references of duplicate statements will be merged . The labels and aliases in a given language are kept distinct .
167
63
148,043
@ SuppressWarnings ( "unchecked" ) public < T extends TermedStatementDocument > T updateTermsStatements ( T currentDocument , List < MonolingualTextValue > addLabels , List < MonolingualTextValue > addDescriptions , List < MonolingualTextValue > addAliases , List < MonolingualTextValue > deleteAliases , List < Statement > addStatements , List < Statement > deleteStatements , String summary ) throws MediaWikiApiErrorException , IOException { TermStatementUpdate termStatementUpdate = new TermStatementUpdate ( currentDocument , addStatements , deleteStatements , addLabels , addDescriptions , addAliases , deleteAliases ) ; termStatementUpdate . setGuidGenerator ( guidGenerator ) ; return ( T ) termStatementUpdate . performEdit ( wbEditingAction , editAsBot , summary ) ; }
Updates the terms and statements of the current document . The updates are computed with respect to the current data in the document making sure that no redundant deletions or duplicate insertions happen . The references of duplicate statements will be merged . The labels and aliases in a given language are kept distinct .
192
58
148,044
public < T extends StatementDocument > void nullEdit ( ItemIdValue itemId ) throws IOException , MediaWikiApiErrorException { ItemDocument currentDocument = ( ItemDocument ) this . wikibaseDataFetcher . getEntityDocument ( itemId . getId ( ) ) ; nullEdit ( currentDocument ) ; }
Performs a null edit on an item . This has some effects on Wikibase such as refreshing the labels of the referred items in the UI .
68
30
148,045
public < T extends StatementDocument > void nullEdit ( PropertyIdValue propertyId ) throws IOException , MediaWikiApiErrorException { PropertyDocument currentDocument = ( PropertyDocument ) this . wikibaseDataFetcher . getEntityDocument ( propertyId . getId ( ) ) ; nullEdit ( currentDocument ) ; }
Performs a null edit on a property . This has some effects on Wikibase such as refreshing the labels of the referred items in the UI .
68
30
148,046
@ SuppressWarnings ( "unchecked" ) public < T extends StatementDocument > T nullEdit ( T currentDocument ) throws IOException , MediaWikiApiErrorException { StatementUpdate statementUpdate = new StatementUpdate ( currentDocument , Collections . < Statement > emptyList ( ) , Collections . < Statement > emptyList ( ) ) ; statementUpdate . setGuidGenerator ( guidGenerator ) ; return ( T ) this . wbEditingAction . wbEditEntity ( currentDocument . getEntityId ( ) . getId ( ) , null , null , null , statementUpdate . getJsonUpdateString ( ) , false , this . editAsBot , currentDocument . getRevisionId ( ) , null ) ; }
Performs a null edit on an entity . This has some effects on Wikibase such as refreshing the labels of the referred items in the UI .
155
30
148,047
public static void main ( String [ ] args ) throws IOException { ExampleHelpers . configureLogging ( ) ; JsonSerializationProcessor . printDocumentation ( ) ; JsonSerializationProcessor jsonSerializationProcessor = new JsonSerializationProcessor ( ) ; ExampleHelpers . processEntitiesFromWikidataDump ( jsonSerializationProcessor ) ; jsonSerializationProcessor . close ( ) ; }
Runs the example program .
91
6
148,048
public void close ( ) throws IOException { System . out . println ( "Serialized " + this . jsonSerializer . getEntityDocumentCount ( ) + " item documents to JSON file " + OUTPUT_FILE_NAME + "." ) ; this . jsonSerializer . close ( ) ; }
Closes the output . Should be called after the JSON serialization was finished .
63
16
148,049
private boolean includeDocument ( ItemDocument itemDocument ) { for ( StatementGroup sg : itemDocument . getStatementGroups ( ) ) { // "P19" is "place of birth" on Wikidata if ( ! "P19" . equals ( sg . getProperty ( ) . getId ( ) ) ) { continue ; } for ( Statement s : sg ) { if ( s . getMainSnak ( ) instanceof ValueSnak ) { Value v = s . getValue ( ) ; // "Q1731" is "Dresden" on Wikidata if ( v instanceof ItemIdValue && "Q1731" . equals ( ( ( ItemIdValue ) v ) . getId ( ) ) ) { return true ; } } } } return false ; }
Returns true if the given document should be included in the serialization .
168
14
148,050
public static String insertDumpInformation ( String pattern , String dateStamp , String project ) { if ( pattern == null ) { return null ; } else { return pattern . replace ( "{DATE}" , dateStamp ) . replace ( "{PROJECT}" , project ) ; } }
Inserts the information about the dateStamp of a dump and the project name into a pattern .
60
20
148,051
private List < DumpProcessingAction > handleArguments ( String [ ] args ) { CommandLine cmd ; CommandLineParser parser = new GnuParser ( ) ; try { cmd = parser . parse ( options , args ) ; } catch ( ParseException e ) { logger . error ( "Failed to parse arguments: " + e . getMessage ( ) ) ; return Collections . emptyList ( ) ; } // Stop processing if a help text is to be printed: if ( ( cmd . hasOption ( CMD_OPTION_HELP ) ) || ( args . length == 0 ) ) { return Collections . emptyList ( ) ; } List < DumpProcessingAction > configuration = new ArrayList <> ( ) ; handleGlobalArguments ( cmd ) ; if ( cmd . hasOption ( CMD_OPTION_ACTION ) ) { DumpProcessingAction action = handleActionArguments ( cmd ) ; if ( action != null ) { configuration . add ( action ) ; } } if ( cmd . hasOption ( CMD_OPTION_CONFIG_FILE ) ) { try { List < DumpProcessingAction > configFile = readConfigFile ( cmd . getOptionValue ( CMD_OPTION_CONFIG_FILE ) ) ; configuration . addAll ( configFile ) ; } catch ( IOException e ) { logger . error ( "Failed to read configuration file \"" + cmd . getOptionValue ( CMD_OPTION_CONFIG_FILE ) + "\": " + e . toString ( ) ) ; } } return configuration ; }
This function interprets the arguments of the main function . By doing this it will set flags for the dump generation . See in the help text for more specific information about the options .
334
36
148,052
private void handleGlobalArguments ( CommandLine cmd ) { if ( cmd . hasOption ( CMD_OPTION_DUMP_LOCATION ) ) { this . dumpDirectoryLocation = cmd . getOptionValue ( CMD_OPTION_DUMP_LOCATION ) ; } if ( cmd . hasOption ( CMD_OPTION_OFFLINE_MODE ) ) { this . offlineMode = true ; } if ( cmd . hasOption ( CMD_OPTION_QUIET ) ) { this . quiet = true ; } if ( cmd . hasOption ( CMD_OPTION_CREATE_REPORT ) ) { this . reportFilename = cmd . getOptionValue ( CMD_OPTION_CREATE_REPORT ) ; } if ( cmd . hasOption ( OPTION_FILTER_LANGUAGES ) ) { setLanguageFilters ( cmd . getOptionValue ( OPTION_FILTER_LANGUAGES ) ) ; } if ( cmd . hasOption ( OPTION_FILTER_SITES ) ) { setSiteFilters ( cmd . getOptionValue ( OPTION_FILTER_SITES ) ) ; } if ( cmd . hasOption ( OPTION_FILTER_PROPERTIES ) ) { setPropertyFilters ( cmd . getOptionValue ( OPTION_FILTER_PROPERTIES ) ) ; } if ( cmd . hasOption ( CMD_OPTION_LOCAL_DUMPFILE ) ) { this . inputDumpLocation = cmd . getOptionValue ( OPTION_LOCAL_DUMPFILE ) ; } }
Analyses the command - line arguments which are relevant for the serialization process in general . It fills out the class arguments with this data .
340
28
148,053
private void handleGlobalArguments ( Section section ) { for ( String key : section . keySet ( ) ) { switch ( key ) { case OPTION_OFFLINE_MODE : if ( section . get ( key ) . toLowerCase ( ) . equals ( "true" ) ) { this . offlineMode = true ; } break ; case OPTION_QUIET : if ( section . get ( key ) . toLowerCase ( ) . equals ( "true" ) ) { this . quiet = true ; } break ; case OPTION_CREATE_REPORT : this . reportFilename = section . get ( key ) ; break ; case OPTION_DUMP_LOCATION : this . dumpDirectoryLocation = section . get ( key ) ; break ; case OPTION_FILTER_LANGUAGES : setLanguageFilters ( section . get ( key ) ) ; break ; case OPTION_FILTER_SITES : setSiteFilters ( section . get ( key ) ) ; break ; case OPTION_FILTER_PROPERTIES : setPropertyFilters ( section . get ( key ) ) ; break ; case OPTION_LOCAL_DUMPFILE : this . inputDumpLocation = section . get ( key ) ; break ; default : logger . warn ( "Unrecognized option: " + key ) ; } } }
Analyses the content of the general section of an ini configuration file and fills out the class arguments with this data .
287
24
148,054
private void checkDuplicateStdOutOutput ( DumpProcessingAction newAction ) { if ( newAction . useStdOut ( ) ) { if ( this . quiet ) { logger . warn ( "Multiple actions are using stdout as output destination." ) ; } this . quiet = true ; } }
Checks if a newly created action wants to write output to stdout and logs a warning if other actions are doing the same .
66
26
148,055
private void setLanguageFilters ( String filters ) { this . filterLanguages = new HashSet <> ( ) ; if ( ! "-" . equals ( filters ) ) { Collections . addAll ( this . filterLanguages , filters . split ( "," ) ) ; } }
Sets the set of language filters based on the given string .
59
13
148,056
private void setSiteFilters ( String filters ) { this . filterSites = new HashSet <> ( ) ; if ( ! "-" . equals ( filters ) ) { Collections . addAll ( this . filterSites , filters . split ( "," ) ) ; } }
Sets the set of site filters based on the given string .
59
13
148,057
private void setPropertyFilters ( String filters ) { this . filterProperties = new HashSet <> ( ) ; if ( ! "-" . equals ( filters ) ) { for ( String pid : filters . split ( "," ) ) { this . filterProperties . add ( Datamodel . makeWikidataPropertyIdValue ( pid ) ) ; } } }
Sets the set of property filters based on the given string .
79
13
148,058
public void writeFinalResults ( ) { printStatus ( ) ; try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "gender-ratios.csv" ) ) ) { out . print ( "Site key,pages total,pages on humans,pages on humans with gender" ) ; for ( EntityIdValue gender : this . genderNamesList ) { out . print ( "," + this . genderNames . get ( gender ) + " (" + gender . getId ( ) + ")" ) ; } out . println ( ) ; List < SiteRecord > siteRecords = new ArrayList <> ( this . siteRecords . values ( ) ) ; Collections . sort ( siteRecords , new SiteRecordComparator ( ) ) ; for ( SiteRecord siteRecord : siteRecords ) { out . print ( siteRecord . siteKey + "," + siteRecord . pageCount + "," + siteRecord . humanPageCount + "," + siteRecord . humanGenderPageCount ) ; for ( EntityIdValue gender : this . genderNamesList ) { if ( siteRecord . genderCounts . containsKey ( gender ) ) { out . print ( "," + siteRecord . genderCounts . get ( gender ) ) ; } else { out . print ( ",0" ) ; } } out . println ( ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Writes the results of the processing to a CSV file .
308
12
148,059
public static void printDocumentation ( ) { System . out . println ( "********************************************************************" ) ; System . out . println ( "*** Wikidata Toolkit: GenderRatioProcessor" ) ; System . out . println ( "*** " ) ; System . out . println ( "*** This program will download and process dumps from Wikidata." ) ; System . out . println ( "*** It will compute the numbers of articles about humans across" ) ; System . out . println ( "*** Wikimedia projects, and in particular it will count the articles" ) ; System . out . println ( "*** for each sex/gender. Results will be stored in a CSV file." ) ; System . out . println ( "*** See source code for further details." ) ; System . out . println ( "********************************************************************" ) ; }
Prints some basic documentation about this program .
176
9
148,060
private boolean containsValue ( StatementGroup statementGroup , Value value ) { for ( Statement s : statementGroup ) { if ( value . equals ( s . getValue ( ) ) ) { return true ; } } return false ; }
Checks if the given group of statements contains the given value as the value of a main snak of some statement .
47
24
148,061
private void addNewGenderName ( EntityIdValue entityIdValue , String name ) { this . genderNames . put ( entityIdValue , name ) ; this . genderNamesList . add ( entityIdValue ) ; }
Adds a new gender item and an initial name .
46
10
148,062
private SiteRecord getSiteRecord ( String siteKey ) { SiteRecord siteRecord = this . siteRecords . get ( siteKey ) ; if ( siteRecord == null ) { siteRecord = new SiteRecord ( siteKey ) ; this . siteRecords . put ( siteKey , siteRecord ) ; } return siteRecord ; }
Returns a site record for the site of the given name creating a new one if it does not exist yet .
69
22
148,063
private void countGender ( EntityIdValue gender , SiteRecord siteRecord ) { Integer curValue = siteRecord . genderCounts . get ( gender ) ; if ( curValue == null ) { siteRecord . genderCounts . put ( gender , 1 ) ; } else { siteRecord . genderCounts . put ( gender , curValue + 1 ) ; } }
Counts a single page of the specified gender . If this is the first page of that gender on this site a suitable key is added to the list of the site s genders .
76
36
148,064
public List < WbSearchEntitiesResult > wbSearchEntities ( String search , String language , Boolean strictLanguage , String type , Long limit , Long offset ) throws MediaWikiApiErrorException { Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( ApiConnection . PARAM_ACTION , "wbsearchentities" ) ; if ( search != null ) { parameters . put ( "search" , search ) ; } else { throw new IllegalArgumentException ( "Search parameter must be specified for this action." ) ; } if ( language != null ) { parameters . put ( "language" , language ) ; } else { throw new IllegalArgumentException ( "Language parameter must be specified for this action." ) ; } if ( strictLanguage != null ) { parameters . put ( "strictlanguage" , Boolean . toString ( strictLanguage ) ) ; } if ( type != null ) { parameters . put ( "type" , type ) ; } if ( limit != null ) { parameters . put ( "limit" , Long . toString ( limit ) ) ; } if ( offset != null ) { parameters . put ( "continue" , Long . toString ( offset ) ) ; } List < WbSearchEntitiesResult > results = new ArrayList <> ( ) ; try { JsonNode root = this . connection . sendJsonRequest ( "POST" , parameters ) ; JsonNode entities = root . path ( "search" ) ; for ( JsonNode entityNode : entities ) { try { JacksonWbSearchEntitiesResult ed = mapper . treeToValue ( entityNode , JacksonWbSearchEntitiesResult . class ) ; results . add ( ed ) ; } catch ( JsonProcessingException e ) { LOGGER . error ( "Error when reading JSON for entity " + entityNode . path ( "id" ) . asText ( "UNKNOWN" ) + ": " + e . toString ( ) ) ; } } } catch ( IOException e ) { LOGGER . error ( "Could not retrive data: " + e . toString ( ) ) ; } return results ; }
Executes the API action wbsearchentity for the given parameters . Searches for entities using labels and aliases . Returns a label and description for the entity in the user language if possible . Returns details of the matched term . The matched term text is also present in the aliases key if different from the display label .
464
66
148,065
public ItemDocumentBuilder withSiteLink ( String title , String siteKey , ItemIdValue ... badges ) { withSiteLink ( factory . getSiteLink ( title , siteKey , Arrays . asList ( badges ) ) ) ; return this ; }
Adds an additional site link to the constructed document .
52
10
148,066
public Resource addReference ( Reference reference ) { Resource resource = this . rdfWriter . getUri ( Vocabulary . getReferenceUri ( reference ) ) ; this . referenceQueue . add ( reference ) ; this . referenceSubjectQueue . add ( resource ) ; return resource ; }
Adds the given reference to the list of references that should still be serialized and returns the RDF resource that will be used as a subject .
59
29
148,067
public void writeReferences ( ) throws RDFHandlerException { Iterator < Reference > referenceIterator = this . referenceQueue . iterator ( ) ; for ( Resource resource : this . referenceSubjectQueue ) { final Reference reference = referenceIterator . next ( ) ; if ( this . declaredReferences . add ( resource ) ) { writeReference ( reference , resource ) ; } } this . referenceSubjectQueue . clear ( ) ; this . referenceQueue . clear ( ) ; this . snakRdfConverter . writeAuxiliaryTriples ( ) ; }
Writes references that have been added recently . Auxiliary triples that are generated for serializing snaks in references will be written right afterwards . This will also trigger any other auxiliary triples to be written that the snak converter object may have buffered .
114
52
148,068
public Sites getSitesInformation ( ) throws IOException { MwDumpFile sitesTableDump = getMostRecentDump ( DumpContentType . SITES ) ; if ( sitesTableDump == null ) { return null ; } // Create a suitable processor for such dumps and process the file: MwSitesDumpFileProcessor sitesDumpFileProcessor = new MwSitesDumpFileProcessor ( ) ; sitesDumpFileProcessor . processDumpFileContents ( sitesTableDump . getDumpFileStream ( ) , sitesTableDump ) ; return sitesDumpFileProcessor . getSites ( ) ; }
Processes the most recent dump of the sites table to extract information about registered sites .
141
17
148,069
@ Deprecated public void processMostRecentDump ( DumpContentType dumpContentType , MwDumpFileProcessor dumpFileProcessor ) { MwDumpFile dumpFile = getMostRecentDump ( dumpContentType ) ; if ( dumpFile != null ) { processDumpFile ( dumpFile , dumpFileProcessor ) ; } }
Processes the most recent dump of the given type using the given dump processor .
75
16
148,070
void processDumpFile ( MwDumpFile dumpFile , MwDumpFileProcessor dumpFileProcessor ) { try ( InputStream inputStream = dumpFile . getDumpFileStream ( ) ) { dumpFileProcessor . processDumpFileContents ( inputStream , dumpFile ) ; } catch ( FileAlreadyExistsException e ) { logger . error ( "Dump file " + dumpFile . toString ( ) + " could not be processed since file " + e . getFile ( ) + " already exists. Try deleting the file or dumpfile directory to attempt a new download." ) ; } catch ( IOException e ) { logger . error ( "Dump file " + dumpFile . toString ( ) + " could not be processed: " + e . toString ( ) ) ; } }
Processes one dump file with the given dump file processor handling exceptions appropriately .
173
15
148,071
public MwDumpFile findMostRecentDump ( DumpContentType dumpContentType ) { List < MwDumpFile > dumps = findAllDumps ( dumpContentType ) ; for ( MwDumpFile dump : dumps ) { if ( dump . isAvailable ( ) ) { return dump ; } } return null ; }
Finds the most recent dump of the given type that is actually available .
72
15
148,072
List < MwDumpFile > mergeDumpLists ( List < MwDumpFile > localDumps , List < MwDumpFile > onlineDumps ) { List < MwDumpFile > result = new ArrayList <> ( localDumps ) ; HashSet < String > localDateStamps = new HashSet <> ( ) ; for ( MwDumpFile dumpFile : localDumps ) { localDateStamps . add ( dumpFile . getDateStamp ( ) ) ; } for ( MwDumpFile dumpFile : onlineDumps ) { if ( ! localDateStamps . contains ( dumpFile . getDateStamp ( ) ) ) { result . add ( dumpFile ) ; } } result . sort ( Collections . reverseOrder ( new MwDumpFile . DateComparator ( ) ) ) ; return result ; }
Merges a list of local and online dumps . For dumps available both online and locally only the local version is included . The list is order with most recent dump date first .
187
35
148,073
List < MwDumpFile > findDumpsLocally ( DumpContentType dumpContentType ) { String directoryPattern = WmfDumpFile . getDumpFileDirectoryName ( dumpContentType , "*" ) ; List < String > dumpFileDirectories ; try { dumpFileDirectories = this . dumpfileDirectoryManager . getSubdirectories ( directoryPattern ) ; } catch ( IOException e ) { logger . error ( "Unable to access dump directory: " + e . toString ( ) ) ; return Collections . emptyList ( ) ; } List < MwDumpFile > result = new ArrayList <> ( ) ; for ( String directory : dumpFileDirectories ) { String dateStamp = WmfDumpFile . getDateStampFromDumpFileDirectoryName ( dumpContentType , directory ) ; if ( dateStamp . matches ( WmfDumpFileManager . DATE_STAMP_PATTERN ) ) { WmfLocalDumpFile dumpFile = new WmfLocalDumpFile ( dateStamp , this . projectName , dumpfileDirectoryManager , dumpContentType ) ; if ( dumpFile . isAvailable ( ) ) { result . add ( dumpFile ) ; } else { logger . error ( "Incomplete local dump file data. Maybe delete " + dumpFile . getDumpfileDirectory ( ) + " to attempt fresh download." ) ; } } // else: silently ignore directories that don't match } result . sort ( Collections . reverseOrder ( new MwDumpFile . DateComparator ( ) ) ) ; logger . info ( "Found " + result . size ( ) + " local dumps of type " + dumpContentType + ": " + result ) ; return result ; }
Finds out which dump files of the given type have been downloaded already . The result is a list of objects that describe the available dump files in descending order by their date . Not all of the dumps included might be actually available .
376
46
148,074
List < MwDumpFile > findDumpsOnline ( DumpContentType dumpContentType ) { List < String > dumpFileDates = findDumpDatesOnline ( dumpContentType ) ; List < MwDumpFile > result = new ArrayList <> ( ) ; for ( String dateStamp : dumpFileDates ) { if ( dumpContentType == DumpContentType . DAILY ) { result . add ( new WmfOnlineDailyDumpFile ( dateStamp , this . projectName , this . webResourceFetcher , this . dumpfileDirectoryManager ) ) ; } else if ( dumpContentType == DumpContentType . JSON ) { result . add ( new JsonOnlineDumpFile ( dateStamp , this . projectName , this . webResourceFetcher , this . dumpfileDirectoryManager ) ) ; } else { result . add ( new WmfOnlineStandardDumpFile ( dateStamp , this . projectName , this . webResourceFetcher , this . dumpfileDirectoryManager , dumpContentType ) ) ; } } logger . info ( "Found " + result . size ( ) + " online dumps of type " + dumpContentType + ": " + result ) ; return result ; }
Finds out which dump files of the given type are available for download . The result is a list of objects that describe the available dump files in descending order by their date . Not all of the dumps included might be actually available .
268
46
148,075
@ Override public void processItemDocument ( ItemDocument itemDocument ) { this . countItems ++ ; // Do some printing for demonstration/debugging. // Only print at most 50 items (or it would get too slow). if ( this . countItems < 10 ) { System . out . println ( itemDocument ) ; } else if ( this . countItems == 10 ) { System . out . println ( "*** I won't print any further items.\n" + "*** We will never finish if we print all the items.\n" + "*** Maybe remove this debug output altogether." ) ; } }
Processes one item document . This is often the main workhorse that gathers the data you are interested in . You can modify this code as you wish .
128
31
148,076
static EntityIdValue fromId ( String id , String siteIri ) { switch ( guessEntityTypeFromId ( id ) ) { case EntityIdValueImpl . JSON_ENTITY_TYPE_ITEM : return new ItemIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_PROPERTY : return new PropertyIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_LEXEME : return new LexemeIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_FORM : return new FormIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_SENSE : return new SenseIdValueImpl ( id , siteIri ) ; default : throw new IllegalArgumentException ( "Entity id \"" + id + "\" is not supported." ) ; } }
Parses an item id
208
6
148,077
static String guessEntityTypeFromId ( String id ) { if ( id . isEmpty ( ) ) { throw new IllegalArgumentException ( "Entity ids should not be empty." ) ; } switch ( id . charAt ( 0 ) ) { case ' ' : if ( id . contains ( "-F" ) ) { return JSON_ENTITY_TYPE_FORM ; } else if ( id . contains ( "-S" ) ) { return JSON_ENTITY_TYPE_SENSE ; } else { return JSON_ENTITY_TYPE_LEXEME ; } case ' ' : return JSON_ENTITY_TYPE_PROPERTY ; case ' ' : return JSON_ENTITY_TYPE_ITEM ; default : throw new IllegalArgumentException ( "Entity id \"" + id + "\" is not supported." ) ; } }
RReturns the entity type of the id like item or property
178
12
148,078
@ Override public void close ( ) { logger . info ( "Finished processing." ) ; this . timer . stop ( ) ; this . lastSeconds = ( int ) ( timer . getTotalWallTime ( ) / 1000000000 ) ; printStatus ( ) ; }
Stops the processing and prints the final time .
57
10
148,079
private void countEntity ( ) { if ( ! this . timer . isRunning ( ) ) { startTimer ( ) ; } this . entityCount ++ ; if ( this . entityCount % 100 == 0 ) { timer . stop ( ) ; int seconds = ( int ) ( timer . getTotalWallTime ( ) / 1000000000 ) ; if ( seconds >= this . lastSeconds + this . reportInterval ) { this . lastSeconds = seconds ; printStatus ( ) ; if ( this . timeout > 0 && seconds > this . timeout ) { logger . info ( "Timeout. Aborting processing." ) ; throw new TimeoutException ( ) ; } } timer . start ( ) ; } }
Counts one entity . Every once in a while the current time is checked so as to print an intermediate report roughly every ten seconds .
146
27
148,080
public static String implodeObjects ( Iterable < ? > objects ) { StringBuilder builder = new StringBuilder ( ) ; boolean first = true ; for ( Object o : objects ) { if ( first ) { first = false ; } else { builder . append ( "|" ) ; } builder . append ( o . toString ( ) ) ; } return builder . toString ( ) ; }
Builds a string that serializes a list of objects separated by the pipe character . The toString methods are used to turn objects into strings . This operation is commonly used to build parameter lists for API requests .
83
42
148,081
public void logout ( ) throws IOException { if ( this . loggedIn ) { Map < String , String > params = new HashMap <> ( ) ; params . put ( "action" , "logout" ) ; params . put ( "format" , "json" ) ; // reduce the output try { sendJsonRequest ( "POST" , params ) ; } catch ( MediaWikiApiErrorException e ) { throw new IOException ( e . getMessage ( ) , e ) ; //TODO: we should throw a better exception } this . loggedIn = false ; this . username = "" ; this . password = "" ; } }
Logs the current user out .
139
7
148,082
String fetchToken ( String tokenType ) throws IOException , MediaWikiApiErrorException { Map < String , String > params = new HashMap <> ( ) ; params . put ( ApiConnection . PARAM_ACTION , "query" ) ; params . put ( "meta" , "tokens" ) ; params . put ( "type" , tokenType ) ; try { JsonNode root = this . sendJsonRequest ( "POST" , params ) ; return root . path ( "query" ) . path ( "tokens" ) . path ( tokenType + "token" ) . textValue ( ) ; } catch ( IOException | MediaWikiApiErrorException e ) { logger . error ( "Error when trying to fetch token: " + e . toString ( ) ) ; } return null ; }
Executes a API query action to get a new token . The method only executes the action without doing any checks first . If errors occur they are logged and null is returned .
177
35
148,083
public InputStream sendRequest ( String requestMethod , Map < String , String > parameters ) throws IOException { String queryString = getQueryString ( parameters ) ; URL url = new URL ( this . apiBaseUrl ) ; HttpURLConnection connection = ( HttpURLConnection ) WebResourceFetcherImpl . getUrlConnection ( url ) ; setupConnection ( requestMethod , queryString , connection ) ; OutputStreamWriter writer = new OutputStreamWriter ( connection . getOutputStream ( ) ) ; writer . write ( queryString ) ; writer . flush ( ) ; writer . close ( ) ; int rc = connection . getResponseCode ( ) ; if ( rc != 200 ) { logger . warn ( "Error: API request returned response code " + rc ) ; } InputStream iStream = connection . getInputStream ( ) ; fillCookies ( connection . getHeaderFields ( ) ) ; return iStream ; }
Sends a request to the API with the given parameters and the given request method and returns the result string . It automatically fills the cookie map with cookies in the result header after the request .
191
38
148,084
List < String > getWarnings ( JsonNode root ) { ArrayList < String > warnings = new ArrayList <> ( ) ; if ( root . has ( "warnings" ) ) { JsonNode warningNode = root . path ( "warnings" ) ; Iterator < Map . Entry < String , JsonNode > > moduleIterator = warningNode . fields ( ) ; while ( moduleIterator . hasNext ( ) ) { Map . Entry < String , JsonNode > moduleNode = moduleIterator . next ( ) ; Iterator < JsonNode > moduleOutputIterator = moduleNode . getValue ( ) . elements ( ) ; while ( moduleOutputIterator . hasNext ( ) ) { JsonNode moduleOutputNode = moduleOutputIterator . next ( ) ; if ( moduleOutputNode . isTextual ( ) ) { warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + moduleOutputNode . textValue ( ) ) ; } else if ( moduleOutputNode . isArray ( ) ) { Iterator < JsonNode > messageIterator = moduleOutputNode . elements ( ) ; while ( messageIterator . hasNext ( ) ) { JsonNode messageNode = messageIterator . next ( ) ; warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + messageNode . path ( "html" ) . path ( "*" ) . asText ( messageNode . toString ( ) ) ) ; } } else { warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + "Warning was not understood. Please report this to Wikidata Toolkit. JSON source: " + moduleOutputNode . toString ( ) ) ; } } } } return warnings ; }
Extracts warnings that are returned in an API response .
373
12
148,085
String getQueryString ( Map < String , String > params ) { StringBuilder builder = new StringBuilder ( ) ; try { boolean first = true ; for ( Map . Entry < String , String > entry : params . entrySet ( ) ) { if ( first ) { first = false ; } else { builder . append ( "&" ) ; } builder . append ( URLEncoder . encode ( entry . getKey ( ) , "UTF-8" ) ) ; builder . append ( "=" ) ; builder . append ( URLEncoder . encode ( entry . getValue ( ) , "UTF-8" ) ) ; } } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( "Your Java version does not support UTF-8 encoding." ) ; } return builder . toString ( ) ; }
Returns the query string of a URL from a parameter list .
174
12
148,086
public static < T > T load ( String resourcePath , BeanSpec spec ) { return load ( resourcePath , spec , false ) ; }
Load resource content from given path into variable with type specified by spec .
29
14
148,087
protected void printCenter ( String format , Object ... args ) { String text = S . fmt ( format , args ) ; info ( S . center ( text , 80 ) ) ; }
Print formatted string in the center of 80 chars line left and right padded .
38
15
148,088
protected void printCenterWithLead ( String lead , String format , Object ... args ) { String text = S . fmt ( format , args ) ; int len = 80 - lead . length ( ) ; info ( S . concat ( lead , S . center ( text , len ) ) ) ; }
Print the lead string followed by centered formatted string . The whole length of the line is 80 chars .
62
20
148,089
public String convert ( BufferedImage image , boolean favicon ) { // Reset statistics before anything statsArray = new int [ 12 ] ; // Begin the timer dStart = System . nanoTime ( ) ; // Scale the image image = scale ( image , favicon ) ; // The +1 is for the newline characters StringBuilder sb = new StringBuilder ( ( image . getWidth ( ) + 1 ) * image . getHeight ( ) ) ; for ( int y = 0 ; y < image . getHeight ( ) ; y ++ ) { // At the end of each line, add a newline character if ( sb . length ( ) != 0 ) sb . append ( "\n" ) ; for ( int x = 0 ; x < image . getWidth ( ) ; x ++ ) { // Color pixelColor = new Color ( image . getRGB ( x , y ) , true ) ; int alpha = pixelColor . getAlpha ( ) ; boolean isTransient = alpha < 0.1 ; double gValue = isTransient ? 250 : ( ( double ) pixelColor . getRed ( ) * 0.2989 + ( double ) pixelColor . getBlue ( ) * 0.5870 + ( double ) pixelColor . getGreen ( ) * 0.1140 ) / ( ( double ) alpha / ( double ) 250 ) ; final char s = gValue < 130 ? darkGrayScaleMap ( gValue ) : lightGrayScaleMap ( gValue ) ; sb . append ( s ) ; } } imgArray = sb . toString ( ) . toCharArray ( ) ; dEnd = System . nanoTime ( ) ; return sb . toString ( ) ; }
The main conversion method .
357
5
148,090
private static BufferedImage scale ( BufferedImage imageToScale , int dWidth , int dHeight , double fWidth , double fHeight ) { BufferedImage dbi = null ; // Needed to create a new BufferedImage object int imageType = imageToScale . getType ( ) ; if ( imageToScale != null ) { dbi = new BufferedImage ( dWidth , dHeight , imageType ) ; Graphics2D g = dbi . createGraphics ( ) ; AffineTransform at = AffineTransform . getScaleInstance ( fWidth , fHeight ) ; g . drawRenderedImage ( imageToScale , at ) ; } return dbi ; }
Image scale method
143
3
148,091
public EventBus emit ( Enum < ? > event , Object ... args ) { return _emitWithOnceBus ( eventContext ( event , args ) ) ; }
Emit an enum event with parameters supplied .
35
9
148,092
public EventBus emit ( String event , Object ... args ) { return _emitWithOnceBus ( eventContext ( event , args ) ) ; }
Emit a string event with parameters .
31
8
148,093
public EventBus emit ( EventObject event , Object ... args ) { return _emitWithOnceBus ( eventContext ( event , args ) ) ; }
Emit an event object with parameters .
32
8
148,094
public EventBus emitAsync ( Enum < ? > event , Object ... args ) { return _emitWithOnceBus ( eventContextAsync ( event , args ) ) ; }
Emit an enum event with parameters and force all listeners to be called asynchronously .
37
18
148,095
public EventBus emitAsync ( String event , Object ... args ) { return _emitWithOnceBus ( eventContextAsync ( event , args ) ) ; }
Emit a string event with parameters and force all listeners to be called asynchronously .
33
18
148,096
public EventBus emitAsync ( EventObject event , Object ... args ) { return _emitWithOnceBus ( eventContextAsync ( event , args ) ) ; }
Emit a event object with parameters and force all listeners to be called asynchronously .
34
18
148,097
public EventBus emitSync ( Enum < ? > event , Object ... args ) { return _emitWithOnceBus ( eventContextSync ( event , args ) ) ; }
Emit an enum event with parameters and force all listener to be called synchronously .
37
17
148,098
public EventBus emitSync ( String event , Object ... args ) { return _emitWithOnceBus ( eventContextSync ( event , args ) ) ; }
Emit a string event with parameters and force all listener to be called synchronously .
33
17
148,099
public EventBus emitSync ( EventObject event , Object ... args ) { return _emitWithOnceBus ( eventContextSync ( event , args ) ) ; }
Emit a event object with parameters and force all listeners to be called synchronously .
34
17