idx int64 0 41.2k | question stringlengths 83 4.15k | target stringlengths 5 715 |
|---|---|---|
24,600 | protected < T > T getDatamodelObjectFromResponse ( JsonNode response , List < String > path , Class < T > targetClass ) throws JsonProcessingException { if ( response == null ) { throw new JsonMappingException ( "The API response is null" ) ; } JsonNode currentNode = response ; for ( String field : path ) { if ( ! currentNode . has ( field ) ) { throw new JsonMappingException ( "Field '" + field + "' not found in API response." ) ; } currentNode = currentNode . path ( field ) ; } return mapper . treeToValue ( currentNode , targetClass ) ; } | Extracts a particular data model instance from a JSON response returned by MediaWiki . The location is described by a list of successive fields to use from the root to the target object . |
24,601 | public static void findSomeStringProperties ( ApiConnection connection ) throws MediaWikiApiErrorException , IOException { WikibaseDataFetcher wbdf = new WikibaseDataFetcher ( connection , siteIri ) ; wbdf . getFilter ( ) . excludeAllProperties ( ) ; wbdf . getFilter ( ) . setLanguageFilter ( Collections . singleton ( "en" ) ) ; ArrayList < PropertyIdValue > stringProperties = new ArrayList < > ( ) ; System . out . println ( "*** Trying to find string properties for the example ... " ) ; int propertyNumber = 1 ; while ( stringProperties . size ( ) < 5 ) { ArrayList < String > fetchProperties = new ArrayList < > ( ) ; for ( int i = propertyNumber ; i < propertyNumber + 10 ; i ++ ) { fetchProperties . add ( "P" + i ) ; } propertyNumber += 10 ; Map < String , EntityDocument > results = wbdf . getEntityDocuments ( fetchProperties ) ; for ( EntityDocument ed : results . values ( ) ) { PropertyDocument pd = ( PropertyDocument ) ed ; if ( DatatypeIdValue . DT_STRING . equals ( pd . getDatatype ( ) . getIri ( ) ) && pd . getLabels ( ) . containsKey ( "en" ) ) { stringProperties . add ( pd . getEntityId ( ) ) ; System . out . println ( "* Found string property " + pd . getEntityId ( ) . getId ( ) + " (" + pd . getLabels ( ) . get ( "en" ) + ")" ) ; } } } stringProperty1 = stringProperties . get ( 0 ) ; stringProperty2 = stringProperties . get ( 1 ) ; stringProperty3 = stringProperties . get ( 2 ) ; stringProperty4 = stringProperties . get ( 3 ) ; stringProperty5 = stringProperties . get ( 4 ) ; System . out . println ( "*** Done." ) ; } | Finds properties of datatype string on test . wikidata . org . Since the test site changes all the time we cannot hardcode a specific property here . Instead we just look through all properties starting from P1 to find the first few properties of type string that have an English label . These properties are used for testing in this code . |
24,602 | public long findPosition ( long nOccurrence ) { updateCount ( ) ; if ( nOccurrence <= 0 ) { return RankedBitVector . NOT_FOUND ; } int findPos = ( int ) ( nOccurrence / this . blockSize ) ; if ( findPos < this . positionArray . length ) { long pos0 = this . positionArray [ findPos ] ; long leftOccurrences = nOccurrence - ( findPos * this . blockSize ) ; if ( leftOccurrences == 0 ) { return pos0 ; } for ( long index = pos0 + 1 ; index < this . bitVector . size ( ) ; index ++ ) { if ( this . bitVector . getBit ( index ) == this . bit ) { leftOccurrences -- ; } if ( leftOccurrences == 0 ) { return index ; } } } return RankedBitVector . NOT_FOUND ; } | Returns the position for a given number of occurrences or NOT_FOUND if this value is not found . |
24,603 | void writeNoValueRestriction ( RdfWriter rdfWriter , String propertyUri , String rangeUri , String subject ) throws RDFHandlerException { Resource bnodeSome = rdfWriter . getFreshBNode ( ) ; rdfWriter . writeTripleValueObject ( subject , RdfWriter . RDF_TYPE , RdfWriter . OWL_CLASS ) ; rdfWriter . writeTripleValueObject ( subject , RdfWriter . OWL_COMPLEMENT_OF , bnodeSome ) ; rdfWriter . writeTripleValueObject ( bnodeSome , RdfWriter . RDF_TYPE , RdfWriter . OWL_RESTRICTION ) ; rdfWriter . writeTripleUriObject ( bnodeSome , RdfWriter . OWL_ON_PROPERTY , propertyUri ) ; rdfWriter . writeTripleUriObject ( bnodeSome , RdfWriter . OWL_SOME_VALUES_FROM , rangeUri ) ; } | Writes no - value restriction . |
24,604 | public StatementBuilder withQualifierValue ( PropertyIdValue propertyIdValue , Value value ) { withQualifier ( factory . getValueSnak ( propertyIdValue , value ) ) ; return getThis ( ) ; } | Adds a qualifier with the given property and value to the constructed statement . |
24,605 | protected void countStatements ( UsageStatistics usageStatistics , StatementDocument statementDocument ) { for ( StatementGroup sg : statementDocument . getStatementGroups ( ) ) { usageStatistics . countStatements += sg . size ( ) ; countPropertyMain ( usageStatistics , sg . getProperty ( ) , sg . size ( ) ) ; for ( Statement s : sg ) { for ( SnakGroup q : s . getQualifiers ( ) ) { countPropertyQualifier ( usageStatistics , q . getProperty ( ) , q . size ( ) ) ; } for ( Reference r : s . getReferences ( ) ) { usageStatistics . countReferencedStatements ++ ; for ( SnakGroup snakGroup : r . getSnakGroups ( ) ) { countPropertyReference ( usageStatistics , snakGroup . getProperty ( ) , snakGroup . size ( ) ) ; } } } } } | Count the statements and property uses of an item or property document . |
24,606 | private void writeFinalResults ( ) { printStatus ( ) ; writePropertyStatisticsToFile ( this . itemStatistics , "item-property-counts.csv" ) ; writePropertyStatisticsToFile ( this . propertyStatistics , "property-property-counts.csv" ) ; try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "site-link-counts.csv" ) ) ) { out . println ( "Site key,Site links" ) ; for ( Entry < String , Integer > entry : this . siteLinkStatistics . entrySet ( ) ) { out . println ( entry . getKey ( ) + "," + entry . getValue ( ) ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } writeTermStatisticsToFile ( this . itemStatistics , "item-term-counts.csv" ) ; writeTermStatisticsToFile ( this . propertyStatistics , "property-term-counts.csv" ) ; } | Prints and stores final result of the processing . This should be called after finishing the processing of a dump . It will print the statistics gathered during processing and it will write a CSV file with usage counts for every property . |
24,607 | private void writePropertyStatisticsToFile ( UsageStatistics usageStatistics , String fileName ) { try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( fileName ) ) ) { out . println ( "Property id,in statements,in qualifiers,in references,total" ) ; for ( Entry < PropertyIdValue , Integer > entry : usageStatistics . propertyCountsMain . entrySet ( ) ) { int qCount = usageStatistics . propertyCountsQualifier . get ( entry . getKey ( ) ) ; int rCount = usageStatistics . propertyCountsReferences . get ( entry . getKey ( ) ) ; int total = entry . getValue ( ) + qCount + rCount ; out . println ( entry . getKey ( ) . getId ( ) + "," + entry . getValue ( ) + "," + qCount + "," + rCount + "," + total ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } } | Stores the gathered usage statistics about property uses to a CSV file . |
24,608 | private void writeTermStatisticsToFile ( UsageStatistics usageStatistics , String fileName ) { for ( String key : usageStatistics . aliasCounts . keySet ( ) ) { countKey ( usageStatistics . labelCounts , key , 0 ) ; } for ( String key : usageStatistics . descriptionCounts . keySet ( ) ) { countKey ( usageStatistics . labelCounts , key , 0 ) ; } try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( fileName ) ) ) { out . println ( "Language,Labels,Descriptions,Aliases" ) ; for ( Entry < String , Integer > entry : usageStatistics . labelCounts . entrySet ( ) ) { countKey ( usageStatistics . aliasCounts , entry . getKey ( ) , 0 ) ; int aCount = usageStatistics . aliasCounts . get ( entry . getKey ( ) ) ; countKey ( usageStatistics . descriptionCounts , entry . getKey ( ) , 0 ) ; int dCount = usageStatistics . descriptionCounts . get ( entry . getKey ( ) ) ; out . println ( entry . getKey ( ) + "," + entry . getValue ( ) + "," + dCount + "," + aCount ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } } | Stores the gathered usage statistics about term uses by language to a CSV file . |
24,609 | private void printStatistics ( UsageStatistics usageStatistics , String entityLabel ) { System . out . println ( "Processed " + usageStatistics . count + " " + entityLabel + ":" ) ; System . out . println ( " * Labels: " + usageStatistics . countLabels + ", descriptions: " + usageStatistics . countDescriptions + ", aliases: " + usageStatistics . countAliases ) ; System . out . println ( " * Statements: " + usageStatistics . countStatements + ", with references: " + usageStatistics . countReferencedStatements ) ; } | Prints a report about the statistics stored in the given data object . |
24,610 | private void countPropertyMain ( UsageStatistics usageStatistics , PropertyIdValue property , int count ) { addPropertyCounters ( usageStatistics , property ) ; usageStatistics . propertyCountsMain . put ( property , usageStatistics . propertyCountsMain . get ( property ) + count ) ; } | Counts additional occurrences of a property as the main property of statements . |
24,611 | private void addPropertyCounters ( UsageStatistics usageStatistics , PropertyIdValue property ) { if ( ! usageStatistics . propertyCountsMain . containsKey ( property ) ) { usageStatistics . propertyCountsMain . put ( property , 0 ) ; usageStatistics . propertyCountsQualifier . put ( property , 0 ) ; usageStatistics . propertyCountsReferences . put ( property , 0 ) ; } } | Initializes the counters for a property to zero if not done yet . |
24,612 | private void countKey ( Map < String , Integer > map , String key , int count ) { if ( map . containsKey ( key ) ) { map . put ( key , map . get ( key ) + count ) ; } else { map . put ( key , count ) ; } } | Helper method that stores in a hash map how often a certain key occurs . If the key has not been encountered yet a new entry is created for it in the map . Otherwise the existing value for the key is incremented . |
24,613 | public void addSite ( String siteKey ) { ValueMap gv = new ValueMap ( siteKey ) ; if ( ! this . valueMaps . contains ( gv ) ) { this . valueMaps . add ( gv ) ; } } | Registers a new site for specific data collection . If null is used as a site key then all data is collected . |
24,614 | private void countCoordinateStatement ( Statement statement , ItemDocument itemDocument ) { Value value = statement . getValue ( ) ; if ( ! ( value instanceof GlobeCoordinatesValue ) ) { return ; } GlobeCoordinatesValue coordsValue = ( GlobeCoordinatesValue ) value ; if ( ! this . globe . equals ( ( coordsValue . getGlobe ( ) ) ) ) { return ; } int xCoord = ( int ) ( ( ( coordsValue . getLongitude ( ) + 180.0 ) / 360.0 ) * this . width ) % this . width ; int yCoord = ( int ) ( ( ( coordsValue . getLatitude ( ) + 90.0 ) / 180.0 ) * this . height ) % this . height ; if ( xCoord < 0 || yCoord < 0 || xCoord >= this . width || yCoord >= this . height ) { System . out . println ( "Dropping out-of-range coordinate: " + coordsValue ) ; return ; } countCoordinates ( xCoord , yCoord , itemDocument ) ; this . count += 1 ; if ( this . count % 100000 == 0 ) { reportProgress ( ) ; writeImages ( ) ; } } | Counts the coordinates stored in a single statement for the relevant property if they are actually given and valid . |
24,615 | private void countCoordinates ( int xCoord , int yCoord , ItemDocument itemDocument ) { for ( String siteKey : itemDocument . getSiteLinks ( ) . keySet ( ) ) { Integer count = this . siteCounts . get ( siteKey ) ; if ( count == null ) { this . siteCounts . put ( siteKey , 1 ) ; } else { this . siteCounts . put ( siteKey , count + 1 ) ; } } for ( ValueMap vm : this . valueMaps ) { vm . countCoordinates ( xCoord , yCoord , itemDocument ) ; } } | Counts a single pair of coordinates in all datasets . |
24,616 | private void writeImages ( ) { for ( ValueMap gv : this . valueMaps ) { gv . writeImage ( ) ; } try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "map-site-count.csv" ) ) ) { out . println ( "Site key,Number of geo items" ) ; out . println ( "wikidata total," + this . count ) ; for ( Entry < String , Integer > entry : this . siteCounts . entrySet ( ) ) { out . println ( entry . getKey ( ) + "," + entry . getValue ( ) ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } } | Writes image files for all data that was collected and the statistics file for all sites . |
24,617 | private int getColor ( int value ) { if ( value == 0 ) { return 0 ; } double scale = Math . log10 ( value ) / Math . log10 ( this . topValue ) ; double lengthScale = Math . min ( 1.0 , scale ) * ( colors . length - 1 ) ; int index = 1 + ( int ) lengthScale ; if ( index == colors . length ) { index -- ; } double partScale = lengthScale - ( index - 1 ) ; int r = ( int ) ( colors [ index - 1 ] [ 0 ] + partScale * ( colors [ index ] [ 0 ] - colors [ index - 1 ] [ 0 ] ) ) ; int g = ( int ) ( colors [ index - 1 ] [ 1 ] + partScale * ( colors [ index ] [ 1 ] - colors [ index - 1 ] [ 1 ] ) ) ; int b = ( int ) ( colors [ index - 1 ] [ 2 ] + partScale * ( colors [ index ] [ 2 ] - colors [ index - 1 ] [ 2 ] ) ) ; r = Math . min ( 255 , r ) ; b = Math . min ( 255 , b ) ; g = Math . min ( 255 , g ) ; return ( r << 16 ) | ( g << 8 ) | b ; } | Returns a color for a given absolute number that is to be shown on the map . |
24,618 | public ReferenceBuilder withPropertyValue ( PropertyIdValue propertyIdValue , Value value ) { getSnakList ( propertyIdValue ) . add ( factory . getValueSnak ( propertyIdValue , value ) ) ; return getThis ( ) ; } | Adds the given property and value to the constructed reference . |
24,619 | private static void close ( Closeable closeable ) { if ( closeable != null ) { try { closeable . close ( ) ; } catch ( IOException ignored ) { logger . error ( "Failed to close output stream: " + ignored . getMessage ( ) ) ; } } } | Closes a Closeable and swallows any exceptions that might occur in the process . |
24,620 | public static void configureLogging ( ) { ConsoleAppender consoleAppender = new ConsoleAppender ( ) ; String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n" ; consoleAppender . setLayout ( new PatternLayout ( pattern ) ) ; consoleAppender . setThreshold ( Level . INFO ) ; consoleAppender . activateOptions ( ) ; Logger . getRootLogger ( ) . addAppender ( consoleAppender ) ; } | Defines how messages should be logged . This method can be modified to restrict the logging messages that are shown on the console or to change their formatting . See the documentation of Log4J for details on how to do this . |
24,621 | public static void processEntitiesFromWikidataDump ( EntityDocumentProcessor entityDocumentProcessor ) { DumpProcessingController dumpProcessingController = new DumpProcessingController ( "wikidatawiki" ) ; dumpProcessingController . setOfflineMode ( OFFLINE_MODE ) ; boolean onlyCurrentRevisions ; switch ( DUMP_FILE_MODE ) { case ALL_REVS : case ALL_REVS_WITH_DAILIES : onlyCurrentRevisions = false ; break ; case CURRENT_REVS : case CURRENT_REVS_WITH_DAILIES : case JSON : case JUST_ONE_DAILY_FOR_TEST : default : onlyCurrentRevisions = true ; } dumpProcessingController . registerEntityDocumentProcessor ( entityDocumentProcessor , null , onlyCurrentRevisions ) ; EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor ( TIMEOUT_SEC ) ; dumpProcessingController . registerEntityDocumentProcessor ( entityTimerProcessor , null , onlyCurrentRevisions ) ; MwDumpFile dumpFile = null ; try { switch ( DUMP_FILE_MODE ) { case ALL_REVS : case CURRENT_REVS : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . FULL ) ; break ; case ALL_REVS_WITH_DAILIES : case CURRENT_REVS_WITH_DAILIES : MwDumpFile fullDumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . FULL ) ; MwDumpFile incrDumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . DAILY ) ; lastDumpFileName = fullDumpFile . getProjectName ( ) + "-" + incrDumpFile . getDateStamp ( ) + "." + fullDumpFile . getDateStamp ( ) ; dumpProcessingController . processAllRecentRevisionDumps ( ) ; break ; case JSON : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . JSON ) ; break ; case JUST_ONE_DAILY_FOR_TEST : dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . DAILY ) ; break ; default : throw new RuntimeException ( "Unsupported dump processing type " + DUMP_FILE_MODE ) ; } if ( dumpFile != null ) { lastDumpFileName = dumpFile . getProjectName ( ) + "-" + dumpFile . getDateStamp ( ) ; dumpProcessingController . processDump ( dumpFile ) ; } } catch ( TimeoutException e ) { } entityTimerProcessor . close ( ) ; } | Processes all entities in a Wikidata dump using the given entity processor . By default the most recent JSON dump will be used . In offline mode only the most recent previously downloaded file is considered . |
24,622 | void addValue ( V value , Resource resource ) { this . valueQueue . add ( value ) ; this . valueSubjectQueue . add ( resource ) ; } | Adds the given value to the list of values that should still be serialized . The given RDF resource will be used as a subject . |
24,623 | protected void processAliases ( List < MonolingualTextValue > addAliases , List < MonolingualTextValue > deleteAliases ) { for ( MonolingualTextValue val : addAliases ) { addAlias ( val ) ; } for ( MonolingualTextValue val : deleteAliases ) { deleteAlias ( val ) ; } } | Processes changes on aliases updating the planned state of the item . |
24,624 | protected void deleteAlias ( MonolingualTextValue alias ) { String lang = alias . getLanguageCode ( ) ; AliasesWithUpdate currentAliases = newAliases . get ( lang ) ; if ( currentAliases != null ) { currentAliases . aliases . remove ( alias ) ; currentAliases . deleted . add ( alias ) ; currentAliases . write = true ; } } | Deletes an individual alias |
24,625 | protected void addAlias ( MonolingualTextValue alias ) { String lang = alias . getLanguageCode ( ) ; AliasesWithUpdate currentAliasesUpdate = newAliases . get ( lang ) ; NameWithUpdate currentLabel = newLabels . get ( lang ) ; if ( currentLabel == null ) { newLabels . put ( lang , new NameWithUpdate ( alias , true ) ) ; } else if ( ! currentLabel . value . equals ( alias ) ) { if ( currentAliasesUpdate == null ) { currentAliasesUpdate = new AliasesWithUpdate ( new ArrayList < MonolingualTextValue > ( ) , true ) ; } List < MonolingualTextValue > currentAliases = currentAliasesUpdate . aliases ; if ( ! currentAliases . contains ( alias ) ) { currentAliases . add ( alias ) ; currentAliasesUpdate . added . add ( alias ) ; currentAliasesUpdate . write = true ; } newAliases . put ( lang , currentAliasesUpdate ) ; } } | Adds an individual alias . It will be merged with the current list of aliases or added as a label if there is no label for this item in this language yet . |
24,626 | protected void processDescriptions ( List < MonolingualTextValue > descriptions ) { for ( MonolingualTextValue description : descriptions ) { NameWithUpdate currentValue = newDescriptions . get ( description . getLanguageCode ( ) ) ; if ( currentValue == null || ! currentValue . value . equals ( description ) ) { newDescriptions . put ( description . getLanguageCode ( ) , new NameWithUpdate ( description , true ) ) ; } } } | Adds descriptions to the item . |
24,627 | protected void processLabels ( List < MonolingualTextValue > labels ) { for ( MonolingualTextValue label : labels ) { String lang = label . getLanguageCode ( ) ; NameWithUpdate currentValue = newLabels . get ( lang ) ; if ( currentValue == null || ! currentValue . value . equals ( label ) ) { newLabels . put ( lang , new NameWithUpdate ( label , true ) ) ; AliasesWithUpdate currentAliases = newAliases . get ( lang ) ; if ( currentAliases != null && currentAliases . aliases . contains ( label ) ) { deleteAlias ( label ) ; } } } } | Adds labels to the item |
24,628 | @ JsonProperty ( "labels" ) @ JsonInclude ( Include . NON_EMPTY ) public Map < String , TermImpl > getLabelUpdates ( ) { return getMonolingualUpdatedValues ( newLabels ) ; } | Label accessor provided for JSON serialization only . |
24,629 | @ JsonProperty ( "descriptions" ) @ JsonInclude ( Include . NON_EMPTY ) public Map < String , TermImpl > getDescriptionUpdates ( ) { return getMonolingualUpdatedValues ( newDescriptions ) ; } | Description accessor provided for JSON serialization only . |
24,630 | @ JsonProperty ( "aliases" ) @ JsonInclude ( Include . NON_EMPTY ) public Map < String , List < TermImpl > > getAliasUpdates ( ) { Map < String , List < TermImpl > > updatedValues = new HashMap < > ( ) ; for ( Map . Entry < String , AliasesWithUpdate > entry : newAliases . entrySet ( ) ) { AliasesWithUpdate update = entry . getValue ( ) ; if ( ! update . write ) { continue ; } List < TermImpl > convertedAliases = new ArrayList < > ( ) ; for ( MonolingualTextValue alias : update . aliases ) { convertedAliases . add ( monolingualToJackson ( alias ) ) ; } updatedValues . put ( entry . getKey ( ) , convertedAliases ) ; } return updatedValues ; } | Alias accessor provided for JSON serialization only |
24,631 | protected Map < String , TermImpl > getMonolingualUpdatedValues ( Map < String , NameWithUpdate > updates ) { Map < String , TermImpl > updatedValues = new HashMap < > ( ) ; for ( NameWithUpdate update : updates . values ( ) ) { if ( ! update . write ) { continue ; } updatedValues . put ( update . value . getLanguageCode ( ) , monolingualToJackson ( update . value ) ) ; } return updatedValues ; } | Helper to format term updates as expected by the Wikibase API |
24,632 | protected RdfSerializer createRdfSerializer ( ) throws IOException { String outputDestinationFinal ; if ( this . outputDestination != null ) { outputDestinationFinal = this . outputDestination ; } else { outputDestinationFinal = "{PROJECT}" + this . taskName + "{DATE}" + ".nt" ; } OutputStream exportOutputStream = getOutputStream ( this . useStdOut , insertDumpInformation ( outputDestinationFinal ) , this . compressionType ) ; RdfSerializer serializer = new RdfSerializer ( RDFFormat . NTRIPLES , exportOutputStream , this . sites , PropertyRegister . getWikidataPropertyRegister ( ) ) ; serializer . setTasks ( this . tasks ) ; return serializer ; } | Creates a new RDF serializer based on the current configuration of this object . |
24,633 | private void setTasks ( String tasks ) { for ( String task : tasks . split ( "," ) ) { if ( KNOWN_TASKS . containsKey ( task ) ) { this . tasks |= KNOWN_TASKS . get ( task ) ; this . taskName += ( this . taskName . isEmpty ( ) ? "" : "-" ) + task ; } else { logger . warn ( "Unsupported RDF serialization task \"" + task + "\". Run without specifying any tasks for help." ) ; } } } | Sets the RDF serialization tasks based on the given string value . |
24,634 | void resizeArray ( int newArraySize ) { long [ ] newArray = new long [ newArraySize ] ; System . arraycopy ( this . arrayOfBits , 0 , newArray , 0 , Math . min ( this . arrayOfBits . length , newArraySize ) ) ; this . arrayOfBits = newArray ; } | Resizes the array that represents this bit vector . |
24,635 | public ItemDocument updateStatements ( ItemIdValue itemIdValue , List < Statement > addStatements , List < Statement > deleteStatements , String summary ) throws MediaWikiApiErrorException , IOException { ItemDocument currentDocument = ( ItemDocument ) this . wikibaseDataFetcher . getEntityDocument ( itemIdValue . getId ( ) ) ; return updateStatements ( currentDocument , addStatements , deleteStatements , summary ) ; } | Updates the statements of the item document identified by the given item id . The updates are computed with respect to the current data found online making sure that no redundant deletions or duplicate insertions happen . The references of duplicate statements will be merged . |
24,636 | public ItemDocument updateTermsStatements ( ItemIdValue itemIdValue , List < MonolingualTextValue > addLabels , List < MonolingualTextValue > addDescriptions , List < MonolingualTextValue > addAliases , List < MonolingualTextValue > deleteAliases , List < Statement > addStatements , List < Statement > deleteStatements , String summary ) throws MediaWikiApiErrorException , IOException { ItemDocument currentDocument = ( ItemDocument ) this . wikibaseDataFetcher . getEntityDocument ( itemIdValue . getId ( ) ) ; return updateTermsStatements ( currentDocument , addLabels , addDescriptions , addAliases , deleteAliases , addStatements , deleteStatements , summary ) ; } | Updates the terms and statements of the item document identified by the given item id . The updates are computed with respect to the current data found online making sure that no redundant deletions or duplicate insertions happen . The references of duplicate statements will be merged . The labels and aliases in a given language are kept distinct . |
24,637 | @ SuppressWarnings ( "unchecked" ) public < T extends TermedStatementDocument > T updateTermsStatements ( T currentDocument , List < MonolingualTextValue > addLabels , List < MonolingualTextValue > addDescriptions , List < MonolingualTextValue > addAliases , List < MonolingualTextValue > deleteAliases , List < Statement > addStatements , List < Statement > deleteStatements , String summary ) throws MediaWikiApiErrorException , IOException { TermStatementUpdate termStatementUpdate = new TermStatementUpdate ( currentDocument , addStatements , deleteStatements , addLabels , addDescriptions , addAliases , deleteAliases ) ; termStatementUpdate . setGuidGenerator ( guidGenerator ) ; return ( T ) termStatementUpdate . performEdit ( wbEditingAction , editAsBot , summary ) ; } | Updates the terms and statements of the current document . The updates are computed with respect to the current data in the document making sure that no redundant deletions or duplicate insertions happen . The references of duplicate statements will be merged . The labels and aliases in a given language are kept distinct . |
24,638 | public < T extends StatementDocument > void nullEdit ( ItemIdValue itemId ) throws IOException , MediaWikiApiErrorException { ItemDocument currentDocument = ( ItemDocument ) this . wikibaseDataFetcher . getEntityDocument ( itemId . getId ( ) ) ; nullEdit ( currentDocument ) ; } | Performs a null edit on an item . This has some effects on Wikibase such as refreshing the labels of the referred items in the UI . |
24,639 | public < T extends StatementDocument > void nullEdit ( PropertyIdValue propertyId ) throws IOException , MediaWikiApiErrorException { PropertyDocument currentDocument = ( PropertyDocument ) this . wikibaseDataFetcher . getEntityDocument ( propertyId . getId ( ) ) ; nullEdit ( currentDocument ) ; } | Performs a null edit on a property . This has some effects on Wikibase such as refreshing the labels of the referred items in the UI . |
24,640 | @ SuppressWarnings ( "unchecked" ) public < T extends StatementDocument > T nullEdit ( T currentDocument ) throws IOException , MediaWikiApiErrorException { StatementUpdate statementUpdate = new StatementUpdate ( currentDocument , Collections . < Statement > emptyList ( ) , Collections . < Statement > emptyList ( ) ) ; statementUpdate . setGuidGenerator ( guidGenerator ) ; return ( T ) this . wbEditingAction . wbEditEntity ( currentDocument . getEntityId ( ) . getId ( ) , null , null , null , statementUpdate . getJsonUpdateString ( ) , false , this . editAsBot , currentDocument . getRevisionId ( ) , null ) ; } | Performs a null edit on an entity . This has some effects on Wikibase such as refreshing the labels of the referred items in the UI . |
24,641 | public static void main ( String [ ] args ) throws IOException { ExampleHelpers . configureLogging ( ) ; JsonSerializationProcessor . printDocumentation ( ) ; JsonSerializationProcessor jsonSerializationProcessor = new JsonSerializationProcessor ( ) ; ExampleHelpers . processEntitiesFromWikidataDump ( jsonSerializationProcessor ) ; jsonSerializationProcessor . close ( ) ; } | Runs the example program . |
24,642 | public void close ( ) throws IOException { System . out . println ( "Serialized " + this . jsonSerializer . getEntityDocumentCount ( ) + " item documents to JSON file " + OUTPUT_FILE_NAME + "." ) ; this . jsonSerializer . close ( ) ; } | Closes the output . Should be called after the JSON serialization was finished . |
24,643 | private boolean includeDocument ( ItemDocument itemDocument ) { for ( StatementGroup sg : itemDocument . getStatementGroups ( ) ) { if ( ! "P19" . equals ( sg . getProperty ( ) . getId ( ) ) ) { continue ; } for ( Statement s : sg ) { if ( s . getMainSnak ( ) instanceof ValueSnak ) { Value v = s . getValue ( ) ; if ( v instanceof ItemIdValue && "Q1731" . equals ( ( ( ItemIdValue ) v ) . getId ( ) ) ) { return true ; } } } } return false ; } | Returns true if the given document should be included in the serialization . |
24,644 | public static String insertDumpInformation ( String pattern , String dateStamp , String project ) { if ( pattern == null ) { return null ; } else { return pattern . replace ( "{DATE}" , dateStamp ) . replace ( "{PROJECT}" , project ) ; } } | Inserts the information about the dateStamp of a dump and the project name into a pattern . |
24,645 | private List < DumpProcessingAction > handleArguments ( String [ ] args ) { CommandLine cmd ; CommandLineParser parser = new GnuParser ( ) ; try { cmd = parser . parse ( options , args ) ; } catch ( ParseException e ) { logger . error ( "Failed to parse arguments: " + e . getMessage ( ) ) ; return Collections . emptyList ( ) ; } if ( ( cmd . hasOption ( CMD_OPTION_HELP ) ) || ( args . length == 0 ) ) { return Collections . emptyList ( ) ; } List < DumpProcessingAction > configuration = new ArrayList < > ( ) ; handleGlobalArguments ( cmd ) ; if ( cmd . hasOption ( CMD_OPTION_ACTION ) ) { DumpProcessingAction action = handleActionArguments ( cmd ) ; if ( action != null ) { configuration . add ( action ) ; } } if ( cmd . hasOption ( CMD_OPTION_CONFIG_FILE ) ) { try { List < DumpProcessingAction > configFile = readConfigFile ( cmd . getOptionValue ( CMD_OPTION_CONFIG_FILE ) ) ; configuration . addAll ( configFile ) ; } catch ( IOException e ) { logger . error ( "Failed to read configuration file \"" + cmd . getOptionValue ( CMD_OPTION_CONFIG_FILE ) + "\": " + e . toString ( ) ) ; } } return configuration ; } | This function interprets the arguments of the main function . By doing this it will set flags for the dump generation . See in the help text for more specific information about the options . |
24,646 | private void handleGlobalArguments ( CommandLine cmd ) { if ( cmd . hasOption ( CMD_OPTION_DUMP_LOCATION ) ) { this . dumpDirectoryLocation = cmd . getOptionValue ( CMD_OPTION_DUMP_LOCATION ) ; } if ( cmd . hasOption ( CMD_OPTION_OFFLINE_MODE ) ) { this . offlineMode = true ; } if ( cmd . hasOption ( CMD_OPTION_QUIET ) ) { this . quiet = true ; } if ( cmd . hasOption ( CMD_OPTION_CREATE_REPORT ) ) { this . reportFilename = cmd . getOptionValue ( CMD_OPTION_CREATE_REPORT ) ; } if ( cmd . hasOption ( OPTION_FILTER_LANGUAGES ) ) { setLanguageFilters ( cmd . getOptionValue ( OPTION_FILTER_LANGUAGES ) ) ; } if ( cmd . hasOption ( OPTION_FILTER_SITES ) ) { setSiteFilters ( cmd . getOptionValue ( OPTION_FILTER_SITES ) ) ; } if ( cmd . hasOption ( OPTION_FILTER_PROPERTIES ) ) { setPropertyFilters ( cmd . getOptionValue ( OPTION_FILTER_PROPERTIES ) ) ; } if ( cmd . hasOption ( CMD_OPTION_LOCAL_DUMPFILE ) ) { this . inputDumpLocation = cmd . getOptionValue ( OPTION_LOCAL_DUMPFILE ) ; } } | Analyses the command - line arguments which are relevant for the serialization process in general . It fills out the class arguments with this data . |
24,647 | private void handleGlobalArguments ( Section section ) { for ( String key : section . keySet ( ) ) { switch ( key ) { case OPTION_OFFLINE_MODE : if ( section . get ( key ) . toLowerCase ( ) . equals ( "true" ) ) { this . offlineMode = true ; } break ; case OPTION_QUIET : if ( section . get ( key ) . toLowerCase ( ) . equals ( "true" ) ) { this . quiet = true ; } break ; case OPTION_CREATE_REPORT : this . reportFilename = section . get ( key ) ; break ; case OPTION_DUMP_LOCATION : this . dumpDirectoryLocation = section . get ( key ) ; break ; case OPTION_FILTER_LANGUAGES : setLanguageFilters ( section . get ( key ) ) ; break ; case OPTION_FILTER_SITES : setSiteFilters ( section . get ( key ) ) ; break ; case OPTION_FILTER_PROPERTIES : setPropertyFilters ( section . get ( key ) ) ; break ; case OPTION_LOCAL_DUMPFILE : this . inputDumpLocation = section . get ( key ) ; break ; default : logger . warn ( "Unrecognized option: " + key ) ; } } } | Analyses the content of the general section of an ini configuration file and fills out the class arguments with this data . |
24,648 | private void checkDuplicateStdOutOutput ( DumpProcessingAction newAction ) { if ( newAction . useStdOut ( ) ) { if ( this . quiet ) { logger . warn ( "Multiple actions are using stdout as output destination." ) ; } this . quiet = true ; } } | Checks if a newly created action wants to write output to stdout and logs a warning if other actions are doing the same . |
24,649 | private void setLanguageFilters ( String filters ) { this . filterLanguages = new HashSet < > ( ) ; if ( ! "-" . equals ( filters ) ) { Collections . addAll ( this . filterLanguages , filters . split ( "," ) ) ; } } | Sets the set of language filters based on the given string . |
24,650 | private void setSiteFilters ( String filters ) { this . filterSites = new HashSet < > ( ) ; if ( ! "-" . equals ( filters ) ) { Collections . addAll ( this . filterSites , filters . split ( "," ) ) ; } } | Sets the set of site filters based on the given string . |
24,651 | private void setPropertyFilters ( String filters ) { this . filterProperties = new HashSet < > ( ) ; if ( ! "-" . equals ( filters ) ) { for ( String pid : filters . split ( "," ) ) { this . filterProperties . add ( Datamodel . makeWikidataPropertyIdValue ( pid ) ) ; } } } | Sets the set of property filters based on the given string . |
24,652 | public void writeFinalResults ( ) { printStatus ( ) ; try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "gender-ratios.csv" ) ) ) { out . print ( "Site key,pages total,pages on humans,pages on humans with gender" ) ; for ( EntityIdValue gender : this . genderNamesList ) { out . print ( "," + this . genderNames . get ( gender ) + " (" + gender . getId ( ) + ")" ) ; } out . println ( ) ; List < SiteRecord > siteRecords = new ArrayList < > ( this . siteRecords . values ( ) ) ; Collections . sort ( siteRecords , new SiteRecordComparator ( ) ) ; for ( SiteRecord siteRecord : siteRecords ) { out . print ( siteRecord . siteKey + "," + siteRecord . pageCount + "," + siteRecord . humanPageCount + "," + siteRecord . humanGenderPageCount ) ; for ( EntityIdValue gender : this . genderNamesList ) { if ( siteRecord . genderCounts . containsKey ( gender ) ) { out . print ( "," + siteRecord . genderCounts . get ( gender ) ) ; } else { out . print ( ",0" ) ; } } out . println ( ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } } | Writes the results of the processing to a CSV file . |
24,653 | public static void printDocumentation ( ) { System . out . println ( "********************************************************************" ) ; System . out . println ( "*** Wikidata Toolkit: GenderRatioProcessor" ) ; System . out . println ( "*** " ) ; System . out . println ( "*** This program will download and process dumps from Wikidata." ) ; System . out . println ( "*** It will compute the numbers of articles about humans across" ) ; System . out . println ( "*** Wikimedia projects, and in particular it will count the articles" ) ; System . out . println ( "*** for each sex/gender. Results will be stored in a CSV file." ) ; System . out . println ( "*** See source code for further details." ) ; System . out . println ( "********************************************************************" ) ; } | Prints some basic documentation about this program . |
24,654 | private boolean containsValue ( StatementGroup statementGroup , Value value ) { for ( Statement s : statementGroup ) { if ( value . equals ( s . getValue ( ) ) ) { return true ; } } return false ; } | Checks if the given group of statements contains the given value as the value of a main snak of some statement . |
24,655 | private void addNewGenderName ( EntityIdValue entityIdValue , String name ) { this . genderNames . put ( entityIdValue , name ) ; this . genderNamesList . add ( entityIdValue ) ; } | Adds a new gender item and an initial name . |
24,656 | private SiteRecord getSiteRecord ( String siteKey ) { SiteRecord siteRecord = this . siteRecords . get ( siteKey ) ; if ( siteRecord == null ) { siteRecord = new SiteRecord ( siteKey ) ; this . siteRecords . put ( siteKey , siteRecord ) ; } return siteRecord ; } | Returns a site record for the site of the given name creating a new one if it does not exist yet . |
24,657 | private void countGender ( EntityIdValue gender , SiteRecord siteRecord ) { Integer curValue = siteRecord . genderCounts . get ( gender ) ; if ( curValue == null ) { siteRecord . genderCounts . put ( gender , 1 ) ; } else { siteRecord . genderCounts . put ( gender , curValue + 1 ) ; } } | Counts a single page of the specified gender . If this is the first page of that gender on this site a suitable key is added to the list of the site s genders . |
24,658 | public List < WbSearchEntitiesResult > wbSearchEntities ( String search , String language , Boolean strictLanguage , String type , Long limit , Long offset ) throws MediaWikiApiErrorException { Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( ApiConnection . PARAM_ACTION , "wbsearchentities" ) ; if ( search != null ) { parameters . put ( "search" , search ) ; } else { throw new IllegalArgumentException ( "Search parameter must be specified for this action." ) ; } if ( language != null ) { parameters . put ( "language" , language ) ; } else { throw new IllegalArgumentException ( "Language parameter must be specified for this action." ) ; } if ( strictLanguage != null ) { parameters . put ( "strictlanguage" , Boolean . toString ( strictLanguage ) ) ; } if ( type != null ) { parameters . put ( "type" , type ) ; } if ( limit != null ) { parameters . put ( "limit" , Long . toString ( limit ) ) ; } if ( offset != null ) { parameters . put ( "continue" , Long . toString ( offset ) ) ; } List < WbSearchEntitiesResult > results = new ArrayList < > ( ) ; try { JsonNode root = this . connection . sendJsonRequest ( "POST" , parameters ) ; JsonNode entities = root . path ( "search" ) ; for ( JsonNode entityNode : entities ) { try { JacksonWbSearchEntitiesResult ed = mapper . treeToValue ( entityNode , JacksonWbSearchEntitiesResult . class ) ; results . add ( ed ) ; } catch ( JsonProcessingException e ) { LOGGER . error ( "Error when reading JSON for entity " + entityNode . path ( "id" ) . asText ( "UNKNOWN" ) + ": " + e . toString ( ) ) ; } } } catch ( IOException e ) { LOGGER . error ( "Could not retrive data: " + e . toString ( ) ) ; } return results ; } | Executes the API action wbsearchentity for the given parameters . Searches for entities using labels and aliases . Returns a label and description for the entity in the user language if possible . Returns details of the matched term . The matched term text is also present in the aliases key if different from the display label . |
24,659 | public ItemDocumentBuilder withSiteLink ( String title , String siteKey , ItemIdValue ... badges ) { withSiteLink ( factory . getSiteLink ( title , siteKey , Arrays . asList ( badges ) ) ) ; return this ; } | Adds an additional site link to the constructed document . |
24,660 | public Resource addReference ( Reference reference ) { Resource resource = this . rdfWriter . getUri ( Vocabulary . getReferenceUri ( reference ) ) ; this . referenceQueue . add ( reference ) ; this . referenceSubjectQueue . add ( resource ) ; return resource ; } | Adds the given reference to the list of references that should still be serialized and returns the RDF resource that will be used as a subject . |
24,661 | public void writeReferences ( ) throws RDFHandlerException { Iterator < Reference > referenceIterator = this . referenceQueue . iterator ( ) ; for ( Resource resource : this . referenceSubjectQueue ) { final Reference reference = referenceIterator . next ( ) ; if ( this . declaredReferences . add ( resource ) ) { writeReference ( reference , resource ) ; } } this . referenceSubjectQueue . clear ( ) ; this . referenceQueue . clear ( ) ; this . snakRdfConverter . writeAuxiliaryTriples ( ) ; } | Writes references that have been added recently . Auxiliary triples that are generated for serializing snaks in references will be written right afterwards . This will also trigger any other auxiliary triples to be written that the snak converter object may have buffered . |
24,662 | public Sites getSitesInformation ( ) throws IOException { MwDumpFile sitesTableDump = getMostRecentDump ( DumpContentType . SITES ) ; if ( sitesTableDump == null ) { return null ; } MwSitesDumpFileProcessor sitesDumpFileProcessor = new MwSitesDumpFileProcessor ( ) ; sitesDumpFileProcessor . processDumpFileContents ( sitesTableDump . getDumpFileStream ( ) , sitesTableDump ) ; return sitesDumpFileProcessor . getSites ( ) ; } | Processes the most recent dump of the sites table to extract information about registered sites . |
24,663 | public void processMostRecentDump ( DumpContentType dumpContentType , MwDumpFileProcessor dumpFileProcessor ) { MwDumpFile dumpFile = getMostRecentDump ( dumpContentType ) ; if ( dumpFile != null ) { processDumpFile ( dumpFile , dumpFileProcessor ) ; } } | Processes the most recent dump of the given type using the given dump processor . |
24,664 | void processDumpFile ( MwDumpFile dumpFile , MwDumpFileProcessor dumpFileProcessor ) { try ( InputStream inputStream = dumpFile . getDumpFileStream ( ) ) { dumpFileProcessor . processDumpFileContents ( inputStream , dumpFile ) ; } catch ( FileAlreadyExistsException e ) { logger . error ( "Dump file " + dumpFile . toString ( ) + " could not be processed since file " + e . getFile ( ) + " already exists. Try deleting the file or dumpfile directory to attempt a new download." ) ; } catch ( IOException e ) { logger . error ( "Dump file " + dumpFile . toString ( ) + " could not be processed: " + e . toString ( ) ) ; } } | Processes one dump file with the given dump file processor handling exceptions appropriately . |
24,665 | public MwDumpFile findMostRecentDump ( DumpContentType dumpContentType ) { List < MwDumpFile > dumps = findAllDumps ( dumpContentType ) ; for ( MwDumpFile dump : dumps ) { if ( dump . isAvailable ( ) ) { return dump ; } } return null ; } | Finds the most recent dump of the given type that is actually available . |
24,666 | List < MwDumpFile > mergeDumpLists ( List < MwDumpFile > localDumps , List < MwDumpFile > onlineDumps ) { List < MwDumpFile > result = new ArrayList < > ( localDumps ) ; HashSet < String > localDateStamps = new HashSet < > ( ) ; for ( MwDumpFile dumpFile : localDumps ) { localDateStamps . add ( dumpFile . getDateStamp ( ) ) ; } for ( MwDumpFile dumpFile : onlineDumps ) { if ( ! localDateStamps . contains ( dumpFile . getDateStamp ( ) ) ) { result . add ( dumpFile ) ; } } result . sort ( Collections . reverseOrder ( new MwDumpFile . DateComparator ( ) ) ) ; return result ; } | Merges a list of local and online dumps . For dumps available both online and locally only the local version is included . The list is order with most recent dump date first . |
24,667 | List < MwDumpFile > findDumpsLocally ( DumpContentType dumpContentType ) { String directoryPattern = WmfDumpFile . getDumpFileDirectoryName ( dumpContentType , "*" ) ; List < String > dumpFileDirectories ; try { dumpFileDirectories = this . dumpfileDirectoryManager . getSubdirectories ( directoryPattern ) ; } catch ( IOException e ) { logger . error ( "Unable to access dump directory: " + e . toString ( ) ) ; return Collections . emptyList ( ) ; } List < MwDumpFile > result = new ArrayList < > ( ) ; for ( String directory : dumpFileDirectories ) { String dateStamp = WmfDumpFile . getDateStampFromDumpFileDirectoryName ( dumpContentType , directory ) ; if ( dateStamp . matches ( WmfDumpFileManager . DATE_STAMP_PATTERN ) ) { WmfLocalDumpFile dumpFile = new WmfLocalDumpFile ( dateStamp , this . projectName , dumpfileDirectoryManager , dumpContentType ) ; if ( dumpFile . isAvailable ( ) ) { result . add ( dumpFile ) ; } else { logger . error ( "Incomplete local dump file data. Maybe delete " + dumpFile . getDumpfileDirectory ( ) + " to attempt fresh download." ) ; } } } result . sort ( Collections . reverseOrder ( new MwDumpFile . DateComparator ( ) ) ) ; logger . info ( "Found " + result . size ( ) + " local dumps of type " + dumpContentType + ": " + result ) ; return result ; } | Finds out which dump files of the given type have been downloaded already . The result is a list of objects that describe the available dump files in descending order by their date . Not all of the dumps included might be actually available . |
24,668 | List < MwDumpFile > findDumpsOnline ( DumpContentType dumpContentType ) { List < String > dumpFileDates = findDumpDatesOnline ( dumpContentType ) ; List < MwDumpFile > result = new ArrayList < > ( ) ; for ( String dateStamp : dumpFileDates ) { if ( dumpContentType == DumpContentType . DAILY ) { result . add ( new WmfOnlineDailyDumpFile ( dateStamp , this . projectName , this . webResourceFetcher , this . dumpfileDirectoryManager ) ) ; } else if ( dumpContentType == DumpContentType . JSON ) { result . add ( new JsonOnlineDumpFile ( dateStamp , this . projectName , this . webResourceFetcher , this . dumpfileDirectoryManager ) ) ; } else { result . add ( new WmfOnlineStandardDumpFile ( dateStamp , this . projectName , this . webResourceFetcher , this . dumpfileDirectoryManager , dumpContentType ) ) ; } } logger . info ( "Found " + result . size ( ) + " online dumps of type " + dumpContentType + ": " + result ) ; return result ; } | Finds out which dump files of the given type are available for download . The result is a list of objects that describe the available dump files in descending order by their date . Not all of the dumps included might be actually available . |
24,669 | public void processItemDocument ( ItemDocument itemDocument ) { this . countItems ++ ; if ( this . countItems < 10 ) { System . out . println ( itemDocument ) ; } else if ( this . countItems == 10 ) { System . out . println ( "*** I won't print any further items.\n" + "*** We will never finish if we print all the items.\n" + "*** Maybe remove this debug output altogether." ) ; } } | Processes one item document . This is often the main workhorse that gathers the data you are interested in . You can modify this code as you wish . |
24,670 | static EntityIdValue fromId ( String id , String siteIri ) { switch ( guessEntityTypeFromId ( id ) ) { case EntityIdValueImpl . JSON_ENTITY_TYPE_ITEM : return new ItemIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_PROPERTY : return new PropertyIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_LEXEME : return new LexemeIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_FORM : return new FormIdValueImpl ( id , siteIri ) ; case EntityIdValueImpl . JSON_ENTITY_TYPE_SENSE : return new SenseIdValueImpl ( id , siteIri ) ; default : throw new IllegalArgumentException ( "Entity id \"" + id + "\" is not supported." ) ; } } | Parses an item id |
24,671 | static String guessEntityTypeFromId ( String id ) { if ( id . isEmpty ( ) ) { throw new IllegalArgumentException ( "Entity ids should not be empty." ) ; } switch ( id . charAt ( 0 ) ) { case 'L' : if ( id . contains ( "-F" ) ) { return JSON_ENTITY_TYPE_FORM ; } else if ( id . contains ( "-S" ) ) { return JSON_ENTITY_TYPE_SENSE ; } else { return JSON_ENTITY_TYPE_LEXEME ; } case 'P' : return JSON_ENTITY_TYPE_PROPERTY ; case 'Q' : return JSON_ENTITY_TYPE_ITEM ; default : throw new IllegalArgumentException ( "Entity id \"" + id + "\" is not supported." ) ; } } | RReturns the entity type of the id like item or property |
24,672 | public void close ( ) { logger . info ( "Finished processing." ) ; this . timer . stop ( ) ; this . lastSeconds = ( int ) ( timer . getTotalWallTime ( ) / 1000000000 ) ; printStatus ( ) ; } | Stops the processing and prints the final time . |
24,673 | private void countEntity ( ) { if ( ! this . timer . isRunning ( ) ) { startTimer ( ) ; } this . entityCount ++ ; if ( this . entityCount % 100 == 0 ) { timer . stop ( ) ; int seconds = ( int ) ( timer . getTotalWallTime ( ) / 1000000000 ) ; if ( seconds >= this . lastSeconds + this . reportInterval ) { this . lastSeconds = seconds ; printStatus ( ) ; if ( this . timeout > 0 && seconds > this . timeout ) { logger . info ( "Timeout. Aborting processing." ) ; throw new TimeoutException ( ) ; } } timer . start ( ) ; } } | Counts one entity . Every once in a while the current time is checked so as to print an intermediate report roughly every ten seconds . |
24,674 | public static String implodeObjects ( Iterable < ? > objects ) { StringBuilder builder = new StringBuilder ( ) ; boolean first = true ; for ( Object o : objects ) { if ( first ) { first = false ; } else { builder . append ( "|" ) ; } builder . append ( o . toString ( ) ) ; } return builder . toString ( ) ; } | Builds a string that serializes a list of objects separated by the pipe character . The toString methods are used to turn objects into strings . This operation is commonly used to build parameter lists for API requests . |
24,675 | public void logout ( ) throws IOException { if ( this . loggedIn ) { Map < String , String > params = new HashMap < > ( ) ; params . put ( "action" , "logout" ) ; params . put ( "format" , "json" ) ; try { sendJsonRequest ( "POST" , params ) ; } catch ( MediaWikiApiErrorException e ) { throw new IOException ( e . getMessage ( ) , e ) ; } this . loggedIn = false ; this . username = "" ; this . password = "" ; } } | Logs the current user out . |
24,676 | String fetchToken ( String tokenType ) throws IOException , MediaWikiApiErrorException { Map < String , String > params = new HashMap < > ( ) ; params . put ( ApiConnection . PARAM_ACTION , "query" ) ; params . put ( "meta" , "tokens" ) ; params . put ( "type" , tokenType ) ; try { JsonNode root = this . sendJsonRequest ( "POST" , params ) ; return root . path ( "query" ) . path ( "tokens" ) . path ( tokenType + "token" ) . textValue ( ) ; } catch ( IOException | MediaWikiApiErrorException e ) { logger . error ( "Error when trying to fetch token: " + e . toString ( ) ) ; } return null ; } | Executes a API query action to get a new token . The method only executes the action without doing any checks first . If errors occur they are logged and null is returned . |
24,677 | public InputStream sendRequest ( String requestMethod , Map < String , String > parameters ) throws IOException { String queryString = getQueryString ( parameters ) ; URL url = new URL ( this . apiBaseUrl ) ; HttpURLConnection connection = ( HttpURLConnection ) WebResourceFetcherImpl . getUrlConnection ( url ) ; setupConnection ( requestMethod , queryString , connection ) ; OutputStreamWriter writer = new OutputStreamWriter ( connection . getOutputStream ( ) ) ; writer . write ( queryString ) ; writer . flush ( ) ; writer . close ( ) ; int rc = connection . getResponseCode ( ) ; if ( rc != 200 ) { logger . warn ( "Error: API request returned response code " + rc ) ; } InputStream iStream = connection . getInputStream ( ) ; fillCookies ( connection . getHeaderFields ( ) ) ; return iStream ; } | Sends a request to the API with the given parameters and the given request method and returns the result string . It automatically fills the cookie map with cookies in the result header after the request . |
24,678 | List < String > getWarnings ( JsonNode root ) { ArrayList < String > warnings = new ArrayList < > ( ) ; if ( root . has ( "warnings" ) ) { JsonNode warningNode = root . path ( "warnings" ) ; Iterator < Map . Entry < String , JsonNode > > moduleIterator = warningNode . fields ( ) ; while ( moduleIterator . hasNext ( ) ) { Map . Entry < String , JsonNode > moduleNode = moduleIterator . next ( ) ; Iterator < JsonNode > moduleOutputIterator = moduleNode . getValue ( ) . elements ( ) ; while ( moduleOutputIterator . hasNext ( ) ) { JsonNode moduleOutputNode = moduleOutputIterator . next ( ) ; if ( moduleOutputNode . isTextual ( ) ) { warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + moduleOutputNode . textValue ( ) ) ; } else if ( moduleOutputNode . isArray ( ) ) { Iterator < JsonNode > messageIterator = moduleOutputNode . elements ( ) ; while ( messageIterator . hasNext ( ) ) { JsonNode messageNode = messageIterator . next ( ) ; warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + messageNode . path ( "html" ) . path ( "*" ) . asText ( messageNode . toString ( ) ) ) ; } } else { warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + "Warning was not understood. Please report this to Wikidata Toolkit. JSON source: " + moduleOutputNode . toString ( ) ) ; } } } } return warnings ; } | Extracts warnings that are returned in an API response . |
24,679 | String getQueryString ( Map < String , String > params ) { StringBuilder builder = new StringBuilder ( ) ; try { boolean first = true ; for ( Map . Entry < String , String > entry : params . entrySet ( ) ) { if ( first ) { first = false ; } else { builder . append ( "&" ) ; } builder . append ( URLEncoder . encode ( entry . getKey ( ) , "UTF-8" ) ) ; builder . append ( "=" ) ; builder . append ( URLEncoder . encode ( entry . getValue ( ) , "UTF-8" ) ) ; } } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( "Your Java version does not support UTF-8 encoding." ) ; } return builder . toString ( ) ; } | Returns the query string of a URL from a parameter list . |
24,680 | public static < T > T load ( String resourcePath , BeanSpec spec ) { return load ( resourcePath , spec , false ) ; } | Load resource content from given path into variable with type specified by spec . |
24,681 | protected void printCenter ( String format , Object ... args ) { String text = S . fmt ( format , args ) ; info ( S . center ( text , 80 ) ) ; } | Print formatted string in the center of 80 chars line left and right padded . |
24,682 | protected void printCenterWithLead ( String lead , String format , Object ... args ) { String text = S . fmt ( format , args ) ; int len = 80 - lead . length ( ) ; info ( S . concat ( lead , S . center ( text , len ) ) ) ; } | Print the lead string followed by centered formatted string . The whole length of the line is 80 chars . |
24,683 | public String convert ( BufferedImage image , boolean favicon ) { statsArray = new int [ 12 ] ; dStart = System . nanoTime ( ) ; image = scale ( image , favicon ) ; StringBuilder sb = new StringBuilder ( ( image . getWidth ( ) + 1 ) * image . getHeight ( ) ) ; for ( int y = 0 ; y < image . getHeight ( ) ; y ++ ) { if ( sb . length ( ) != 0 ) sb . append ( "\n" ) ; for ( int x = 0 ; x < image . getWidth ( ) ; x ++ ) { Color pixelColor = new Color ( image . getRGB ( x , y ) , true ) ; int alpha = pixelColor . getAlpha ( ) ; boolean isTransient = alpha < 0.1 ; double gValue = isTransient ? 250 : ( ( double ) pixelColor . getRed ( ) * 0.2989 + ( double ) pixelColor . getBlue ( ) * 0.5870 + ( double ) pixelColor . getGreen ( ) * 0.1140 ) / ( ( double ) alpha / ( double ) 250 ) ; final char s = gValue < 130 ? darkGrayScaleMap ( gValue ) : lightGrayScaleMap ( gValue ) ; sb . append ( s ) ; } } imgArray = sb . toString ( ) . toCharArray ( ) ; dEnd = System . nanoTime ( ) ; return sb . toString ( ) ; } | The main conversion method . |
24,684 | private static BufferedImage scale ( BufferedImage imageToScale , int dWidth , int dHeight , double fWidth , double fHeight ) { BufferedImage dbi = null ; int imageType = imageToScale . getType ( ) ; if ( imageToScale != null ) { dbi = new BufferedImage ( dWidth , dHeight , imageType ) ; Graphics2D g = dbi . createGraphics ( ) ; AffineTransform at = AffineTransform . getScaleInstance ( fWidth , fHeight ) ; g . drawRenderedImage ( imageToScale , at ) ; } return dbi ; } | Image scale method |
24,685 | public EventBus emit ( Enum < ? > event , Object ... args ) { return _emitWithOnceBus ( eventContext ( event , args ) ) ; } | Emit an enum event with parameters supplied . |
24,686 | public EventBus emit ( String event , Object ... args ) { return _emitWithOnceBus ( eventContext ( event , args ) ) ; } | Emit a string event with parameters . |
24,687 | public EventBus emit ( EventObject event , Object ... args ) { return _emitWithOnceBus ( eventContext ( event , args ) ) ; } | Emit an event object with parameters . |
24,688 | public EventBus emitAsync ( Enum < ? > event , Object ... args ) { return _emitWithOnceBus ( eventContextAsync ( event , args ) ) ; } | Emit an enum event with parameters and force all listeners to be called asynchronously . |
24,689 | public EventBus emitAsync ( String event , Object ... args ) { return _emitWithOnceBus ( eventContextAsync ( event , args ) ) ; } | Emit a string event with parameters and force all listeners to be called asynchronously . |
24,690 | public EventBus emitAsync ( EventObject event , Object ... args ) { return _emitWithOnceBus ( eventContextAsync ( event , args ) ) ; } | Emit a event object with parameters and force all listeners to be called asynchronously . |
24,691 | public EventBus emitSync ( Enum < ? > event , Object ... args ) { return _emitWithOnceBus ( eventContextSync ( event , args ) ) ; } | Emit an enum event with parameters and force all listener to be called synchronously . |
24,692 | public EventBus emitSync ( String event , Object ... args ) { return _emitWithOnceBus ( eventContextSync ( event , args ) ) ; } | Emit a string event with parameters and force all listener to be called synchronously . |
24,693 | public EventBus emitSync ( EventObject event , Object ... args ) { return _emitWithOnceBus ( eventContextSync ( event , args ) ) ; } | Emit a event object with parameters and force all listeners to be called synchronously . |
24,694 | public static String common ( ) { String common = SysProps . get ( KEY_COMMON_CONF_TAG ) ; if ( S . blank ( common ) ) { common = "common" ; } return common ; } | Return the common configuration set name . By default it is common |
24,695 | public static String confSetName ( ) { String profile = SysProps . get ( AppConfigKey . PROFILE . key ( ) ) ; if ( S . blank ( profile ) ) { profile = Act . mode ( ) . name ( ) . toLowerCase ( ) ; } return profile ; } | Return the name of the current conf set |
24,696 | private static Map < String , Object > processConf ( Map < String , ? > conf ) { Map < String , Object > m = new HashMap < String , Object > ( conf . size ( ) ) ; for ( String s : conf . keySet ( ) ) { Object o = conf . get ( s ) ; if ( s . startsWith ( "act." ) ) s = s . substring ( 4 ) ; m . put ( s , o ) ; m . put ( Config . canonical ( s ) , o ) ; } return m ; } | trim act . from conf keys |
24,697 | public File curDir ( ) { File file = session ( ) . attribute ( ATTR_PWD ) ; if ( null == file ) { file = new File ( System . getProperty ( "user.dir" ) ) ; session ( ) . attribute ( ATTR_PWD , file ) ; } return file ; } | Return the current working directory |
24,698 | private String getRowLineBuf ( int colCount , List < Integer > colMaxLenList , String [ ] [ ] data ) { S . Buffer rowBuilder = S . buffer ( ) ; int colWidth ; for ( int i = 0 ; i < colCount ; i ++ ) { colWidth = colMaxLenList . get ( i ) + 3 ; for ( int j = 0 ; j < colWidth ; j ++ ) { if ( j == 0 ) { rowBuilder . append ( "+" ) ; } else if ( ( i + 1 == colCount && j + 1 == colWidth ) ) { rowBuilder . append ( "-+" ) ; } else { rowBuilder . append ( "-" ) ; } } } return rowBuilder . append ( "\n" ) . toString ( ) ; } | Each string item rendering requires the border and a space on both sides . |
24,699 | public byte [ ] toByteArray ( ) { try { ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; ObjectOutputStream oos = new ObjectOutputStream ( baos ) ; oos . writeObject ( this ) ; return baos . toByteArray ( ) ; } catch ( IOException e ) { throw E . ioException ( e ) ; } } | Serialize this AppDescriptor and output byte array |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.