idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
147,900
public static void doMetaGetRO ( AdminClient adminClient , Collection < Integer > nodeIds , List < String > storeNames , List < String > metaKeys ) throws IOException { for ( String key : metaKeys ) { System . out . println ( "Metadata: " + key ) ; if ( ! key . equals ( KEY_MAX_VERSION ) && ! key . equals ( KEY_CURRENT_VERSION ) && ! key . equals ( KEY_STORAGE_FORMAT ) ) { System . out . println ( " Invalid read-only metadata key: " + key ) ; } else { for ( Integer nodeId : nodeIds ) { String hostName = adminClient . getAdminClientCluster ( ) . getNodeById ( nodeId ) . getHost ( ) ; System . out . println ( " Node: " + hostName + ":" + nodeId ) ; if ( key . equals ( KEY_MAX_VERSION ) ) { Map < String , Long > mapStoreToROVersion = adminClient . readonlyOps . getROMaxVersion ( nodeId , storeNames ) ; for ( String storeName : mapStoreToROVersion . keySet ( ) ) { System . out . println ( " " + storeName + ":" + mapStoreToROVersion . get ( storeName ) ) ; } } else if ( key . equals ( KEY_CURRENT_VERSION ) ) { Map < String , Long > mapStoreToROVersion = adminClient . readonlyOps . getROCurrentVersion ( nodeId , storeNames ) ; for ( String storeName : mapStoreToROVersion . keySet ( ) ) { System . out . println ( " " + storeName + ":" + mapStoreToROVersion . get ( storeName ) ) ; } } else if ( key . equals ( KEY_STORAGE_FORMAT ) ) { Map < String , String > mapStoreToROFormat = adminClient . readonlyOps . getROStorageFormat ( nodeId , storeNames ) ; for ( String storeName : mapStoreToROFormat . keySet ( ) ) { System . out . println ( " " + storeName + ":" + mapStoreToROFormat . get ( storeName ) ) ; } } } } System . out . println ( ) ; } }
Gets read - only metadata .
492
7
147,901
public static void doMetaUpdateVersionsOnStores ( AdminClient adminClient , List < StoreDefinition > oldStoreDefs , List < StoreDefinition > newStoreDefs ) { Set < String > storeNamesUnion = new HashSet < String > ( ) ; Map < String , StoreDefinition > oldStoreDefinitionMap = new HashMap < String , StoreDefinition > ( ) ; Map < String , StoreDefinition > newStoreDefinitionMap = new HashMap < String , StoreDefinition > ( ) ; List < String > storesChanged = new ArrayList < String > ( ) ; for ( StoreDefinition storeDef : oldStoreDefs ) { String storeName = storeDef . getName ( ) ; storeNamesUnion . add ( storeName ) ; oldStoreDefinitionMap . put ( storeName , storeDef ) ; } for ( StoreDefinition storeDef : newStoreDefs ) { String storeName = storeDef . getName ( ) ; storeNamesUnion . add ( storeName ) ; newStoreDefinitionMap . put ( storeName , storeDef ) ; } for ( String storeName : storeNamesUnion ) { StoreDefinition oldStoreDef = oldStoreDefinitionMap . get ( storeName ) ; StoreDefinition newStoreDef = newStoreDefinitionMap . get ( storeName ) ; if ( oldStoreDef == null && newStoreDef != null || oldStoreDef != null && newStoreDef == null || oldStoreDef != null && newStoreDef != null && ! oldStoreDef . equals ( newStoreDef ) ) { storesChanged . add ( storeName ) ; } } System . out . println ( "Updating metadata version for the following stores: " + storesChanged ) ; try { adminClient . metadataMgmtOps . updateMetadataversion ( adminClient . getAdminClientCluster ( ) . getNodeIds ( ) , storesChanged ) ; } catch ( Exception e ) { System . err . println ( "Error while updating metadata version for the specified store." ) ; } }
Updates metadata versions on stores .
413
7
147,902
public static void executeCommand ( String [ ] args ) throws IOException { OptionParser parser = getParser ( ) ; String url = null ; Boolean confirm = false ; // parse command-line input OptionSet options = parser . parse ( args ) ; if ( options . has ( AdminParserUtils . OPT_HELP ) ) { printHelp ( System . out ) ; return ; } AdminParserUtils . checkRequired ( options , AdminParserUtils . OPT_URL ) ; url = ( String ) options . valueOf ( AdminParserUtils . OPT_URL ) ; if ( options . has ( AdminParserUtils . OPT_CONFIRM ) ) { confirm = true ; } // print summary System . out . println ( "Synchronize metadata versions across all nodes" ) ; System . out . println ( "Location:" ) ; System . out . println ( " bootstrap url = " + url ) ; System . out . println ( " node = all nodes" ) ; AdminClient adminClient = AdminToolUtils . getAdminClient ( url ) ; AdminToolUtils . assertServerNotInRebalancingState ( adminClient ) ; Versioned < Properties > versionedProps = mergeAllVersions ( adminClient ) ; printVersions ( versionedProps ) ; // execute command if ( ! AdminToolUtils . askConfirm ( confirm , "do you want to synchronize metadata versions to all node" ) ) return ; adminClient . metadataMgmtOps . setMetadataVersion ( versionedProps ) ; }
Parses command - line and synchronizes metadata versions across all nodes .
324
15
147,903
public static void executeCommand ( String [ ] args ) throws IOException { OptionParser parser = getParser ( ) ; // declare parameters String url = null ; // parse command-line input OptionSet options = parser . parse ( args ) ; if ( options . has ( AdminParserUtils . OPT_HELP ) ) { printHelp ( System . out ) ; return ; } // check required options and/or conflicting options AdminParserUtils . checkRequired ( options , AdminParserUtils . OPT_URL ) ; // load parameters url = ( String ) options . valueOf ( AdminParserUtils . OPT_URL ) ; // execute command AdminClient adminClient = AdminToolUtils . getAdminClient ( url ) ; doMetaCheckVersion ( adminClient ) ; }
Parses command - line and verifies metadata versions on all the cluster nodes
161
16
147,904
private Integer getKeyPartitionId ( byte [ ] key ) { Integer keyPartitionId = storeInstance . getNodesPartitionIdForKey ( nodeId , key ) ; Utils . notNull ( keyPartitionId ) ; return keyPartitionId ; }
Given the key figures out which partition on the local node hosts the key .
57
15
147,905
protected boolean isItemAccepted ( byte [ ] key ) { boolean entryAccepted = false ; if ( ! fetchOrphaned ) { if ( isKeyNeeded ( key ) ) { entryAccepted = true ; } } else { if ( ! StoreRoutingPlan . checkKeyBelongsToNode ( key , nodeId , initialCluster , storeDef ) ) { entryAccepted = true ; } } return entryAccepted ; }
Determines if entry is accepted . For normal usage this means confirming that the key is needed . For orphan usage this simply means confirming the key belongs to the node .
93
34
147,906
protected void accountForFetchedKey ( byte [ ] key ) { fetched ++ ; if ( streamStats != null ) { streamStats . reportStreamingFetch ( operation ) ; } if ( recordsPerPartition <= 0 ) { return ; } Integer keyPartitionId = getKeyPartitionId ( key ) ; Long partitionFetch = partitionFetches . get ( keyPartitionId ) ; Utils . notNull ( partitionFetch ) ; partitionFetch ++ ; partitionFetches . put ( keyPartitionId , partitionFetch ) ; if ( partitionFetch == recordsPerPartition ) { if ( partitionsToFetch . contains ( keyPartitionId ) ) { partitionsToFetch . remove ( keyPartitionId ) ; } else { logger . warn ( "Partitions to fetch did not contain expected partition ID: " + keyPartitionId ) ; } } else if ( partitionFetch > recordsPerPartition ) { logger . warn ( "Partition fetch count larger than expected for partition ID " + keyPartitionId + " : " + partitionFetch ) ; } }
Account for key being fetched .
235
7
147,907
protected StreamRequestHandlerState determineRequestHandlerState ( String itemTag ) { if ( keyIterator . hasNext ( ) && ! fetchedEnoughForAllPartitions ( ) ) { return StreamRequestHandlerState . WRITING ; } else { logger . info ( "Finished fetch " + itemTag + " for store '" + storageEngine . getName ( ) + "' with partitions " + partitionIds ) ; progressInfoMessage ( "Fetch " + itemTag + " (end of scan)" ) ; return StreamRequestHandlerState . COMPLETE ; } }
Determines if still WRITING or COMPLETE .
118
12
147,908
protected List < Versioned < V > > resolveAndConstructVersionsToPersist ( List < Versioned < V > > valuesInStorage , List < Versioned < V > > multiPutValues ) { List < Versioned < V >> obsoleteVals = new ArrayList < Versioned < V > > ( multiPutValues . size ( ) ) ; // Go over all the values and determine whether the version is // acceptable for ( Versioned < V > value : multiPutValues ) { Iterator < Versioned < V >> iter = valuesInStorage . iterator ( ) ; boolean obsolete = false ; // Compare the current version with a set of accepted versions while ( iter . hasNext ( ) ) { Versioned < V > curr = iter . next ( ) ; Occurred occurred = value . getVersion ( ) . compare ( curr . getVersion ( ) ) ; if ( occurred == Occurred . BEFORE ) { obsolete = true ; break ; } else if ( occurred == Occurred . AFTER ) { iter . remove ( ) ; } } if ( obsolete ) { // add to return value if obsolete obsoleteVals . add ( value ) ; } else { // else update the set of accepted versions valuesInStorage . add ( value ) ; } } return obsoleteVals ; }
Computes the final list of versions to be stored on top of what is currently being stored . Final list is valuesInStorage modified in place
266
28
147,909
private < T > InternalProviderImpl installInternalProvider ( Class < T > clazz , String bindingName , InternalProviderImpl < ? extends T > internalProvider , boolean isBound , boolean isTestProvider ) { if ( bindingName == null ) { if ( isBound ) { return installUnNamedProvider ( mapClassesToUnNamedBoundProviders , clazz , internalProvider , isTestProvider ) ; } else { return installUnNamedProvider ( mapClassesToUnNamedUnBoundProviders , clazz , internalProvider , isTestProvider ) ; } } else { return installNamedProvider ( mapClassesToNamedBoundProviders , clazz , bindingName , internalProvider , isTestProvider ) ; } }
Installs a provider either in the scope or the pool of unbound providers .
155
16
147,910
@ Override protected void reset ( ) { super . reset ( ) ; mapClassesToNamedBoundProviders . clear ( ) ; mapClassesToUnNamedBoundProviders . clear ( ) ; hasTestModules = false ; installBindingForScope ( ) ; }
Resets the state of the scope . Useful for automation testing when we want to reset the scope used to install test modules .
60
25
147,911
public synchronized T get ( Scope scope ) { if ( instance != null ) { return instance ; } if ( providerInstance != null ) { if ( isProvidingSingletonInScope ) { instance = providerInstance . get ( ) ; //gc providerInstance = null ; return instance ; } return providerInstance . get ( ) ; } if ( factoryClass != null && factory == null ) { factory = FactoryLocator . getFactory ( factoryClass ) ; //gc factoryClass = null ; } if ( factory != null ) { if ( ! factory . hasScopeAnnotation ( ) && ! isCreatingSingletonInScope ) { return factory . createInstance ( scope ) ; } instance = factory . createInstance ( scope ) ; //gc factory = null ; return instance ; } if ( providerFactoryClass != null && providerFactory == null ) { providerFactory = FactoryLocator . getFactory ( providerFactoryClass ) ; //gc providerFactoryClass = null ; } if ( providerFactory != null ) { if ( providerFactory . hasProvidesSingletonInScopeAnnotation ( ) || isProvidingSingletonInScope ) { instance = providerFactory . createInstance ( scope ) . get ( ) ; //gc providerFactory = null ; return instance ; } if ( providerFactory . hasScopeAnnotation ( ) || isCreatingSingletonInScope ) { providerInstance = providerFactory . createInstance ( scope ) ; //gc providerFactory = null ; return providerInstance . get ( ) ; } return providerFactory . createInstance ( scope ) . get ( ) ; } throw new IllegalStateException ( "A provider can only be used with an instance, a provider, a factory or a provider factory. Should not happen." ) ; }
of the unbound provider (
354
6
147,912
public static void closeScope ( Object name ) { //we remove the scope first, so that other threads don't see it, and see the next snapshot of the tree ScopeNode scope = ( ScopeNode ) MAP_KEY_TO_SCOPE . remove ( name ) ; if ( scope != null ) { ScopeNode parentScope = scope . getParentScope ( ) ; if ( parentScope != null ) { parentScope . removeChild ( scope ) ; } else { ConfigurationHolder . configuration . onScopeForestReset ( ) ; } removeScopeAndChildrenFromMap ( scope ) ; } }
Detach a scope from its parent this will trigger the garbage collection of this scope and it s sub - scopes if they are not referenced outside of Toothpick .
123
33
147,913
public static void reset ( ) { for ( Object name : Collections . list ( MAP_KEY_TO_SCOPE . keys ( ) ) ) { closeScope ( name ) ; } ConfigurationHolder . configuration . onScopeForestReset ( ) ; ScopeImpl . resetUnBoundProviders ( ) ; }
Clears all scopes . Useful for testing and not getting any leak ...
64
15
147,914
void notifyMwRevisionProcessors ( MwRevision mwRevision , boolean isCurrent ) { if ( mwRevision == null || mwRevision . getPageId ( ) <= 0 ) { return ; } for ( MwRevisionProcessorBroker . RevisionSubscription rs : this . revisionSubscriptions ) { if ( rs . onlyCurrentRevisions == isCurrent && ( rs . model == null || mwRevision . getModel ( ) . equals ( rs . model ) ) ) { rs . mwRevisionProcessor . processRevision ( mwRevision ) ; } } }
Notifies all interested subscribers of the given revision .
133
10
147,915
static ItemIdValueImpl fromIri ( String iri ) { int separator = iri . lastIndexOf ( ' ' ) + 1 ; try { return new ItemIdValueImpl ( iri . substring ( separator ) , iri . substring ( 0 , separator ) ) ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( "Invalid Wikibase entity IRI: " + iri , e ) ; } }
Parses an item IRI
99
7
147,916
void resetCurrentRevisionData ( ) { this . revisionId = NO_REVISION_ID ; // impossible as an id in MediaWiki this . parentRevisionId = NO_REVISION_ID ; this . text = null ; this . comment = null ; this . format = null ; this . timeStamp = null ; this . model = null ; }
Resets all member fields that hold information about the revision that is currently being processed .
78
17
147,917
private static MonolingualTextValue toTerm ( MonolingualTextValue term ) { return term instanceof TermImpl ? term : new TermImpl ( term . getLanguageCode ( ) , term . getText ( ) ) ; }
We need to make sure the terms are of the right type otherwise they will not be serialized correctly .
48
21
147,918
private void writePropertyData ( ) { try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "properties.csv" ) ) ) { out . println ( "Id" + ",Label" + ",Description" + ",URL" + ",Datatype" + ",Uses in statements" + ",Items with such statements" + ",Uses in statements with qualifiers" + ",Uses in qualifiers" + ",Uses in references" + ",Uses total" + ",Related properties" ) ; List < Entry < PropertyIdValue , PropertyRecord > > list = new ArrayList < Entry < PropertyIdValue , PropertyRecord > > ( this . propertyRecords . entrySet ( ) ) ; Collections . sort ( list , new UsageRecordComparator ( ) ) ; for ( Entry < PropertyIdValue , PropertyRecord > entry : list ) { printPropertyRecord ( out , entry . getValue ( ) , entry . getKey ( ) ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Writes the data collected about properties to a file .
227
11
147,919
private void writeClassData ( ) { try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "classes.csv" ) ) ) { out . println ( "Id" + ",Label" + ",Description" + ",URL" + ",Image" + ",Number of direct instances" + ",Number of direct subclasses" + ",Direct superclasses" + ",All superclasses" + ",Related properties" ) ; List < Entry < EntityIdValue , ClassRecord > > list = new ArrayList <> ( this . classRecords . entrySet ( ) ) ; Collections . sort ( list , new ClassUsageRecordComparator ( ) ) ; for ( Entry < EntityIdValue , ClassRecord > entry : list ) { if ( entry . getValue ( ) . itemCount > 0 || entry . getValue ( ) . subclassCount > 0 ) { printClassRecord ( out , entry . getValue ( ) , entry . getKey ( ) ) ; } } } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Writes the data collected about classes to a file .
229
11
147,920
private void printClassRecord ( PrintStream out , ClassRecord classRecord , EntityIdValue entityIdValue ) { printTerms ( out , classRecord . itemDocument , entityIdValue , "\"" + getClassLabel ( entityIdValue ) + "\"" ) ; printImage ( out , classRecord . itemDocument ) ; out . print ( "," + classRecord . itemCount + "," + classRecord . subclassCount ) ; printClassList ( out , classRecord . superClasses ) ; HashSet < EntityIdValue > superClasses = new HashSet <> ( ) ; for ( EntityIdValue superClass : classRecord . superClasses ) { addSuperClasses ( superClass , superClasses ) ; } printClassList ( out , superClasses ) ; printRelatedProperties ( out , classRecord ) ; out . println ( "" ) ; }
Prints the data for a single class to the given stream . This will be a single line in CSV .
185
22
147,921
private void printImage ( PrintStream out , ItemDocument itemDocument ) { String imageFile = null ; if ( itemDocument != null ) { for ( StatementGroup sg : itemDocument . getStatementGroups ( ) ) { boolean isImage = "P18" . equals ( sg . getProperty ( ) . getId ( ) ) ; if ( ! isImage ) { continue ; } for ( Statement s : sg ) { if ( s . getMainSnak ( ) instanceof ValueSnak ) { Value value = s . getMainSnak ( ) . getValue ( ) ; if ( value instanceof StringValue ) { imageFile = ( ( StringValue ) value ) . getString ( ) ; break ; } } } if ( imageFile != null ) { break ; } } } if ( imageFile == null ) { out . print ( ",\"http://commons.wikimedia.org/w/thumb.php?f=MA_Route_blank.svg&w=50\"" ) ; } else { try { String imageFileEncoded ; imageFileEncoded = URLEncoder . encode ( imageFile . replace ( " " , "_" ) , "utf-8" ) ; // Keep special title symbols unescaped: imageFileEncoded = imageFileEncoded . replace ( "%3A" , ":" ) . replace ( "%2F" , "/" ) ; out . print ( "," + csvStringEscape ( "http://commons.wikimedia.org/w/thumb.php?f=" + imageFileEncoded ) + "&w=50" ) ; } catch ( UnsupportedEncodingException e ) { throw new RuntimeException ( "Your JRE does not support UTF-8 encoding. Srsly?!" , e ) ; } } }
Prints the URL of a thumbnail for the given item document to the output or a default image if no image is given for the item .
387
28
147,922
private void printPropertyRecord ( PrintStream out , PropertyRecord propertyRecord , PropertyIdValue propertyIdValue ) { printTerms ( out , propertyRecord . propertyDocument , propertyIdValue , null ) ; String datatype = "Unknown" ; if ( propertyRecord . propertyDocument != null ) { datatype = getDatatypeLabel ( propertyRecord . propertyDocument . getDatatype ( ) ) ; } out . print ( "," + datatype + "," + propertyRecord . statementCount + "," + propertyRecord . itemCount + "," + propertyRecord . statementWithQualifierCount + "," + propertyRecord . qualifierCount + "," + propertyRecord . referenceCount + "," + ( propertyRecord . statementCount + propertyRecord . qualifierCount + propertyRecord . referenceCount ) ) ; printRelatedProperties ( out , propertyRecord ) ; out . println ( "" ) ; }
Prints the data of one property to the given output . This will be a single line in CSV .
189
21
147,923
private String getDatatypeLabel ( DatatypeIdValue datatype ) { if ( datatype . getIri ( ) == null ) { // TODO should be redundant once the // JSON parsing works return "Unknown" ; } switch ( datatype . getIri ( ) ) { case DatatypeIdValue . DT_COMMONS_MEDIA : return "Commons media" ; case DatatypeIdValue . DT_GLOBE_COORDINATES : return "Globe coordinates" ; case DatatypeIdValue . DT_ITEM : return "Item" ; case DatatypeIdValue . DT_QUANTITY : return "Quantity" ; case DatatypeIdValue . DT_STRING : return "String" ; case DatatypeIdValue . DT_TIME : return "Time" ; case DatatypeIdValue . DT_URL : return "URL" ; case DatatypeIdValue . DT_PROPERTY : return "Property" ; case DatatypeIdValue . DT_EXTERNAL_ID : return "External identifier" ; case DatatypeIdValue . DT_MATH : return "Math" ; case DatatypeIdValue . DT_MONOLINGUAL_TEXT : return "Monolingual Text" ; default : throw new RuntimeException ( "Unknown datatype " + datatype . getIri ( ) ) ; } }
Returns an English label for a given datatype .
303
11
147,924
private String getPropertyLabel ( PropertyIdValue propertyIdValue ) { PropertyRecord propertyRecord = this . propertyRecords . get ( propertyIdValue ) ; if ( propertyRecord == null || propertyRecord . propertyDocument == null ) { return propertyIdValue . getId ( ) ; } else { return getLabel ( propertyIdValue , propertyRecord . propertyDocument ) ; } }
Returns a string that should be used as a label for the given property .
78
15
147,925
private String getClassLabel ( EntityIdValue entityIdValue ) { ClassRecord classRecord = this . classRecords . get ( entityIdValue ) ; String label ; if ( classRecord == null || classRecord . itemDocument == null ) { label = entityIdValue . getId ( ) ; } else { label = getLabel ( entityIdValue , classRecord . itemDocument ) ; } EntityIdValue labelOwner = this . labels . get ( label ) ; if ( labelOwner == null ) { this . labels . put ( label , entityIdValue ) ; return label ; } else if ( labelOwner . equals ( entityIdValue ) ) { return label ; } else { return label + " (" + entityIdValue . getId ( ) + ")" ; } }
Returns a string that should be used as a label for the given item . The method also ensures that each label is used for only one class . Other classes with the same label will have their QID added for disambiguation .
162
47
147,926
@ Override public void writeValue ( TimeValue value , Resource resource ) throws RDFHandlerException { this . rdfWriter . writeTripleValueObject ( resource , RdfWriter . RDF_TYPE , RdfWriter . WB_TIME_VALUE ) ; this . rdfWriter . writeTripleValueObject ( resource , RdfWriter . WB_TIME , TimeValueConverter . getTimeLiteral ( value , this . rdfWriter ) ) ; this . rdfWriter . writeTripleIntegerObject ( resource , RdfWriter . WB_TIME_PRECISION , value . getPrecision ( ) ) ; this . rdfWriter . writeTripleIntegerObject ( resource , RdfWriter . WB_TIME_TIMEZONE , value . getTimezoneOffset ( ) ) ; this . rdfWriter . writeTripleUriObject ( resource , RdfWriter . WB_TIME_CALENDAR_MODEL , value . getPreferredCalendarModel ( ) ) ; }
Write the auxiliary RDF data for encoding the given value .
214
12
147,927
public static void main ( String [ ] args ) throws LoginFailedException , IOException , MediaWikiApiErrorException { ExampleHelpers . configureLogging ( ) ; printDocumentation ( ) ; SetLabelsForNumbersBot bot = new SetLabelsForNumbersBot ( ) ; ExampleHelpers . processEntitiesFromWikidataDump ( bot ) ; bot . finish ( ) ; System . out . println ( "*** Done." ) ; }
Main method to run the bot .
96
7
147,928
protected void addLabelForNumbers ( ItemIdValue itemIdValue ) { String qid = itemIdValue . getId ( ) ; try { // Fetch the online version of the item to make sure we edit the // current version: ItemDocument currentItemDocument = ( ItemDocument ) dataFetcher . getEntityDocument ( qid ) ; if ( currentItemDocument == null ) { System . out . println ( "*** " + qid + " could not be fetched. Maybe it has been deleted." ) ; return ; } // Check if we still have exactly one numeric value: QuantityValue number = currentItemDocument . findStatementQuantityValue ( "P1181" ) ; if ( number == null ) { System . out . println ( "*** No unique numeric value for " + qid ) ; return ; } // Check if the item is in a known numeric class: if ( ! currentItemDocument . hasStatementValue ( "P31" , numberClasses ) ) { System . out . println ( "*** " + qid + " is not in a known class of integer numbers. Skipping." ) ; return ; } // Check if the value is integer and build label string: String numberString ; try { BigInteger intValue = number . getNumericValue ( ) . toBigIntegerExact ( ) ; numberString = intValue . toString ( ) ; } catch ( ArithmeticException e ) { System . out . println ( "*** Numeric value for " + qid + " is not an integer: " + number . getNumericValue ( ) ) ; return ; } // Construct data to write: ItemDocumentBuilder itemDocumentBuilder = ItemDocumentBuilder . forItemId ( itemIdValue ) . withRevisionId ( currentItemDocument . getRevisionId ( ) ) ; ArrayList < String > languages = new ArrayList <> ( arabicNumeralLanguages . length ) ; for ( int i = 0 ; i < arabicNumeralLanguages . length ; i ++ ) { if ( ! currentItemDocument . getLabels ( ) . containsKey ( arabicNumeralLanguages [ i ] ) ) { itemDocumentBuilder . withLabel ( numberString , arabicNumeralLanguages [ i ] ) ; languages . add ( arabicNumeralLanguages [ i ] ) ; } } if ( languages . size ( ) == 0 ) { System . out . println ( "*** Labels already complete for " + qid ) ; return ; } logEntityModification ( currentItemDocument . getEntityId ( ) , numberString , languages ) ; dataEditor . editItemDocument ( itemDocumentBuilder . build ( ) , false , "Set labels to numeric value (Task MB1)" ) ; } catch ( MediaWikiApiErrorException e ) { e . printStackTrace ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Fetches the current online data for the given item and adds numerical labels if necessary .
619
18
147,929
protected boolean lacksSomeLanguage ( ItemDocument itemDocument ) { for ( int i = 0 ; i < arabicNumeralLanguages . length ; i ++ ) { if ( ! itemDocument . getLabels ( ) . containsKey ( arabicNumeralLanguages [ i ] ) ) { return true ; } } return false ; }
Returns true if the given item document lacks a label for at least one of the languages covered .
72
19
147,930
public void writeFinalResults ( ) { printStatus ( ) ; try ( PrintStream out = new PrintStream ( ExampleHelpers . openExampleFileOuputStream ( "life-expectancies.csv" ) ) ) { for ( int i = 0 ; i < lifeSpans . length ; i ++ ) { if ( peopleCount [ i ] != 0 ) { out . println ( i + "," + ( double ) lifeSpans [ i ] / peopleCount [ i ] + "," + peopleCount [ i ] ) ; } } } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Writes the results of the processing to a file .
134
11
147,931
private static void createDirectory ( Path path ) throws IOException { try { Files . createDirectory ( path ) ; } catch ( FileAlreadyExistsException e ) { if ( ! Files . isDirectory ( path ) ) { throw e ; } } }
Create a directory at the given path if it does not exist yet .
52
14
147,932
public static FileOutputStream openResultFileOuputStream ( Path resultDirectory , String filename ) throws IOException { Path filePath = resultDirectory . resolve ( filename ) ; return new FileOutputStream ( filePath . toFile ( ) ) ; }
Opens a new FileOutputStream for a file of the given name in the given result directory . Any file of this name that exists already will be replaced . The caller is responsible for eventually closing the stream .
52
42
147,933
private void addSuperClasses ( Integer directSuperClass , ClassRecord subClassRecord ) { if ( subClassRecord . superClasses . contains ( directSuperClass ) ) { return ; } subClassRecord . superClasses . add ( directSuperClass ) ; ClassRecord superClassRecord = getClassRecord ( directSuperClass ) ; if ( superClassRecord == null ) { return ; } for ( Integer superClass : superClassRecord . directSuperClasses ) { addSuperClasses ( superClass , subClassRecord ) ; } }
Recursively add indirect subclasses to a class record .
113
12
147,934
private Integer getNumId ( String idString , boolean isUri ) { String numString ; if ( isUri ) { if ( ! idString . startsWith ( "http://www.wikidata.org/entity/" ) ) { return 0 ; } numString = idString . substring ( "http://www.wikidata.org/entity/Q" . length ( ) ) ; } else { numString = idString . substring ( 1 ) ; } return Integer . parseInt ( numString ) ; }
Extracts a numeric id from a string which can be either a Wikidata entity URI or a short entity or property id .
113
27
147,935
private void countCooccurringProperties ( StatementDocument statementDocument , UsageRecord usageRecord , PropertyIdValue thisPropertyIdValue ) { for ( StatementGroup sg : statementDocument . getStatementGroups ( ) ) { if ( ! sg . getProperty ( ) . equals ( thisPropertyIdValue ) ) { Integer propertyId = getNumId ( sg . getProperty ( ) . getId ( ) , false ) ; if ( ! usageRecord . propertyCoCounts . containsKey ( propertyId ) ) { usageRecord . propertyCoCounts . put ( propertyId , 1 ) ; } else { usageRecord . propertyCoCounts . put ( propertyId , usageRecord . propertyCoCounts . get ( propertyId ) + 1 ) ; } } } }
Counts each property for which there is a statement in the given item document ignoring the property thisPropertyIdValue to avoid properties counting themselves .
162
28
147,936
private InputStream runSparqlQuery ( String query ) throws IOException { try { String queryString = "query=" + URLEncoder . encode ( query , "UTF-8" ) + "&format=json" ; URL url = new URL ( "https://query.wikidata.org/sparql?" + queryString ) ; HttpURLConnection connection = ( HttpURLConnection ) url . openConnection ( ) ; connection . setRequestMethod ( "GET" ) ; return connection . getInputStream ( ) ; } catch ( UnsupportedEncodingException | MalformedURLException e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } }
Executes a given SPARQL query and returns a stream with the result in JSON format .
149
19
147,937
private void writePropertyData ( ) { try ( PrintStream out = new PrintStream ( openResultFileOuputStream ( resultDirectory , "properties.json" ) ) ) { out . println ( "{" ) ; int count = 0 ; for ( Entry < Integer , PropertyRecord > propertyEntry : this . propertyRecords . entrySet ( ) ) { if ( count > 0 ) { out . println ( "," ) ; } out . print ( "\"" + propertyEntry . getKey ( ) + "\":" ) ; mapper . writeValue ( out , propertyEntry . getValue ( ) ) ; count ++ ; } out . println ( "\n}" ) ; System . out . println ( " Serialized information for " + count + " properties." ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Writes all data that was collected about properties to a json file .
180
14
147,938
private void writeClassData ( ) { try ( PrintStream out = new PrintStream ( openResultFileOuputStream ( resultDirectory , "classes.json" ) ) ) { out . println ( "{" ) ; // Add direct subclass information: for ( Entry < Integer , ClassRecord > classEntry : this . classRecords . entrySet ( ) ) { if ( classEntry . getValue ( ) . subclassCount == 0 && classEntry . getValue ( ) . itemCount == 0 ) { continue ; } for ( Integer superClass : classEntry . getValue ( ) . directSuperClasses ) { this . classRecords . get ( superClass ) . nonemptyDirectSubclasses . add ( classEntry . getKey ( ) . toString ( ) ) ; } } int count = 0 ; int countNoLabel = 0 ; for ( Entry < Integer , ClassRecord > classEntry : this . classRecords . entrySet ( ) ) { if ( classEntry . getValue ( ) . subclassCount == 0 && classEntry . getValue ( ) . itemCount == 0 ) { continue ; } if ( classEntry . getValue ( ) . label == null ) { countNoLabel ++ ; } if ( count > 0 ) { out . println ( "," ) ; } out . print ( "\"" + classEntry . getKey ( ) + "\":" ) ; mapper . writeValue ( out , classEntry . getValue ( ) ) ; count ++ ; } out . println ( "\n}" ) ; System . out . println ( " Serialized information for " + count + " class items." ) ; System . out . println ( " -- class items with missing label: " + countNoLabel ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Writes all data that was collected about classes to a json file .
380
14
147,939
public static String formatTimeISO8601 ( TimeValue value ) { StringBuilder builder = new StringBuilder ( ) ; DecimalFormat yearForm = new DecimalFormat ( FORMAT_YEAR ) ; DecimalFormat timeForm = new DecimalFormat ( FORMAT_OTHER ) ; if ( value . getYear ( ) > 0 ) { builder . append ( "+" ) ; } builder . append ( yearForm . format ( value . getYear ( ) ) ) ; builder . append ( "-" ) ; builder . append ( timeForm . format ( value . getMonth ( ) ) ) ; builder . append ( "-" ) ; builder . append ( timeForm . format ( value . getDay ( ) ) ) ; builder . append ( "T" ) ; builder . append ( timeForm . format ( value . getHour ( ) ) ) ; builder . append ( ":" ) ; builder . append ( timeForm . format ( value . getMinute ( ) ) ) ; builder . append ( ":" ) ; builder . append ( timeForm . format ( value . getSecond ( ) ) ) ; builder . append ( "Z" ) ; return builder . toString ( ) ; }
Returns a representation of the date from the value attributes as ISO 8601 encoding .
248
16
147,940
public static String formatBigDecimal ( BigDecimal number ) { if ( number . signum ( ) != - 1 ) { return "+" + number . toString ( ) ; } else { return number . toString ( ) ; } }
Returns a signed string representation of the given number .
51
10
147,941
private static DumpContentType guessDumpContentType ( String fileName ) { String lcDumpName = fileName . toLowerCase ( ) ; if ( lcDumpName . contains ( ".json.gz" ) ) { return DumpContentType . JSON ; } else if ( lcDumpName . contains ( ".json.bz2" ) ) { return DumpContentType . JSON ; } else if ( lcDumpName . contains ( ".sql.gz" ) ) { return DumpContentType . SITES ; } else if ( lcDumpName . contains ( ".xml.bz2" ) ) { if ( lcDumpName . contains ( "daily" ) ) { return DumpContentType . DAILY ; } else if ( lcDumpName . contains ( "current" ) ) { return DumpContentType . CURRENT ; } else { return DumpContentType . FULL ; } } else { logger . warn ( "Could not guess type of the dump file \"" + fileName + "\". Defaulting to json.gz." ) ; return DumpContentType . JSON ; } }
Guess the type of the given dump from its filename .
248
12
147,942
private static String guessDumpDate ( String fileName ) { Pattern p = Pattern . compile ( "([0-9]{8})" ) ; Matcher m = p . matcher ( fileName ) ; if ( m . find ( ) ) { return m . group ( 1 ) ; } else { logger . info ( "Could not guess date of the dump file \"" + fileName + "\". Defaulting to YYYYMMDD." ) ; return "YYYYMMDD" ; } }
Guess the date of the dump from the given dump file name .
107
14
147,943
public void add ( StatementRank rank , Resource subject ) { if ( this . bestRank == rank ) { subjects . add ( subject ) ; } else if ( bestRank == StatementRank . NORMAL && rank == StatementRank . PREFERRED ) { //We found a preferred statement subjects . clear ( ) ; bestRank = StatementRank . PREFERRED ; subjects . add ( subject ) ; } }
Adds a Statement .
83
4
147,944
public void writeAuxiliaryTriples ( ) throws RDFHandlerException { for ( PropertyRestriction pr : this . someValuesQueue ) { writeSomeValueRestriction ( pr . propertyUri , pr . rangeUri , pr . subject ) ; } this . someValuesQueue . clear ( ) ; this . valueRdfConverter . writeAuxiliaryTriples ( ) ; }
Writes all auxiliary triples that have been buffered recently . This includes OWL property restrictions but it also includes any auxiliary triples required by complex values that were used in snaks .
83
38
147,945
void writeSomeValueRestriction ( String propertyUri , String rangeUri , Resource bnode ) throws RDFHandlerException { this . rdfWriter . writeTripleValueObject ( bnode , RdfWriter . RDF_TYPE , RdfWriter . OWL_RESTRICTION ) ; this . rdfWriter . writeTripleUriObject ( bnode , RdfWriter . OWL_ON_PROPERTY , propertyUri ) ; this . rdfWriter . writeTripleUriObject ( bnode , RdfWriter . OWL_SOME_VALUES_FROM , rangeUri ) ; }
Writes a buffered some - value restriction .
136
10
147,946
String getRangeUri ( PropertyIdValue propertyIdValue ) { String datatype = this . propertyRegister . getPropertyType ( propertyIdValue ) ; if ( datatype == null ) return null ; switch ( datatype ) { case DatatypeIdValue . DT_MONOLINGUAL_TEXT : this . rdfConversionBuffer . addDatatypeProperty ( propertyIdValue ) ; return Vocabulary . RDF_LANG_STRING ; case DatatypeIdValue . DT_STRING : case DatatypeIdValue . DT_EXTERNAL_ID : case DatatypeIdValue . DT_MATH : this . rdfConversionBuffer . addDatatypeProperty ( propertyIdValue ) ; return Vocabulary . XSD_STRING ; case DatatypeIdValue . DT_COMMONS_MEDIA : case DatatypeIdValue . DT_GLOBE_COORDINATES : case DatatypeIdValue . DT_ITEM : case DatatypeIdValue . DT_PROPERTY : case DatatypeIdValue . DT_LEXEME : case DatatypeIdValue . DT_FORM : case DatatypeIdValue . DT_SENSE : case DatatypeIdValue . DT_TIME : case DatatypeIdValue . DT_URL : case DatatypeIdValue . DT_GEO_SHAPE : case DatatypeIdValue . DT_TABULAR_DATA : case DatatypeIdValue . DT_QUANTITY : this . rdfConversionBuffer . addObjectProperty ( propertyIdValue ) ; return Vocabulary . OWL_THING ; default : return null ; } }
Returns the class of datatype URI that best characterizes the range of the given property based on its datatype .
361
25
147,947
void addSomeValuesRestriction ( Resource subject , String propertyUri , String rangeUri ) { this . someValuesQueue . add ( new PropertyRestriction ( subject , propertyUri , rangeUri ) ) ; }
Adds the given some - value restriction to the list of restrictions that should still be serialized . The given resource will be used as a subject .
47
29
147,948
Map < String , EntityDocument > getEntityDocumentMap ( int numOfEntities , WbGetEntitiesActionData properties ) throws MediaWikiApiErrorException , IOException { if ( numOfEntities == 0 ) { return Collections . emptyMap ( ) ; } configureProperties ( properties ) ; return this . wbGetEntitiesAction . wbGetEntities ( properties ) ; }
Creates a map of identifiers or page titles to documents retrieved via the APIs .
84
16
147,949
private void setRequestProps ( WbGetEntitiesActionData properties ) { StringBuilder builder = new StringBuilder ( ) ; builder . append ( "info|datatype" ) ; if ( ! this . filter . excludeAllLanguages ( ) ) { builder . append ( "|labels|aliases|descriptions" ) ; } if ( ! this . filter . excludeAllProperties ( ) ) { builder . append ( "|claims" ) ; } if ( ! this . filter . excludeAllSiteLinks ( ) ) { builder . append ( "|sitelinks" ) ; } properties . props = builder . toString ( ) ; }
Sets the value for the API s props parameter based on the current settings .
141
16
147,950
private void setRequestLanguages ( WbGetEntitiesActionData properties ) { if ( this . filter . excludeAllLanguages ( ) || this . filter . getLanguageFilter ( ) == null ) { return ; } properties . languages = ApiConnection . implodeObjects ( this . filter . getLanguageFilter ( ) ) ; }
Sets the value for the API s languages parameter based on the current settings .
71
16
147,951
private void setRequestSitefilter ( WbGetEntitiesActionData properties ) { if ( this . filter . excludeAllSiteLinks ( ) || this . filter . getSiteLinkFilter ( ) == null ) { return ; } properties . sitefilter = ApiConnection . implodeObjects ( this . filter . getSiteLinkFilter ( ) ) ; }
Sets the value for the API s sitefilter parameter based on the current settings .
74
17
147,952
void processSiteRow ( String siteRow ) { String [ ] row = getSiteRowFields ( siteRow ) ; String filePath = "" ; String pagePath = "" ; String dataArray = row [ 8 ] . substring ( row [ 8 ] . indexOf ( ' ' ) , row [ 8 ] . length ( ) - 2 ) ; // Explanation for the regular expression below: // "'{' or ';'" followed by either // "NOT: ';', '{', or '}'" repeated one or more times; or // "a single '}'" // The first case matches ";s:5:\"paths\"" // but also ";a:2:" in "{s:5:\"paths\";a:2:{s:9:\ ...". // The second case matches ";}" which terminates (sub)arrays. Matcher matcher = Pattern . compile ( "[{;](([^;}{][^;}{]*)|[}])" ) . matcher ( dataArray ) ; String prevString = "" ; String curString = "" ; String path = "" ; boolean valuePosition = false ; while ( matcher . find ( ) ) { String match = matcher . group ( ) . substring ( 1 ) ; if ( match . length ( ) == 0 ) { valuePosition = false ; continue ; } if ( match . charAt ( 0 ) == ' ' ) { valuePosition = ! valuePosition && ! "" . equals ( prevString ) ; curString = match . substring ( match . indexOf ( ' ' ) + 1 , match . length ( ) - 2 ) ; } else if ( match . charAt ( 0 ) == ' ' ) { valuePosition = false ; path = path + "/" + prevString ; } else if ( "}" . equals ( match ) ) { valuePosition = false ; path = path . substring ( 0 , path . lastIndexOf ( ' ' ) ) ; } if ( valuePosition && "file_path" . equals ( prevString ) && "/paths" . equals ( path ) ) { filePath = curString ; } else if ( valuePosition && "page_path" . equals ( prevString ) && "/paths" . equals ( path ) ) { pagePath = curString ; } prevString = curString ; curString = "" ; } MwSitesDumpFileProcessor . logger . debug ( "Found site data \"" + row [ 1 ] + "\" (group \"" + row [ 3 ] + "\", language \"" + row [ 5 ] + "\", type \"" + row [ 2 ] + "\")" ) ; this . sites . setSiteInformation ( row [ 1 ] , row [ 3 ] , row [ 5 ] , row [ 2 ] , filePath , pagePath ) ; }
Processes a row of the sites table and stores the site information found therein .
601
16
147,953
public synchronized void start ( ) { if ( ( todoFlags & RECORD_CPUTIME ) != 0 ) { currentStartCpuTime = getThreadCpuTime ( threadId ) ; } else { currentStartCpuTime = - 1 ; } if ( ( todoFlags & RECORD_WALLTIME ) != 0 ) { currentStartWallTime = System . nanoTime ( ) ; } else { currentStartWallTime = - 1 ; } isRunning = true ; }
Start the timer .
102
4
147,954
public static void startNamedTimer ( String timerName , int todoFlags , long threadId ) { getNamedTimer ( timerName , todoFlags , threadId ) . start ( ) ; }
Start a timer of the given string name for the current thread . If no such timer exists yet then it will be newly created .
43
26
147,955
public static long stopNamedTimer ( String timerName , int todoFlags ) { return stopNamedTimer ( timerName , todoFlags , Thread . currentThread ( ) . getId ( ) ) ; }
Stop a timer of the given string name for the current thread . If no such timer exists - 1 will be returned . Otherwise the return value is the CPU time that was measured .
45
36
147,956
public static void resetNamedTimer ( String timerName , int todoFlags , long threadId ) { getNamedTimer ( timerName , todoFlags , threadId ) . reset ( ) ; }
Reset a timer of the given string name for the given thread . If no such timer exists yet then it will be newly created .
43
27
147,957
public static Timer getNamedTimer ( String timerName , int todoFlags ) { return getNamedTimer ( timerName , todoFlags , Thread . currentThread ( ) . getId ( ) ) ; }
Get a timer of the given string name and todos for the current thread . If no such timer exists yet then it will be newly created .
46
29
147,958
public static Timer getNamedTimer ( String timerName , int todoFlags , long threadId ) { Timer key = new Timer ( timerName , todoFlags , threadId ) ; registeredTimers . putIfAbsent ( key , key ) ; return registeredTimers . get ( key ) ; }
Get a timer of the given string name for the given thread . If no such timer exists yet then it will be newly created .
67
26
147,959
public static Timer getNamedTotalTimer ( String timerName ) { long totalCpuTime = 0 ; long totalSystemTime = 0 ; int measurements = 0 ; int timerCount = 0 ; int todoFlags = RECORD_NONE ; Timer previousTimer = null ; for ( Map . Entry < Timer , Timer > entry : registeredTimers . entrySet ( ) ) { if ( entry . getValue ( ) . name . equals ( timerName ) ) { previousTimer = entry . getValue ( ) ; timerCount += 1 ; totalCpuTime += previousTimer . totalCpuTime ; totalSystemTime += previousTimer . totalWallTime ; measurements += previousTimer . measurements ; todoFlags |= previousTimer . todoFlags ; } } if ( timerCount == 1 ) { return previousTimer ; } else { Timer result = new Timer ( timerName , todoFlags , 0 ) ; result . totalCpuTime = totalCpuTime ; result . totalWallTime = totalSystemTime ; result . measurements = measurements ; result . threadCount = timerCount ; return result ; } }
Collect the total times measured by all known named timers of the given name . This is useful to add up times that were collected across separate threads .
235
29
147,960
public void performActions ( ) { if ( this . clientConfiguration . getActions ( ) . isEmpty ( ) ) { this . clientConfiguration . printHelp ( ) ; return ; } this . dumpProcessingController . setOfflineMode ( this . clientConfiguration . getOfflineMode ( ) ) ; if ( this . clientConfiguration . getDumpDirectoryLocation ( ) != null ) { try { this . dumpProcessingController . setDownloadDirectory ( this . clientConfiguration . getDumpDirectoryLocation ( ) ) ; } catch ( IOException e ) { logger . error ( "Could not set download directory to " + this . clientConfiguration . getDumpDirectoryLocation ( ) + ": " + e . getMessage ( ) ) ; logger . error ( "Aborting" ) ; return ; } } dumpProcessingController . setLanguageFilter ( this . clientConfiguration . getFilterLanguages ( ) ) ; dumpProcessingController . setSiteLinkFilter ( this . clientConfiguration . getFilterSiteKeys ( ) ) ; dumpProcessingController . setPropertyFilter ( this . clientConfiguration . getFilterProperties ( ) ) ; MwDumpFile dumpFile = this . clientConfiguration . getLocalDumpFile ( ) ; if ( dumpFile == null ) { dumpFile = dumpProcessingController . getMostRecentDump ( DumpContentType . JSON ) ; } else { if ( ! dumpFile . isAvailable ( ) ) { logger . error ( "Dump file not found or not readable: " + dumpFile . toString ( ) ) ; return ; } } this . clientConfiguration . setProjectName ( dumpFile . getProjectName ( ) ) ; this . clientConfiguration . setDateStamp ( dumpFile . getDateStamp ( ) ) ; boolean hasReadyProcessor = false ; for ( DumpProcessingAction props : this . clientConfiguration . getActions ( ) ) { if ( ! props . isReady ( ) ) { continue ; } if ( props . needsSites ( ) ) { prepareSites ( ) ; if ( this . sites == null ) { // sites unavailable continue ; } props . setSites ( this . sites ) ; } props . setDumpInformation ( dumpFile . getProjectName ( ) , dumpFile . getDateStamp ( ) ) ; this . dumpProcessingController . registerEntityDocumentProcessor ( props , null , true ) ; hasReadyProcessor = true ; } if ( ! hasReadyProcessor ) { return ; // silent; non-ready action should report its problem // directly } if ( ! this . clientConfiguration . isQuiet ( ) ) { EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor ( 0 ) ; this . dumpProcessingController . registerEntityDocumentProcessor ( entityTimerProcessor , null , true ) ; } openActions ( ) ; this . dumpProcessingController . processDump ( dumpFile ) ; closeActions ( ) ; try { writeReport ( ) ; } catch ( IOException e ) { logger . error ( "Could not print report file: " + e . getMessage ( ) ) ; } }
Performs all actions that have been configured .
659
9
147,961
private void initializeLogging ( ) { // Since logging is static, make sure this is done only once even if // multiple clients are created (e.g., during tests) if ( consoleAppender != null ) { return ; } consoleAppender = new ConsoleAppender ( ) ; consoleAppender . setLayout ( new PatternLayout ( LOG_PATTERN ) ) ; consoleAppender . setThreshold ( Level . INFO ) ; LevelRangeFilter filter = new LevelRangeFilter ( ) ; filter . setLevelMin ( Level . TRACE ) ; filter . setLevelMax ( Level . INFO ) ; consoleAppender . addFilter ( filter ) ; consoleAppender . activateOptions ( ) ; org . apache . log4j . Logger . getRootLogger ( ) . addAppender ( consoleAppender ) ; errorAppender = new ConsoleAppender ( ) ; errorAppender . setLayout ( new PatternLayout ( LOG_PATTERN ) ) ; errorAppender . setThreshold ( Level . WARN ) ; errorAppender . setTarget ( ConsoleAppender . SYSTEM_ERR ) ; errorAppender . activateOptions ( ) ; org . apache . log4j . Logger . getRootLogger ( ) . addAppender ( errorAppender ) ; }
Sets up Log4J to write log messages to the console . Low - priority messages are logged to stdout while high - priority messages go to stderr .
271
34
147,962
public static void main ( String [ ] args ) throws ParseException , IOException { Client client = new Client ( new DumpProcessingController ( "wikidatawiki" ) , args ) ; client . performActions ( ) ; }
Launches the client with the specified parameters .
52
9
147,963
public void writeBasicDeclarations ( ) throws RDFHandlerException { for ( Map . Entry < String , String > uriType : Vocabulary . getKnownVocabularyTypes ( ) . entrySet ( ) ) { this . rdfWriter . writeTripleUriObject ( uriType . getKey ( ) , RdfWriter . RDF_TYPE , uriType . getValue ( ) ) ; } }
Writes OWL declarations for all basic vocabulary elements used in the dump .
89
15
147,964
void writeInterPropertyLinks ( PropertyDocument document ) throws RDFHandlerException { Resource subject = this . rdfWriter . getUri ( document . getEntityId ( ) . getIri ( ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_DIRECT_CLAIM_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . DIRECT ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_CLAIM_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . STATEMENT ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_STATEMENT_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . VALUE_SIMPLE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_STATEMENT_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . VALUE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_QUALIFIER_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . QUALIFIER_SIMPLE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_QUALIFIER_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . QUALIFIER ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_REFERENCE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . REFERENCE_SIMPLE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_REFERENCE_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . REFERENCE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_NO_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . NO_VALUE ) ) ; this . rdfWriter . writeTripleUriObject ( subject , this . rdfWriter . getUri ( Vocabulary . WB_NO_QUALIFIER_VALUE_PROP ) , Vocabulary . getPropertyUri ( document . getEntityId ( ) , PropertyContext . NO_QUALIFIER_VALUE ) ) ; // TODO something more with NO_VALUE }
Writes triples which conect properties with there corresponding rdf properties for statements simple statements qualifiers reference attributes and values .
705
24
147,965
void writeBestRankTriples ( ) { for ( Resource resource : this . rankBuffer . getBestRankedStatements ( ) ) { try { this . rdfWriter . writeTripleUriObject ( resource , RdfWriter . RDF_TYPE , RdfWriter . WB_BEST_RANK . toString ( ) ) ; } catch ( RDFHandlerException e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; } } this . rankBuffer . clear ( ) ; }
Writes triples to determine the statements with the highest rank .
109
13
147,966
String getUriStringForRank ( StatementRank rank ) { switch ( rank ) { case NORMAL : return Vocabulary . WB_NORMAL_RANK ; case PREFERRED : return Vocabulary . WB_PREFERRED_RANK ; case DEPRECATED : return Vocabulary . WB_DEPRECATED_RANK ; default : throw new IllegalArgumentException ( ) ; } }
Returns an URI which represents the statement rank in a triple .
84
12
147,967
public static String fixLanguageCodeIfDeprecated ( String wikimediaLanguageCode ) { if ( DEPRECATED_LANGUAGE_CODES . containsKey ( wikimediaLanguageCode ) ) { return DEPRECATED_LANGUAGE_CODES . get ( wikimediaLanguageCode ) ; } else { return wikimediaLanguageCode ; } }
Translate a Wikimedia language code to its preferred value if this code is deprecated or return it untouched if the string is not a known deprecated Wikimedia language code
77
30
147,968
public T withLabel ( String text , String languageCode ) { withLabel ( factory . getMonolingualTextValue ( text , languageCode ) ) ; return getThis ( ) ; }
Adds an additional label to the constructed document .
39
9
147,969
public T withDescription ( String text , String languageCode ) { withDescription ( factory . getMonolingualTextValue ( text , languageCode ) ) ; return getThis ( ) ; }
Adds an additional description to the constructed document .
39
9
147,970
public T withAlias ( String text , String languageCode ) { withAlias ( factory . getMonolingualTextValue ( text , languageCode ) ) ; return getThis ( ) ; }
Adds an additional alias to the constructed document .
39
9
147,971
public T withStatement ( Statement statement ) { PropertyIdValue pid = statement . getMainSnak ( ) . getPropertyId ( ) ; ArrayList < Statement > pidStatements = this . statements . get ( pid ) ; if ( pidStatements == null ) { pidStatements = new ArrayList < Statement > ( ) ; this . statements . put ( pid , pidStatements ) ; } pidStatements . add ( statement ) ; return getThis ( ) ; }
Adds an additional statement to the constructed document .
99
9
147,972
protected void logIncompatibleValueError ( PropertyIdValue propertyIdValue , String datatype , String valueType ) { logger . warn ( "Property " + propertyIdValue . getId ( ) + " has type \"" + datatype + "\" but a value of type " + valueType + ". Data ignored." ) ; }
Logs a message for a case where the value of a property does not fit to its declared datatype .
71
23
147,973
@ JsonInclude ( Include . NON_EMPTY ) @ JsonProperty ( "id" ) public String getJsonId ( ) { if ( ! EntityIdValue . SITE_LOCAL . equals ( this . siteIri ) ) { return this . entityId ; } else { return null ; } }
Returns the string id of the entity that this document refers to . Only for use by Jackson during serialization .
68
22
147,974
public Map < String , EntityDocument > wbGetEntities ( WbGetEntitiesActionData properties ) throws MediaWikiApiErrorException , IOException { return wbGetEntities ( properties . ids , properties . sites , properties . titles , properties . props , properties . languages , properties . sitefilter ) ; }
Creates a map of identifiers or page titles to documents retrieved via the API URL
69
16
147,975
public static String getStatementUri ( Statement statement ) { int i = statement . getStatementId ( ) . indexOf ( ' ' ) + 1 ; return PREFIX_WIKIDATA_STATEMENT + statement . getSubject ( ) . getId ( ) + "-" + statement . getStatementId ( ) . substring ( i ) ; }
Get the URI for the given statement .
74
8
147,976
public static String getPropertyUri ( PropertyIdValue propertyIdValue , PropertyContext propertyContext ) { switch ( propertyContext ) { case DIRECT : return PREFIX_PROPERTY_DIRECT + propertyIdValue . getId ( ) ; case STATEMENT : return PREFIX_PROPERTY + propertyIdValue . getId ( ) ; case VALUE_SIMPLE : return PREFIX_PROPERTY_STATEMENT + propertyIdValue . getId ( ) ; case VALUE : return PREFIX_PROPERTY_STATEMENT_VALUE + propertyIdValue . getId ( ) ; case QUALIFIER : return PREFIX_PROPERTY_QUALIFIER_VALUE + propertyIdValue . getId ( ) ; case QUALIFIER_SIMPLE : return PREFIX_PROPERTY_QUALIFIER + propertyIdValue . getId ( ) ; case REFERENCE : return PREFIX_PROPERTY_REFERENCE_VALUE + propertyIdValue . getId ( ) ; case REFERENCE_SIMPLE : return PREFIX_PROPERTY_REFERENCE + propertyIdValue . getId ( ) ; case NO_VALUE : return PREFIX_WIKIDATA_NO_VALUE + propertyIdValue . getId ( ) ; case NO_QUALIFIER_VALUE : return PREFIX_WIKIDATA_NO_QUALIFIER_VALUE + propertyIdValue . getId ( ) ; default : return null ; } }
Get the URI for the given property in the given context .
313
12
147,977
public StatementGroup findStatementGroup ( String propertyIdValue ) { if ( this . claims . containsKey ( propertyIdValue ) ) { return new StatementGroupImpl ( this . claims . get ( propertyIdValue ) ) ; } return null ; }
Find a statement group by its property id without checking for equality with the site IRI . More efficient implementation than the default one .
51
26
147,978
protected static Map < String , List < Statement > > addStatementToGroups ( Statement statement , Map < String , List < Statement > > claims ) { Map < String , List < Statement > > newGroups = new HashMap <> ( claims ) ; String pid = statement . getMainSnak ( ) . getPropertyId ( ) . getId ( ) ; if ( newGroups . containsKey ( pid ) ) { List < Statement > newGroup = new ArrayList <> ( newGroups . get ( pid ) . size ( ) ) ; boolean statementReplaced = false ; for ( Statement existingStatement : newGroups . get ( pid ) ) { if ( existingStatement . getStatementId ( ) . equals ( statement . getStatementId ( ) ) && ! existingStatement . getStatementId ( ) . isEmpty ( ) ) { statementReplaced = true ; newGroup . add ( statement ) ; } else { newGroup . add ( existingStatement ) ; } } if ( ! statementReplaced ) { newGroup . add ( statement ) ; } newGroups . put ( pid , newGroup ) ; } else { newGroups . put ( pid , Collections . singletonList ( statement ) ) ; } return newGroups ; }
Adds a Statement to a given collection of statement groups . If the statement id is not null and matches that of an existing statement this statement will be replaced .
263
31
147,979
protected static Map < String , List < Statement > > removeStatements ( Set < String > statementIds , Map < String , List < Statement > > claims ) { Map < String , List < Statement > > newClaims = new HashMap <> ( claims . size ( ) ) ; for ( Entry < String , List < Statement > > entry : claims . entrySet ( ) ) { List < Statement > filteredStatements = new ArrayList <> ( ) ; for ( Statement s : entry . getValue ( ) ) { if ( ! statementIds . contains ( s . getStatementId ( ) ) ) { filteredStatements . add ( s ) ; } } if ( ! filteredStatements . isEmpty ( ) ) { newClaims . put ( entry . getKey ( ) , filteredStatements ) ; } } return newClaims ; }
Removes statement ids from a collection of statement groups .
180
12
147,980
public static String getDatatypeIriFromJsonDatatype ( String jsonDatatype ) { switch ( jsonDatatype ) { case JSON_DT_ITEM : return DT_ITEM ; case JSON_DT_PROPERTY : return DT_PROPERTY ; case JSON_DT_GLOBE_COORDINATES : return DT_GLOBE_COORDINATES ; case JSON_DT_URL : return DT_URL ; case JSON_DT_COMMONS_MEDIA : return DT_COMMONS_MEDIA ; case JSON_DT_TIME : return DT_TIME ; case JSON_DT_QUANTITY : return DT_QUANTITY ; case JSON_DT_STRING : return DT_STRING ; case JSON_DT_MONOLINGUAL_TEXT : return DT_MONOLINGUAL_TEXT ; default : if ( ! JSON_DATATYPE_PATTERN . matcher ( jsonDatatype ) . matches ( ) ) { throw new IllegalArgumentException ( "Invalid JSON datatype \"" + jsonDatatype + "\"" ) ; } String [ ] parts = jsonDatatype . split ( "-" ) ; for ( int i = 0 ; i < parts . length ; i ++ ) { parts [ i ] = StringUtils . capitalize ( parts [ i ] ) ; } return "http://wikiba.se/ontology#" + StringUtils . join ( parts ) ; } }
Returns the WDTK datatype IRI for the property datatype as represented by the given JSON datatype string .
319
27
147,981
public static String getJsonDatatypeFromDatatypeIri ( String datatypeIri ) { switch ( datatypeIri ) { case DatatypeIdValue . DT_ITEM : return DatatypeIdImpl . JSON_DT_ITEM ; case DatatypeIdValue . DT_GLOBE_COORDINATES : return DatatypeIdImpl . JSON_DT_GLOBE_COORDINATES ; case DatatypeIdValue . DT_URL : return DatatypeIdImpl . JSON_DT_URL ; case DatatypeIdValue . DT_COMMONS_MEDIA : return DatatypeIdImpl . JSON_DT_COMMONS_MEDIA ; case DatatypeIdValue . DT_TIME : return DatatypeIdImpl . JSON_DT_TIME ; case DatatypeIdValue . DT_QUANTITY : return DatatypeIdImpl . JSON_DT_QUANTITY ; case DatatypeIdValue . DT_STRING : return DatatypeIdImpl . JSON_DT_STRING ; case DatatypeIdValue . DT_MONOLINGUAL_TEXT : return DatatypeIdImpl . JSON_DT_MONOLINGUAL_TEXT ; case DatatypeIdValue . DT_PROPERTY : return DatatypeIdImpl . JSON_DT_PROPERTY ; default : //We apply the reverse algorithm of JacksonDatatypeId::getDatatypeIriFromJsonDatatype Matcher matcher = DATATYPE_ID_PATTERN . matcher ( datatypeIri ) ; if ( ! matcher . matches ( ) ) { throw new IllegalArgumentException ( "Unknown datatype: " + datatypeIri ) ; } StringBuilder jsonDatatypeBuilder = new StringBuilder ( ) ; for ( char ch : StringUtils . uncapitalize ( matcher . group ( 1 ) ) . toCharArray ( ) ) { if ( Character . isUpperCase ( ch ) ) { jsonDatatypeBuilder . append ( ' ' ) . append ( Character . toLowerCase ( ch ) ) ; } else { jsonDatatypeBuilder . append ( ch ) ; } } return jsonDatatypeBuilder . toString ( ) ; } }
Returns the JSON datatype for the property datatype as represented by the given WDTK datatype IRI string .
494
27
147,982
protected InputStream getCompressorInputStream ( InputStream inputStream , CompressionType compressionType ) throws IOException { switch ( compressionType ) { case NONE : return inputStream ; case GZIP : return new GZIPInputStream ( inputStream ) ; case BZ2 : return new BZip2CompressorInputStream ( new BufferedInputStream ( inputStream ) ) ; default : throw new IllegalArgumentException ( "Unsupported compression type: " + compressionType ) ; } }
Returns an input stream that applies the required decompression to the given input stream .
104
16
147,983
void createDirectory ( Path path ) throws IOException { if ( Files . exists ( path ) && Files . isDirectory ( path ) ) { return ; } if ( this . readOnly ) { throw new FileNotFoundException ( "The requested directory \"" + path . toString ( ) + "\" does not exist and we are in read-only mode, so it cannot be created." ) ; } Files . createDirectory ( path ) ; }
Creates a directory at the given path if it does not exist yet and if the directory manager was not configured for read - only access .
93
28
147,984
public static String getDumpFilePostfix ( DumpContentType dumpContentType ) { if ( WmfDumpFile . POSTFIXES . containsKey ( dumpContentType ) ) { return WmfDumpFile . POSTFIXES . get ( dumpContentType ) ; } else { throw new IllegalArgumentException ( "Unsupported dump type " + dumpContentType ) ; } }
Returns the ending used by the Wikimedia - provided dumpfile names of the given type .
84
17
147,985
public static String getDumpFileWebDirectory ( DumpContentType dumpContentType , String projectName ) { if ( dumpContentType == DumpContentType . JSON ) { if ( "wikidatawiki" . equals ( projectName ) ) { return WmfDumpFile . DUMP_SITE_BASE_URL + WmfDumpFile . WEB_DIRECTORY . get ( dumpContentType ) + "wikidata" + "/" ; } else { throw new RuntimeException ( "Wikimedia Foundation uses non-systematic directory names for this type of dump file." + " I don't know where to find dumps of project " + projectName ) ; } } else if ( WmfDumpFile . WEB_DIRECTORY . containsKey ( dumpContentType ) ) { return WmfDumpFile . DUMP_SITE_BASE_URL + WmfDumpFile . WEB_DIRECTORY . get ( dumpContentType ) + projectName + "/" ; } else { throw new IllegalArgumentException ( "Unsupported dump type " + dumpContentType ) ; } }
Returns the absolute directory on the Web site where dumpfiles of the given type can be found .
245
19
147,986
public static CompressionType getDumpFileCompressionType ( String fileName ) { if ( fileName . endsWith ( ".gz" ) ) { return CompressionType . GZIP ; } else if ( fileName . endsWith ( ".bz2" ) ) { return CompressionType . BZ2 ; } else { return CompressionType . NONE ; } }
Returns the compression type of this kind of dump file using file suffixes
81
14
147,987
public static String getDumpFileDirectoryName ( DumpContentType dumpContentType , String dateStamp ) { return dumpContentType . toString ( ) . toLowerCase ( ) + "-" + dateStamp ; }
Returns the name of the directory where the dumpfile of the given type and date should be stored .
48
20
147,988
public static String getDumpFileName ( DumpContentType dumpContentType , String projectName , String dateStamp ) { if ( dumpContentType == DumpContentType . JSON ) { return dateStamp + WmfDumpFile . getDumpFilePostfix ( dumpContentType ) ; } else { return projectName + "-" + dateStamp + WmfDumpFile . getDumpFilePostfix ( dumpContentType ) ; } }
Returns the name under which this dump file . This is the name used online and also locally when downloading the file .
100
23
147,989
public static boolean isRevisionDumpFile ( DumpContentType dumpContentType ) { if ( WmfDumpFile . REVISION_DUMP . containsKey ( dumpContentType ) ) { return WmfDumpFile . REVISION_DUMP . get ( dumpContentType ) ; } else { throw new IllegalArgumentException ( "Unsupported dump type " + dumpContentType ) ; } }
Returns true if the given dump file type contains page revisions and false if it does not . Dumps that do not contain pages are for auxiliary information such as linked sites .
90
34
147,990
private void processDumpFileContentsRecovery ( InputStream inputStream ) throws IOException { JsonDumpFileProcessor . logger . warn ( "Entering recovery mode to parse rest of file. This might be slightly slower." ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( inputStream ) ) ; String line = br . readLine ( ) ; if ( line == null ) { // can happen if iterator already has consumed all // the stream return ; } if ( line . length ( ) >= 100 ) { line = line . substring ( 0 , 100 ) + "[...]" + line . substring ( line . length ( ) - 50 ) ; } JsonDumpFileProcessor . logger . warn ( "Skipping rest of current line: " + line ) ; line = br . readLine ( ) ; while ( line != null && line . length ( ) > 1 ) { try { EntityDocument document ; if ( line . charAt ( line . length ( ) - 1 ) == ' ' ) { document = documentReader . readValue ( line . substring ( 0 , line . length ( ) - 1 ) ) ; } else { document = documentReader . readValue ( line ) ; } handleDocument ( document ) ; } catch ( JsonProcessingException e ) { logJsonProcessingException ( e ) ; JsonDumpFileProcessor . logger . error ( "Problematic line was: " + line . substring ( 0 , Math . min ( 50 , line . length ( ) ) ) + "..." ) ; } line = br . readLine ( ) ; } }
Process dump file data from the given input stream . The method can recover from an errors that occurred while processing an input stream which is assumed to contain the JSON serialization of a list of JSON entities with each entity serialization in one line . To recover from the previous error the first line is skipped .
344
60
147,991
private void reportException ( Exception e ) { logger . error ( "Failed to write JSON export: " + e . toString ( ) ) ; throw new RuntimeException ( e . toString ( ) , e ) ; }
Reports a given exception as a RuntimeException since the interface does not allow us to throw checked exceptions directly .
47
21
147,992
protected static String jacksonObjectToString ( Object object ) { try { return mapper . writeValueAsString ( object ) ; } catch ( JsonProcessingException e ) { logger . error ( "Failed to serialize JSON data: " + e . toString ( ) ) ; return null ; } }
Serializes the given object in JSON and returns the resulting string . In case of errors null is returned . In particular this happens if the object is not based on a Jackson - annotated class . An error is logged in this case .
66
47
147,993
protected static String getTimePrecisionString ( byte precision ) { switch ( precision ) { case TimeValue . PREC_SECOND : return "sec" ; case TimeValue . PREC_MINUTE : return "min" ; case TimeValue . PREC_HOUR : return "hour" ; case TimeValue . PREC_DAY : return "day" ; case TimeValue . PREC_MONTH : return "month" ; case TimeValue . PREC_YEAR : return "year" ; case TimeValue . PREC_DECADE : return "decade" ; case TimeValue . PREC_100Y : return "100 years" ; case TimeValue . PREC_1KY : return "1000 years" ; case TimeValue . PREC_10KY : return "10K years" ; case TimeValue . PREC_100KY : return "100K years" ; case TimeValue . PREC_1MY : return "1 million years" ; case TimeValue . PREC_10MY : return "10 million years" ; case TimeValue . PREC_100MY : return "100 million years" ; case TimeValue . PREC_1GY : return "1000 million years" ; default : return "Unsupported precision " + precision ; } }
Returns a human - readable string representation of a reference to a precision that is used for a time value .
270
21
147,994
public JsonNode wbSetLabel ( String id , String site , String title , String newEntity , String language , String value , boolean bot , long baserevid , String summary ) throws IOException , MediaWikiApiErrorException { Validate . notNull ( language , "Language parameter cannot be null when setting a label" ) ; Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "language" , language ) ; if ( value != null ) { parameters . put ( "value" , value ) ; } JsonNode response = performAPIAction ( "wbsetlabel" , id , site , title , newEntity , parameters , summary , baserevid , bot ) ; return response ; }
Executes the API action wbsetlabel for the given parameters .
161
14
147,995
public JsonNode wbSetAliases ( String id , String site , String title , String newEntity , String language , List < String > add , List < String > remove , List < String > set , boolean bot , long baserevid , String summary ) throws IOException , MediaWikiApiErrorException { Validate . notNull ( language , "Language parameter cannot be null when setting aliases" ) ; Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "language" , language ) ; if ( set != null ) { if ( add != null || remove != null ) { throw new IllegalArgumentException ( "Cannot use parameters \"add\" or \"remove\" when using \"set\" to edit aliases" ) ; } parameters . put ( "set" , ApiConnection . implodeObjects ( set ) ) ; } if ( add != null ) { parameters . put ( "add" , ApiConnection . implodeObjects ( add ) ) ; } if ( remove != null ) { parameters . put ( "remove" , ApiConnection . implodeObjects ( remove ) ) ; } JsonNode response = performAPIAction ( "wbsetaliases" , id , site , title , newEntity , parameters , summary , baserevid , bot ) ; return response ; }
Executes the API action wbsetaliases for the given parameters .
287
15
147,996
public JsonNode wbSetClaim ( String statement , boolean bot , long baserevid , String summary ) throws IOException , MediaWikiApiErrorException { Validate . notNull ( statement , "Statement parameter cannot be null when adding or changing a statement" ) ; Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "claim" , statement ) ; return performAPIAction ( "wbsetclaim" , null , null , null , null , parameters , summary , baserevid , bot ) ; }
Executes the API action wbsetclaim for the given parameters .
120
14
147,997
public JsonNode wbRemoveClaims ( List < String > statementIds , boolean bot , long baserevid , String summary ) throws IOException , MediaWikiApiErrorException { Validate . notNull ( statementIds , "statementIds parameter cannot be null when deleting statements" ) ; Validate . notEmpty ( statementIds , "statement ids to delete must be non-empty when deleting statements" ) ; Validate . isTrue ( statementIds . size ( ) <= 50 , "At most 50 statements can be deleted at once" ) ; Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "claim" , String . join ( "|" , statementIds ) ) ; return performAPIAction ( "wbremoveclaims" , null , null , null , null , parameters , summary , baserevid , bot ) ; }
Executes the API action wbremoveclaims for the given parameters .
194
15
147,998
protected void fixIntegerPrecisions ( ItemIdValue itemIdValue , String propertyId ) { String qid = itemIdValue . getId ( ) ; try { // Fetch the online version of the item to make sure we edit the // current version: ItemDocument currentItemDocument = ( ItemDocument ) dataFetcher . getEntityDocument ( qid ) ; if ( currentItemDocument == null ) { System . out . println ( "*** " + qid + " could not be fetched. Maybe it has been deleted." ) ; return ; } // Get the current statements for the property we want to fix: StatementGroup editPropertyStatements = currentItemDocument . findStatementGroup ( propertyId ) ; if ( editPropertyStatements == null ) { System . out . println ( "*** " + qid + " no longer has any statements for " + propertyId ) ; return ; } PropertyIdValue property = Datamodel . makeWikidataPropertyIdValue ( propertyId ) ; List < Statement > updateStatements = new ArrayList <> ( ) ; for ( Statement s : editPropertyStatements ) { QuantityValue qv = ( QuantityValue ) s . getValue ( ) ; if ( qv != null && isPlusMinusOneValue ( qv ) ) { QuantityValue exactValue = Datamodel . makeQuantityValue ( qv . getNumericValue ( ) , qv . getNumericValue ( ) , qv . getNumericValue ( ) ) ; Statement exactStatement = StatementBuilder . forSubjectAndProperty ( itemIdValue , property ) . withValue ( exactValue ) . withId ( s . getStatementId ( ) ) . withQualifiers ( s . getQualifiers ( ) ) . withReferences ( s . getReferences ( ) ) . withRank ( s . getRank ( ) ) . build ( ) ; updateStatements . add ( exactStatement ) ; } } if ( updateStatements . size ( ) == 0 ) { System . out . println ( "*** " + qid + " quantity values for " + propertyId + " already fixed" ) ; return ; } logEntityModification ( currentItemDocument . getEntityId ( ) , updateStatements , propertyId ) ; dataEditor . updateStatements ( currentItemDocument , updateStatements , Collections . < Statement > emptyList ( ) , "Set exact values for [[Property:" + propertyId + "|" + propertyId + "]] integer quantities (Task MB2)" ) ; } catch ( MediaWikiApiErrorException e ) { e . printStackTrace ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
Fetches the current online data for the given item and fixes the precision of integer quantities if necessary .
568
21
147,999
protected void markStatementsForDeletion ( StatementDocument currentDocument , List < Statement > deleteStatements ) { for ( Statement statement : deleteStatements ) { boolean found = false ; for ( StatementGroup sg : currentDocument . getStatementGroups ( ) ) { if ( ! sg . getProperty ( ) . equals ( statement . getMainSnak ( ) . getPropertyId ( ) ) ) { continue ; } Statement changedStatement = null ; for ( Statement existingStatement : sg ) { if ( existingStatement . equals ( statement ) ) { found = true ; toDelete . add ( statement . getStatementId ( ) ) ; } else if ( existingStatement . getStatementId ( ) . equals ( statement . getStatementId ( ) ) ) { // (we assume all existing statement ids to be nonempty // here) changedStatement = existingStatement ; break ; } } if ( ! found ) { StringBuilder warning = new StringBuilder ( ) ; warning . append ( "Cannot delete statement (id " ) . append ( statement . getStatementId ( ) ) . append ( ") since it is not present in data. Statement was:\n" ) . append ( statement ) ; if ( changedStatement != null ) { warning . append ( "\nThe data contains another statement with the same id: maybe it has been edited? Other statement was:\n" ) . append ( changedStatement ) ; } logger . warn ( warning . toString ( ) ) ; } } } }
Marks the given list of statements for deletion . It is verified that the current document actually contains the statements before doing so . This check is based on exact statement equality including qualifier order and statement id .
312
40