_id stringlengths 2 7 | title stringlengths 3 140 | partition stringclasses 3
values | text stringlengths 73 34.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q4800 | SchemaUsageAnalyzer.createDirectory | train | private static void createDirectory(Path path) throws IOException {
try {
Files.createDirectory(path);
} catch (FileAlreadyExistsException e) {
if (!Files.isDirectory(path)) {
throw e;
}
}
} | java | {
"resource": ""
} |
q4801 | SchemaUsageAnalyzer.openResultFileOuputStream | train | public static FileOutputStream openResultFileOuputStream(
Path resultDirectory, String filename) throws IOException {
Path filePath = resultDirectory.resolve(filename);
return new FileOutputStream(filePath.toFile());
} | java | {
"resource": ""
} |
q4802 | SchemaUsageAnalyzer.addSuperClasses | train | private void addSuperClasses(Integer directSuperClass,
ClassRecord subClassRecord) {
if (subClassRecord.superClasses.contains(directSuperClass)) {
return;
}
subClassRecord.superClasses.add(directSuperClass);
ClassRecord superClassRecord = getClassRecord(directSuperClass);
if (superClassRecord == null) {
return;
}
for (Integer superClass : superClassRecord.directSuperClasses) {
addSuperClasses(superClass, subClassRecord);
}
} | java | {
"resource": ""
} |
q4803 | SchemaUsageAnalyzer.getNumId | train | private Integer getNumId(String idString, boolean isUri) {
String numString;
if (isUri) {
if (!idString.startsWith("http://www.wikidata.org/entity/")) {
return 0;
}
numString = idString.substring("http://www.wikidata.org/entity/Q"
.length());
} else {
numString = idString.substring(1);
}
return Integer.parseInt(numString);
} | java | {
"resource": ""
} |
q4804 | SchemaUsageAnalyzer.countCooccurringProperties | train | private void countCooccurringProperties(
StatementDocument statementDocument, UsageRecord usageRecord,
PropertyIdValue thisPropertyIdValue) {
for (StatementGroup sg : statementDocument.getStatementGroups()) {
if (!sg.getProperty().equals(thisPropertyIdValue)) {
Integer propertyId = getNumId(sg.getProperty().getId(), false);
if (!usageRecord.propertyCoCounts.containsKey(propertyId)) {
usageRecord.propertyCoCounts.put(propertyId, 1);
} else {
usageRecord.propertyCoCounts.put(propertyId,
usageRecord.propertyCoCounts.get(propertyId) + 1);
}
}
}
} | java | {
"resource": ""
} |
q4805 | SchemaUsageAnalyzer.runSparqlQuery | train | private InputStream runSparqlQuery(String query) throws IOException {
try {
String queryString = "query=" + URLEncoder.encode(query, "UTF-8")
+ "&format=json";
URL url = new URL("https://query.wikidata.org/sparql?"
+ queryString);
HttpURLConnection connection = (HttpURLConnection) url
.openConnection();
connection.setRequestMethod("GET");
return connection.getInputStream();
} catch (UnsupportedEncodingException | MalformedURLException e) {
throw new RuntimeException(e.getMessage(), e);
}
} | java | {
"resource": ""
} |
q4806 | SchemaUsageAnalyzer.writePropertyData | train | private void writePropertyData() {
try (PrintStream out = new PrintStream(openResultFileOuputStream(
resultDirectory, "properties.json"))) {
out.println("{");
int count = 0;
for (Entry<Integer, PropertyRecord> propertyEntry : this.propertyRecords
.entrySet()) {
if (count > 0) {
out.println(",");
}
out.print("\"" + propertyEntry.getKey() + "\":");
mapper.writeValue(out, propertyEntry.getValue());
count++;
}
out.println("\n}");
System.out.println(" Serialized information for " + count
+ " properties.");
} catch (IOException e) {
e.printStackTrace();
}
} | java | {
"resource": ""
} |
q4807 | SchemaUsageAnalyzer.writeClassData | train | private void writeClassData() {
try (PrintStream out = new PrintStream(openResultFileOuputStream(
resultDirectory, "classes.json"))) {
out.println("{");
// Add direct subclass information:
for (Entry<Integer, ClassRecord> classEntry : this.classRecords
.entrySet()) {
if (classEntry.getValue().subclassCount == 0
&& classEntry.getValue().itemCount == 0) {
continue;
}
for (Integer superClass : classEntry.getValue().directSuperClasses) {
this.classRecords.get(superClass).nonemptyDirectSubclasses
.add(classEntry.getKey().toString());
}
}
int count = 0;
int countNoLabel = 0;
for (Entry<Integer, ClassRecord> classEntry : this.classRecords
.entrySet()) {
if (classEntry.getValue().subclassCount == 0
&& classEntry.getValue().itemCount == 0) {
continue;
}
if (classEntry.getValue().label == null) {
countNoLabel++;
}
if (count > 0) {
out.println(",");
}
out.print("\"" + classEntry.getKey() + "\":");
mapper.writeValue(out, classEntry.getValue());
count++;
}
out.println("\n}");
System.out.println(" Serialized information for " + count
+ " class items.");
System.out.println(" -- class items with missing label: "
+ countNoLabel);
} catch (IOException e) {
e.printStackTrace();
}
} | java | {
"resource": ""
} |
q4808 | DataFormatter.formatTimeISO8601 | train | public static String formatTimeISO8601(TimeValue value) {
StringBuilder builder = new StringBuilder();
DecimalFormat yearForm = new DecimalFormat(FORMAT_YEAR);
DecimalFormat timeForm = new DecimalFormat(FORMAT_OTHER);
if (value.getYear() > 0) {
builder.append("+");
}
builder.append(yearForm.format(value.getYear()));
builder.append("-");
builder.append(timeForm.format(value.getMonth()));
builder.append("-");
builder.append(timeForm.format(value.getDay()));
builder.append("T");
builder.append(timeForm.format(value.getHour()));
builder.append(":");
builder.append(timeForm.format(value.getMinute()));
builder.append(":");
builder.append(timeForm.format(value.getSecond()));
builder.append("Z");
return builder.toString();
} | java | {
"resource": ""
} |
q4809 | DataFormatter.formatBigDecimal | train | public static String formatBigDecimal(BigDecimal number) {
if (number.signum() != -1) {
return "+" + number.toString();
} else {
return number.toString();
}
} | java | {
"resource": ""
} |
q4810 | MwLocalDumpFile.guessDumpContentType | train | private static DumpContentType guessDumpContentType(String fileName) {
String lcDumpName = fileName.toLowerCase();
if (lcDumpName.contains(".json.gz")) {
return DumpContentType.JSON;
} else if (lcDumpName.contains(".json.bz2")) {
return DumpContentType.JSON;
} else if (lcDumpName.contains(".sql.gz")) {
return DumpContentType.SITES;
} else if (lcDumpName.contains(".xml.bz2")) {
if (lcDumpName.contains("daily")) {
return DumpContentType.DAILY;
} else if (lcDumpName.contains("current")) {
return DumpContentType.CURRENT;
} else {
return DumpContentType.FULL;
}
} else {
logger.warn("Could not guess type of the dump file \"" + fileName
+ "\". Defaulting to json.gz.");
return DumpContentType.JSON;
}
} | java | {
"resource": ""
} |
q4811 | MwLocalDumpFile.guessDumpDate | train | private static String guessDumpDate(String fileName) {
Pattern p = Pattern.compile("([0-9]{8})");
Matcher m = p.matcher(fileName);
if (m.find()) {
return m.group(1);
} else {
logger.info("Could not guess date of the dump file \"" + fileName
+ "\". Defaulting to YYYYMMDD.");
return "YYYYMMDD";
}
} | java | {
"resource": ""
} |
q4812 | RankBuffer.add | train | public void add(StatementRank rank, Resource subject) {
if (this.bestRank == rank) {
subjects.add(subject);
} else if(bestRank == StatementRank.NORMAL && rank == StatementRank.PREFERRED) {
//We found a preferred statement
subjects.clear();
bestRank = StatementRank.PREFERRED;
subjects.add(subject);
}
} | java | {
"resource": ""
} |
q4813 | SnakRdfConverter.writeAuxiliaryTriples | train | public void writeAuxiliaryTriples() throws RDFHandlerException {
for (PropertyRestriction pr : this.someValuesQueue) {
writeSomeValueRestriction(pr.propertyUri, pr.rangeUri, pr.subject);
}
this.someValuesQueue.clear();
this.valueRdfConverter.writeAuxiliaryTriples();
} | java | {
"resource": ""
} |
q4814 | SnakRdfConverter.writeSomeValueRestriction | train | void writeSomeValueRestriction(String propertyUri, String rangeUri,
Resource bnode) throws RDFHandlerException {
this.rdfWriter.writeTripleValueObject(bnode, RdfWriter.RDF_TYPE,
RdfWriter.OWL_RESTRICTION);
this.rdfWriter.writeTripleUriObject(bnode, RdfWriter.OWL_ON_PROPERTY,
propertyUri);
this.rdfWriter.writeTripleUriObject(bnode,
RdfWriter.OWL_SOME_VALUES_FROM, rangeUri);
} | java | {
"resource": ""
} |
q4815 | SnakRdfConverter.getRangeUri | train | String getRangeUri(PropertyIdValue propertyIdValue) {
String datatype = this.propertyRegister
.getPropertyType(propertyIdValue);
if (datatype == null)
return null;
switch (datatype) {
case DatatypeIdValue.DT_MONOLINGUAL_TEXT:
this.rdfConversionBuffer.addDatatypeProperty(propertyIdValue);
return Vocabulary.RDF_LANG_STRING;
case DatatypeIdValue.DT_STRING:
case DatatypeIdValue.DT_EXTERNAL_ID:
case DatatypeIdValue.DT_MATH:
this.rdfConversionBuffer.addDatatypeProperty(propertyIdValue);
return Vocabulary.XSD_STRING;
case DatatypeIdValue.DT_COMMONS_MEDIA:
case DatatypeIdValue.DT_GLOBE_COORDINATES:
case DatatypeIdValue.DT_ITEM:
case DatatypeIdValue.DT_PROPERTY:
case DatatypeIdValue.DT_LEXEME:
case DatatypeIdValue.DT_FORM:
case DatatypeIdValue.DT_SENSE:
case DatatypeIdValue.DT_TIME:
case DatatypeIdValue.DT_URL:
case DatatypeIdValue.DT_GEO_SHAPE:
case DatatypeIdValue.DT_TABULAR_DATA:
case DatatypeIdValue.DT_QUANTITY:
this.rdfConversionBuffer.addObjectProperty(propertyIdValue);
return Vocabulary.OWL_THING;
default:
return null;
}
} | java | {
"resource": ""
} |
q4816 | SnakRdfConverter.addSomeValuesRestriction | train | void addSomeValuesRestriction(Resource subject, String propertyUri,
String rangeUri) {
this.someValuesQueue.add(new PropertyRestriction(subject, propertyUri,
rangeUri));
} | java | {
"resource": ""
} |
q4817 | WikibaseDataFetcher.getEntityDocumentMap | train | Map<String, EntityDocument> getEntityDocumentMap(int numOfEntities,
WbGetEntitiesActionData properties)
throws MediaWikiApiErrorException, IOException {
if (numOfEntities == 0) {
return Collections.emptyMap();
}
configureProperties(properties);
return this.wbGetEntitiesAction.wbGetEntities(properties);
} | java | {
"resource": ""
} |
q4818 | WikibaseDataFetcher.setRequestProps | train | private void setRequestProps(WbGetEntitiesActionData properties) {
StringBuilder builder = new StringBuilder();
builder.append("info|datatype");
if (!this.filter.excludeAllLanguages()) {
builder.append("|labels|aliases|descriptions");
}
if (!this.filter.excludeAllProperties()) {
builder.append("|claims");
}
if (!this.filter.excludeAllSiteLinks()) {
builder.append("|sitelinks");
}
properties.props = builder.toString();
} | java | {
"resource": ""
} |
q4819 | WikibaseDataFetcher.setRequestLanguages | train | private void setRequestLanguages(WbGetEntitiesActionData properties) {
if (this.filter.excludeAllLanguages()
|| this.filter.getLanguageFilter() == null) {
return;
}
properties.languages = ApiConnection.implodeObjects(this.filter
.getLanguageFilter());
} | java | {
"resource": ""
} |
q4820 | WikibaseDataFetcher.setRequestSitefilter | train | private void setRequestSitefilter(WbGetEntitiesActionData properties) {
if (this.filter.excludeAllSiteLinks()
|| this.filter.getSiteLinkFilter() == null) {
return;
}
properties.sitefilter = ApiConnection.implodeObjects(this.filter
.getSiteLinkFilter());
} | java | {
"resource": ""
} |
q4821 | MwSitesDumpFileProcessor.processSiteRow | train | void processSiteRow(String siteRow) {
String[] row = getSiteRowFields(siteRow);
String filePath = "";
String pagePath = "";
String dataArray = row[8].substring(row[8].indexOf('{'),
row[8].length() - 2);
// Explanation for the regular expression below:
// "'{' or ';'" followed by either
// "NOT: ';', '{', or '}'" repeated one or more times; or
// "a single '}'"
// The first case matches ";s:5:\"paths\""
// but also ";a:2:" in "{s:5:\"paths\";a:2:{s:9:\ ...".
// The second case matches ";}" which terminates (sub)arrays.
Matcher matcher = Pattern.compile("[{;](([^;}{][^;}{]*)|[}])").matcher(
dataArray);
String prevString = "";
String curString = "";
String path = "";
boolean valuePosition = false;
while (matcher.find()) {
String match = matcher.group().substring(1);
if (match.length() == 0) {
valuePosition = false;
continue;
}
if (match.charAt(0) == 's') {
valuePosition = !valuePosition && !"".equals(prevString);
curString = match.substring(match.indexOf('"') + 1,
match.length() - 2);
} else if (match.charAt(0) == 'a') {
valuePosition = false;
path = path + "/" + prevString;
} else if ("}".equals(match)) {
valuePosition = false;
path = path.substring(0, path.lastIndexOf('/'));
}
if (valuePosition && "file_path".equals(prevString)
&& "/paths".equals(path)) {
filePath = curString;
} else if (valuePosition && "page_path".equals(prevString)
&& "/paths".equals(path)) {
pagePath = curString;
}
prevString = curString;
curString = "";
}
MwSitesDumpFileProcessor.logger.debug("Found site data \"" + row[1]
+ "\" (group \"" + row[3] + "\", language \"" + row[5]
+ "\", type \"" + row[2] + "\")");
this.sites.setSiteInformation(row[1], row[3], row[5], row[2], filePath,
pagePath);
} | java | {
"resource": ""
} |
q4822 | Timer.start | train | public synchronized void start() {
if ((todoFlags & RECORD_CPUTIME) != 0) {
currentStartCpuTime = getThreadCpuTime(threadId);
} else {
currentStartCpuTime = -1;
}
if ((todoFlags & RECORD_WALLTIME) != 0) {
currentStartWallTime = System.nanoTime();
} else {
currentStartWallTime = -1;
}
isRunning = true;
} | java | {
"resource": ""
} |
q4823 | Timer.startNamedTimer | train | public static void startNamedTimer(String timerName, int todoFlags,
long threadId) {
getNamedTimer(timerName, todoFlags, threadId).start();
} | java | {
"resource": ""
} |
q4824 | Timer.stopNamedTimer | train | public static long stopNamedTimer(String timerName, int todoFlags) {
return stopNamedTimer(timerName, todoFlags, Thread.currentThread()
.getId());
} | java | {
"resource": ""
} |
q4825 | Timer.resetNamedTimer | train | public static void resetNamedTimer(String timerName, int todoFlags,
long threadId) {
getNamedTimer(timerName, todoFlags, threadId).reset();
} | java | {
"resource": ""
} |
q4826 | Timer.getNamedTimer | train | public static Timer getNamedTimer(String timerName, int todoFlags) {
return getNamedTimer(timerName, todoFlags, Thread.currentThread()
.getId());
} | java | {
"resource": ""
} |
q4827 | Timer.getNamedTimer | train | public static Timer getNamedTimer(String timerName, int todoFlags,
long threadId) {
Timer key = new Timer(timerName, todoFlags, threadId);
registeredTimers.putIfAbsent(key, key);
return registeredTimers.get(key);
} | java | {
"resource": ""
} |
q4828 | Timer.getNamedTotalTimer | train | public static Timer getNamedTotalTimer(String timerName) {
long totalCpuTime = 0;
long totalSystemTime = 0;
int measurements = 0;
int timerCount = 0;
int todoFlags = RECORD_NONE;
Timer previousTimer = null;
for (Map.Entry<Timer, Timer> entry : registeredTimers.entrySet()) {
if (entry.getValue().name.equals(timerName)) {
previousTimer = entry.getValue();
timerCount += 1;
totalCpuTime += previousTimer.totalCpuTime;
totalSystemTime += previousTimer.totalWallTime;
measurements += previousTimer.measurements;
todoFlags |= previousTimer.todoFlags;
}
}
if (timerCount == 1) {
return previousTimer;
} else {
Timer result = new Timer(timerName, todoFlags, 0);
result.totalCpuTime = totalCpuTime;
result.totalWallTime = totalSystemTime;
result.measurements = measurements;
result.threadCount = timerCount;
return result;
}
} | java | {
"resource": ""
} |
q4829 | Client.performActions | train | public void performActions() {
if (this.clientConfiguration.getActions().isEmpty()) {
this.clientConfiguration.printHelp();
return;
}
this.dumpProcessingController.setOfflineMode(this.clientConfiguration
.getOfflineMode());
if (this.clientConfiguration.getDumpDirectoryLocation() != null) {
try {
this.dumpProcessingController
.setDownloadDirectory(this.clientConfiguration
.getDumpDirectoryLocation());
} catch (IOException e) {
logger.error("Could not set download directory to "
+ this.clientConfiguration.getDumpDirectoryLocation()
+ ": " + e.getMessage());
logger.error("Aborting");
return;
}
}
dumpProcessingController.setLanguageFilter(this.clientConfiguration
.getFilterLanguages());
dumpProcessingController.setSiteLinkFilter(this.clientConfiguration
.getFilterSiteKeys());
dumpProcessingController.setPropertyFilter(this.clientConfiguration
.getFilterProperties());
MwDumpFile dumpFile = this.clientConfiguration.getLocalDumpFile();
if (dumpFile == null) {
dumpFile = dumpProcessingController
.getMostRecentDump(DumpContentType.JSON);
} else {
if (!dumpFile.isAvailable()) {
logger.error("Dump file not found or not readable: "
+ dumpFile.toString());
return;
}
}
this.clientConfiguration.setProjectName(dumpFile.getProjectName());
this.clientConfiguration.setDateStamp(dumpFile.getDateStamp());
boolean hasReadyProcessor = false;
for (DumpProcessingAction props : this.clientConfiguration.getActions()) {
if (!props.isReady()) {
continue;
}
if (props.needsSites()) {
prepareSites();
if (this.sites == null) { // sites unavailable
continue;
}
props.setSites(this.sites);
}
props.setDumpInformation(dumpFile.getProjectName(),
dumpFile.getDateStamp());
this.dumpProcessingController.registerEntityDocumentProcessor(
props, null, true);
hasReadyProcessor = true;
}
if (!hasReadyProcessor) {
return; // silent; non-ready action should report its problem
// directly
}
if (!this.clientConfiguration.isQuiet()) {
EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor(
0);
this.dumpProcessingController.registerEntityDocumentProcessor(
entityTimerProcessor, null, true);
}
openActions();
this.dumpProcessingController.processDump(dumpFile);
closeActions();
try {
writeReport();
} catch (IOException e) {
logger.error("Could not print report file: " + e.getMessage());
}
} | java | {
"resource": ""
} |
q4830 | Client.initializeLogging | train | private void initializeLogging() {
// Since logging is static, make sure this is done only once even if
// multiple clients are created (e.g., during tests)
if (consoleAppender != null) {
return;
}
consoleAppender = new ConsoleAppender();
consoleAppender.setLayout(new PatternLayout(LOG_PATTERN));
consoleAppender.setThreshold(Level.INFO);
LevelRangeFilter filter = new LevelRangeFilter();
filter.setLevelMin(Level.TRACE);
filter.setLevelMax(Level.INFO);
consoleAppender.addFilter(filter);
consoleAppender.activateOptions();
org.apache.log4j.Logger.getRootLogger().addAppender(consoleAppender);
errorAppender = new ConsoleAppender();
errorAppender.setLayout(new PatternLayout(LOG_PATTERN));
errorAppender.setThreshold(Level.WARN);
errorAppender.setTarget(ConsoleAppender.SYSTEM_ERR);
errorAppender.activateOptions();
org.apache.log4j.Logger.getRootLogger().addAppender(errorAppender);
} | java | {
"resource": ""
} |
q4831 | Client.main | train | public static void main(String[] args) throws ParseException, IOException {
Client client = new Client(
new DumpProcessingController("wikidatawiki"), args);
client.performActions();
} | java | {
"resource": ""
} |
q4832 | RdfConverter.writeBasicDeclarations | train | public void writeBasicDeclarations() throws RDFHandlerException {
for (Map.Entry<String, String> uriType : Vocabulary
.getKnownVocabularyTypes().entrySet()) {
this.rdfWriter.writeTripleUriObject(uriType.getKey(),
RdfWriter.RDF_TYPE, uriType.getValue());
}
} | java | {
"resource": ""
} |
q4833 | RdfConverter.writeInterPropertyLinks | train | void writeInterPropertyLinks(PropertyDocument document)
throws RDFHandlerException {
Resource subject = this.rdfWriter.getUri(document.getEntityId()
.getIri());
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_DIRECT_CLAIM_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.DIRECT));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_CLAIM_PROP), Vocabulary.getPropertyUri(
document.getEntityId(), PropertyContext.STATEMENT));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_STATEMENT_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.VALUE_SIMPLE));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_STATEMENT_VALUE_PROP),
Vocabulary.getPropertyUri(document.getEntityId(),
PropertyContext.VALUE));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_QUALIFIER_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.QUALIFIER_SIMPLE));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_QUALIFIER_VALUE_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.QUALIFIER));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_REFERENCE_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.REFERENCE_SIMPLE));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_REFERENCE_VALUE_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.REFERENCE));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_NO_VALUE_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.NO_VALUE));
this.rdfWriter.writeTripleUriObject(subject, this.rdfWriter
.getUri(Vocabulary.WB_NO_QUALIFIER_VALUE_PROP), Vocabulary
.getPropertyUri(document.getEntityId(),
PropertyContext.NO_QUALIFIER_VALUE));
// TODO something more with NO_VALUE
} | java | {
"resource": ""
} |
q4834 | RdfConverter.writeBestRankTriples | train | void writeBestRankTriples() {
for (Resource resource : this.rankBuffer.getBestRankedStatements()) {
try {
this.rdfWriter.writeTripleUriObject(resource,
RdfWriter.RDF_TYPE, RdfWriter.WB_BEST_RANK.toString());
} catch (RDFHandlerException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
this.rankBuffer.clear();
} | java | {
"resource": ""
} |
q4835 | RdfConverter.getUriStringForRank | train | String getUriStringForRank(StatementRank rank) {
switch (rank) {
case NORMAL:
return Vocabulary.WB_NORMAL_RANK;
case PREFERRED:
return Vocabulary.WB_PREFERRED_RANK;
case DEPRECATED:
return Vocabulary.WB_DEPRECATED_RANK;
default:
throw new IllegalArgumentException();
}
} | java | {
"resource": ""
} |
q4836 | WikimediaLanguageCodes.fixLanguageCodeIfDeprecated | train | public static String fixLanguageCodeIfDeprecated(String wikimediaLanguageCode) {
if (DEPRECATED_LANGUAGE_CODES.containsKey(wikimediaLanguageCode)) {
return DEPRECATED_LANGUAGE_CODES.get(wikimediaLanguageCode);
} else {
return wikimediaLanguageCode;
}
} | java | {
"resource": ""
} |
q4837 | EntityDocumentBuilder.withLabel | train | public T withLabel(String text, String languageCode) {
withLabel(factory.getMonolingualTextValue(text, languageCode));
return getThis();
} | java | {
"resource": ""
} |
q4838 | EntityDocumentBuilder.withDescription | train | public T withDescription(String text, String languageCode) {
withDescription(factory.getMonolingualTextValue(text, languageCode));
return getThis();
} | java | {
"resource": ""
} |
q4839 | EntityDocumentBuilder.withAlias | train | public T withAlias(String text, String languageCode) {
withAlias(factory.getMonolingualTextValue(text, languageCode));
return getThis();
} | java | {
"resource": ""
} |
q4840 | EntityDocumentBuilder.withStatement | train | public T withStatement(Statement statement) {
PropertyIdValue pid = statement.getMainSnak()
.getPropertyId();
ArrayList<Statement> pidStatements = this.statements.get(pid);
if (pidStatements == null) {
pidStatements = new ArrayList<Statement>();
this.statements.put(pid, pidStatements);
}
pidStatements.add(statement);
return getThis();
} | java | {
"resource": ""
} |
q4841 | AbstractValueConverter.logIncompatibleValueError | train | protected void logIncompatibleValueError(PropertyIdValue propertyIdValue,
String datatype, String valueType) {
logger.warn("Property " + propertyIdValue.getId() + " has type \""
+ datatype + "\" but a value of type " + valueType
+ ". Data ignored.");
} | java | {
"resource": ""
} |
q4842 | EntityDocumentImpl.getJsonId | train | @JsonInclude(Include.NON_EMPTY)
@JsonProperty("id")
public String getJsonId() {
if (!EntityIdValue.SITE_LOCAL.equals(this.siteIri)) {
return this.entityId;
} else {
return null;
}
} | java | {
"resource": ""
} |
q4843 | WbGetEntitiesAction.wbGetEntities | train | public Map<String, EntityDocument> wbGetEntities(
WbGetEntitiesActionData properties)
throws MediaWikiApiErrorException, IOException {
return wbGetEntities(properties.ids, properties.sites,
properties.titles, properties.props, properties.languages,
properties.sitefilter);
} | java | {
"resource": ""
} |
q4844 | Vocabulary.getStatementUri | train | public static String getStatementUri(Statement statement) {
int i = statement.getStatementId().indexOf('$') + 1;
return PREFIX_WIKIDATA_STATEMENT
+ statement.getSubject().getId() + "-"
+ statement.getStatementId().substring(i);
} | java | {
"resource": ""
} |
q4845 | Vocabulary.getPropertyUri | train | public static String getPropertyUri(PropertyIdValue propertyIdValue,
PropertyContext propertyContext) {
switch (propertyContext) {
case DIRECT:
return PREFIX_PROPERTY_DIRECT + propertyIdValue.getId();
case STATEMENT:
return PREFIX_PROPERTY + propertyIdValue.getId();
case VALUE_SIMPLE:
return PREFIX_PROPERTY_STATEMENT + propertyIdValue.getId();
case VALUE:
return PREFIX_PROPERTY_STATEMENT_VALUE + propertyIdValue.getId();
case QUALIFIER:
return PREFIX_PROPERTY_QUALIFIER_VALUE + propertyIdValue.getId();
case QUALIFIER_SIMPLE:
return PREFIX_PROPERTY_QUALIFIER + propertyIdValue.getId();
case REFERENCE:
return PREFIX_PROPERTY_REFERENCE_VALUE + propertyIdValue.getId();
case REFERENCE_SIMPLE:
return PREFIX_PROPERTY_REFERENCE + propertyIdValue.getId();
case NO_VALUE:
return PREFIX_WIKIDATA_NO_VALUE + propertyIdValue.getId();
case NO_QUALIFIER_VALUE:
return PREFIX_WIKIDATA_NO_QUALIFIER_VALUE + propertyIdValue.getId();
default:
return null;
}
} | java | {
"resource": ""
} |
q4846 | StatementDocumentImpl.findStatementGroup | train | public StatementGroup findStatementGroup(String propertyIdValue) {
if (this.claims.containsKey(propertyIdValue)) {
return new StatementGroupImpl(this.claims.get(propertyIdValue));
}
return null;
} | java | {
"resource": ""
} |
q4847 | StatementDocumentImpl.addStatementToGroups | train | protected static Map<String, List<Statement>> addStatementToGroups(Statement statement, Map<String, List<Statement>> claims) {
Map<String, List<Statement>> newGroups = new HashMap<>(claims);
String pid = statement.getMainSnak().getPropertyId().getId();
if(newGroups.containsKey(pid)) {
List<Statement> newGroup = new ArrayList<>(newGroups.get(pid).size());
boolean statementReplaced = false;
for(Statement existingStatement : newGroups.get(pid)) {
if(existingStatement.getStatementId().equals(statement.getStatementId()) &&
!existingStatement.getStatementId().isEmpty()) {
statementReplaced = true;
newGroup.add(statement);
} else {
newGroup.add(existingStatement);
}
}
if(!statementReplaced) {
newGroup.add(statement);
}
newGroups.put(pid, newGroup);
} else {
newGroups.put(pid, Collections.singletonList(statement));
}
return newGroups;
} | java | {
"resource": ""
} |
q4848 | StatementDocumentImpl.removeStatements | train | protected static Map<String, List<Statement>> removeStatements(Set<String> statementIds, Map<String, List<Statement>> claims) {
Map<String, List<Statement>> newClaims = new HashMap<>(claims.size());
for(Entry<String, List<Statement>> entry : claims.entrySet()) {
List<Statement> filteredStatements = new ArrayList<>();
for(Statement s : entry.getValue()) {
if(!statementIds.contains(s.getStatementId())) {
filteredStatements.add(s);
}
}
if(!filteredStatements.isEmpty()) {
newClaims.put(entry.getKey(),
filteredStatements);
}
}
return newClaims;
} | java | {
"resource": ""
} |
q4849 | DatatypeIdImpl.getDatatypeIriFromJsonDatatype | train | public static String getDatatypeIriFromJsonDatatype(String jsonDatatype) {
switch (jsonDatatype) {
case JSON_DT_ITEM:
return DT_ITEM;
case JSON_DT_PROPERTY:
return DT_PROPERTY;
case JSON_DT_GLOBE_COORDINATES:
return DT_GLOBE_COORDINATES;
case JSON_DT_URL:
return DT_URL;
case JSON_DT_COMMONS_MEDIA:
return DT_COMMONS_MEDIA;
case JSON_DT_TIME:
return DT_TIME;
case JSON_DT_QUANTITY:
return DT_QUANTITY;
case JSON_DT_STRING:
return DT_STRING;
case JSON_DT_MONOLINGUAL_TEXT:
return DT_MONOLINGUAL_TEXT;
default:
if(!JSON_DATATYPE_PATTERN.matcher(jsonDatatype).matches()) {
throw new IllegalArgumentException("Invalid JSON datatype \"" + jsonDatatype + "\"");
}
String[] parts = jsonDatatype.split("-");
for(int i = 0; i < parts.length; i++) {
parts[i] = StringUtils.capitalize(parts[i]);
}
return "http://wikiba.se/ontology#" + StringUtils.join(parts);
}
} | java | {
"resource": ""
} |
q4850 | DatatypeIdImpl.getJsonDatatypeFromDatatypeIri | train | public static String getJsonDatatypeFromDatatypeIri(String datatypeIri) {
switch (datatypeIri) {
case DatatypeIdValue.DT_ITEM:
return DatatypeIdImpl.JSON_DT_ITEM;
case DatatypeIdValue.DT_GLOBE_COORDINATES:
return DatatypeIdImpl.JSON_DT_GLOBE_COORDINATES;
case DatatypeIdValue.DT_URL:
return DatatypeIdImpl.JSON_DT_URL;
case DatatypeIdValue.DT_COMMONS_MEDIA:
return DatatypeIdImpl.JSON_DT_COMMONS_MEDIA;
case DatatypeIdValue.DT_TIME:
return DatatypeIdImpl.JSON_DT_TIME;
case DatatypeIdValue.DT_QUANTITY:
return DatatypeIdImpl.JSON_DT_QUANTITY;
case DatatypeIdValue.DT_STRING:
return DatatypeIdImpl.JSON_DT_STRING;
case DatatypeIdValue.DT_MONOLINGUAL_TEXT:
return DatatypeIdImpl.JSON_DT_MONOLINGUAL_TEXT;
case DatatypeIdValue.DT_PROPERTY:
return DatatypeIdImpl.JSON_DT_PROPERTY;
default:
//We apply the reverse algorithm of JacksonDatatypeId::getDatatypeIriFromJsonDatatype
Matcher matcher = DATATYPE_ID_PATTERN.matcher(datatypeIri);
if(!matcher.matches()) {
throw new IllegalArgumentException("Unknown datatype: " + datatypeIri);
}
StringBuilder jsonDatatypeBuilder = new StringBuilder();
for(char ch : StringUtils.uncapitalize(matcher.group(1)).toCharArray()) {
if(Character.isUpperCase(ch)) {
jsonDatatypeBuilder
.append('-')
.append(Character.toLowerCase(ch));
} else {
jsonDatatypeBuilder.append(ch);
}
}
return jsonDatatypeBuilder.toString();
}
} | java | {
"resource": ""
} |
q4851 | DirectoryManagerImpl.getCompressorInputStream | train | protected InputStream getCompressorInputStream(InputStream inputStream,
CompressionType compressionType) throws IOException {
switch (compressionType) {
case NONE:
return inputStream;
case GZIP:
return new GZIPInputStream(inputStream);
case BZ2:
return new BZip2CompressorInputStream(new BufferedInputStream(
inputStream));
default:
throw new IllegalArgumentException("Unsupported compression type: "
+ compressionType);
}
} | java | {
"resource": ""
} |
q4852 | DirectoryManagerImpl.createDirectory | train | void createDirectory(Path path) throws IOException {
if (Files.exists(path) && Files.isDirectory(path)) {
return;
}
if (this.readOnly) {
throw new FileNotFoundException(
"The requested directory \""
+ path.toString()
+ "\" does not exist and we are in read-only mode, so it cannot be created.");
}
Files.createDirectory(path);
} | java | {
"resource": ""
} |
q4853 | WmfDumpFile.getDumpFilePostfix | train | public static String getDumpFilePostfix(DumpContentType dumpContentType) {
if (WmfDumpFile.POSTFIXES.containsKey(dumpContentType)) {
return WmfDumpFile.POSTFIXES.get(dumpContentType);
} else {
throw new IllegalArgumentException("Unsupported dump type "
+ dumpContentType);
}
} | java | {
"resource": ""
} |
q4854 | WmfDumpFile.getDumpFileWebDirectory | train | public static String getDumpFileWebDirectory(
DumpContentType dumpContentType, String projectName) {
if (dumpContentType == DumpContentType.JSON) {
if ("wikidatawiki".equals(projectName)) {
return WmfDumpFile.DUMP_SITE_BASE_URL
+ WmfDumpFile.WEB_DIRECTORY.get(dumpContentType)
+ "wikidata" + "/";
} else {
throw new RuntimeException(
"Wikimedia Foundation uses non-systematic directory names for this type of dump file."
+ " I don't know where to find dumps of project "
+ projectName);
}
} else if (WmfDumpFile.WEB_DIRECTORY.containsKey(dumpContentType)) {
return WmfDumpFile.DUMP_SITE_BASE_URL
+ WmfDumpFile.WEB_DIRECTORY.get(dumpContentType)
+ projectName + "/";
} else {
throw new IllegalArgumentException("Unsupported dump type "
+ dumpContentType);
}
} | java | {
"resource": ""
} |
q4855 | WmfDumpFile.getDumpFileCompressionType | train | public static CompressionType getDumpFileCompressionType(String fileName) {
if (fileName.endsWith(".gz")) {
return CompressionType.GZIP;
} else if (fileName.endsWith(".bz2")) {
return CompressionType.BZ2;
} else {
return CompressionType.NONE;
}
} | java | {
"resource": ""
} |
q4856 | WmfDumpFile.getDumpFileDirectoryName | train | public static String getDumpFileDirectoryName(
DumpContentType dumpContentType, String dateStamp) {
return dumpContentType.toString().toLowerCase() + "-" + dateStamp;
} | java | {
"resource": ""
} |
q4857 | WmfDumpFile.getDumpFileName | train | public static String getDumpFileName(DumpContentType dumpContentType,
String projectName, String dateStamp) {
if (dumpContentType == DumpContentType.JSON) {
return dateStamp + WmfDumpFile.getDumpFilePostfix(dumpContentType);
} else {
return projectName + "-" + dateStamp
+ WmfDumpFile.getDumpFilePostfix(dumpContentType);
}
} | java | {
"resource": ""
} |
q4858 | WmfDumpFile.isRevisionDumpFile | train | public static boolean isRevisionDumpFile(DumpContentType dumpContentType) {
if (WmfDumpFile.REVISION_DUMP.containsKey(dumpContentType)) {
return WmfDumpFile.REVISION_DUMP.get(dumpContentType);
} else {
throw new IllegalArgumentException("Unsupported dump type "
+ dumpContentType);
}
} | java | {
"resource": ""
} |
q4859 | JsonDumpFileProcessor.processDumpFileContentsRecovery | train | private void processDumpFileContentsRecovery(InputStream inputStream)
throws IOException {
JsonDumpFileProcessor.logger
.warn("Entering recovery mode to parse rest of file. This might be slightly slower.");
BufferedReader br = new BufferedReader(new InputStreamReader(
inputStream));
String line = br.readLine();
if (line == null) { // can happen if iterator already has consumed all
// the stream
return;
}
if (line.length() >= 100) {
line = line.substring(0, 100) + "[...]"
+ line.substring(line.length() - 50);
}
JsonDumpFileProcessor.logger.warn("Skipping rest of current line: "
+ line);
line = br.readLine();
while (line != null && line.length() > 1) {
try {
EntityDocument document;
if (line.charAt(line.length() - 1) == ',') {
document = documentReader.readValue(line.substring(0,
line.length() - 1));
} else {
document = documentReader.readValue(line);
}
handleDocument(document);
} catch (JsonProcessingException e) {
logJsonProcessingException(e);
JsonDumpFileProcessor.logger.error("Problematic line was: "
+ line.substring(0, Math.min(50, line.length()))
+ "...");
}
line = br.readLine();
}
} | java | {
"resource": ""
} |
q4860 | JsonSerializer.reportException | train | private void reportException(Exception e) {
logger.error("Failed to write JSON export: " + e.toString());
throw new RuntimeException(e.toString(), e);
} | java | {
"resource": ""
} |
q4861 | JsonSerializer.jacksonObjectToString | train | protected static String jacksonObjectToString(Object object) {
try {
return mapper.writeValueAsString(object);
} catch (JsonProcessingException e) {
logger.error("Failed to serialize JSON data: " + e.toString());
return null;
}
} | java | {
"resource": ""
} |
q4862 | ToString.getTimePrecisionString | train | protected static String getTimePrecisionString(byte precision) {
switch (precision) {
case TimeValue.PREC_SECOND:
return "sec";
case TimeValue.PREC_MINUTE:
return "min";
case TimeValue.PREC_HOUR:
return "hour";
case TimeValue.PREC_DAY:
return "day";
case TimeValue.PREC_MONTH:
return "month";
case TimeValue.PREC_YEAR:
return "year";
case TimeValue.PREC_DECADE:
return "decade";
case TimeValue.PREC_100Y:
return "100 years";
case TimeValue.PREC_1KY:
return "1000 years";
case TimeValue.PREC_10KY:
return "10K years";
case TimeValue.PREC_100KY:
return "100K years";
case TimeValue.PREC_1MY:
return "1 million years";
case TimeValue.PREC_10MY:
return "10 million years";
case TimeValue.PREC_100MY:
return "100 million years";
case TimeValue.PREC_1GY:
return "1000 million years";
default:
return "Unsupported precision " + precision;
}
} | java | {
"resource": ""
} |
q4863 | WbEditingAction.wbSetLabel | train | public JsonNode wbSetLabel(String id, String site, String title,
String newEntity, String language, String value,
boolean bot, long baserevid, String summary)
throws IOException, MediaWikiApiErrorException {
Validate.notNull(language,
"Language parameter cannot be null when setting a label");
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("language", language);
if (value != null) {
parameters.put("value", value);
}
JsonNode response = performAPIAction("wbsetlabel", id, site, title, newEntity,
parameters, summary, baserevid, bot);
return response;
} | java | {
"resource": ""
} |
q4864 | WbEditingAction.wbSetAliases | train | public JsonNode wbSetAliases(String id, String site, String title,
String newEntity, String language, List<String> add,
List<String> remove, List<String> set,
boolean bot, long baserevid, String summary)
throws IOException, MediaWikiApiErrorException {
Validate.notNull(language,
"Language parameter cannot be null when setting aliases");
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("language", language);
if (set != null) {
if (add != null || remove != null) {
throw new IllegalArgumentException(
"Cannot use parameters \"add\" or \"remove\" when using \"set\" to edit aliases");
}
parameters.put("set", ApiConnection.implodeObjects(set));
}
if (add != null) {
parameters.put("add", ApiConnection.implodeObjects(add));
}
if (remove != null) {
parameters.put("remove", ApiConnection.implodeObjects(remove));
}
JsonNode response = performAPIAction("wbsetaliases", id, site, title, newEntity, parameters, summary, baserevid, bot);
return response;
} | java | {
"resource": ""
} |
q4865 | WbEditingAction.wbSetClaim | train | public JsonNode wbSetClaim(String statement,
boolean bot, long baserevid, String summary)
throws IOException, MediaWikiApiErrorException {
Validate.notNull(statement,
"Statement parameter cannot be null when adding or changing a statement");
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("claim", statement);
return performAPIAction("wbsetclaim", null, null, null, null, parameters, summary, baserevid, bot);
} | java | {
"resource": ""
} |
q4866 | WbEditingAction.wbRemoveClaims | train | public JsonNode wbRemoveClaims(List<String> statementIds,
boolean bot, long baserevid, String summary)
throws IOException, MediaWikiApiErrorException {
Validate.notNull(statementIds,
"statementIds parameter cannot be null when deleting statements");
Validate.notEmpty(statementIds,
"statement ids to delete must be non-empty when deleting statements");
Validate.isTrue(statementIds.size() <= 50,
"At most 50 statements can be deleted at once");
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("claim", String.join("|", statementIds));
return performAPIAction("wbremoveclaims", null, null, null, null, parameters, summary, baserevid, bot);
} | java | {
"resource": ""
} |
q4867 | FixIntegerQuantityPrecisionsBot.fixIntegerPrecisions | train | protected void fixIntegerPrecisions(ItemIdValue itemIdValue,
String propertyId) {
String qid = itemIdValue.getId();
try {
// Fetch the online version of the item to make sure we edit the
// current version:
ItemDocument currentItemDocument = (ItemDocument) dataFetcher
.getEntityDocument(qid);
if (currentItemDocument == null) {
System.out.println("*** " + qid
+ " could not be fetched. Maybe it has been deleted.");
return;
}
// Get the current statements for the property we want to fix:
StatementGroup editPropertyStatements = currentItemDocument
.findStatementGroup(propertyId);
if (editPropertyStatements == null) {
System.out.println("*** " + qid
+ " no longer has any statements for " + propertyId);
return;
}
PropertyIdValue property = Datamodel
.makeWikidataPropertyIdValue(propertyId);
List<Statement> updateStatements = new ArrayList<>();
for (Statement s : editPropertyStatements) {
QuantityValue qv = (QuantityValue) s.getValue();
if (qv != null && isPlusMinusOneValue(qv)) {
QuantityValue exactValue = Datamodel.makeQuantityValue(
qv.getNumericValue(), qv.getNumericValue(),
qv.getNumericValue());
Statement exactStatement = StatementBuilder
.forSubjectAndProperty(itemIdValue, property)
.withValue(exactValue).withId(s.getStatementId())
.withQualifiers(s.getQualifiers())
.withReferences(s.getReferences())
.withRank(s.getRank()).build();
updateStatements.add(exactStatement);
}
}
if (updateStatements.size() == 0) {
System.out.println("*** " + qid + " quantity values for "
+ propertyId + " already fixed");
return;
}
logEntityModification(currentItemDocument.getEntityId(),
updateStatements, propertyId);
dataEditor.updateStatements(currentItemDocument, updateStatements,
Collections.<Statement> emptyList(),
"Set exact values for [[Property:" + propertyId + "|"
+ propertyId + "]] integer quantities (Task MB2)");
} catch (MediaWikiApiErrorException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
} | java | {
"resource": ""
} |
q4868 | StatementUpdate.markStatementsForDeletion | train | protected void markStatementsForDeletion(StatementDocument currentDocument,
List<Statement> deleteStatements) {
for (Statement statement : deleteStatements) {
boolean found = false;
for (StatementGroup sg : currentDocument.getStatementGroups()) {
if (!sg.getProperty().equals(statement.getMainSnak().getPropertyId())) {
continue;
}
Statement changedStatement = null;
for (Statement existingStatement : sg) {
if (existingStatement.equals(statement)) {
found = true;
toDelete.add(statement.getStatementId());
} else if (existingStatement.getStatementId().equals(
statement.getStatementId())) {
// (we assume all existing statement ids to be nonempty
// here)
changedStatement = existingStatement;
break;
}
}
if (!found) {
StringBuilder warning = new StringBuilder();
warning.append("Cannot delete statement (id ")
.append(statement.getStatementId())
.append(") since it is not present in data. Statement was:\n")
.append(statement);
if (changedStatement != null) {
warning.append(
"\nThe data contains another statement with the same id: maybe it has been edited? Other statement was:\n")
.append(changedStatement);
}
logger.warn(warning.toString());
}
}
}
} | java | {
"resource": ""
} |
q4869 | StatementUpdate.markStatementsForInsertion | train | protected void markStatementsForInsertion(
StatementDocument currentDocument, List<Statement> addStatements) {
for (Statement statement : addStatements) {
addStatement(statement, true);
}
for (StatementGroup sg : currentDocument.getStatementGroups()) {
if (this.toKeep.containsKey(sg.getProperty())) {
for (Statement statement : sg) {
if (!this.toDelete.contains(statement.getStatementId())) {
addStatement(statement, false);
}
}
}
}
} | java | {
"resource": ""
} |
q4870 | StatementUpdate.addStatement | train | protected void addStatement(Statement statement, boolean isNew) {
PropertyIdValue pid = statement.getMainSnak().getPropertyId();
// This code maintains the following properties:
// (1) the toKeep structure does not contain two statements with the
// same statement id
// (2) the toKeep structure does not contain two statements that can
// be merged
if (this.toKeep.containsKey(pid)) {
List<StatementWithUpdate> statements = this.toKeep.get(pid);
for (int i = 0; i < statements.size(); i++) {
Statement currentStatement = statements.get(i).statement;
boolean currentIsNew = statements.get(i).write;
if (!"".equals(currentStatement.getStatementId())
&& currentStatement.getStatementId().equals(
statement.getStatementId())) {
// Same, non-empty id: ignore existing statement as if
// deleted
return;
}
Statement newStatement = mergeStatements(statement,
currentStatement);
if (newStatement != null) {
boolean writeNewStatement = (isNew || !newStatement
.equals(statement))
&& (currentIsNew || !newStatement
.equals(currentStatement));
// noWrite: (newS == statement && !isNew)
// || (newS == cur && !curIsNew)
// Write: (newS != statement || isNew )
// && (newS != cur || curIsNew)
statements.set(i, new StatementWithUpdate(newStatement,
writeNewStatement));
// Impossible with default merge code:
// Kept here for future extensions that may choose to not
// reuse this id.
if (!"".equals(statement.getStatementId())
&& !newStatement.getStatementId().equals(
statement.getStatementId())) {
this.toDelete.add(statement.getStatementId());
}
if (!"".equals(currentStatement.getStatementId())
&& !newStatement.getStatementId().equals(
currentStatement.getStatementId())) {
this.toDelete.add(currentStatement.getStatementId());
}
return;
}
}
statements.add(new StatementWithUpdate(statement, isNew));
} else {
List<StatementWithUpdate> statements = new ArrayList<>();
statements.add(new StatementWithUpdate(statement, isNew));
this.toKeep.put(pid, statements);
}
} | java | {
"resource": ""
} |
q4871 | StatementUpdate.mergeReferences | train | protected List<Reference> mergeReferences(
List<? extends Reference> references1,
List<? extends Reference> references2) {
List<Reference> result = new ArrayList<>();
for (Reference reference : references1) {
addBestReferenceToList(reference, result);
}
for (Reference reference : references2) {
addBestReferenceToList(reference, result);
}
return result;
} | java | {
"resource": ""
} |
q4872 | StatementUpdate.equivalentClaims | train | protected boolean equivalentClaims(Claim claim1, Claim claim2) {
return claim1.getMainSnak().equals(claim2.getMainSnak())
&& isSameSnakSet(claim1.getAllQualifiers(),
claim2.getAllQualifiers());
} | java | {
"resource": ""
} |
q4873 | StatementUpdate.isSameSnakSet | train | protected boolean isSameSnakSet(Iterator<Snak> snaks1, Iterator<Snak> snaks2) {
ArrayList<Snak> snakList1 = new ArrayList<>(5);
while (snaks1.hasNext()) {
snakList1.add(snaks1.next());
}
int snakCount2 = 0;
while (snaks2.hasNext()) {
snakCount2++;
Snak snak2 = snaks2.next();
boolean found = false;
for (int i = 0; i < snakList1.size(); i++) {
if (snak2.equals(snakList1.get(i))) {
snakList1.set(i, null);
found = true;
break;
}
}
if (!found) {
return false;
}
}
return snakCount2 == snakList1.size();
} | java | {
"resource": ""
} |
q4874 | StatementUpdate.getRevisionIdFromResponse | train | protected long getRevisionIdFromResponse(JsonNode response) throws JsonMappingException {
if(response == null) {
throw new JsonMappingException("API response is null");
}
JsonNode entity = null;
if(response.has("entity")) {
entity = response.path("entity");
} else if(response.has("pageinfo")) {
entity = response.path("pageinfo");
}
if(entity != null && entity.has("lastrevid")) {
return entity.path("lastrevid").asLong();
}
throw new JsonMappingException("The last revision id could not be found in API response");
} | java | {
"resource": ""
} |
q4875 | StatementUpdate.getDatamodelObjectFromResponse | train | protected <T> T getDatamodelObjectFromResponse(JsonNode response, List<String> path, Class<T> targetClass) throws JsonProcessingException {
if(response == null) {
throw new JsonMappingException("The API response is null");
}
JsonNode currentNode = response;
for(String field : path) {
if (!currentNode.has(field)) {
throw new JsonMappingException("Field '"+field+"' not found in API response.");
}
currentNode = currentNode.path(field);
}
return mapper.treeToValue(currentNode, targetClass);
} | java | {
"resource": ""
} |
q4876 | EditOnlineDataExample.findSomeStringProperties | train | public static void findSomeStringProperties(ApiConnection connection)
throws MediaWikiApiErrorException, IOException {
WikibaseDataFetcher wbdf = new WikibaseDataFetcher(connection, siteIri);
wbdf.getFilter().excludeAllProperties();
wbdf.getFilter().setLanguageFilter(Collections.singleton("en"));
ArrayList<PropertyIdValue> stringProperties = new ArrayList<>();
System.out
.println("*** Trying to find string properties for the example ... ");
int propertyNumber = 1;
while (stringProperties.size() < 5) {
ArrayList<String> fetchProperties = new ArrayList<>();
for (int i = propertyNumber; i < propertyNumber + 10; i++) {
fetchProperties.add("P" + i);
}
propertyNumber += 10;
Map<String, EntityDocument> results = wbdf
.getEntityDocuments(fetchProperties);
for (EntityDocument ed : results.values()) {
PropertyDocument pd = (PropertyDocument) ed;
if (DatatypeIdValue.DT_STRING.equals(pd.getDatatype().getIri())
&& pd.getLabels().containsKey("en")) {
stringProperties.add(pd.getEntityId());
System.out.println("* Found string property "
+ pd.getEntityId().getId() + " ("
+ pd.getLabels().get("en") + ")");
}
}
}
stringProperty1 = stringProperties.get(0);
stringProperty2 = stringProperties.get(1);
stringProperty3 = stringProperties.get(2);
stringProperty4 = stringProperties.get(3);
stringProperty5 = stringProperties.get(4);
System.out.println("*** Done.");
} | java | {
"resource": ""
} |
q4877 | FindPositionArray.findPosition | train | public long findPosition(long nOccurrence) {
updateCount();
if (nOccurrence <= 0) {
return RankedBitVector.NOT_FOUND;
}
int findPos = (int) (nOccurrence / this.blockSize);
if (findPos < this.positionArray.length) {
long pos0 = this.positionArray[findPos];
long leftOccurrences = nOccurrence - (findPos * this.blockSize);
if (leftOccurrences == 0) {
return pos0;
}
for (long index = pos0 + 1; index < this.bitVector.size(); index++) {
if (this.bitVector.getBit(index) == this.bit) {
leftOccurrences--;
}
if (leftOccurrences == 0) {
return index;
}
}
}
return RankedBitVector.NOT_FOUND;
} | java | {
"resource": ""
} |
q4878 | OwlDeclarationBuffer.writeNoValueRestriction | train | void writeNoValueRestriction(RdfWriter rdfWriter, String propertyUri,
String rangeUri, String subject) throws RDFHandlerException {
Resource bnodeSome = rdfWriter.getFreshBNode();
rdfWriter.writeTripleValueObject(subject, RdfWriter.RDF_TYPE,
RdfWriter.OWL_CLASS);
rdfWriter.writeTripleValueObject(subject, RdfWriter.OWL_COMPLEMENT_OF,
bnodeSome);
rdfWriter.writeTripleValueObject(bnodeSome, RdfWriter.RDF_TYPE,
RdfWriter.OWL_RESTRICTION);
rdfWriter.writeTripleUriObject(bnodeSome, RdfWriter.OWL_ON_PROPERTY,
propertyUri);
rdfWriter.writeTripleUriObject(bnodeSome,
RdfWriter.OWL_SOME_VALUES_FROM, rangeUri);
} | java | {
"resource": ""
} |
q4879 | StatementBuilder.withQualifierValue | train | public StatementBuilder withQualifierValue(PropertyIdValue propertyIdValue,
Value value) {
withQualifier(factory.getValueSnak(propertyIdValue, value));
return getThis();
} | java | {
"resource": ""
} |
q4880 | EntityStatisticsProcessor.countStatements | train | protected void countStatements(UsageStatistics usageStatistics,
StatementDocument statementDocument) {
// Count Statement data:
for (StatementGroup sg : statementDocument.getStatementGroups()) {
// Count Statements:
usageStatistics.countStatements += sg.size();
// Count uses of properties in Statements:
countPropertyMain(usageStatistics, sg.getProperty(), sg.size());
for (Statement s : sg) {
for (SnakGroup q : s.getQualifiers()) {
countPropertyQualifier(usageStatistics, q.getProperty(), q.size());
}
for (Reference r : s.getReferences()) {
usageStatistics.countReferencedStatements++;
for (SnakGroup snakGroup : r.getSnakGroups()) {
countPropertyReference(usageStatistics,
snakGroup.getProperty(), snakGroup.size());
}
}
}
}
} | java | {
"resource": ""
} |
q4881 | EntityStatisticsProcessor.writeFinalResults | train | private void writeFinalResults() {
// Print a final report:
printStatus();
// Store property counts in files:
writePropertyStatisticsToFile(this.itemStatistics,
"item-property-counts.csv");
writePropertyStatisticsToFile(this.propertyStatistics,
"property-property-counts.csv");
// Store site link statistics in file:
try (PrintStream out = new PrintStream(
ExampleHelpers
.openExampleFileOuputStream("site-link-counts.csv"))) {
out.println("Site key,Site links");
for (Entry<String, Integer> entry : this.siteLinkStatistics
.entrySet()) {
out.println(entry.getKey() + "," + entry.getValue());
}
} catch (IOException e) {
e.printStackTrace();
}
// Store term statistics in file:
writeTermStatisticsToFile(this.itemStatistics, "item-term-counts.csv");
writeTermStatisticsToFile(this.propertyStatistics,
"property-term-counts.csv");
} | java | {
"resource": ""
} |
q4882 | EntityStatisticsProcessor.writePropertyStatisticsToFile | train | private void writePropertyStatisticsToFile(UsageStatistics usageStatistics,
String fileName) {
try (PrintStream out = new PrintStream(
ExampleHelpers.openExampleFileOuputStream(fileName))) {
out.println("Property id,in statements,in qualifiers,in references,total");
for (Entry<PropertyIdValue, Integer> entry : usageStatistics.propertyCountsMain
.entrySet()) {
int qCount = usageStatistics.propertyCountsQualifier.get(entry
.getKey());
int rCount = usageStatistics.propertyCountsReferences.get(entry
.getKey());
int total = entry.getValue() + qCount + rCount;
out.println(entry.getKey().getId() + "," + entry.getValue()
+ "," + qCount + "," + rCount + "," + total);
}
} catch (IOException e) {
e.printStackTrace();
}
} | java | {
"resource": ""
} |
q4883 | EntityStatisticsProcessor.writeTermStatisticsToFile | train | private void writeTermStatisticsToFile(UsageStatistics usageStatistics,
String fileName) {
// Make sure all keys are present in label count map:
for (String key : usageStatistics.aliasCounts.keySet()) {
countKey(usageStatistics.labelCounts, key, 0);
}
for (String key : usageStatistics.descriptionCounts.keySet()) {
countKey(usageStatistics.labelCounts, key, 0);
}
try (PrintStream out = new PrintStream(
ExampleHelpers.openExampleFileOuputStream(fileName))) {
out.println("Language,Labels,Descriptions,Aliases");
for (Entry<String, Integer> entry : usageStatistics.labelCounts
.entrySet()) {
countKey(usageStatistics.aliasCounts, entry.getKey(), 0);
int aCount = usageStatistics.aliasCounts.get(entry.getKey());
countKey(usageStatistics.descriptionCounts, entry.getKey(), 0);
int dCount = usageStatistics.descriptionCounts.get(entry
.getKey());
out.println(entry.getKey() + "," + entry.getValue() + ","
+ dCount + "," + aCount);
}
} catch (IOException e) {
e.printStackTrace();
}
} | java | {
"resource": ""
} |
q4884 | EntityStatisticsProcessor.printStatistics | train | private void printStatistics(UsageStatistics usageStatistics,
String entityLabel) {
System.out.println("Processed " + usageStatistics.count + " "
+ entityLabel + ":");
System.out.println(" * Labels: " + usageStatistics.countLabels
+ ", descriptions: " + usageStatistics.countDescriptions
+ ", aliases: " + usageStatistics.countAliases);
System.out.println(" * Statements: " + usageStatistics.countStatements
+ ", with references: "
+ usageStatistics.countReferencedStatements);
} | java | {
"resource": ""
} |
q4885 | EntityStatisticsProcessor.countPropertyMain | train | private void countPropertyMain(UsageStatistics usageStatistics,
PropertyIdValue property, int count) {
addPropertyCounters(usageStatistics, property);
usageStatistics.propertyCountsMain.put(property,
usageStatistics.propertyCountsMain.get(property) + count);
} | java | {
"resource": ""
} |
q4886 | EntityStatisticsProcessor.addPropertyCounters | train | private void addPropertyCounters(UsageStatistics usageStatistics,
PropertyIdValue property) {
if (!usageStatistics.propertyCountsMain.containsKey(property)) {
usageStatistics.propertyCountsMain.put(property, 0);
usageStatistics.propertyCountsQualifier.put(property, 0);
usageStatistics.propertyCountsReferences.put(property, 0);
}
} | java | {
"resource": ""
} |
q4887 | EntityStatisticsProcessor.countKey | train | private void countKey(Map<String, Integer> map, String key, int count) {
if (map.containsKey(key)) {
map.put(key, map.get(key) + count);
} else {
map.put(key, count);
}
} | java | {
"resource": ""
} |
q4888 | WorldMapProcessor.addSite | train | public void addSite(String siteKey) {
ValueMap gv = new ValueMap(siteKey);
if (!this.valueMaps.contains(gv)) {
this.valueMaps.add(gv);
}
} | java | {
"resource": ""
} |
q4889 | WorldMapProcessor.countCoordinateStatement | train | private void countCoordinateStatement(Statement statement,
ItemDocument itemDocument) {
Value value = statement.getValue();
if (!(value instanceof GlobeCoordinatesValue)) {
return;
}
GlobeCoordinatesValue coordsValue = (GlobeCoordinatesValue) value;
if (!this.globe.equals((coordsValue.getGlobe()))) {
return;
}
int xCoord = (int) (((coordsValue.getLongitude() + 180.0) / 360.0) * this.width)
% this.width;
int yCoord = (int) (((coordsValue.getLatitude() + 90.0) / 180.0) * this.height)
% this.height;
if (xCoord < 0 || yCoord < 0 || xCoord >= this.width
|| yCoord >= this.height) {
System.out.println("Dropping out-of-range coordinate: "
+ coordsValue);
return;
}
countCoordinates(xCoord, yCoord, itemDocument);
this.count += 1;
if (this.count % 100000 == 0) {
reportProgress();
writeImages();
}
} | java | {
"resource": ""
} |
q4890 | WorldMapProcessor.countCoordinates | train | private void countCoordinates(int xCoord, int yCoord,
ItemDocument itemDocument) {
for (String siteKey : itemDocument.getSiteLinks().keySet()) {
Integer count = this.siteCounts.get(siteKey);
if (count == null) {
this.siteCounts.put(siteKey, 1);
} else {
this.siteCounts.put(siteKey, count + 1);
}
}
for (ValueMap vm : this.valueMaps) {
vm.countCoordinates(xCoord, yCoord, itemDocument);
}
} | java | {
"resource": ""
} |
q4891 | WorldMapProcessor.writeImages | train | private void writeImages() {
for (ValueMap gv : this.valueMaps) {
gv.writeImage();
}
try (PrintStream out = new PrintStream(
ExampleHelpers.openExampleFileOuputStream("map-site-count.csv"))) {
out.println("Site key,Number of geo items");
out.println("wikidata total," + this.count);
for (Entry<String, Integer> entry : this.siteCounts.entrySet()) {
out.println(entry.getKey() + "," + entry.getValue());
}
} catch (IOException e) {
e.printStackTrace();
}
} | java | {
"resource": ""
} |
q4892 | WorldMapProcessor.getColor | train | private int getColor(int value) {
if (value == 0) {
return 0;
}
double scale = Math.log10(value) / Math.log10(this.topValue);
double lengthScale = Math.min(1.0, scale) * (colors.length - 1);
int index = 1 + (int) lengthScale;
if (index == colors.length) {
index--;
}
double partScale = lengthScale - (index - 1);
int r = (int) (colors[index - 1][0] + partScale
* (colors[index][0] - colors[index - 1][0]));
int g = (int) (colors[index - 1][1] + partScale
* (colors[index][1] - colors[index - 1][1]));
int b = (int) (colors[index - 1][2] + partScale
* (colors[index][2] - colors[index - 1][2]));
r = Math.min(255, r);
b = Math.min(255, b);
g = Math.min(255, g);
return (r << 16) | (g << 8) | b;
} | java | {
"resource": ""
} |
q4893 | ReferenceBuilder.withPropertyValue | train | public ReferenceBuilder withPropertyValue(PropertyIdValue propertyIdValue,
Value value) {
getSnakList(propertyIdValue).add(
factory.getValueSnak(propertyIdValue, value));
return getThis();
} | java | {
"resource": ""
} |
q4894 | DumpProcessingOutputAction.close | train | private static void close(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException ignored) {
logger.error("Failed to close output stream: "
+ ignored.getMessage());
}
}
} | java | {
"resource": ""
} |
q4895 | ExampleHelpers.configureLogging | train | public static void configureLogging() {
// Create the appender that will write log messages to the console.
ConsoleAppender consoleAppender = new ConsoleAppender();
// Define the pattern of log messages.
// Insert the string "%c{1}:%L" to also show class name and line.
String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n";
consoleAppender.setLayout(new PatternLayout(pattern));
// Change to Level.ERROR for fewer messages:
consoleAppender.setThreshold(Level.INFO);
consoleAppender.activateOptions();
Logger.getRootLogger().addAppender(consoleAppender);
} | java | {
"resource": ""
} |
q4896 | ExampleHelpers.processEntitiesFromWikidataDump | train | public static void processEntitiesFromWikidataDump(
EntityDocumentProcessor entityDocumentProcessor) {
// Controller object for processing dumps:
DumpProcessingController dumpProcessingController = new DumpProcessingController(
"wikidatawiki");
dumpProcessingController.setOfflineMode(OFFLINE_MODE);
// // Optional: Use another download directory:
// dumpProcessingController.setDownloadDirectory(System.getProperty("user.dir"));
// Should we process historic revisions or only current ones?
boolean onlyCurrentRevisions;
switch (DUMP_FILE_MODE) {
case ALL_REVS:
case ALL_REVS_WITH_DAILIES:
onlyCurrentRevisions = false;
break;
case CURRENT_REVS:
case CURRENT_REVS_WITH_DAILIES:
case JSON:
case JUST_ONE_DAILY_FOR_TEST:
default:
onlyCurrentRevisions = true;
}
// Subscribe to the most recent entity documents of type wikibase item:
dumpProcessingController.registerEntityDocumentProcessor(
entityDocumentProcessor, null, onlyCurrentRevisions);
// Also add a timer that reports some basic progress information:
EntityTimerProcessor entityTimerProcessor = new EntityTimerProcessor(
TIMEOUT_SEC);
dumpProcessingController.registerEntityDocumentProcessor(
entityTimerProcessor, null, onlyCurrentRevisions);
MwDumpFile dumpFile = null;
try {
// Start processing (may trigger downloads where needed):
switch (DUMP_FILE_MODE) {
case ALL_REVS:
case CURRENT_REVS:
dumpFile = dumpProcessingController
.getMostRecentDump(DumpContentType.FULL);
break;
case ALL_REVS_WITH_DAILIES:
case CURRENT_REVS_WITH_DAILIES:
MwDumpFile fullDumpFile = dumpProcessingController
.getMostRecentDump(DumpContentType.FULL);
MwDumpFile incrDumpFile = dumpProcessingController
.getMostRecentDump(DumpContentType.DAILY);
lastDumpFileName = fullDumpFile.getProjectName() + "-"
+ incrDumpFile.getDateStamp() + "."
+ fullDumpFile.getDateStamp();
dumpProcessingController.processAllRecentRevisionDumps();
break;
case JSON:
dumpFile = dumpProcessingController
.getMostRecentDump(DumpContentType.JSON);
break;
case JUST_ONE_DAILY_FOR_TEST:
dumpFile = dumpProcessingController
.getMostRecentDump(DumpContentType.DAILY);
break;
default:
throw new RuntimeException("Unsupported dump processing type "
+ DUMP_FILE_MODE);
}
if (dumpFile != null) {
lastDumpFileName = dumpFile.getProjectName() + "-"
+ dumpFile.getDateStamp();
dumpProcessingController.processDump(dumpFile);
}
} catch (TimeoutException e) {
// The timer caused a time out. Continue and finish normally.
}
// Print final timer results:
entityTimerProcessor.close();
} | java | {
"resource": ""
} |
q4897 | BufferedValueConverter.addValue | train | void addValue(V value, Resource resource) {
this.valueQueue.add(value);
this.valueSubjectQueue.add(resource);
} | java | {
"resource": ""
} |
q4898 | TermStatementUpdate.processAliases | train | protected void processAliases(List<MonolingualTextValue> addAliases, List<MonolingualTextValue> deleteAliases) {
for(MonolingualTextValue val : addAliases) {
addAlias(val);
}
for(MonolingualTextValue val : deleteAliases) {
deleteAlias(val);
}
} | java | {
"resource": ""
} |
q4899 | TermStatementUpdate.deleteAlias | train | protected void deleteAlias(MonolingualTextValue alias) {
String lang = alias.getLanguageCode();
AliasesWithUpdate currentAliases = newAliases.get(lang);
if (currentAliases != null) {
currentAliases.aliases.remove(alias);
currentAliases.deleted.add(alias);
currentAliases.write = true;
}
} | java | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.