proj_name
stringclasses 131
values | relative_path
stringlengths 30
228
| class_name
stringlengths 1
68
| func_name
stringlengths 1
48
| masked_class
stringlengths 78
9.82k
| func_body
stringlengths 46
9.61k
| len_input
int64 29
2.01k
| len_output
int64 14
1.94k
| total
int64 55
2.05k
| relevant_context
stringlengths 0
38.4k
|
|---|---|---|---|---|---|---|---|---|---|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/ExtractorPDF.java
|
ExtractorPDF
|
shouldExtract
|
class ExtractorPDF extends ContentExtractor {
@SuppressWarnings("unused")
private static final long serialVersionUID = 3L;
private static final Logger LOGGER =
Logger.getLogger(ExtractorPDF.class.getName());
{
setMaxSizeToParse(10*1024*1024L); // 10MB
}
public long getMaxSizeToParse() {
return (Long) kp.get("maxSizeToParse");
}
/**
* The maximum size of PDF files to consider. PDFs larger than this
* maximum will not be searched for links.
*/
public void setMaxSizeToParse(long threshold) {
kp.put("maxSizeToParse",threshold);
}
public ExtractorPDF() {
}
@Override
protected boolean shouldExtract(CrawlURI uri) {<FILL_FUNCTION_BODY>}
protected boolean innerExtract(CrawlURI curi){
File tempFile;
int sn;
Thread thread = Thread.currentThread();
if (thread instanceof SinkHandlerLogThread) {
sn = ((SinkHandlerLogThread)thread).getSerialNumber();
} else {
sn = System.identityHashCode(thread);
}
try {
tempFile = File.createTempFile("tt" + sn , "tmp.pdf");
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
ArrayList<String> uris;
try {
curi.getRecorder().copyContentBodyTo(tempFile);
try (PDFParser parser = new PDFParser(tempFile.getAbsolutePath())){
uris = parser.extractURIs();
}
} catch (IOException e) {
curi.getNonFatalFailures().add(e);
return false;
} catch (RuntimeException e) {
// Truncated/corrupt PDFs may generate ClassCast exceptions, or
// other problems
curi.getNonFatalFailures().add(e);
return false;
} finally {
FileUtils.deleteSoonerOrLater(tempFile);
}
if (uris == null) {
return true;
}
for (String uri: uris) {
try {
UURI src = curi.getUURI();
UURI dest = UURIFactory.getInstance(uri);
LinkContext lc = LinkContext.NAVLINK_MISC;
Hop hop = Hop.NAVLINK;
addOutlink(curi, dest, lc, hop);
} catch (URIException e1) {
// There may not be a controller (e.g. If we're being run
// by the extractor tool).
logUriError(e1, curi.getUURI(), uri);
}
}
numberOfLinksExtracted.addAndGet(uris.size());
LOGGER.fine(curi+" has "+uris.size()+" links.");
// Set flag to indicate that link extraction is completed.
return true;
}
}
|
long max = getMaxSizeToParse();
if (uri.getRecorder().getRecordedInput().getSize() > max) {
return false;
}
String ct = uri.getContentType();
return (ct != null) && (ct.startsWith("application/pdf"));
| 796
| 78
| 874
|
<methods>public non-sealed void <init>() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/ExtractorRobotsTxt.java
|
ExtractorRobotsTxt
|
parseRobotsTxt
|
class ExtractorRobotsTxt extends ContentExtractor {
private static final Logger LOGGER = Logger
.getLogger(ExtractorRobotsTxt.class.getName());
private static final Pattern ROBOTS_PATTERN = Pattern
.compile("^https?://[^/]+/robots.txt$");
private static final Pattern SITEMAP_PATTERN = Pattern
.compile("(?i)Sitemap:\\s*(.+)$");
public static final String ANNOTATION_IS_SITEMAP = "isSitemap";
@Override
protected boolean shouldExtract(CrawlURI uri) {
boolean shouldExtract = false;
if (uri.isPrerequisite()) {
shouldExtract = ROBOTS_PATTERN.matcher(uri.getURI()).matches();
LOGGER.finest("Checked prerequisite " + uri + " GOT " + shouldExtract);
}
return shouldExtract;
}
public List<String> parseRobotsTxt(InputStream input) {<FILL_FUNCTION_BODY>}
@Override
protected boolean innerExtract(CrawlURI curi) {
try {
// Parse the robots for the sitemaps.
List<String> links = parseRobotsTxt(
curi.getRecorder()
.getContentReplayInputStream());
LOGGER.finest("Checked " + curi + " GOT " + links);
// Get the max outlinks (needed by add method):
int max = getExtractorParameters().getMaxOutlinks();
// Accrue links:
for (String link : links) {
try {
// We've found a sitemap:
LOGGER.fine("Found site map: " + link);
numberOfLinksExtracted.incrementAndGet();
// Add links but using the cloned CrawlURI as the crawl
// context.
CrawlURI newCuri = addRelativeToBase(curi, max, link,
LinkContext.MANIFEST_MISC, Hop.MANIFEST);
if (newCuri == null) {
continue;
}
// Annotate as a Site Map:
newCuri.getAnnotations().add(
ExtractorRobotsTxt.ANNOTATION_IS_SITEMAP);
} catch (URIException e) {
logUriError(e, curi.getUURI(), link);
}
}
// Return number of links discovered:
return !links.isEmpty();
} catch (IOException e) {
LOGGER.log(Level.WARNING, curi.getURI(), e);
curi.getNonFatalFailures().add(e);
}
return false;
}
}
|
ArrayList<String> links = new ArrayList<>();
BufferedReader reader = new BufferedReader(new InputStreamReader(input));
try {
String line;
Matcher matcher;
while ((line = reader.readLine()) != null) {
matcher = SITEMAP_PATTERN.matcher(line);
if (matcher.matches()) {
links.add(matcher.group(1));
}
}
} catch (IOException e) {
LOGGER.warning(e.toString());
}
return links;
| 784
| 159
| 943
|
<methods>public non-sealed void <init>() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/ExtractorSWF.java
|
ExtractorTagParser
|
parsePlaceObject2
|
class ExtractorTagParser extends TagParser {
protected ExtractorTagParser(SWFTagTypes tagtypes) {
super(tagtypes);
}
protected void parseDefineBits(InStream in) throws IOException {
// DO NOTHING - no URLs to be found in bits
}
protected void parseDefineBitsJPEG3(InStream in) throws IOException {
// DO NOTHING - no URLs to be found in bits
}
protected void parseDefineBitsLossless(InStream in, int length, boolean hasAlpha) throws IOException {
// DO NOTHING - no URLs to be found in bits
}
protected void parseDefineButtonSound(InStream in) throws IOException {
// DO NOTHING - no URLs to be found in sound
}
protected void parseDefineFont(InStream in) throws IOException {
// DO NOTHING - no URLs to be found in font
}
protected void parseDefineJPEG2(InStream in, int length) throws IOException {
// DO NOTHING - no URLs to be found in jpeg
}
protected void parseDefineJPEGTables(InStream in) throws IOException {
// DO NOTHING - no URLs to be found in jpeg
}
protected void parseDefineShape(int type, InStream in) throws IOException {
// DO NOTHING - no URLs to be found in shape
}
protected void parseDefineSound(InStream in) throws IOException {
// DO NOTHING - no URLs to be found in sound
}
protected void parseFontInfo(InStream in, int length, boolean isFI2) throws IOException {
// DO NOTHING - no URLs to be found in font info
}
protected void parseDefineFont2(InStream in) throws IOException {
// DO NOTHING - no URLs to be found in bits
}
// heritrix: Overridden to use our TagParser and SWFReader. The rest of
// the code is the same.
@Override
protected void parseDefineSprite(InStream in) throws IOException {
int id = in.readUI16();
in.readUI16(); // frame count
SWFTagTypes sstt = mTagtypes.tagDefineSprite(id);
if (sstt == null)
return;
// heritrix: only these two lines differ from
// super.parseDefineSprite()
TagParser parser = new ExtractorTagParser(sstt);
SWFReader reader = new SWFReader(parser, in);
reader.readTags();
}
// Overridden to read 32 bit clip event flags when flash version >= 6.
// All the rest of the code is copied directly. Fixes HER-1509.
@Override
protected void parsePlaceObject2( InStream in ) throws IOException
{<FILL_FUNCTION_BODY>}
}
|
boolean hasClipActions = in.readUBits(1) != 0;
boolean hasClipDepth = in.readUBits(1) != 0;
boolean hasName = in.readUBits(1) != 0;
boolean hasRatio = in.readUBits(1) != 0;
boolean hasColorTransform = in.readUBits(1) != 0;
boolean hasMatrix = in.readUBits(1) != 0;
boolean hasCharacter = in.readUBits(1) != 0;
boolean isMove = in.readUBits(1) != 0;
int depth = in.readUI16();
int charId = hasCharacter ? in.readUI16() : 0;
Matrix matrix = hasMatrix ? new Matrix( in ) : null;
AlphaTransform cxform = hasColorTransform ? new AlphaTransform( in ) : null;
int ratio = hasRatio ? in.readUI16() : -1;
String name = hasName ? in.readString(mStringEncoding) : null;
int clipDepth = hasClipDepth ? in.readUI16() : 0;
int clipEventFlags = 0;
if (hasClipActions) {
in.readUI16(); // reserved
// heritrix: flags size changed in swf version 6
clipEventFlags = mFlashVersion < 6 ? in.readUI16() : in.readSI32();
}
SWFActions actions = mTagtypes.tagPlaceObject2(isMove, clipDepth,
depth, charId, matrix, cxform, ratio, name, clipEventFlags);
if (hasClipActions && actions != null) {
int flags = 0;
// heritrix: flags size changed in swf version 6
while ((flags = mFlashVersion < 6 ? in.readUI16() : in.readSI32()) != 0) {
in.readUI32(); // length
actions.start(flags);
ActionParser parser = new ActionParser(actions, mFlashVersion);
parser.parse(in);
}
actions.done();
}
| 743
| 569
| 1,312
|
<methods>public non-sealed void <init>() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/ExtractorURI.java
|
ExtractorURI
|
extractLink
|
class ExtractorURI extends Extractor {
@SuppressWarnings("unused")
private static final long serialVersionUID = 3L;
private static Logger LOGGER =
Logger.getLogger(ExtractorURI.class.getName());
protected static final String ABS_HTTP_URI_PATTERN = "^https?://[^\\s<>]*$";
/**
* Constructor
*/
public ExtractorURI() {
}
@Override
protected boolean shouldProcess(CrawlURI uri) {
return true;
}
/**
* Perform usual extraction on a CrawlURI
*
* @param curi Crawl URI to process.
*/
@Override
public void extract(CrawlURI curi) {
for (CrawlURI link : curi.getOutLinks()) {
extractLink(curi, link);
}
}
/**
* Consider a single Link for internal URIs
*
* @param curi CrawlURI to add discoveries to
* @param wref Link to examine for internal URIs
*/
protected void extractLink(CrawlURI curi, CrawlURI wref) {<FILL_FUNCTION_BODY>}
/**
* Look for URIs inside the supplied UURI.
*
* Static for ease of testing or outside use.
*
* @param source UURI to example
* @return List of discovered String URIs.
*/
protected static List<String> extractQueryStringLinks(UURI source) {
List<String> results = new ArrayList<String>();
String decodedQuery;
try {
decodedQuery = source.getQuery();
} catch (URIException e1) {
// shouldn't happen
return results;
}
if(decodedQuery==null) {
return results;
}
// check if full query-string appears to be http(s) URI
Matcher m = TextUtils.getMatcher(ABS_HTTP_URI_PATTERN,decodedQuery);
if(m.matches()) {
TextUtils.recycleMatcher(m);
results.add(decodedQuery);
}
// split into params, see if any param value is http(s) URI
String rawQuery = new String(source.getRawQuery());
String[] params = rawQuery.split("&");
for (String param : params) {
String[] keyVal = param.split("=");
if(keyVal.length==2) {
String candidate;
try {
candidate = LaxURLCodec.DEFAULT.decode(keyVal[1]);
} catch (DecoderException e) {
continue;
}
// TODO: use other non-UTF8 codecs when appropriate
m.reset(candidate);
if(m.matches()) {
results.add(candidate);
}
}
}
return results;
}
}
|
UURI source = null;
try {
source = UURIFactory.getInstance(wref.getURI());
} catch (URIException e) {
LOGGER.log(Level.FINE,"bad URI",e);
}
if(source == null) {
// shouldn't happen
return;
}
List<String> found = extractQueryStringLinks(source);
for (String uri : found) {
try {
UURI dest = UURIFactory.getInstance(uri);
LinkContext lc = LinkContext.SPECULATIVE_MISC;
Hop hop = Hop.SPECULATIVE;
addOutlink(curi, dest, lc, hop);
numberOfLinksExtracted.incrementAndGet();
} catch (URIException e) {
LOGGER.log(Level.FINE, "bad URI", e);
}
}
// TODO: consider path URIs too
| 762
| 243
| 1,005
|
<methods>public non-sealed void <init>() ,public static void add(org.archive.modules.CrawlURI, int, java.lang.String, org.archive.modules.extractor.LinkContext, org.archive.modules.extractor.Hop) throws URIException,public static org.archive.modules.CrawlURI addRelativeToBase(org.archive.modules.CrawlURI, int, java.lang.CharSequence, org.archive.modules.extractor.LinkContext, org.archive.modules.extractor.Hop) throws URIException,public static org.archive.modules.CrawlURI addRelativeToVia(org.archive.modules.CrawlURI, int, java.lang.String, org.archive.modules.extractor.LinkContext, org.archive.modules.extractor.Hop) throws URIException,public org.archive.modules.extractor.ExtractorParameters getExtractorParameters() ,public org.archive.modules.extractor.UriErrorLoggerModule getLoggerModule() ,public void logUriError(URIException, org.archive.net.UURI, java.lang.CharSequence) ,public java.lang.String report() ,public void setExtractorParameters(org.archive.modules.extractor.ExtractorParameters) ,public void setLoggerModule(org.archive.modules.extractor.UriErrorLoggerModule) <variables>public static final org.archive.modules.extractor.ExtractorParameters DEFAULT_PARAMETERS,private static final java.util.logging.Logger LOGGER,protected transient org.archive.modules.extractor.ExtractorParameters extractorParameters,private static final java.util.logging.Logger logger,protected transient org.archive.modules.extractor.UriErrorLoggerModule loggerModule,protected java.util.concurrent.atomic.AtomicLong numberOfLinksExtracted
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/ExtractorXML.java
|
ExtractorXML
|
innerExtract
|
class ExtractorXML extends ContentExtractor {
@SuppressWarnings("unused")
private static final long serialVersionUID = 3L;
private static Logger logger =
Logger.getLogger(ExtractorXML.class.getName());
static final Pattern XML_URI_EXTRACTOR = Pattern
.compile("(?s)[\"\'>]\\s*(?:<!\\[CDATA\\[)?([^<>\\s\'\"@]+)\\s*(?:\\]\\]>)?[\"\'<]");
// GROUPS:
// (G1) possible uri
public ExtractorXML() {
}
@Override
protected boolean shouldExtract(CrawlURI curi) {
String mimeType = curi.getContentType();
// first check for xml mimetype or file extension
// application/vnd.openxmlformats.* seem to be zip archives
if (mimeType != null
&& (mimeType.toLowerCase().indexOf("xml") >= 0 && !mimeType
.matches("(?i)application/vnd.openxmlformats.*"))
|| curi.toString().toLowerCase().endsWith(".rss")
|| curi.toString().toLowerCase().endsWith(".xml")) {
return true;
}
// check if content starts with xml preamble "<?xml" and does not
// contain "<!doctype html" or "<html" early in the content
String contentStartingChunk = curi.getRecorder().getContentReplayPrefixString(400);
if (contentStartingChunk.matches("(?is)[\\ufeff]?<\\?xml\\s.*")
&& !contentStartingChunk.matches("(?is).*(?:<!doctype\\s+html|<html[>\\s]).*")) {
return true;
}
return false;
}
/**
* @param curi Crawl URI to process.
*/
@Override
protected boolean innerExtract(CrawlURI curi) {<FILL_FUNCTION_BODY>}
protected Charset getContentDeclaredCharset(CrawlURI curi, String contentPrefix) {
Matcher m = TextUtils.getMatcher("(?s)<\\?xml\\s+[^>]*encoding=['\"]([^'\"]+)['\"]", contentPrefix);
String charsetName = null;
try {
if(m.find()) {
charsetName = m.group(1);
return Charset.forName(charsetName);
}
} catch (IllegalArgumentException iae) {
logger.log(Level.INFO,"Unknown content-encoding '"+charsetName+"' declared; using default");
curi.getAnnotations().add("unsatisfiableCharsetInXML:"+charsetName);
} finally {
TextUtils.recycleMatcher(m);
}
return null;
}
public static long processXml(Extractor ext,
CrawlURI curi, CharSequence cs) {
long foundLinks = 0;
Matcher matcher = XML_URI_EXTRACTOR.matcher(cs);
while (matcher.find()) {
String xmlUri = StringEscapeUtils.unescapeXml(matcher.group(1));
if (UriUtils.isVeryLikelyUri(xmlUri)) {
foundLinks++;
try {
// treat as speculative, as whether context really
// intends to create a followable/fetchable URI is
// unknown
int max = ext.getExtractorParameters().getMaxOutlinks();
addRelativeToBase(curi, max, xmlUri,
LinkContext.SPECULATIVE_MISC, Hop.SPECULATIVE);
} catch (URIException e) {
// There may not be a controller (e.g. If we're being run
// by the extractor tool).
ext.logUriError(e, curi.getUURI(), xmlUri);
}
}
}
return foundLinks;
}
}
|
ReplayCharSequence cs = null;
Charset contentDeclaredEncoding = null;
try {
// if charset not spec'd in http header look for <?xml encoding=""?>
if (!curi.containsContentTypeCharsetDeclaration()) {
String contentPrefix = curi.getRecorder().getContentReplayPrefixString(50);
contentDeclaredEncoding = getContentDeclaredCharset(curi, contentPrefix);
if(!curi.getRecorder().getCharset().equals(contentDeclaredEncoding) && contentDeclaredEncoding!=null) {
String newContentPrefix = curi.getRecorder().getContentReplayPrefixString(50,contentDeclaredEncoding);
Charset reflexiveCharset = getContentDeclaredCharset(curi, newContentPrefix);
if(contentDeclaredEncoding.equals(reflexiveCharset)) {
// content-declared charset is self-consistent; use
curi.getAnnotations().add("usingCharsetInXML:"+contentDeclaredEncoding);
curi.getRecorder().setCharset(contentDeclaredEncoding);
} else {
// error: declared charset not evident once put into effect
curi.getAnnotations().add("inconsistentCharsetInXML:"+contentDeclaredEncoding);
// so, ignore in favor of original default
}
}
}
cs = curi.getRecorder().getContentReplayCharSequence();
numberOfLinksExtracted.addAndGet(processXml(this, curi, cs));
// Set flag to indicate that link extraction is completed.
return true;
} catch (IOException e) {
logger.severe("Failed getting ReplayCharSequence: " + e.getMessage());
}
return false;
| 1,053
| 430
| 1,483
|
<methods>public non-sealed void <init>() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/HTMLLinkContext.java
|
HTMLLinkContext
|
get
|
class HTMLLinkContext extends LinkContext {
private static final long serialVersionUID = 1L;
final public static HTMLLinkContext META = new HTMLLinkContext("meta");
final public static HTMLLinkContext A_HREF = new HTMLLinkContext("a", "href");
final public static HTMLLinkContext IMG_SRC = new HTMLLinkContext("img", "src");
final public static HTMLLinkContext IMG_SRCSET = new HTMLLinkContext("img", "srcset");
final public static HTMLLinkContext SOURCE_SRCSET = new HTMLLinkContext("source", "srcset");
final public static HTMLLinkContext IMG_DATA_SRC = new HTMLLinkContext("img", "data-src");
final public static HTMLLinkContext IMG_DATA_SRCSET = new HTMLLinkContext("img", "data-srcset");
final public static HTMLLinkContext IMG_DATA_ORIGINAL = new HTMLLinkContext("img", "data-original");
final public static HTMLLinkContext IMG_DATA_ORIGINAL_SET = new HTMLLinkContext("source", "data-original-set");
final public static HTMLLinkContext SOURCE_DATA_ORIGINAL_SET = new HTMLLinkContext("source", "data-original-set");
final public static HTMLLinkContext SCRIPT_SRC = new HTMLLinkContext("script", "src");
final public static HTMLLinkContext META_HREF = new HTMLLinkContext("meta", "href");
/**
* The HTML path to the URL.
*/
private final String path;
/**
* return an instance of HTMLLinkContext for attribute {@code attr} in
* element {@code el}. returns pre-allocated shared instance for common case,
* or new instance for others.
* @param el element name
* @param attr attribute name
* @return instance of HTMLLinkContext
*/
public static HTMLLinkContext get(CharSequence el, CharSequence attr) {
if (attr.equals("href") || attr.equals("HREF")) {
if (el.equals("a") || el.equals("A")) return A_HREF;
if (el.equals("meta") || el.equals("META")) return META_HREF;
} else if (attr.equals("src") || attr.equals("SRC")) {
if (el.equals("img") || attr.equals("IMG")) return IMG_SRC;
if (el.equals("script") || attr.equals("SCRIPT")) return SCRIPT_SRC;
} else if (attr.equals("srcset") || attr.equals("SRCSET")) {
if (el.equals("img") || attr.equals("IMG")) return IMG_SRCSET;
if (el.equals("source") || attr.equals("SOURCE")) return SOURCE_SRCSET;
} else if (attr.equals("data-src") || attr.equals("DATA-SRC")) {
if (el.equals("img") || attr.equals("IMG")) return IMG_DATA_SRC;
} else if (attr.equals("data-srcset") || attr.equals("DATA-SRCSET")) {
if (el.equals("img") || attr.equals("IMG")) return IMG_DATA_SRCSET;
} else if (attr.equals("data-original") || attr.equals("DATA-ORIGINAL")) {
if (el.equals("img") || attr.equals("IMG")) return IMG_DATA_ORIGINAL;
} else if (attr.equals("data-original-set") || attr.equals("DATA-ORIGINAL-SET")) {
if (el.equals("img") || attr.equals("IMG")) return IMG_DATA_ORIGINAL_SET;
if (el.equals("source") || attr.equals("SOURCE")) return SOURCE_DATA_ORIGINAL_SET;
}
return new HTMLLinkContext(el, attr);
}
/**
* return an instance of HTMLLinkContext for path {@code path}.
* returns pre-allocated shared instance for common case, or new instance for others.
* <p>TODO: most code calling this method builds path by concatenating element name
* and attribute name. consider changing such code to call {@link #get(CharSequence, CharSequence)}
* instead.</p>
* @param path element and attribute in XLink-like path notation
* @return instance of HTMLLinkContext
*/
public static HTMLLinkContext get(String path) {<FILL_FUNCTION_BODY>}
/**
* Constructor.
*
* @param path an XPath-like context, eg "A\@HREF"
*/
protected HTMLLinkContext(String path) {
// FIXME: Verify that path really is XPath-like
this.path = path;
}
protected HTMLLinkContext(CharSequence element, CharSequence attribute) {
if (attribute == null) {
this.path = "";
} else {
this.path = element + "/@" + attribute;
}
}
@Override
public String toString() {
return path;
}
}
|
if (path.equalsIgnoreCase("a/@href")) return A_HREF;
if (path.equalsIgnoreCase("meta/@href")) return META_HREF;
if (path.equalsIgnoreCase("img/@src")) return IMG_SRC;
if (path.equalsIgnoreCase("img/@srcset")) return IMG_SRCSET;
if (path.equalsIgnoreCase("source/@srcset")) return SOURCE_SRCSET;
if (path.equalsIgnoreCase("script/@src")) return SCRIPT_SRC;
if (path.equalsIgnoreCase("img/@data-src")) return IMG_DATA_SRC;
if (path.equalsIgnoreCase("img/@data-srcset")) return IMG_DATA_SRCSET;
if (path.equalsIgnoreCase("img/@data-original")) return IMG_DATA_ORIGINAL;
if (path.equalsIgnoreCase("img/@data-original-set")) return IMG_DATA_ORIGINAL_SET;
if (path.equalsIgnoreCase("source/@data-original-set")) return SOURCE_DATA_ORIGINAL_SET;
return new HTMLLinkContext(path);
| 1,341
| 299
| 1,640
|
<methods>public non-sealed void <init>() ,public boolean equals(java.lang.Object) ,public int hashCode() <variables>public static final org.archive.modules.extractor.LinkContext EMBED_MISC,public static final org.archive.modules.extractor.LinkContext INFERRED_MISC,public static final org.archive.modules.extractor.LinkContext JS_MISC,public static final org.archive.modules.extractor.LinkContext MANIFEST_MISC,public static final org.archive.modules.extractor.LinkContext NAVLINK_MISC,public static final org.archive.modules.extractor.LinkContext PREREQ_MISC,public static final org.archive.modules.extractor.LinkContext SPECULATIVE_MISC,private static final long serialVersionUID
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/HTTPContentDigest.java
|
HTTPContentDigest
|
innerProcess
|
class HTTPContentDigest extends Processor {
@SuppressWarnings("unused")
private static final long serialVersionUID = 3L;
private static Logger logger =
Logger.getLogger(HTTPContentDigest.class.getName());
{
setStripRegex("");
}
public String getStripRegex() {
return (String) kp.get("stripRegex");
}
/**
* A regular expression that matches those portions of downloaded documents
* that need to be ignored when calculating the content digest. Segments
* matching this expression will be rewritten with the blank character for
* the content digest.
*/
public void setStripRegex(String regex) {
kp.put("stripRegex",regex);
}
{
setMaxSizeToDigest(1*1024*1024L); // 1MB
}
public long getMaxSizeToDigest() {
return (Long) kp.get("maxSizeToDigest");
}
/** Maximum file size for - longer files will be ignored. -1 = unlimited*/
public void setMaxSizeToDigest(long threshold) {
kp.put("maxSizeToDigest",threshold);
}
private static final String SHA1 = "SHA1";
/**
* Constructor.
*/
public HTTPContentDigest() {
}
protected boolean shouldProcess(CrawlURI uri) {
if (!uri.getContentType().startsWith("text")) {
return false;
}
long maxSize = getMaxSizeToDigest();
if ((maxSize > - 1) && (maxSize < uri.getContentSize())) {
return false;
}
return true;
}
protected void innerProcess(CrawlURI curi) throws InterruptedException {<FILL_FUNCTION_BODY>}
}
|
// Ok, if we got this far we need to calculate the content digest.
// Get the regex
String regex = getStripRegex();
// Get a replay of the document character seq.
ReplayCharSequence cs = null;
try {
cs = curi.getRecorder().getContentReplayCharSequence();
// Create a MessageDigest
MessageDigest digest = null;
try {
digest = MessageDigest.getInstance(SHA1);
} catch (NoSuchAlgorithmException e1) {
e1.printStackTrace();
return;
}
digest.reset();
String s = null;
if (StringUtils.isEmpty(regex)) {
s = cs.toString();
} else {
// Process the document
Matcher m = TextUtils.getMatcher(regex, cs);
s = m.replaceAll(" ");
TextUtils.recycleMatcher(m);
}
digest.update(s.getBytes());
// Get the new digest value
byte[] newDigestValue = digest.digest();
// Save new digest value
curi.setContentDigest(SHA1, newDigestValue);
} catch (Exception e) {
curi.getNonFatalFailures().add(e);
logger.warning("Failed get of replay char sequence " +
curi.toString() + " " + e.getMessage() + " " +
Thread.currentThread().getName());
return; // Can't proceed if this happens.
}
| 491
| 391
| 882
|
<methods>public non-sealed void <init>() ,public void doCheckpoint(org.archive.checkpointing.Checkpoint) throws java.io.IOException,public void finishCheckpoint(org.archive.checkpointing.Checkpoint) ,public static java.lang.String flattenVia(org.archive.modules.CrawlURI) ,public java.lang.String getBeanName() ,public boolean getEnabled() ,public org.archive.spring.KeyedProperties getKeyedProperties() ,public static long getRecordedSize(org.archive.modules.CrawlURI) ,public org.archive.modules.deciderules.DecideRule getShouldProcessRule() ,public long getURICount() ,public static boolean hasHttpAuthenticationCredential(org.archive.modules.CrawlURI) ,public boolean isRunning() ,public static boolean isSuccess(org.archive.modules.CrawlURI) ,public org.archive.modules.ProcessResult process(org.archive.modules.CrawlURI) throws java.lang.InterruptedException,public java.lang.String report() ,public void setBeanName(java.lang.String) ,public void setEnabled(boolean) ,public void setRecoveryCheckpoint(org.archive.checkpointing.Checkpoint) ,public void setShouldProcessRule(org.archive.modules.deciderules.DecideRule) ,public void start() ,public void startCheckpoint(org.archive.checkpointing.Checkpoint) ,public void stop() <variables>protected java.lang.String beanName,protected boolean isRunning,protected org.archive.spring.KeyedProperties kp,protected org.archive.checkpointing.Checkpoint recoveryCheckpoint,protected java.util.concurrent.atomic.AtomicLong uriCount
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/LinkContext.java
|
SimpleLinkContext
|
equals
|
class SimpleLinkContext extends LinkContext {
private static final long serialVersionUID = 1L;
private String desc;
public SimpleLinkContext(String desc) {
this.desc = desc;
}
public String toString() {
return desc;
}
}
/** Stand-in value for embeds without other context. */
final public static LinkContext EMBED_MISC
= new SimpleLinkContext("=EMBED_MISC");
/** Stand-in value for JavaScript-discovered urls without other context. */
final public static LinkContext JS_MISC
= new SimpleLinkContext("=JS_MISC");
/** Stand-in value for navlink urls without other context. */
final public static LinkContext NAVLINK_MISC
= new SimpleLinkContext("=NAVLINK_MISC");
/**
* Stand-in value for speculative/aggressively extracted urls without
* other context.
*/
final public static LinkContext SPECULATIVE_MISC
= new SimpleLinkContext("=SPECULATIVE_MISC");
/**
* Stand-in value for inferred urls without other context.
*/
final public static LinkContext INFERRED_MISC
= new SimpleLinkContext("=INFERRED_MISC");
/** Stand-in value for prerequisite urls without other context. */
final public static LinkContext PREREQ_MISC
= new SimpleLinkContext("=PREREQ_MISC");
/** Stand-in value for prerequisite urls without other context. */
final public static LinkContext MANIFEST_MISC
= new SimpleLinkContext("=MANIFEST_MISC");
public boolean equals(Object o) {<FILL_FUNCTION_BODY>
|
if (o == this) {
return true;
}
if (!(o instanceof LinkContext)) {
return false;
}
return o.toString().equals(toString());
| 474
| 50
| 524
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/PDFParser.java
|
PDFParser
|
extractURIs
|
class PDFParser implements Closeable {
protected ArrayList<String> foundURIs;
protected PDDocument documentReader;
protected byte[] document;
public PDFParser(String doc) throws IOException {
resetState();
getInFromFile(doc);
initialize();
}
public PDFParser(byte[] doc) throws IOException{
resetState();
document = doc;
initialize();
}
/** Reinitialize the object as though a new one were created.
*/
protected void resetState(){
foundURIs = new ArrayList<String>();
documentReader = null;
document = null;
}
/**
* Reset the object and initialize it with a new byte array (the document).
* @param doc
* @throws IOException
*/
public void resetState(byte[] doc) throws IOException{
resetState();
document = doc;
initialize();
}
/** Reinitialize the object as though a new one were created, complete
* with a valid pointer to a document that can be read
* @param doc
* @throws IOException
*/
public void resetState(String doc) throws IOException{
resetState();
getInFromFile(doc);
initialize();
}
/**
* Read a file named 'doc' and store its' bytes for later processing.
* @param doc
* @throws IOException
*/
protected void getInFromFile(String doc) throws IOException{
File documentOnDisk = new File(doc);
documentReader = Loader.loadPDF(documentOnDisk);
}
/**
* Get a list of URIs retrieved from the Pdf during the
* extractURIs operation.
* @return A list of URIs retrieved from the Pdf during the
* extractURIs operation.
*/
public ArrayList<String> getURIs(){
return foundURIs;
}
/**
* Initialize opens the document for reading. This is done implicitly
* by the constuctor. This should only need to be called directly following
* a reset.
* @throws IOException
*/
protected void initialize() throws IOException{
if(document != null){
documentReader = Loader.loadPDF(document);
}
}
/**
* Extract URIs from all objects found in a Pdf document's catalog.
* Returns an array list representing all URIs found in the document catalog tree.
* @return URIs from all objects found in a Pdf document's catalog.
*/
public ArrayList<String> extractURIs() throws IOException {<FILL_FUNCTION_BODY>}
@Override
public void close() throws IOException {
if (documentReader != null) {
documentReader.close();
}
}
public static void main(String[] argv){
try {
PDFParser parser = new PDFParser("/tmp/pdfspec.pdf");
ArrayList<String> uris = parser.extractURIs();
Iterator<String> i = uris.iterator();
while(i.hasNext()){
String uri = (String)i.next();
System.out.println("got uri: " + uri);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
for (PDPage page : documentReader.getPages()) {
for (PDAnnotation annotation : page.getAnnotations()) {
if (annotation instanceof PDAnnotationLink) {
PDAnnotationLink link = (PDAnnotationLink) annotation;
PDAction action = link.getAction();
if (action instanceof PDActionURI) {
PDActionURI uri = (PDActionURI) action;
foundURIs.add(uri.getURI());
}
}
}
}
return getURIs();
| 824
| 131
| 955
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/StringExtractorTestBase.java
|
TestData
|
testExtraction
|
class TestData {
public CrawlURI uri;
public CrawlURI expectedResult;
public TestData(CrawlURI uri, CrawlURI expectedResult) {
this.uri = uri;
this.expectedResult = expectedResult;
}
}
/**
* Returns an array of valid test data pairs. The pairs consist of text
* to be processed followed by
*
* @return the test data
*/
protected abstract String[] getValidTestData();
protected abstract Collection<TestData> makeData(String text, String uri)
throws Exception;
/**
* Tests each text/URI pair in the test data array.
*
* @throws Exception just in case
*/
public void testExtraction() throws Exception {<FILL_FUNCTION_BODY>
|
try {
String[] valid = getValidTestData();
for (int i = 0; i < valid.length; i += 2) {
testOne(valid[i], valid[i + 1]);
}
} catch (Exception e) {
e.printStackTrace(); // I hate maven.
throw e;
}
| 217
| 87
| 304
|
<methods>public non-sealed void <init>() ,public static Recorder createRecorder(java.lang.String) throws java.io.IOException,public static Recorder createRecorder(java.lang.String, java.lang.String) throws java.io.IOException,public final void setUp() ,public void testFinished() throws java.lang.Exception,public void testZeroContent() throws java.lang.Exception<variables>protected org.archive.modules.extractor.Extractor extractor
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/extractor/TrapSuppressExtractor.java
|
TrapSuppressExtractor
|
innerExtract
|
class TrapSuppressExtractor extends ContentExtractor {
@SuppressWarnings("unused")
private static final long serialVersionUID = -1028783453022579530L;
/** ALIst attribute key for carrying-forward content-digest from 'via'*/
public static String A_VIA_DIGEST = "via-digest";
protected long numberOfCURIsHandled = 0;
protected long numberOfCURIsSuppressed = 0;
/**
* Usual constructor.
*/
public TrapSuppressExtractor() {
}
protected boolean shouldExtract(CrawlURI uri) {
return true;
}
protected boolean innerExtract(CrawlURI curi){<FILL_FUNCTION_BODY>}
}
|
numberOfCURIsHandled++;
String currentDigest = curi.getContentDigestSchemeString();
String viaDigest = null;
if(curi.containsDataKey(A_VIA_DIGEST)) {
viaDigest = (String) curi.getData().get(A_VIA_DIGEST);
}
if(currentDigest!=null) {
curi.makeHeritable(A_VIA_DIGEST);
if(currentDigest.equals(viaDigest)) {
curi.getAnnotations().add("trapSuppressExtractor");
numberOfCURIsSuppressed++;
// mark as already-extracted -- suppressing further extraction
return true;
}
// already consulted; so clobber with current value to be
// inherited
curi.getData().put(A_VIA_DIGEST, currentDigest);
}
return false;
| 213
| 241
| 454
|
<methods>public non-sealed void <init>() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/AbstractCookieStore.java
|
LimitedCookieStoreFacade
|
clearExpired
|
class LimitedCookieStoreFacade implements CookieStore {
private List<Cookie> cookies;
protected LimitedCookieStoreFacade(List<Cookie> cookies) {
this.cookies = cookies;
}
@Override
public List<Cookie> getCookies() {
return cookies;
}
@Override
public boolean clearExpired(Date date) {<FILL_FUNCTION_BODY>}
@Override
public void clear() {
throw new RuntimeException("not implemented");
}
@Override
public void addCookie(Cookie cookie) {
AbstractCookieStore.this.addCookie(cookie);
}
}
|
int expiredCount = 0;
for( Cookie c : cookies) {
boolean expired = AbstractCookieStore.this.expireCookie(c, date);
if( expired ) {
logger.fine("Expired cookie: " + c + " for date: " + date);
expiredCount++;
}
}
if( expiredCount > 0 ) {
logger.fine("Expired " + expiredCount + " cookies for date: " + date);
return true;
} else {
return false;
}
| 174
| 142
| 316
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/BasicExecutionAwareEntityEnclosingRequest.java
|
BasicExecutionAwareEntityEnclosingRequest
|
expectContinue
|
class BasicExecutionAwareEntityEnclosingRequest extends
BasicExecutionAwareRequest implements HttpEntityEnclosingRequest {
private HttpEntity entity;
public BasicExecutionAwareEntityEnclosingRequest(final String method,
final String uri) {
super(method, uri);
}
public BasicExecutionAwareEntityEnclosingRequest(final String method,
final String uri, final ProtocolVersion ver) {
super(method, uri, ver);
}
public BasicExecutionAwareEntityEnclosingRequest(RequestLine requestline) {
super(requestline);
}
@Override
public boolean expectContinue() {<FILL_FUNCTION_BODY>}
@Override
public void setEntity(HttpEntity entity) {
this.entity = entity;
}
@Override
public HttpEntity getEntity() {
return this.entity;
}
}
|
Header expect = getFirstHeader(HTTP.EXPECT_DIRECTIVE);
return expect != null
&& HTTP.EXPECT_CONTINUE.equalsIgnoreCase(expect.getValue());
| 226
| 49
| 275
|
<methods>public void <init>(java.lang.String, java.lang.String) ,public void <init>(java.lang.String, java.lang.String, ProtocolVersion) ,public void <init>(RequestLine) ,public ProtocolVersion getProtocolVersion() ,public RequestLine getRequestLine() ,public java.lang.String toString() <variables>private final non-sealed java.lang.String method,private RequestLine requestline,private final non-sealed java.lang.String uri
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/BasicExecutionAwareRequest.java
|
BasicExecutionAwareRequest
|
getRequestLine
|
class BasicExecutionAwareRequest extends AbstractExecutionAwareRequest {
private final String method;
private final String uri;
private RequestLine requestline;
/**
* Creates an instance of this class using the given request method
* and URI.
*
* @param method request method.
* @param uri request URI.
*/
public BasicExecutionAwareRequest(final String method, final String uri) {
super();
this.method = Args.notNull(method, "Method name");
this.uri = Args.notNull(uri, "Request URI");
this.requestline = null;
}
/**
* Creates an instance of this class using the given request method, URI
* and the HTTP protocol version.
*
* @param method request method.
* @param uri request URI.
* @param ver HTTP protocol version.
*/
public BasicExecutionAwareRequest(final String method, final String uri, final ProtocolVersion ver) {
this(new BasicRequestLine(method, uri, ver));
}
/**
* Creates an instance of this class using the given request line.
*
* @param requestline request line.
*/
public BasicExecutionAwareRequest(final RequestLine requestline) {
super();
this.requestline = Args.notNull(requestline, "Request line");
this.method = requestline.getMethod();
this.uri = requestline.getUri();
}
/**
* Returns the HTTP protocol version to be used for this request.
*
* @see BasicHttpRequest#BasicHttpRequest(String, String)
*/
public ProtocolVersion getProtocolVersion() {
return getRequestLine().getProtocolVersion();
}
/**
* Returns the request line of this request.
*
* @see BasicHttpRequest#BasicHttpRequest(String, String)
*/
public RequestLine getRequestLine() {<FILL_FUNCTION_BODY>}
@Override
public String toString() {
return this.method + " " + this.uri + " " + this.headergroup;
}
}
|
if (this.requestline == null) {
this.requestline = new BasicRequestLine(this.method, this.uri, HttpVersion.HTTP_1_1);
}
return this.requestline;
| 534
| 55
| 589
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/BdbCookieStore.java
|
RestrictedCollectionWrappedList
|
prepare
|
class RestrictedCollectionWrappedList<T> implements List<T> {
private Collection<T> wrapped;
public RestrictedCollectionWrappedList(Collection<T> wrapped) { this.wrapped = wrapped; }
@Override public int size() { return wrapped.size(); }
@Override public boolean isEmpty() { throw new RuntimeException("not implemented"); }
@Override public boolean contains(Object o) { throw new RuntimeException("not implemented"); }
@Override public Iterator<T> iterator() { return wrapped.iterator(); }
@Override public Object[] toArray() { return IteratorUtils.toArray(wrapped.iterator()); }
@SuppressWarnings({"hiding", "unchecked"}) @Override public <T> T[] toArray(T[] a) { return (T[]) IteratorUtils.toArray(iterator(), a.getClass()); }
@Override public boolean add(T e) { throw new RuntimeException("immutable list"); }
@Override public boolean remove(Object o) { throw new RuntimeException("immutable list"); }
@Override public boolean containsAll(Collection<?> c) { return wrapped.containsAll(c); }
@Override public boolean addAll(Collection<? extends T> c) { throw new RuntimeException("immutable list"); }
@Override public boolean addAll(int index, Collection<? extends T> c) { throw new RuntimeException("immutable list"); }
@Override public boolean removeAll(Collection<?> c) { throw new RuntimeException("immutable list"); }
@Override public boolean retainAll(Collection<?> c) { throw new RuntimeException("immutable list"); }
@Override public void clear() { throw new RuntimeException("immutable list"); }
@Override public T get(int index) { throw new RuntimeException("not implemented"); }
@Override public T set(int index, T element) { throw new RuntimeException("immutable list"); }
@Override public void add(int index, T element) { throw new RuntimeException("immutable list"); }
@Override public T remove(int index) { throw new RuntimeException("immutable list"); }
@Override public int indexOf(Object o) { throw new RuntimeException("not implemented"); }
@Override public int lastIndexOf(Object o) { throw new RuntimeException("not implemented"); }
@Override public ListIterator<T> listIterator() { throw new RuntimeException("not implemented"); }
@Override public ListIterator<T> listIterator(int index) { throw new RuntimeException("not implemented"); }
@Override public List<T> subList(int fromIndex, int toIndex) { throw new RuntimeException("not implemented"); }
}
protected BdbModule bdb;
@Autowired
public void setBdbModule(BdbModule bdb) {
this.bdb = bdb;
}
public static String COOKIEDB_NAME = "hc_httpclient_cookies";
private transient Database cookieDb;
private transient StoredSortedMap<byte[],Cookie> cookies;
public void prepare() {<FILL_FUNCTION_BODY>
|
try {
StoredClassCatalog classCatalog = bdb.getClassCatalog();
BdbModule.BdbConfig dbConfig = new BdbModule.BdbConfig();
dbConfig.setTransactional(false);
dbConfig.setAllowCreate(true);
dbConfig.setSortedDuplicates(false);
cookieDb = bdb.openDatabase(COOKIEDB_NAME, dbConfig,
isCheckpointRecovery);
cookies = new StoredSortedMap<byte[],Cookie>(cookieDb,
new ByteArrayBinding(),
new SerialBinding<Cookie>(classCatalog, Cookie.class),
true);
} catch (DatabaseException e) {
throw new RuntimeException(e);
}
| 741
| 189
| 930
|
<methods>public non-sealed void <init>() ,public void addCookie(Cookie) ,public abstract void clear() ,public CookieStore cookieStoreFor(org.archive.modules.CrawlURI) throws URIException,public abstract boolean expireCookie(Cookie, java.util.Date) ,public org.archive.spring.ConfigFile getCookiesLoadFile() ,public org.archive.spring.ConfigPath getCookiesSaveFile() ,public boolean isCookieCountMaxedForDomain(java.lang.String) ,public boolean isRunning() ,public void saveCookies() ,public void saveCookies(java.lang.String) ,public void setCookiesLoadFile(org.archive.spring.ConfigFile) ,public void setCookiesSaveFile(org.archive.spring.ConfigPath) ,public void start() ,public void stop() <variables>public static final int MAX_COOKIES_FOR_DOMAIN,protected static final Comparator<Cookie> cookieComparator,protected org.archive.spring.ConfigFile cookiesLoadFile,protected org.archive.spring.ConfigPath cookiesSaveFile,protected boolean isRunning,protected final java.util.logging.Logger logger
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/DefaultServerCache.java
|
DefaultServerCache
|
getHostFor
|
class DefaultServerCache extends ServerCache implements Closeable, Serializable {
private static final long serialVersionUID = 1L;
@SuppressWarnings("unused")
private static Logger logger =
Logger.getLogger(DefaultServerCache.class.getName());
/**
* hostname[:port] -> CrawlServer.
* Set in the initialization.
*/
protected ObjectIdentityCache<CrawlServer> servers = null;
/**
* hostname -> CrawlHost.
* Set in the initialization.
*/
protected ObjectIdentityCache<CrawlHost> hosts = null;
/**
* Constructor.
*/
public DefaultServerCache() {
this(
new ObjectIdentityMemCache<CrawlServer>(),
new ObjectIdentityMemCache<CrawlHost>());
}
public DefaultServerCache(ObjectIdentityCache<CrawlServer> servers,
ObjectIdentityCache<CrawlHost> hosts) {
this.servers = servers;
this.hosts = hosts;
}
/**
* Get the {@link CrawlServer} associated with <code>name</code>.
* @param serverKey Server name we're to return server for.
* @return CrawlServer instance that matches the passed server name.
*/
public CrawlServer getServerFor(final String serverKey) {
CrawlServer cserver = servers.getOrUse(
serverKey,
new Supplier<CrawlServer>() {
public CrawlServer get() {
String skey = new String(serverKey); // ensure private minimal key
return new CrawlServer(skey);
}});
return cserver;
}
/**
* Get the {@link CrawlHost} associated with <code>name</code>.
* @param hostname Host name we're to return Host for.
* @return CrawlHost instance that matches the passed Host name.
*/
public CrawlHost getHostFor(final String hostname) {<FILL_FUNCTION_BODY>}
/**
* @param serverKey Key to use doing lookup.
* @return True if a server instance exists.
*/
public boolean containsServer(String serverKey) {
return (CrawlServer) servers.get(serverKey) != null;
}
/**
* @param hostKey Key to use doing lookup.
* @return True if a host instance exists.
*/
public boolean containsHost(String hostKey) {
return (CrawlHost) hosts.get(hostKey) != null;
}
/**
* Called when shutting down the cache so we can do clean up.
*/
public void close() {
if (this.hosts != null) {
// If we're using a bdb bigmap, the call to clear will
// close down the bdb database.
this.hosts.close();
this.hosts = null;
}
if (this.servers != null) {
this.servers.close();
this.servers = null;
}
}
/**
* NOTE: Should not mutate the CrawlHost instance so retrieved; depending on
* the hostscache implementation, the change may not be reliably persistent.
*
* @see org.archive.modules.net.ServerCache#forAllHostsDo(org.apache.commons.collections.Closure)
*/
public void forAllHostsDo(Closure c) {
for(String host : hosts.keySet()) {
c.execute(hosts.get(host));
}
}
public Set<String> hostKeys() {
return hosts.keySet();
}
}
|
if (hostname == null || hostname.length() == 0) {
return null;
}
CrawlHost host = hosts.getOrUse(
hostname,
new Supplier<CrawlHost>() {
public CrawlHost get() {
String hkey = new String(hostname); // ensure private minimal key
return new CrawlHost(hkey);
}});
if (host != null && host.getIP() != null
&& "0.0.0.0".equals(host.getIP().getHostAddress())) {
throw new IllegalStateException("got suspicious value 0.0.0.0 for " + hostname);
}
return host;
| 967
| 178
| 1,145
|
<methods>public non-sealed void <init>() ,public abstract void forAllHostsDo(Closure) ,public abstract org.archive.modules.net.CrawlHost getHostFor(java.lang.String) ,public org.archive.modules.net.CrawlHost getHostFor(org.archive.net.UURI) ,public abstract org.archive.modules.net.CrawlServer getServerFor(java.lang.String) ,public org.archive.modules.net.CrawlServer getServerFor(org.archive.net.UURI) ,public abstract Set<java.lang.String> hostKeys() <variables>private static java.util.logging.Logger logger
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/FetchFTP.java
|
SocketFactoryWithTimeout
|
fetch
|
class SocketFactoryWithTimeout extends SocketFactory {
protected int connectTimeoutMs = 0;
public int getConnectTimeoutMs() {
return connectTimeoutMs;
}
public void setConnectTimeoutMs(int connectTimeoutMs) {
this.connectTimeoutMs = connectTimeoutMs;
}
public Socket createSocket() {
return new Socket();
}
public Socket createSocket(String host, int port) throws IOException,
UnknownHostException {
Socket sock = createSocket();
sock.connect(new InetSocketAddress(host, port), connectTimeoutMs);
return sock;
}
public Socket createSocket(InetAddress host, int port)
throws IOException {
Socket sock = createSocket();
sock.connect(new InetSocketAddress(host, port), connectTimeoutMs);
return sock;
}
public Socket createSocket(String host, int port,
InetAddress localHost, int localPort) throws IOException,
UnknownHostException {
Socket sock = createSocket();
sock.bind(new InetSocketAddress(localHost, localPort));
sock.connect(new InetSocketAddress(host, port), connectTimeoutMs);
return sock;
}
public Socket createSocket(InetAddress address, int port,
InetAddress localAddress, int localPort) throws IOException {
Socket sock = createSocket();
sock.bind(new InetSocketAddress(localAddress, localPort));
sock.connect(new InetSocketAddress(address, port), connectTimeoutMs);
return sock;
}
}
protected SocketFactoryWithTimeout socketFactory;
/**
* Fetches a document from an FTP server.
*
* @param curi the URI of the document to fetch
* @param client the FTPClient to use for the fetch
* @param recorder the recorder to preserve the document in
* @throws IOException if a network or protocol error occurs
* @throws InterruptedException if the thread is interrupted
*/
private void fetch(CrawlURI curi, ClientFTP client, Recorder recorder)
throws IOException, InterruptedException {<FILL_FUNCTION_BODY>
|
// Connect to the FTP server.
UURI uuri = curi.getUURI();
int port = uuri.getPort();
if (port == -1) {
port = 21;
}
if (socketFactory == null) {
socketFactory = new SocketFactoryWithTimeout();
}
socketFactory.setConnectTimeoutMs(getSoTimeoutMs());
client.setSocketFactory(socketFactory);
client.setConnectTimeout(getSoTimeoutMs());
client.setDefaultTimeout(getSoTimeoutMs());
client.setDataTimeout(getSoTimeoutMs());
client.connect(uuri.getHost(), port);
client.setSoTimeout(getSoTimeoutMs()); // must be after connect()
// Authenticate.
String[] auth = getAuth(curi);
client.login(auth[0], auth[1]);
// The given resource may or may not be a directory.
// To figure out which is which, execute a CD command to
// the UURI's path. If CD works, it's a directory.
boolean isDirectory = client.changeWorkingDirectory(uuri.getPath());
// Get a data socket. This will either be the result of a NLST
// command for a directory, or a RETR command for a file.
int command;
String path;
if (isDirectory) {
curi.getAnnotations().add("ftpDirectoryList");
command = FTPCommand.NLST;
client.setFileType(FTP.ASCII_FILE_TYPE);
path = ".";
} else {
command = FTPCommand.RETR;
client.setFileType(FTP.BINARY_FILE_TYPE);
path = uuri.getPath();
}
client.enterLocalPassiveMode();
Socket socket = null;
try {
socket = client.openDataConnection(command, path);
// if "227 Entering Passive Mode" these will get reset later
curi.setFetchStatus(client.getReplyCode());
curi.getData().put(A_FTP_FETCH_STATUS, client.getReplyStrings()[0]);
} catch (IOException e) {
// try it again, see AbstractFrontier.needsRetrying()
curi.setFetchStatus(FetchStatusCodes.S_CONNECT_LOST);
}
// Save the streams in the CURI, where downstream processors
// expect to find them.
if (socket != null) {
if (socket.getSoTimeout() != getSoTimeoutMs()) {
logger.warning("data socket timeout " + socket.getSoTimeout() + "ms is not expected value " + getSoTimeoutMs() + "ms");
}
// Shall we get a digest on the content downloaded?
boolean digestContent = getDigestContent();
String algorithm = null;
if (digestContent) {
algorithm = getDigestAlgorithm();
recorder.getRecordedInput().setDigest(algorithm);
recorder.getRecordedInput().startDigest();
} else {
// clear
recorder.getRecordedInput().setDigest((MessageDigest)null);
}
curi.setServerIP(socket.getInetAddress().getHostAddress());
try {
saveToRecorder(curi, socket, recorder);
} finally {
recorder.close();
client.closeDataConnection(); // does socket.close()
curi.setContentSize(recorder.getRecordedInput().getSize());
// "226 Transfer complete."
client.getReply();
curi.setFetchStatus(client.getReplyCode());
curi.getData().put(A_FTP_FETCH_STATUS, client.getReplyStrings()[0]);
if (isDirectory) {
curi.setContentType("text/plain");
} else {
curi.setContentType("application/octet-stream");
}
if (logger.isLoggable(Level.FINE)) {
logger.fine("read " + recorder.getRecordedInput().getSize()
+ " bytes from ftp data socket");
}
if (digestContent) {
curi.setContentDigest(algorithm,
recorder.getRecordedInput().getDigestValue());
}
}
if (isDirectory) {
extract(curi, recorder);
}
} else {
// no data - without this, content size is -1
curi.setContentSize(0);
}
addParent(curi);
| 569
| 1,181
| 1,750
|
<methods>public non-sealed void <init>() ,public void doCheckpoint(org.archive.checkpointing.Checkpoint) throws java.io.IOException,public void finishCheckpoint(org.archive.checkpointing.Checkpoint) ,public static java.lang.String flattenVia(org.archive.modules.CrawlURI) ,public java.lang.String getBeanName() ,public boolean getEnabled() ,public org.archive.spring.KeyedProperties getKeyedProperties() ,public static long getRecordedSize(org.archive.modules.CrawlURI) ,public org.archive.modules.deciderules.DecideRule getShouldProcessRule() ,public long getURICount() ,public static boolean hasHttpAuthenticationCredential(org.archive.modules.CrawlURI) ,public boolean isRunning() ,public static boolean isSuccess(org.archive.modules.CrawlURI) ,public org.archive.modules.ProcessResult process(org.archive.modules.CrawlURI) throws java.lang.InterruptedException,public java.lang.String report() ,public void setBeanName(java.lang.String) ,public void setEnabled(boolean) ,public void setRecoveryCheckpoint(org.archive.checkpointing.Checkpoint) ,public void setShouldProcessRule(org.archive.modules.deciderules.DecideRule) ,public void start() ,public void startCheckpoint(org.archive.checkpointing.Checkpoint) ,public void stop() <variables>protected java.lang.String beanName,protected boolean isRunning,protected org.archive.spring.KeyedProperties kp,protected org.archive.checkpointing.Checkpoint recoveryCheckpoint,protected java.util.concurrent.atomic.AtomicLong uriCount
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/FetchStats.java
|
FetchStats
|
tally
|
class FetchStats extends CrawledBytesHistotable implements Serializable, FetchStatusCodes, Reporter {
private static final long serialVersionUID = 2l;
public enum Stage {SCHEDULED, RELOCATED, RETRIED, SUCCEEDED, DISREGARDED, FAILED};
public static final String TOTAL_SCHEDULED = "totalScheduled"; // anything initially scheduled
// (totalScheduled - (fetchSuccesses + fetchFailures)
public static final String FETCH_SUCCESSES = "fetchSuccesses"; // anything disposed-success
// (HTTP 2XX response codes, other non-errors)
public static final String FETCH_FAILURES = "fetchFailures"; // anything disposed-failure
public static final String FETCH_DISREGARDS = "fetchDisregards";// anything disposed-disregard
public static final String FETCH_RESPONSES = "fetchResponses"; // all positive responses (incl. 3XX, 4XX, 5XX)
public static final String ROBOTS_DENIALS = "robotsDenials"; // all robots-precluded failures
public static final String SUCCESS_BYTES = "successBytes"; // total size of all success responses
public static final String TOTAL_BYTES = "totalBytes"; // total size of all responses
public static final String FETCH_NONRESPONSES = "fetchNonResponses"; // processing attempts resulting in no response
// (both failures and temp deferrals)
public interface HasFetchStats {
public FetchStats getSubstats();
}
public interface CollectsFetchStats {
public void tally(CrawlURI curi, Stage stage);
}
protected long lastSuccessTime;
public synchronized void tally(CrawlURI curi, Stage stage) {<FILL_FUNCTION_BODY>}
public long getFetchSuccesses() {
return get(FETCH_SUCCESSES);
}
public long getFetchResponses() {
return get(FETCH_RESPONSES);
}
public long getSuccessBytes() {
return get(SUCCESS_BYTES);
}
public long getTotalBytes() {
return get(TOTAL_BYTES);
}
public long getFetchNonResponses() {
return get(FETCH_NONRESPONSES);
}
public long getTotalScheduled() {
return get(TOTAL_SCHEDULED);
}
public long getFetchDisregards() {
return get(FETCH_DISREGARDS);
}
public long getRobotsDenials() {
return get(ROBOTS_DENIALS);
}
public long getRemaining() {
return get(TOTAL_SCHEDULED) - (get(FETCH_SUCCESSES) + get(FETCH_FAILURES)+ get(FETCH_DISREGARDS));
}
public long getRecordedFinishes() {
return get(FETCH_SUCCESSES) + get(FETCH_FAILURES);
}
public long getNovelBytes() {
return get(NOVEL);
}
public long getNovelUrls() {
return get(NOVELCOUNT);
}
public long getNotModifiedBytes() {
return get(NOTMODIFIED);
}
public long getNotModifiedUrls() {
return get(NOTMODIFIEDCOUNT);
}
public long getDupByHashBytes() {
return get(DUPLICATE);
}
public long getDupByHashUrls() {
return get(DUPLICATECOUNT);
}
public long getOtherDupBytes() {
return get(OTHERDUPLICATE);
}
public long getOtherDupUrls() {
return get(OTHERDUPLICATECOUNT);
}
/* (non-Javadoc)
* @see org.archive.util.Reporter#reportTo(java.io.PrintWriter)
*/
@Override // Reporter
public void reportTo(PrintWriter writer) {
writer.println(shortReportLegend());
shortReportLineTo(writer);
}
@Override
public String shortReportLegend() {
return "totalScheduled fetchSuccesses fetchFailures fetchDisregards " +
"fetchResponses robotsDenials successBytes totalBytes " +
"fetchNonResponses lastSuccessTime";
}
public String shortReportLine() {
return ReportUtils.shortReportLine(this);
}
@Override
public void shortReportLineTo(PrintWriter writer) {
writer.print(get(TOTAL_SCHEDULED));
writer.print(" ");
writer.print(get(FETCH_SUCCESSES));
writer.print(" ");
writer.print(get(FETCH_FAILURES));
writer.print(" ");
writer.print(get(FETCH_DISREGARDS));
writer.print(" ");
writer.print(get(FETCH_RESPONSES));
writer.print(" ");
writer.print(get(ROBOTS_DENIALS));
writer.print(" ");
writer.print(get(SUCCESS_BYTES));
writer.print(" ");
writer.print(get(TOTAL_BYTES));
writer.print(" ");
writer.print(get(FETCH_NONRESPONSES));
writer.print(" ");
writer.print(ArchiveUtils.getLog17Date(lastSuccessTime));
}
@Override
public Map<String, Object> shortReportMap() {
Map<String,Object> map = new LinkedHashMap<String, Object>(this);
map.put("lastSuccessTime",lastSuccessTime);
return map;
}
public long getLastSuccessTime() {
return lastSuccessTime;
}
}
|
switch(stage) {
case SCHEDULED:
tally(TOTAL_SCHEDULED, 1);
break;
case RETRIED:
if(curi.getFetchStatus()<=0) {
tally(FETCH_NONRESPONSES, 1);
}
break;
case SUCCEEDED:
tally(FETCH_SUCCESSES, 1);
tally(FETCH_RESPONSES, 1);
tally(TOTAL_BYTES, curi.getContentSize());
tally(SUCCESS_BYTES, curi.getContentSize());
lastSuccessTime = curi.getFetchCompletedTime();
break;
case DISREGARDED:
tally(FETCH_DISREGARDS, 1);
if(curi.getFetchStatus()==S_ROBOTS_PRECLUDED) {
tally(ROBOTS_DENIALS, 1);
}
break;
case FAILED:
if(curi.getFetchStatus()<=0) {
tally(FETCH_NONRESPONSES, 1);
} else {
tally(FETCH_RESPONSES, 1);
tally(TOTAL_BYTES, curi.getContentSize());
}
tally(FETCH_FAILURES, 1);
break;
default:
break;
}
if (curi.getFetchStatus() > 0) {
this.accumulate(curi);
}
| 1,529
| 421
| 1,950
|
<methods>public void <init>() ,public void accumulate(org.archive.modules.CrawlURI) ,public long getTotalBytes() ,public long getTotalUrls() ,public java.lang.String summary() <variables>public static final java.lang.String DUPLICATE,public static final java.lang.String DUPLICATECOUNT,public static final java.lang.String NOTMODIFIED,public static final java.lang.String NOTMODIFIEDCOUNT,public static final java.lang.String NOVEL,public static final java.lang.String NOVELCOUNT,public static final java.lang.String OTHERDUPLICATE,public static final java.lang.String OTHERDUPLICATECOUNT,public static final java.lang.String WARC_NOVEL_CONTENT_BYTES,public static final java.lang.String WARC_NOVEL_URLS,private static final long serialVersionUID
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/SimpleCookieStore.java
|
SimpleCookieStore
|
expireCookie
|
class SimpleCookieStore extends AbstractCookieStore implements CookieStore {
protected BasicCookieStore cookies;
@Override
protected void prepare() {
cookies = new BasicCookieStore();
}
@Override
public void startCheckpoint(Checkpoint checkpointInProgress) {
throw new RuntimeException("not implemented");
}
@Override
public void doCheckpoint(Checkpoint checkpointInProgress)
throws IOException {
throw new RuntimeException("not implemented");
}
@Override
public void finishCheckpoint(Checkpoint checkpointInProgress) {
throw new RuntimeException("not implemented");
}
@Override
@Autowired(required=false)
public void setRecoveryCheckpoint(Checkpoint recoveryCheckpoint) {
throw new RuntimeException("not implemented");
}
@Override
public List<Cookie> getCookies() {
return cookies.getCookies();
}
@Override
public boolean clearExpired(Date date) {
return cookies.clearExpired(date);
}
@Override
public CookieStore cookieStoreFor(String host) {
return this;
}
@Override
public void addCookieImpl(Cookie cookie) {
cookies.addCookie(cookie);
}
@Override
public boolean expireCookie(Cookie cookie, Date date) {<FILL_FUNCTION_BODY>}
@Override
public void clear() {
cookies.clear();
}
}
|
if( cookie.isExpired(date)) {
cookies.getCookies().remove(cookie);
return true;
} else {
return false;
}
| 380
| 47
| 427
|
<methods>public non-sealed void <init>() ,public void addCookie(Cookie) ,public abstract void clear() ,public CookieStore cookieStoreFor(org.archive.modules.CrawlURI) throws URIException,public abstract boolean expireCookie(Cookie, java.util.Date) ,public org.archive.spring.ConfigFile getCookiesLoadFile() ,public org.archive.spring.ConfigPath getCookiesSaveFile() ,public boolean isCookieCountMaxedForDomain(java.lang.String) ,public boolean isRunning() ,public void saveCookies() ,public void saveCookies(java.lang.String) ,public void setCookiesLoadFile(org.archive.spring.ConfigFile) ,public void setCookiesSaveFile(org.archive.spring.ConfigPath) ,public void start() ,public void stop() <variables>public static final int MAX_COOKIES_FOR_DOMAIN,protected static final Comparator<Cookie> cookieComparator,protected org.archive.spring.ConfigFile cookiesLoadFile,protected org.archive.spring.ConfigPath cookiesSaveFile,protected boolean isRunning,protected final java.util.logging.Logger logger
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/SocksSSLSocketFactory.java
|
SocksSSLSocketFactory
|
createSocket
|
class SocksSSLSocketFactory extends SSLConnectionSocketFactory {
public SocksSSLSocketFactory(final SSLContext sslContext) {
// TODO: site certificate ignore needs implementing here if Heritrix supports it
super(sslContext);
}
@Override
public Socket createSocket(final HttpContext context) throws IOException {<FILL_FUNCTION_BODY>}
}
|
InetSocketAddress socksAddress = (InetSocketAddress) context.getAttribute("socks.address");
Proxy proxy = new Proxy(Proxy.Type.SOCKS, socksAddress);
return new Socket(proxy);
| 94
| 60
| 154
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/fetcher/SocksSocketFactory.java
|
SocksSocketFactory
|
createSocket
|
class SocksSocketFactory extends PlainConnectionSocketFactory {
@Override
public Socket createSocket(final HttpContext context) throws IOException {<FILL_FUNCTION_BODY>}
}
|
InetSocketAddress socksAddress = (InetSocketAddress) context.getAttribute("socks.address");
Proxy proxy = new Proxy(Proxy.Type.SOCKS, socksAddress);
return new Socket(proxy);
| 47
| 60
| 107
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/forms/ExtractorHTMLForms.java
|
ExtractorHTMLForms
|
findAttributeValueGroup
|
class ExtractorHTMLForms extends Extractor {
@SuppressWarnings("unused")
private static final long serialVersionUID = 2L;
public static final String A_HTML_FORM_OBJECTS = "html-form-objects";
private static Logger logger =
Logger.getLogger(ExtractorHTMLForms.class.getName());
{
setExtractAllForms(false);
}
public boolean getExtractAllForms() {
return (Boolean) kp.get("extractAllForms");
}
/**
* If true, report all FORMs. If false, report only those that
* appear to be a login-enabling FORM.
* Default is false.
*/
public void setExtractAllForms(boolean extractAllForms) {
kp.put("extractAllForms",extractAllForms);
}
public ExtractorHTMLForms() {
}
protected boolean shouldProcess(CrawlURI uri) {
return uri.containsDataKey(ExtractorHTML.A_FORM_OFFSETS);
}
public void extract(CrawlURI curi) {
try {
ReplayCharSequence cs = curi.getRecorder().getContentReplayCharSequence();
analyze(curi, cs);
} catch (IOException e) {
curi.getNonFatalFailures().add(e);
logger.log(Level.WARNING,"Failed get of replay char sequence in " +
Thread.currentThread().getName(), e);
}
}
/**
* Run analysis: find form METHOD, ACTION, and all INPUT names/values
*
* Log as configured.
*
* @param curi CrawlURI we're processing.
* @param cs Sequence from underlying ReplayCharSequence. This
* is TRANSIENT data. Make a copy if you want the data to live outside
* of this extractors' lifetime.
*/
protected void analyze(CrawlURI curi, CharSequence cs) {
for (Object offset : curi.getDataList(ExtractorHTML.A_FORM_OFFSETS)) {
int offsetInt = (Integer) offset;
CharSequence relevantSequence = cs.subSequence(offsetInt, cs.length());
String method = findAttributeValueGroup("(?i)^[^>]*\\smethod\\s*=\\s*([^>\\s]{1,50000})[^>]*>",1,relevantSequence);
String action = findAttributeValueGroup("(?i)^[^>]*\\saction\\s*=\\s*([^>\\s]{1,50000})[^>]*>",1,relevantSequence);
String enctype = findAttributeValueGroup("(?i)^[^>]*\\senctype\\s*=\\s*([^>\\s]{1,50000})[^>]*>",1,relevantSequence);
HTMLForm form = new HTMLForm();
form.setMethod(method);
form.setAction(action);
form.setEnctype(enctype);
for(CharSequence input : findGroups("(?i)(<input\\s[^>]*>)|(</?form>)",1,relevantSequence)) {
String type = findAttributeValueGroup("(?i)^[^>]*\\stype\\s*=\\s*([^>\\s]{1,50000})[^>]*>",1,input);
String name = findAttributeValueGroup("(?i)^[^>]*\\sname\\s*=\\s*([^>\\s]{1,50000})[^>]*>",1,input);
String value = findAttributeValueGroup("(?i)^[^>]*\\svalue\\s*=\\s*([^>\\s]{1,50000})[^>]*>",1,input);
Matcher m = TextUtils.getMatcher("(?i)^[^>]*\\schecked\\s*[^>]*>", input);
boolean checked = false;
try {
checked = m.find();
} finally {
TextUtils.recycleMatcher(m);
}
form.addField(type, name, value, checked);
}
if (form.seemsLoginForm() || getExtractAllForms()) {
curi.getDataList(A_HTML_FORM_OBJECTS).add(form);
curi.getAnnotations().add(form.asAnnotation());
}
}
}
protected List<CharSequence> findGroups(String pattern, int groupNumber, CharSequence cs) {
ArrayList<CharSequence> groups = new ArrayList<CharSequence>();
Matcher m = TextUtils.getMatcher(pattern, cs);
try {
while(m.find()) {
if(m.group(groupNumber)!=null) {
groups.add(cs.subSequence(m.start(groupNumber),m.end(groupNumber)));
} else {
// group not found: end find condition
break;
}
}
return groups;
} finally {
TextUtils.recycleMatcher(m);
}
}
protected String findAttributeValueGroup(String pattern, int groupNumber, CharSequence cs) {<FILL_FUNCTION_BODY>}
}
|
Matcher m = TextUtils.getMatcher(pattern, cs);
try {
if(m.find()) {
String value = m.group(groupNumber);
/*
* In a case like this <input name="foo"/> the group here will
* be "foo"/ ... it's difficult to adjust the regex to avoid
* slurping that trailing slash, so handle it here
*/
value = StringUtils.removeEnd(value, "'/");
value = StringUtils.removeEnd(value, "\"/");
value = StringUtils.strip(value, "\'\""); // strip quotes if present
return value;
} else {
return null;
}
} finally {
TextUtils.recycleMatcher(m);
}
| 1,357
| 195
| 1,552
|
<methods>public non-sealed void <init>() ,public static void add(org.archive.modules.CrawlURI, int, java.lang.String, org.archive.modules.extractor.LinkContext, org.archive.modules.extractor.Hop) throws URIException,public static org.archive.modules.CrawlURI addRelativeToBase(org.archive.modules.CrawlURI, int, java.lang.CharSequence, org.archive.modules.extractor.LinkContext, org.archive.modules.extractor.Hop) throws URIException,public static org.archive.modules.CrawlURI addRelativeToVia(org.archive.modules.CrawlURI, int, java.lang.String, org.archive.modules.extractor.LinkContext, org.archive.modules.extractor.Hop) throws URIException,public org.archive.modules.extractor.ExtractorParameters getExtractorParameters() ,public org.archive.modules.extractor.UriErrorLoggerModule getLoggerModule() ,public void logUriError(URIException, org.archive.net.UURI, java.lang.CharSequence) ,public java.lang.String report() ,public void setExtractorParameters(org.archive.modules.extractor.ExtractorParameters) ,public void setLoggerModule(org.archive.modules.extractor.UriErrorLoggerModule) <variables>public static final org.archive.modules.extractor.ExtractorParameters DEFAULT_PARAMETERS,private static final java.util.logging.Logger LOGGER,protected transient org.archive.modules.extractor.ExtractorParameters extractorParameters,private static final java.util.logging.Logger logger,protected transient org.archive.modules.extractor.UriErrorLoggerModule loggerModule,protected java.util.concurrent.atomic.AtomicLong numberOfLinksExtracted
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/forms/HTMLForm.java
|
FormInput
|
presumedUsernameInput
|
class FormInput {
public String type;
public String name;
public String value;
public boolean checked = false;
@Override
public String toString() {
String str = "input[@type='" + type+"'][@name='" + name + "'][@value='" + value + "']";
if (checked) {
str = str + "[@checked]";
}
return str;
}
}
protected String method;
protected String action;
protected String enctype;
protected List<FormInput> allInputs = new ArrayList<FormInput>();
protected List<FormInput> candidateUsernameInputs = new ArrayList<FormInput>();
protected List<FormInput> candidatePasswordInputs = new ArrayList<FormInput>();
/**
* Add a discovered INPUT, tracking it as potential
* username/password receiver.
* @param type
* @param name
* @param value
* @param checked true if "checked" attribute is present (for radio buttons and checkboxes)
*/
public void addField(String type, String name, String value, boolean checked) {
FormInput input = new FormInput();
input.type = type;
if (isMultipleFormSubmitInputs(type)) {
return;
}
// default input type is text per html standard
if (input.type == null) {
input.type = "text";
}
input.name = name;
input.value = value;
input.checked = checked;
allInputs.add(input);
if("text".equalsIgnoreCase(input.type) || "email".equalsIgnoreCase(input.type)) {
candidateUsernameInputs.add(input);
} else if ("password".equalsIgnoreCase(type)) {
candidatePasswordInputs.add(input);
}
}
public boolean isMultipleFormSubmitInputs(String type) {
if (type != null && !type.toLowerCase().equals("submit"))
return false;
for (FormInput input : allInputs) {
if (input.type.toLowerCase().equals("submit")) {
return true;
}
}
return false;
}
/**
* Add a discovered INPUT, tracking it as potential
* username/password receiver.
* @param type
* @param name
* @param value
*/
public void addField(String type, String name, String value) {
addField(type, name, value, false);
}
public void setMethod(String method) {
this.method = method;
}
public String getAction() {
return action;
}
public void setAction(String action) {
this.action = action;
}
public String getEnctype() {
return enctype;
}
public void setEnctype(String enctype) {
this.enctype = enctype;
}
/**
* For now, we consider a POST form with only 1 password
* field and 1 potential username field (type text or email)
* to be a likely login form.
*
* @return boolean likely login form
*/
public boolean seemsLoginForm() {
return "post".equalsIgnoreCase(method)
&& candidatePasswordInputs.size() == 1
&& presumedUsernameInput() != null;
}
protected FormInput presumedUsernameInput() {<FILL_FUNCTION_BODY>
|
if (candidateUsernameInputs.size() < 1) {
return null;
} else if (candidateUsernameInputs.size() == 1) {
return candidateUsernameInputs.get(0);
} else {
// more than one candidate; if there is exactly one whose name
// contains the string "username", choose that one
FormInput choice = null;
for (FormInput input: candidateUsernameInputs) {
if (input.name != null && input.name.toLowerCase().indexOf("username") != -1) {
if (choice == null) {
choice = input;
} else {
return null;
}
}
}
return choice;
}
| 902
| 183
| 1,085
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/net/BdbServerCache.java
|
BdbServerCache
|
start
|
class BdbServerCache extends DefaultServerCache
implements Lifecycle, Checkpointable {
private static final long serialVersionUID = 1L;
protected BdbModule bdb;
@Autowired
public void setBdbModule(BdbModule bdb) {
this.bdb = bdb;
}
public BdbServerCache() {
}
public void start() {<FILL_FUNCTION_BODY>}
protected boolean isRunning = false;
protected boolean isCheckpointRecovery = false;
public boolean isRunning() {
return isRunning;
}
public void stop() {
isRunning = false;
// TODO: release bigmaps?
}
@Override
public void startCheckpoint(Checkpoint checkpointInProgress) {
}
@Override
public void doCheckpoint(Checkpoint checkpointInProgress)
throws IOException {
// handled in BdbModule
}
@Override
public void finishCheckpoint(Checkpoint checkpointInProgress) {
}
@Override
@Autowired(required = false)
public void setRecoveryCheckpoint(Checkpoint recoveryCheckpoint) {
// just remember that we are doing checkpoint-recovery;
// actual state recovery happens via BdbModule
isCheckpointRecovery = true;
}
}
|
if(isRunning()) {
return;
}
try {
this.servers = bdb.getObjectCache("servers", isCheckpointRecovery, CrawlServer.class, CrawlServer.class);
this.hosts = bdb.getObjectCache("hosts", isCheckpointRecovery, CrawlHost.class, CrawlHost.class);
} catch (DatabaseException e) {
throw new IllegalStateException(e);
}
isRunning = true;
| 348
| 124
| 472
|
<methods>public void <init>() ,public void <init>(ObjectIdentityCache<org.archive.modules.net.CrawlServer>, ObjectIdentityCache<org.archive.modules.net.CrawlHost>) ,public void close() ,public boolean containsHost(java.lang.String) ,public boolean containsServer(java.lang.String) ,public void forAllHostsDo(Closure) ,public org.archive.modules.net.CrawlHost getHostFor(java.lang.String) ,public org.archive.modules.net.CrawlServer getServerFor(java.lang.String) ,public Set<java.lang.String> hostKeys() <variables>protected ObjectIdentityCache<org.archive.modules.net.CrawlHost> hosts,private static java.util.logging.Logger logger,private static final long serialVersionUID,protected ObjectIdentityCache<org.archive.modules.net.CrawlServer> servers
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/net/DefaultTempDirProvider.java
|
DefaultTempDirProvider
|
makeTempDir
|
class DefaultTempDirProvider implements TempDirProvider {
private static final long serialVersionUID = 1L;
final private static File TEMP_DIR = makeTempDir();
protected static File makeTempDir() {<FILL_FUNCTION_BODY>}
public File getScratchDisk() {
return TEMP_DIR;
}
}
|
File f;
try {
f = File.createTempFile("xxx", null);
File r = f.getParentFile();
f.delete();
return r;
} catch (IOException e) {
return new File("temp");
}
| 95
| 69
| 164
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/net/FirstNamedRobotsPolicy.java
|
FirstNamedRobotsPolicy
|
allows
|
class FirstNamedRobotsPolicy extends RobotsPolicy {
/** list of user-agents to try; if any are allowed, a URI will be crawled */
protected List<String> candidateUserAgents = new LinkedList<String>();
public List<String> getCandidateUserAgents() {
return candidateUserAgents;
}
public void setCandidateUserAgents(List<String> candidateUserAgents) {
this.candidateUserAgents = candidateUserAgents;
}
/** whether to adopt the user-agent that is allowed for the fetch */
protected boolean shouldMasquerade = true;
public boolean getShouldMasquerade() {
return shouldMasquerade;
}
public void setShouldMasquerade(boolean shouldMasquerade) {
this.shouldMasquerade = shouldMasquerade;
}
/** whether to obey the 'nofollow' directive in an HTML META ROBOTS element */
protected boolean obeyMetaRobotsNofollow = true;
public boolean isObeyMetaRobotsNofollow() {
return obeyMetaRobotsNofollow;
}
public void setObeyMetaRobotsNofollow(boolean obeyMetaRobotsNofollow) {
this.obeyMetaRobotsNofollow = obeyMetaRobotsNofollow;
}
@Override
public boolean allows(String userAgent, CrawlURI curi, Robotstxt robotstxt) {<FILL_FUNCTION_BODY>}
@Override
public boolean obeyMetaRobotsNofollow() {
return obeyMetaRobotsNofollow;
}
}
|
RobotsDirectives directives = robotstxt.getDirectivesFor(userAgent, false);
if(directives!=null) {
return directives.allows(getPathQuery(curi));
}
for(String candidate : candidateUserAgents) {
directives = robotstxt.getDirectivesFor(candidate, false);
if(directives!=null) {
if(shouldMasquerade) {
curi.setUserAgent(candidate);
}
return directives.allows(getPathQuery(curi));
}
}
return robotstxt.getDirectivesFor(userAgent).allows(getPathQuery(curi));
| 423
| 174
| 597
|
<methods>public non-sealed void <init>() ,public abstract boolean allows(java.lang.String, org.archive.modules.CrawlURI, org.archive.modules.net.Robotstxt) ,public java.lang.String getPathQuery(org.archive.modules.CrawlURI) ,public abstract boolean obeyMetaRobotsNofollow() <variables>public static Map<java.lang.String,org.archive.modules.net.RobotsPolicy> STANDARD_POLICIES
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/net/MostFavoredRobotsPolicy.java
|
MostFavoredRobotsPolicy
|
allows
|
class MostFavoredRobotsPolicy extends RobotsPolicy {
/** list of user-agents to try; if any are allowed, a URI will be crawled */
protected List<String> candidateUserAgents = new LinkedList<String>();
public List<String> getCandidateUserAgents() {
return candidateUserAgents;
}
public void setCandidateUserAgents(List<String> candidateUserAgents) {
this.candidateUserAgents = candidateUserAgents;
}
/** whether to adopt the user-agent that is allowed for the fetch */
protected boolean shouldMasquerade = true;
public boolean getShouldMasquerade() {
return shouldMasquerade;
}
public void setShouldMasquerade(boolean shouldMasquerade) {
this.shouldMasquerade = shouldMasquerade;
}
/** whether to obey the 'nofollow' directive in an HTML META ROBOTS element */
protected boolean obeyMetaRobotsNofollow = true;
public boolean isObeyMetaRobotsNofollow() {
return obeyMetaRobotsNofollow;
}
public void setObeyMetaRobotsNofollow(boolean obeyMetaRobotsNofollow) {
this.obeyMetaRobotsNofollow = obeyMetaRobotsNofollow;
}
@Override
public boolean allows(String userAgent, CrawlURI curi, Robotstxt robotstxt) {<FILL_FUNCTION_BODY>}
@Override
public boolean obeyMetaRobotsNofollow() {
return obeyMetaRobotsNofollow;
}
}
|
if (robotstxt.getDirectivesFor(userAgent).allows(getPathQuery(curi))) {
return true;
}
for(String candidate : candidateUserAgents) {
if (robotstxt.getDirectivesFor(candidate).allows(getPathQuery(curi))) {
if(shouldMasquerade) {
curi.setUserAgent(candidate);
}
return true;
}
}
// TODO: expand to offer option of following other rules in site's
// robots.txt, even if they don't match any of candidate set.
// TBD: which user-agent to use in that case.
return false;
| 425
| 175
| 600
|
<methods>public non-sealed void <init>() ,public abstract boolean allows(java.lang.String, org.archive.modules.CrawlURI, org.archive.modules.net.Robotstxt) ,public java.lang.String getPathQuery(org.archive.modules.CrawlURI) ,public abstract boolean obeyMetaRobotsNofollow() <variables>public static Map<java.lang.String,org.archive.modules.net.RobotsPolicy> STANDARD_POLICIES
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/net/RobotsDirectives.java
|
RobotsDirectives
|
longestPrefixLength
|
class RobotsDirectives implements Serializable {
private static final long serialVersionUID = 5386542759286155383L;
protected ConcurrentSkipListSet<String> disallows = new ConcurrentSkipListSet<String>();
protected ConcurrentSkipListSet<String> allows = new ConcurrentSkipListSet<String>();
protected float crawlDelay = -1;
public transient boolean hasDirectives = false;
public boolean allows(String path) {
return !(longestPrefixLength(disallows, path) > longestPrefixLength(allows, path));
}
/**
* @param prefixSet
* @param str
* @return length of longest entry in {@code prefixSet} that prefixes {@code str}, or zero
* if no entry prefixes {@code str}
*/
protected int longestPrefixLength(ConcurrentSkipListSet<String> prefixSet,
String str) {<FILL_FUNCTION_BODY>}
public void addDisallow(String path) {
hasDirectives = true;
if(path.length()==0) {
// ignore empty-string disallows
// (they really mean allow, when alone)
return;
}
disallows.add(path);
}
public void addAllow(String path) {
hasDirectives = true;
allows.add(path);
}
public void setCrawlDelay(float i) {
hasDirectives = true;
crawlDelay=i;
}
public float getCrawlDelay() {
return crawlDelay;
}
// Kryo support
public static void autoregisterTo(AutoKryo kryo) {
kryo.register(RobotsDirectives.class, new ReferenceFieldSerializer(kryo, RobotsDirectives.class));
kryo.autoregister(ConcurrentSkipListSet.class); // now used instead of PrefixSet in RobotsDirectives
kryo.setRegistrationOptional(true);
}
}
|
String possiblePrefix = prefixSet.floor(str);
if (possiblePrefix != null && str.startsWith(possiblePrefix)) {
return possiblePrefix.length();
} else {
return 0;
}
| 552
| 58
| 610
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/net/RobotsPolicy.java
|
RobotsPolicy
|
getPathQuery
|
class RobotsPolicy {
public static Map<String, RobotsPolicy> STANDARD_POLICIES = new HashMap<String,RobotsPolicy>();
static {
STANDARD_POLICIES.put("obey", ObeyRobotsPolicy.INSTANCE);
// the obey policy has also historically been called 'classic'
STANDARD_POLICIES.put("classic", ObeyRobotsPolicy.INSTANCE);
STANDARD_POLICIES.put("ignore", IgnoreRobotsPolicy.INSTANCE);
STANDARD_POLICIES.put("robotsTxtOnly", RobotsTxtOnlyPolicy.INSTANCE);
}
public abstract boolean allows(String userAgent, CrawlURI curi, Robotstxt robotstxt);
public abstract boolean obeyMetaRobotsNofollow();
public String getPathQuery(CrawlURI curi) {<FILL_FUNCTION_BODY>}
}
|
try {
return curi.getUURI().getPathQuery();
} catch (URIException e) {
// unlikely
return "";
}
| 239
| 42
| 281
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/net/ServerCache.java
|
ServerCache
|
getServerFor
|
class ServerCache {
public abstract CrawlHost getHostFor(String host);
public abstract CrawlServer getServerFor(String serverKey);
/**
* Utility for performing an action on every CrawlHost.
*
* @param action 1-argument Closure to apply to each CrawlHost
*/
public abstract void forAllHostsDo(Closure action);
private static Logger logger =
Logger.getLogger(ServerCache.class.getName());
/**
* Get the {@link CrawlHost} associated with <code>curi</code>.
* @param uuri CandidateURI we're to return Host for.
* @return CandidateURI instance that matches the passed Host name.
*/
public CrawlHost getHostFor(UURI uuri) {
CrawlHost h = null;
try {
if (uuri.getScheme().equals("dns")) {
h = getHostFor("dns:");
} else if (uuri.getScheme().equals("whois")) {
h = getHostFor("whois:");
} else {
h = getHostFor(uuri.getReferencedHost());
}
} catch (URIException e) {
logger.log(Level.SEVERE, uuri.toString(), e);
}
return h;
}
/**
* Get the {@link CrawlServer} associated with <code>curi</code>.
* @param uuri CandidateURI we're to get server from.
* @return CrawlServer instance that matches the passed CandidateURI.
*/
public CrawlServer getServerFor(UURI uuri) {<FILL_FUNCTION_BODY>}
abstract public Set<String> hostKeys();
}
|
CrawlServer cs = null;
try {
String key = CrawlServer.getServerKey(uuri);
// TODOSOMEDAY: make this robust against those rare cases
// where authority is not a hostname.
if (key != null) {
cs = getServerFor(key);
}
} catch (URIException e) {
logger.log(
Level.FINE, "No server key obtainable: "+uuri.toString(), e);
} catch (NullPointerException npe) {
logger.log(
Level.FINE, "No server key obtainable: "+uuri.toString(), npe);
}
return cs;
| 462
| 177
| 639
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/AbstractContentDigestHistory.java
|
AbstractContentDigestHistory
|
persistKeyFor
|
class AbstractContentDigestHistory {
/**
* Looks up the history by key {@code persistKeyFor(curi)} and loads it into
* {@code curi.getContentDigestHistory()}.
*
* @param curi
*/
public abstract void load(CrawlURI curi);
/**
* Stores {@code curi.getContentDigestHistory()} for the key
* {@code persistKeyFor(curi)}.
*
* @param curi
*/
public abstract void store(CrawlURI curi);
/**
*
* @param curi
* @return {@code curi.getContentDigestSchemeString()}
* @throws IllegalStateException if {@code curi.getContentDigestSchemeString()} is null
*/
protected String persistKeyFor(CrawlURI curi) {<FILL_FUNCTION_BODY>}
}
|
String key = curi.getContentDigestSchemeString();
if (key == null) {
throw new IllegalStateException("cannot load content digest history, CrawlURI does not have content digest value for " + curi);
}
return key;
| 234
| 66
| 300
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/AbstractPersistProcessor.java
|
AbstractPersistProcessor
|
shouldStore
|
class AbstractPersistProcessor extends Processor {
/** @see RecrawlAttributeConstants#A_WRITE_TAG */
protected boolean onlyStoreIfWriteTagPresent = true;
public boolean getOnlyStoreIfWriteTagPresent() {
return onlyStoreIfWriteTagPresent;
}
public void setOnlyStoreIfWriteTagPresent(boolean onlyStoreIfWriteTagPresent) {
this.onlyStoreIfWriteTagPresent = onlyStoreIfWriteTagPresent;
}
/**
* Whether the current CrawlURI's state should be persisted (to log or
* direct to database)
*
* @param curi
* CrawlURI
* @return true if state should be stored; false to skip persistence
*/
protected boolean shouldStore(CrawlURI curi) {<FILL_FUNCTION_BODY>}
/**
* @param uri
* @return true if {@code curi} has WRITE_TAG in the latest fetch history (i.e. this crawl).
*/
protected boolean hasWriteTag(CrawlURI uri) {
return uri.getFetchHistory() != null
&& uri.getFetchHistory()[0] != null
&& uri.getFetchHistory()[0].containsKey(A_WRITE_TAG);
}
/**
* Whether the current CrawlURI's state should be loaded
*
* @param curi CrawlURI
* @return true if state should be loaded; false to skip loading
*/
protected boolean shouldLoad(CrawlURI curi) {
// TODO: don't load some (prereqs?)
return true;
}
}
|
// do this first for quick decision on CURLs postponed by prerequisite
if (!curi.isSuccess()) {
return false;
}
// DNS query need not be persisted
String scheme = curi.getUURI().getScheme();
if (!(scheme.equals("http") || scheme.equals("https") || scheme.equals("ftp") || scheme.equals("sftp"))) {
return false;
}
if (getOnlyStoreIfWriteTagPresent() && !hasWriteTag(curi)) {
return false;
}
return true;
| 418
| 156
| 574
|
<methods>public non-sealed void <init>() ,public void doCheckpoint(org.archive.checkpointing.Checkpoint) throws java.io.IOException,public void finishCheckpoint(org.archive.checkpointing.Checkpoint) ,public static java.lang.String flattenVia(org.archive.modules.CrawlURI) ,public java.lang.String getBeanName() ,public boolean getEnabled() ,public org.archive.spring.KeyedProperties getKeyedProperties() ,public static long getRecordedSize(org.archive.modules.CrawlURI) ,public org.archive.modules.deciderules.DecideRule getShouldProcessRule() ,public long getURICount() ,public static boolean hasHttpAuthenticationCredential(org.archive.modules.CrawlURI) ,public boolean isRunning() ,public static boolean isSuccess(org.archive.modules.CrawlURI) ,public org.archive.modules.ProcessResult process(org.archive.modules.CrawlURI) throws java.lang.InterruptedException,public java.lang.String report() ,public void setBeanName(java.lang.String) ,public void setEnabled(boolean) ,public void setRecoveryCheckpoint(org.archive.checkpointing.Checkpoint) ,public void setShouldProcessRule(org.archive.modules.deciderules.DecideRule) ,public void start() ,public void startCheckpoint(org.archive.checkpointing.Checkpoint) ,public void stop() <variables>protected java.lang.String beanName,protected boolean isRunning,protected org.archive.spring.KeyedProperties kp,protected org.archive.checkpointing.Checkpoint recoveryCheckpoint,protected java.util.concurrent.atomic.AtomicLong uriCount
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/BdbContentDigestHistory.java
|
BdbContentDigestHistory
|
load
|
class BdbContentDigestHistory extends AbstractContentDigestHistory implements Lifecycle {
private static final Logger logger =
Logger.getLogger(BdbContentDigestHistory.class.getName());
protected BdbModule bdb;
@Autowired
public void setBdbModule(BdbModule bdb) {
this.bdb = bdb;
}
protected String historyDbName = "contentDigestHistory";
public String getHistoryDbName() {
return this.historyDbName;
}
public void setHistoryDbName(String name) {
this.historyDbName = name;
}
@SuppressWarnings("rawtypes")
protected StoredSortedMap<String, Map> store;
protected Database historyDb;
@Override
@SuppressWarnings("rawtypes")
public void start() {
if (isRunning()) {
return;
}
StoredSortedMap<String, Map> historyMap;
try {
StoredClassCatalog classCatalog = bdb.getClassCatalog();
historyDb = bdb.openDatabase(getHistoryDbName(), historyDbConfig(), true);
historyMap = new StoredSortedMap<String, Map>(
historyDb,
new StringBinding(),
new SerialBinding<Map>(classCatalog, Map.class),
true);
} catch (DatabaseException e) {
throw new RuntimeException(e);
}
store = historyMap;
}
@Override
public boolean isRunning() {
return historyDb != null;
}
@Override
public void stop() {
if (!isRunning()) {
return;
}
// leave other cleanup to BdbModule
historyDb = null;
}
protected transient BdbModule.BdbConfig historyDbConfig;
protected BdbModule.BdbConfig historyDbConfig() {
if (historyDbConfig == null) {
historyDbConfig = new BdbModule.BdbConfig();
historyDbConfig.setTransactional(false);
historyDbConfig.setAllowCreate(true);
historyDbConfig.setDeferredWrite(true);
}
return historyDbConfig;
}
public void load(CrawlURI curi) {<FILL_FUNCTION_BODY>}
public void store(CrawlURI curi) {
if (!curi.hasContentDigestHistory()
|| curi.getContentDigestHistory().isEmpty()) {
return;
}
if (logger.isLoggable(Level.FINER)) {
logger.finer("storing history by digest " + persistKeyFor(curi)
+ " for uri " + curi + " - "
+ curi.getContentDigestHistory());
}
store.put(persistKeyFor(curi), curi.getContentDigestHistory());
}
}
|
// make this call in all cases so that the value is initialized and
// WARCWriterProcessor knows it should put the info in there
HashMap<String, Object> contentDigestHistory = curi.getContentDigestHistory();
@SuppressWarnings("unchecked")
Map<String, Object> loadedHistory = store.get(persistKeyFor(curi));
if (loadedHistory != null) {
if (logger.isLoggable(Level.FINER)) {
logger.finer("loaded history by digest " + persistKeyFor(curi)
+ " for uri " + curi + " - " + loadedHistory);
}
contentDigestHistory.putAll(loadedHistory);
}
| 732
| 181
| 913
|
<methods>public non-sealed void <init>() ,public abstract void load(org.archive.modules.CrawlURI) ,public abstract void store(org.archive.modules.CrawlURI) <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/ContentDigestHistoryLoader.java
|
ContentDigestHistoryLoader
|
innerProcess
|
class ContentDigestHistoryLoader extends Processor {
protected AbstractContentDigestHistory contentDigestHistory;
@Autowired
public void setContentDigestHistory(
AbstractContentDigestHistory contentDigestHistory) {
this.contentDigestHistory = contentDigestHistory;
}
@Override
protected boolean shouldProcess(CrawlURI uri) {
return uri.getContentDigest() != null && uri.getContentLength() > 0;
}
@Override
protected void innerProcess(CrawlURI curi) throws InterruptedException {<FILL_FUNCTION_BODY>}
}
|
contentDigestHistory.load(curi);
if (!curi.getContentDigestHistory().isEmpty()) {
IdenticalPayloadDigestRevisit revisit =
new IdenticalPayloadDigestRevisit(curi.getContentDigestSchemeString());
revisit.setRefersToDate((String)curi.getContentDigestHistory().get(A_ORIGINAL_DATE));
revisit.setRefersToTargetURI((String)curi.getContentDigestHistory().get(A_ORIGINAL_URL));
String warcRecordId= (String)curi.getContentDigestHistory().get(A_WARC_RECORD_ID);
if (warcRecordId!=null) {
revisit.setRefersToRecordID(warcRecordId);
}
curi.setRevisitProfile(revisit);
curi.getAnnotations().add("duplicate:digest");
}
| 155
| 249
| 404
|
<methods>public non-sealed void <init>() ,public void doCheckpoint(org.archive.checkpointing.Checkpoint) throws java.io.IOException,public void finishCheckpoint(org.archive.checkpointing.Checkpoint) ,public static java.lang.String flattenVia(org.archive.modules.CrawlURI) ,public java.lang.String getBeanName() ,public boolean getEnabled() ,public org.archive.spring.KeyedProperties getKeyedProperties() ,public static long getRecordedSize(org.archive.modules.CrawlURI) ,public org.archive.modules.deciderules.DecideRule getShouldProcessRule() ,public long getURICount() ,public static boolean hasHttpAuthenticationCredential(org.archive.modules.CrawlURI) ,public boolean isRunning() ,public static boolean isSuccess(org.archive.modules.CrawlURI) ,public org.archive.modules.ProcessResult process(org.archive.modules.CrawlURI) throws java.lang.InterruptedException,public java.lang.String report() ,public void setBeanName(java.lang.String) ,public void setEnabled(boolean) ,public void setRecoveryCheckpoint(org.archive.checkpointing.Checkpoint) ,public void setShouldProcessRule(org.archive.modules.deciderules.DecideRule) ,public void start() ,public void startCheckpoint(org.archive.checkpointing.Checkpoint) ,public void stop() <variables>protected java.lang.String beanName,protected boolean isRunning,protected org.archive.spring.KeyedProperties kp,protected org.archive.checkpointing.Checkpoint recoveryCheckpoint,protected java.util.concurrent.atomic.AtomicLong uriCount
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/ContentDigestHistoryStorer.java
|
ContentDigestHistoryStorer
|
shouldProcess
|
class ContentDigestHistoryStorer extends Processor {
protected AbstractContentDigestHistory contentDigestHistory;
@Autowired
public void setContentDigestHistory(
AbstractContentDigestHistory contentDigestHistory) {
this.contentDigestHistory = contentDigestHistory;
}
@Override
protected boolean shouldProcess(CrawlURI uri) {<FILL_FUNCTION_BODY>}
@Override
protected void innerProcess(CrawlURI curi) throws InterruptedException {
contentDigestHistory.store(curi);
}
}
|
return uri.getContentDigest() != null && uri.getContentLength() > 0
&& uri.hasContentDigestHistory()
&& !uri.getContentDigestHistory().isEmpty();
| 145
| 50
| 195
|
<methods>public non-sealed void <init>() ,public void doCheckpoint(org.archive.checkpointing.Checkpoint) throws java.io.IOException,public void finishCheckpoint(org.archive.checkpointing.Checkpoint) ,public static java.lang.String flattenVia(org.archive.modules.CrawlURI) ,public java.lang.String getBeanName() ,public boolean getEnabled() ,public org.archive.spring.KeyedProperties getKeyedProperties() ,public static long getRecordedSize(org.archive.modules.CrawlURI) ,public org.archive.modules.deciderules.DecideRule getShouldProcessRule() ,public long getURICount() ,public static boolean hasHttpAuthenticationCredential(org.archive.modules.CrawlURI) ,public boolean isRunning() ,public static boolean isSuccess(org.archive.modules.CrawlURI) ,public org.archive.modules.ProcessResult process(org.archive.modules.CrawlURI) throws java.lang.InterruptedException,public java.lang.String report() ,public void setBeanName(java.lang.String) ,public void setEnabled(boolean) ,public void setRecoveryCheckpoint(org.archive.checkpointing.Checkpoint) ,public void setShouldProcessRule(org.archive.modules.deciderules.DecideRule) ,public void start() ,public void startCheckpoint(org.archive.checkpointing.Checkpoint) ,public void stop() <variables>protected java.lang.String beanName,protected boolean isRunning,protected org.archive.spring.KeyedProperties kp,protected org.archive.checkpointing.Checkpoint recoveryCheckpoint,protected java.util.concurrent.atomic.AtomicLong uriCount
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/FetchHistoryProcessor.java
|
FetchHistoryProcessor
|
historyRealloc
|
class FetchHistoryProcessor extends Processor {
@SuppressWarnings("unused")
private static final long serialVersionUID = 1L;
/** Desired history array length. */
protected int historyLength = 2;
public int getHistoryLength() {
return this.historyLength;
}
public void setHistoryLength(int length) {
this.historyLength = length;
}
// key description: "Number of previous fetch entries to retain in the URI " +
// "history. The current fetch becomes a history entry at " +
// "this Processor step, so the smallest useful value is " +
// "'2' (including the current fetch). Default is '2'."
// class description: "FetchHistoryProcessor. Maintain a history of fetch " +
// "information inside the CrawlURI's attributes.."
public FetchHistoryProcessor() {
}
@Override
protected void innerProcess(CrawlURI puri) throws InterruptedException {
CrawlURI curi = (CrawlURI) puri;
HashMap<String, Object> latestFetch = new HashMap<String, Object>();
// save status
latestFetch.put(A_STATUS, curi.getFetchStatus());
// save fetch start time
latestFetch.put(A_FETCH_BEGAN_TIME, curi.getFetchBeginTime());
// save digest
String digest = curi.getContentDigestSchemeString();
if (digest != null) {
latestFetch.put(A_CONTENT_DIGEST, digest);
}
// save relevant HTTP headers, if available
if (curi.isHttpTransaction()) {
saveHeader(curi, latestFetch, A_ETAG_HEADER);
saveHeader(curi, latestFetch, A_LAST_MODIFIED_HEADER);
// save reference length (real or virtual)
long referenceLength;
if (curi.containsDataKey(A_REFERENCE_LENGTH)) {
// reuse previous length if available (see FetchHTTP#setSizes).
referenceLength = (Long) curi.getData().get(A_REFERENCE_LENGTH);
} else {
// normally, use content-length
referenceLength = curi.getContentLength();
}
latestFetch.put(A_REFERENCE_LENGTH, referenceLength);
}
HashMap<String, Object>[] history = historyRealloc(curi);
// rotate all history entries up one slot, insert new at [0]
for (int i = history.length - 1; i > 0; i--) {
history[i] = history[i - 1];
}
history[0] = latestFetch;
curi.setFetchHistory(history);
if (curi.getFetchStatus() == 304) {
if( history.length >= 2 && history[1] != null && history[1].containsKey(A_CONTENT_DIGEST)) {
// Copy forward the content digest as the current digest is simply of an empty response
latestFetch.put(A_CONTENT_DIGEST, history[1].get(A_CONTENT_DIGEST));
// Create revisit profile
curi.getAnnotations().add("duplicate:server-not-modified");
ServerNotModifiedRevisit revisit = new ServerNotModifiedRevisit();
revisit.setETag((String) latestFetch.get(A_ETAG_HEADER));
revisit.setLastModified((String) latestFetch.get(A_LAST_MODIFIED_HEADER));
revisit.setPayloadDigest((String)latestFetch.get(A_CONTENT_DIGEST));
curi.setRevisitProfile(revisit);
}
} else if (hasIdenticalDigest(curi)) {
curi.getAnnotations().add("duplicate:digest");
IdenticalPayloadDigestRevisit revisit =
new IdenticalPayloadDigestRevisit((String)history[1].get(A_CONTENT_DIGEST));
revisit.setRefersToTargetURI(curi.getURI()); // Matches are always on the same URI
revisit.setRefersToDate((Long)history[1].get(A_FETCH_BEGAN_TIME));
curi.setRevisitProfile(revisit);
}
}
/**
* Utility method for testing if a CrawlURI's last two history
* entries (one being the most recent fetch) have identical
* content-digest information.
*
* @param curi CrawlURI to test
* @return true if last two history entries have identical digests,
* otherwise false
*/
public static boolean hasIdenticalDigest(CrawlURI curi) {
Map<String,Object>[] history = curi.getFetchHistory();
return history != null
&& history[0] != null
&& history[0].containsKey(A_CONTENT_DIGEST)
&& history[1] != null
&& history[1].containsKey(A_CONTENT_DIGEST)
&& history[0].get(A_CONTENT_DIGEST).equals(history[1].get(A_CONTENT_DIGEST));
}
/** Get or create proper-sized history array */
@SuppressWarnings("unchecked")
protected HashMap<String, Object>[] historyRealloc(CrawlURI curi) {<FILL_FUNCTION_BODY>}
/** Save a header from the given HTTP operation into the Map. */
protected void saveHeader(CrawlURI curi, Map<String,Object> map,
String key) {
String value = curi.getHttpResponseHeader(key);
if (value != null) {
map.put(key, value);
}
}
@Override
protected boolean shouldProcess(CrawlURI curi) {
// only process if curi contains evidence of fetch attempt
return curi.containsDataKey(A_FETCH_BEGAN_TIME);
}
}
|
int targetHistoryLength = getHistoryLength();
HashMap<String, Object>[] history = curi.getFetchHistory();
if (history == null) {
history = new HashMap[targetHistoryLength];
}
if (history.length != targetHistoryLength) {
HashMap<String, Object>[] newHistory = new HashMap[targetHistoryLength];
System.arraycopy(history, 0, newHistory, 0,
Math.min(history.length, newHistory.length));
history = newHistory;
}
return history;
| 1,550
| 141
| 1,691
|
<methods>public non-sealed void <init>() ,public void doCheckpoint(org.archive.checkpointing.Checkpoint) throws java.io.IOException,public void finishCheckpoint(org.archive.checkpointing.Checkpoint) ,public static java.lang.String flattenVia(org.archive.modules.CrawlURI) ,public java.lang.String getBeanName() ,public boolean getEnabled() ,public org.archive.spring.KeyedProperties getKeyedProperties() ,public static long getRecordedSize(org.archive.modules.CrawlURI) ,public org.archive.modules.deciderules.DecideRule getShouldProcessRule() ,public long getURICount() ,public static boolean hasHttpAuthenticationCredential(org.archive.modules.CrawlURI) ,public boolean isRunning() ,public static boolean isSuccess(org.archive.modules.CrawlURI) ,public org.archive.modules.ProcessResult process(org.archive.modules.CrawlURI) throws java.lang.InterruptedException,public java.lang.String report() ,public void setBeanName(java.lang.String) ,public void setEnabled(boolean) ,public void setRecoveryCheckpoint(org.archive.checkpointing.Checkpoint) ,public void setShouldProcessRule(org.archive.modules.deciderules.DecideRule) ,public void start() ,public void startCheckpoint(org.archive.checkpointing.Checkpoint) ,public void stop() <variables>protected java.lang.String beanName,protected boolean isRunning,protected org.archive.spring.KeyedProperties kp,protected org.archive.checkpointing.Checkpoint recoveryCheckpoint,protected java.util.concurrent.atomic.AtomicLong uriCount
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/PersistLoadProcessor.java
|
PersistLoadProcessor
|
start
|
class PersistLoadProcessor extends PersistOnlineProcessor {
@SuppressWarnings("unused")
private static final long serialVersionUID = -1917169316015093131L;
private static final Logger logger =
Logger.getLogger(PersistLoadProcessor.class.getName());
// class description: "PersistLoadProcessor. Loads CrawlURI attributes " +
// "from a previous crawl for current consultation."
/**
* A source (either log file or BDB directory) from which to copy history
* information into the current store at startup. (Whenever possible, it
* would be better to ensure the original history DB is in its own
* independent BDB environment, and then copy and reuse that environment in
* the followup crawl(s).) Only one of {link @preloadSource} and
* {link @preloadSourceUrl} may be specified.
*/
protected ConfigPath preloadSource =
new ConfigFile("preload source","");
public ConfigPath getPreloadSource() {
return preloadSource;
}
public void setPreloadSource(ConfigPath preloadSource) {
this.preloadSource = preloadSource;
}
public PersistLoadProcessor() {
}
/**
* A log file source url from which to copy history information into the
* current store at startup. (Whenever possible, it would be better to
* ensure the original history DB is in its own independent BDB environment,
* and then copy and reuse that environment in the followup crawl(s).)
* Only one of {@link preloadSource} and {@link preloadSourceUrl} may be specified.
*/
protected String preloadSourceUrl = "";
public String getPreloadSourceUrl() {
return preloadSourceUrl;
}
public void setPreloadSourceUrl(String preloadSourceUrl) {
this.preloadSourceUrl = preloadSourceUrl;
}
@Override
protected void innerProcess(CrawlURI curi) throws InterruptedException {
String pkey = persistKeyFor(curi);
@SuppressWarnings("unchecked")
Map<String, Object> prior =
(Map<String,Object>) store.get(pkey);
if(prior!=null) {
// merge in keys
prior.keySet().removeAll(curi.getData().keySet());
curi.getData().putAll(prior);
}
}
@Override
protected boolean shouldProcess(CrawlURI uri) {
return shouldLoad(uri);
}
@Override
public void start() {<FILL_FUNCTION_BODY>}
}
|
if (isRunning()) {
return;
}
super.start();
if (StringUtils.isNotBlank(getPreloadSourceUrl()) && StringUtils.isNotBlank(getPreloadSource().getPath())) {
logger.log(Level.SEVERE, "Both preloadSource and preloadSourceUrl are set - using preloadSource " + getPreloadSource().getFile());
}
String source = null;
Integer count = null;
try {
if (StringUtils.isNotBlank(getPreloadSource().getPath())) {
source = preloadSource.getPath();
count = PersistProcessor.copyPersistSourceToHistoryMap(preloadSource.getFile(), store);
} else if (StringUtils.isNotBlank(getPreloadSourceUrl())) {
source = getPreloadSourceUrl();
count = PersistProcessor.copyPersistSourceToHistoryMap(new URL(source), store);
}
if (count != null) {
logger.info("Loaded deduplication information for " + count + " previously fetched urls from " + source);
}
} catch (IOException ioe) {
logger.log(Level.SEVERE, "Problem loading " + source + ", proceeding without deduplication. " + ioe);
} catch(DatabaseException de) {
logger.log(Level.SEVERE, "Problem loading " + source + ", proceeding without deduplication. " + de);
} catch(IllegalArgumentException iae) {
logger.log(Level.SEVERE, "Problem loading " + source + ", proceeding without deduplication. " + iae);
}
| 672
| 406
| 1,078
|
<methods>public void <init>() ,public java.lang.String getHistoryDbName() ,public boolean isRunning() ,public void setBdbModule(org.archive.bdb.BdbModule) ,public void setHistoryDbName(java.lang.String) ,public void start() ,public void stop() <variables>protected org.archive.bdb.BdbModule bdb,protected Database historyDb,protected java.lang.String historyDbName,private static final long serialVersionUID,protected StoredSortedMap<java.lang.String,Map#RAW> store
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/PersistLogProcessor.java
|
PersistLogProcessor
|
start
|
class PersistLogProcessor extends PersistProcessor
implements Checkpointable, Lifecycle {
@SuppressWarnings("unused")
private static final long serialVersionUID = 1678691994065439346L;
protected CrawlerJournal log;
// description: "Filename to which to log URI persistence information. " +
// "Default is 'logs/persistlog.txtser.gz'. "
protected ConfigPath logFile = new ConfigPath("URI persistence log file","${launchId}/logs/persistlog.txtser.gz");
public ConfigPath getLogFile() {
return this.logFile;
}
public void setLogFile(ConfigPath path) {
this.logFile = path;
}
// class description: "PersistLogProcessor. Logs CrawlURI attributes " +
// "from latest fetch for consultation by a later recrawl."
public PersistLogProcessor() {
}
public void start() {<FILL_FUNCTION_BODY>}
public boolean isRunning() {
return log != null;
}
public void stop() {
if(!isRunning()) {
return;
}
// XXX happens at finish; move to teardown?
log.close();
log = null;
}
@Override
protected void innerProcess(CrawlURI curi) {
log.writeLine(persistKeyFor(curi), " ",
new String(Base64.encodeBase64(
SerializationUtils.serialize((Serializable)curi.getData()))));
}
public void startCheckpoint(Checkpoint checkpointInProgress) {}
public void doCheckpoint(Checkpoint checkpointInProgress) throws IOException {
// rotate log
log.rotateForCheckpoint(checkpointInProgress);
}
public void finishCheckpoint(Checkpoint checkpointInProgress) {}
@Override
protected boolean shouldProcess(CrawlURI uri) {
return shouldStore(uri);
}
}
|
if (isRunning()) {
return;
}
try {
File logFile = getLogFile().getFile();
log = new CrawlerJournal(logFile);
} catch (IOException e) {
// TODO Auto-generated catch block
throw new RuntimeException(e);
}
| 536
| 79
| 615
|
<methods>public void <init>() ,public static int copyPersistSourceToHistoryMap(java.io.File, StoredSortedMap<java.lang.String,Map#RAW>) throws DatabaseException, java.io.IOException,public static int copyPersistSourceToHistoryMap(java.net.URL, StoredSortedMap<java.lang.String,Map#RAW>) throws DatabaseException, java.io.IOException,public static void main(java.lang.String[]) throws DatabaseException, java.io.IOException,public static java.lang.String persistKeyFor(org.archive.modules.CrawlURI) ,public static java.lang.String persistKeyFor(java.lang.String) ,public static int populatePersistEnv(java.lang.String, java.io.File) throws java.io.IOException,public static org.archive.util.bdbje.EnhancedEnvironment setupCopyEnvironment(java.io.File) throws DatabaseException,public static org.archive.util.bdbje.EnhancedEnvironment setupCopyEnvironment(java.io.File, boolean) throws DatabaseException<variables>public static final non-sealed org.archive.bdb.BdbModule.BdbConfig HISTORY_DB_CONFIG,public static final java.lang.String URI_HISTORY_DBNAME,private static final java.util.logging.Logger logger,private static final long serialVersionUID
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/recrawl/PersistOnlineProcessor.java
|
PersistOnlineProcessor
|
stop
|
class PersistOnlineProcessor extends PersistProcessor
implements Lifecycle {
@SuppressWarnings("unused")
private static final long serialVersionUID = -666479480942267268L;
protected BdbModule bdb;
@Autowired
public void setBdbModule(BdbModule bdb) {
this.bdb = bdb;
}
protected String historyDbName = "uri_history";
public String getHistoryDbName() {
return this.historyDbName;
}
public void setHistoryDbName(String name) {
this.historyDbName = name;
}
@SuppressWarnings("unchecked")
protected StoredSortedMap<String,Map> store;
protected Database historyDb;
public PersistOnlineProcessor() {
}
@SuppressWarnings("unchecked")
public void start() {
// TODO: share single store instance between Load and Store processors
// (shared context? EnhancedEnvironment?)
if (isRunning()) {
return;
}
StoredSortedMap<String,Map> historyMap;
try {
StoredClassCatalog classCatalog = bdb.getClassCatalog();
BdbModule.BdbConfig dbConfig = HISTORY_DB_CONFIG;
historyDb = bdb.openDatabase(getHistoryDbName(), dbConfig, true);
historyMap =
new StoredSortedMap<String,Map>(
historyDb,
new StringBinding(),
new SerialBinding<Map>(classCatalog,Map.class),
true);
} catch (DatabaseException e) {
throw new RuntimeException(e);
}
store = historyMap;
}
public boolean isRunning() {
return historyDb != null;
}
public void stop() {<FILL_FUNCTION_BODY>}
}
|
if (!isRunning()) {
return;
}
// leave other cleanup to BdbModule
historyDb = null;
| 497
| 36
| 533
|
<methods>public void <init>() ,public static int copyPersistSourceToHistoryMap(java.io.File, StoredSortedMap<java.lang.String,Map#RAW>) throws DatabaseException, java.io.IOException,public static int copyPersistSourceToHistoryMap(java.net.URL, StoredSortedMap<java.lang.String,Map#RAW>) throws DatabaseException, java.io.IOException,public static void main(java.lang.String[]) throws DatabaseException, java.io.IOException,public static java.lang.String persistKeyFor(org.archive.modules.CrawlURI) ,public static java.lang.String persistKeyFor(java.lang.String) ,public static int populatePersistEnv(java.lang.String, java.io.File) throws java.io.IOException,public static org.archive.util.bdbje.EnhancedEnvironment setupCopyEnvironment(java.io.File) throws DatabaseException,public static org.archive.util.bdbje.EnhancedEnvironment setupCopyEnvironment(java.io.File, boolean) throws DatabaseException<variables>public static final non-sealed org.archive.bdb.BdbModule.BdbConfig HISTORY_DB_CONFIG,public static final java.lang.String URI_HISTORY_DBNAME,private static final java.util.logging.Logger logger,private static final long serialVersionUID
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/revisit/AbstractProfile.java
|
AbstractProfile
|
getWarcHeaders
|
class AbstractProfile implements RevisitProfile {
protected String refersToDate;
protected String refersToRecordID;
@Override
public Map<String, String> getWarcHeaders() {<FILL_FUNCTION_BODY>}
/**
* Set the refers to date
* @param refersToDate Must be a string representation of a data conforming to
* W3C/ISO8601 format, assuming UTC. Format is yyyy-MM-dd'T'HH:mm:ss'Z'
*
* @see ArchiveUtils#getLog14Date(java.util.Date)
*/
public void setRefersToDate(String refersToDate) {
this.refersToDate = refersToDate;
}
public String getRefersToDate() {
return refersToDate;
}
/**
* Set the refers to date
* @param refersToDate
*/
public void setRefersToDate(long refersToDate) {
this.refersToDate = ArchiveUtils.getLog14Date(refersToDate);
}
public String getRefersToRecordID() {
return refersToRecordID;
}
public void setRefersToRecordID(String refersToRecordID) {
this.refersToRecordID = refersToRecordID;
}
}
|
Map<String, String> headers = new HashMap<String, String>();
if (refersToDate!=null) {
headers.put(HEADER_KEY_REFERS_TO_DATE, refersToDate);
}
if (refersToRecordID!=null) {
headers.put(HEADER_KEY_REFERS_TO, "<" + refersToRecordID + ">");
}
return headers;
| 379
| 131
| 510
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/revisit/IdenticalPayloadDigestRevisit.java
|
IdenticalPayloadDigestRevisit
|
getWarcHeaders
|
class IdenticalPayloadDigestRevisit extends AbstractProfile {
protected final String payloadDigest;
protected String refersToTargetURI;
/**
* Minimal constructor.
* @param payloadDigest The digest of the original record
*/
public IdenticalPayloadDigestRevisit(String payloadDigest) {
if (payloadDigest==null) {
throw new NullPointerException("PayloadDigest may not be null");
}
this.payloadDigest = payloadDigest;
}
@Override
public String getProfileName() {
return WARCConstants.PROFILE_REVISIT_IDENTICAL_DIGEST;
}
@Override
public Map<String, String> getWarcHeaders() {<FILL_FUNCTION_BODY>}
public String getRefersToTargetURI() {
return refersToTargetURI;
}
public void setRefersToTargetURI(String refersToTargetURI) {
this.refersToTargetURI = refersToTargetURI;
}
public String getPayloadDigest() {
return payloadDigest;
}
}
|
Map<String, String> headers = super.getWarcHeaders();
headers.put(HEADER_KEY_PAYLOAD_DIGEST, payloadDigest);
if (refersToTargetURI!=null) {
headers.put(HEADER_KEY_REFERS_TO_TARGET_URI, refersToTargetURI);
}
return headers;
| 322
| 115
| 437
|
<methods>public non-sealed void <init>() ,public java.lang.String getRefersToDate() ,public java.lang.String getRefersToRecordID() ,public Map<java.lang.String,java.lang.String> getWarcHeaders() ,public void setRefersToDate(java.lang.String) ,public void setRefersToDate(long) ,public void setRefersToRecordID(java.lang.String) <variables>protected java.lang.String refersToDate,protected java.lang.String refersToRecordID
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/revisit/ServerNotModifiedRevisit.java
|
ServerNotModifiedRevisit
|
getWarcHeaders
|
class ServerNotModifiedRevisit extends AbstractProfile {
// From HTTP response
protected String eTag;
protected String lastModified;
// Optional. Digest of original capture
protected String payloadDigest;
/**
* Minimal constructor.
*/
public ServerNotModifiedRevisit() {
}
@Override
public String getProfileName() {
return WARCConstants.PROFILE_REVISIT_NOT_MODIFIED;
}
@Override
public Map<String, String> getWarcHeaders() {<FILL_FUNCTION_BODY>}
public String getETag() {
return eTag;
}
public void setETag(String eTag) {
this.eTag = eTag;
}
public String getLastModified() {
return lastModified;
}
public void setLastModified(String lastModified) {
this.lastModified = lastModified;
}
public String getPayloadDigest() {
return payloadDigest;
}
public void setPayloadDigest(String payloadDigest) {
this.payloadDigest = payloadDigest;
}
}
|
Map<String, String> headers = super.getWarcHeaders();
if (payloadDigest!=null) {
headers.put(HEADER_KEY_PAYLOAD_DIGEST, payloadDigest);
}
if (eTag!=null) {
headers.put(HEADER_KEY_ETAG, eTag);
}
if (lastModified!=null) {
headers.put(HEADER_KEY_LAST_MODIFIED, lastModified);
}
return headers;
| 355
| 160
| 515
|
<methods>public non-sealed void <init>() ,public java.lang.String getRefersToDate() ,public java.lang.String getRefersToRecordID() ,public Map<java.lang.String,java.lang.String> getWarcHeaders() ,public void setRefersToDate(java.lang.String) ,public void setRefersToDate(long) ,public void setRefersToRecordID(java.lang.String) <variables>protected java.lang.String refersToDate,protected java.lang.String refersToRecordID
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/seeds/TextSeedModule.java
|
TextSeedModule
|
addSeed
|
class TextSeedModule extends SeedModule
implements ReadSource {
private static final long serialVersionUID = 3L;
private static final Logger logger =
Logger.getLogger(TextSeedModule.class.getName());
/**
* Text from which to extract seeds
*/
protected ReadSource textSource = null;
public ReadSource getTextSource() {
return textSource;
}
@Required
public void setTextSource(ReadSource seedsSource) {
this.textSource = seedsSource;
}
/**
* Number of lines of seeds-source to read on initial load before proceeding
* with crawl. Default is -1, meaning all. Any other value will cause that
* number of lines to be loaded before fetching begins, while all extra
* lines continue to be processed in the background. Generally, this should
* only be changed when working with very large seed lists, and scopes that
* do *not* depend on reading all seeds.
*/
protected int blockAwaitingSeedLines = -1;
public int getBlockAwaitingSeedLines() {
return blockAwaitingSeedLines;
}
public void setBlockAwaitingSeedLines(int blockAwaitingSeedLines) {
this.blockAwaitingSeedLines = blockAwaitingSeedLines;
}
public TextSeedModule() {
}
/**
* Announce all seeds from configured source to SeedListeners
* (including nonseed lines mixed in).
* @see org.archive.modules.seeds.SeedModule#announceSeeds()
*/
public void announceSeeds() {
if(getBlockAwaitingSeedLines()>-1) {
final CountDownLatch latch = new CountDownLatch(getBlockAwaitingSeedLines());
new Thread(){
@Override
public void run() {
announceSeeds(latch);
while(latch.getCount()>0) {
latch.countDown();
}
}
}.start();
try {
latch.await();
} catch (InterruptedException e) {
// do nothing
}
} else {
announceSeeds(null);
}
}
protected void announceSeeds(CountDownLatch latchOrNull) {
BufferedReader reader = new BufferedReader(textSource.obtainReader());
try {
announceSeedsFromReader(reader,latchOrNull);
} finally {
IOUtils.closeQuietly(reader);
}
}
/**
* Announce all seeds (and nonseed possible-directive lines) from
* the given Reader
* @param reader source of seed/directive lines
* @param latchOrNull if non-null, sent countDown after each line, allowing
* another thread to proceed after a configurable number of lines processed
*/
protected void announceSeedsFromReader(BufferedReader reader, CountDownLatch latchOrNull) {
String s;
Iterator<String> iter =
new RegexLineIterator(
new LineReadingIterator(reader),
RegexLineIterator.COMMENT_LINE,
RegexLineIterator.NONWHITESPACE_ENTRY_TRAILING_COMMENT,
RegexLineIterator.ENTRY);
int count = 0;
while (iter.hasNext()) {
s = (String) iter.next();
if(Character.isLetterOrDigit(s.charAt(0))) {
// consider a likely URI
seedLine(s);
count++;
if(count%20000==0) {
System.runFinalization();
}
} else {
// report just in case it's a useful directive
nonseedLine(s);
}
if(latchOrNull!=null) {
latchOrNull.countDown();
}
}
publishConcludedSeedBatch();
}
/**
* Handle a read line that is probably a seed.
*
* @param uri String seed-containing line
*/
protected void seedLine(String uri) {
String originalUri = uri;
if (!uri.matches("[a-zA-Z][\\w+\\-]+:.*")) { // Rfc2396 s3.1 scheme,
// minus '.'
// Does not begin with scheme, so try http://
uri = "http://" + uri;
}
try {
UURI uuri = UURIFactory.getInstance(uri);
CrawlURI curi = new CrawlURI(uuri);
curi.setSeed(true);
curi.setSchedulingDirective(SchedulingConstants.MEDIUM);
if (getSourceTagSeeds()) {
curi.setSourceTag(originalUri);
}
publishAddedSeed(curi);
} catch (URIException e) {
// try as nonseed line as fallback
nonseedLine(uri);
}
}
/**
* Handle a read line that is not a seed, but may still have
* meaning to seed-consumers (such as scoping beans).
*/
protected void nonseedLine(String line) {
publishNonSeedLine(line);
}
/**
* Treat the given file as a source of additional seeds,
* announcing to SeedListeners.
*
* @see org.archive.modules.seeds.SeedModule#actOn(java.io.File)
*/
public void actOn(File f) {
BufferedReader reader = null;
try {
reader = ArchiveUtils.getBufferedReader(f);
announceSeedsFromReader(reader, null);
} catch (IOException ioe) {
logger.log(Level.SEVERE,"problem reading seed file "+f,ioe);
} finally {
IOUtils.closeQuietly(reader);
}
}
/**
* Add a new seed to scope. By default, simply appends
* to seeds file, though subclasses may handle differently.
*
* <p>This method is *not* sufficient to get the new seed
* scheduled in the Frontier for crawling -- it only
* affects the Scope's seed record (and decisions which
* flow from seeds).
*
* @param curi CandidateUri to add
*/
@Override
public synchronized void addSeed(final CrawlURI curi) {<FILL_FUNCTION_BODY>}
public Reader obtainReader() {
return textSource.obtainReader();
}
}
|
if(!(textSource instanceof WriteTarget)) {
// TODO: do something else to log seed update
logger.warning("nowhere to log added seed: "+curi);
} else {
// TODO: determine if this modification to seeds file means
// TextSeedModule should (again) be Checkpointable
try {
Writer fw = ((WriteTarget)textSource).obtainWriter(true);
// Write to new (last) line the URL.
fw.write("\n");
fw.write("# Heritrix added seed " +
((curi.getVia() != null) ? "redirect from " + curi.getVia():
"(JMX)") + ".\n");
fw.write(curi.toString());
fw.flush();
fw.close();
} catch (IOException e) {
DevUtils.warnHandle(e, "problem writing new seed");
}
}
publishAddedSeed(curi);
| 1,698
| 248
| 1,946
|
<methods>public void <init>() ,public abstract void actOn(java.io.File) ,public abstract void addSeed(org.archive.modules.CrawlURI) ,public void addSeedListener(org.archive.modules.seeds.SeedListener) ,public abstract void announceSeeds() ,public Set<org.archive.modules.seeds.SeedListener> getSeedListeners() ,public boolean getSourceTagSeeds() ,public void setSeedListeners(Set<org.archive.modules.seeds.SeedListener>) ,public void setSourceTagSeeds(boolean) <variables>protected Set<org.archive.modules.seeds.SeedListener> seedListeners,private static final long serialVersionUID,protected boolean sourceTagSeeds
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/BaseWARCRecordBuilder.java
|
BaseWARCRecordBuilder
|
generateRecordID
|
class BaseWARCRecordBuilder implements WARCRecordBuilder {
public static URI generateRecordID() {<FILL_FUNCTION_BODY>}
}
|
try {
return new URI("urn:uuid:" + UUID.randomUUID());
} catch (URISyntaxException e) {
throw new RuntimeException(e); // impossible
}
| 39
| 50
| 89
|
<no_super_class>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/DnsResponseRecordBuilder.java
|
DnsResponseRecordBuilder
|
buildRecord
|
class DnsResponseRecordBuilder extends BaseWARCRecordBuilder {
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
return "dns".equals(curi.getUURI().getScheme().toLowerCase());
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo) throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
'<' + concurrentTo.toString() + '>');
}
recordInfo.setType(WARCRecordType.response);
recordInfo.setUrl(curi.toString());
recordInfo.setCreate14DigitDate(timestamp);
recordInfo.setMimetype(curi.getContentType());
recordInfo.setContentLength(curi.getRecorder().getRecordedInput().getSize());
recordInfo.setEnforceLength(true);
if (curi.getServerIP() != null) {
recordInfo.addExtraHeader(HEADER_KEY_IP, curi.getServerIP());
}
ReplayInputStream ris =
curi.getRecorder().getRecordedInput().getReplayInputStream();
recordInfo.setContentStream(ris);
return recordInfo;
| 109
| 301
| 410
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/FtpControlConversationRecordBuilder.java
|
FtpControlConversationRecordBuilder
|
buildRecord
|
class FtpControlConversationRecordBuilder extends BaseWARCRecordBuilder {
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
return "ftp".equalsIgnoreCase(curi.getUURI().getScheme()) || "sftp".equalsIgnoreCase(curi.getUURI().getScheme());
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo) throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
String controlConversation =
curi.getData().get(A_FTP_CONTROL_CONVERSATION).toString();
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
'<' + concurrentTo.toString() + '>');
}
recordInfo.setCreate14DigitDate(timestamp);
recordInfo.setUrl(curi.toString());
recordInfo.setMimetype(FTP_CONTROL_CONVERSATION_MIMETYPE);
recordInfo.setEnforceLength(true);
recordInfo.setType(WARCRecordType.metadata);
if (curi.getServerIP() != null) {
recordInfo.addExtraHeader(HEADER_KEY_IP, curi.getServerIP());
}
byte[] b = controlConversation.getBytes("UTF-8");
recordInfo.setContentStream(new ByteArrayInputStream(b));
recordInfo.setContentLength((long) b.length);
return recordInfo;
| 130
| 330
| 460
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/FtpResponseRecordBuilder.java
|
FtpResponseRecordBuilder
|
buildRecord
|
class FtpResponseRecordBuilder extends BaseWARCRecordBuilder {
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
return !curi.isRevisit()
&& ("ftp".equalsIgnoreCase(curi.getUURI().getScheme()) || "sftp".equalsIgnoreCase(curi.getUURI().getScheme()));
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo) throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
'<' + concurrentTo.toString() + '>');
}
recordInfo.setType(WARCRecordType.response);
recordInfo.setUrl(curi.toString());
recordInfo.setCreate14DigitDate(timestamp);
recordInfo.setMimetype(curi.getContentType());
recordInfo.setContentLength(curi.getRecorder().getRecordedInput().getSize());
recordInfo.setEnforceLength(true);
if (curi.getServerIP() != null) {
recordInfo.addExtraHeader(HEADER_KEY_IP, curi.getServerIP());
}
ReplayInputStream ris =
curi.getRecorder().getRecordedInput().getReplayInputStream();
recordInfo.setContentStream(ris);
return recordInfo;
| 141
| 300
| 441
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/HttpRequestRecordBuilder.java
|
HttpRequestRecordBuilder
|
buildRecord
|
class HttpRequestRecordBuilder extends BaseWARCRecordBuilder {
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
return curi.getUURI().getScheme().toLowerCase().startsWith("http");
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo)
throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
'<' + concurrentTo.toString() + '>');
}
recordInfo.setType(WARCRecordType.request);
recordInfo.setUrl(curi.toString());
recordInfo.setCreate14DigitDate(timestamp);
recordInfo.setMimetype(HTTP_REQUEST_MIMETYPE);
recordInfo.setContentLength(curi.getRecorder().getRecordedOutput().getSize());
recordInfo.setEnforceLength(true);
ReplayInputStream
ris = curi.getRecorder().getRecordedOutput().getReplayInputStream();
recordInfo.setContentStream(ris);
return recordInfo;
| 110
| 258
| 368
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/HttpResponseRecordBuilder.java
|
HttpResponseRecordBuilder
|
buildRecord
|
class HttpResponseRecordBuilder extends BaseWARCRecordBuilder {
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
return !curi.isRevisit()
&& curi.getUURI().getScheme().toLowerCase().startsWith("http");
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo) throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
'<' + concurrentTo.toString() + '>');
}
recordInfo.setType(WARCRecordType.response);
recordInfo.setUrl(curi.toString());
recordInfo.setCreate14DigitDate(timestamp);
recordInfo.setMimetype(HTTP_RESPONSE_MIMETYPE);
recordInfo.setContentLength(
curi.getRecorder().getRecordedInput().getSize());
recordInfo.setEnforceLength(true);
if (curi.getContentDigest() != null) {
recordInfo.addExtraHeader(HEADER_KEY_PAYLOAD_DIGEST,
curi.getContentDigestSchemeString());
}
if (curi.getServerIP() != null) {
recordInfo.addExtraHeader(HEADER_KEY_IP, curi.getServerIP());
}
// Check for truncated annotation
String value = null;
Collection<String> anno = curi.getAnnotations();
if (anno.contains(TIMER_TRUNC)) {
value = NAMED_FIELD_TRUNCATED_VALUE_TIME;
} else if (anno.contains(LENGTH_TRUNC)) {
value = NAMED_FIELD_TRUNCATED_VALUE_LENGTH;
} else if (anno.contains(HEADER_TRUNC)) {
value = NAMED_FIELD_TRUNCATED_VALUE_HEAD;
}
// TODO: Add annotation for TRUNCATED_VALUE_UNSPECIFIED
if (value != null) {
recordInfo.addExtraHeader(HEADER_KEY_TRUNCATED, value);
}
if (curi.getData().containsKey(A_WARC_RESPONSE_HEADERS)) {
for (Object headerObj: curi.getDataList(A_WARC_RESPONSE_HEADERS)) {
String[] kv = StringUtils.split(((String) headerObj), ":", 2);
recordInfo.addExtraHeader(kv[0].trim(), kv[1].trim());
}
}
ReplayInputStream ris =
curi.getRecorder().getRecordedInput().getReplayInputStream();
recordInfo.setContentStream(ris);
return recordInfo;
| 120
| 676
| 796
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/MetadataRecordBuilder.java
|
MetadataRecordBuilder
|
buildRecord
|
class MetadataRecordBuilder extends BaseWARCRecordBuilder {
/**
* If you don't want metadata records, take this class out of the chain.
*/
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
String scheme = curi.getUURI().getScheme().toLowerCase();
return scheme.startsWith("http") || "ftp".equals(scheme) || "sftp".equals(scheme);
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo) throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setType(WARCRecordType.metadata);
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
"<" + concurrentTo + ">");
}
recordInfo.setUrl(curi.toString());
recordInfo.setCreate14DigitDate(timestamp);
recordInfo.setMimetype(ANVLRecord.MIMETYPE);
recordInfo.setEnforceLength(true);
// Get some metadata from the curi.
// TODO: Get all curi metadata.
// TODO: Use other than ANVL (or rename ANVL as NameValue or use
// RFC822 (commons-httpclient?).
ANVLRecord r = new ANVLRecord();
if (curi.isSeed()) {
r.addLabel("seed");
} else {
if (curi.forceFetch()) {
r.addLabel("force-fetch");
}
if(StringUtils.isNotBlank(curi.getVia().toString())) {
r.addLabelValue("via", curi.getVia().toString());
}
if(StringUtils.isNotBlank(curi.getPathFromSeed())) {
r.addLabelValue("hopsFromSeed", curi.getPathFromSeed());
}
if (curi.containsDataKey(A_SOURCE_TAG)) {
r.addLabelValue("sourceTag",
(String)curi.getData().get(A_SOURCE_TAG));
}
}
long duration = curi.getFetchCompletedTime() - curi.getFetchBeginTime();
if (duration > -1) {
r.addLabelValue("fetchTimeMs", Long.toString(duration));
}
if (curi.getData().containsKey(A_FTP_FETCH_STATUS)) {
r.addLabelValue("ftpFetchStatus", curi.getData().get(A_FTP_FETCH_STATUS).toString());
}
if (curi.getRecorder() != null && curi.getRecorder().getCharset() != null) {
r.addLabelValue("charsetForLinkExtraction", curi.getRecorder().getCharset().name());
}
for (String annotation: curi.getAnnotations()) {
if (annotation.startsWith("usingCharsetIn") || annotation.startsWith("inconsistentCharsetIn")) {
String[] kv = annotation.split(":", 2);
r.addLabelValue(kv[0], kv[1]);
}
}
// Add outlinks though they are effectively useless without anchor text.
Collection<CrawlURI> links = curi.getOutLinks();
if (links != null && links.size() > 0) {
for (CrawlURI link: links) {
r.addLabelValue("outlink", link.getURI()+" "+link.getLastHop()+" "+link.getViaContext());
}
}
// TODO: Other curi fields to write to metadata.
//
// Credentials
//
// fetch-began-time: 1154569278774
// fetch-completed-time: 1154569281816
//
// Annotations.
byte [] b = r.getUTF8Bytes();
recordInfo.setContentStream(new ByteArrayInputStream(b));
recordInfo.setContentLength((long) b.length);
return recordInfo;
| 160
| 977
| 1,137
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/RevisitRecordBuilder.java
|
RevisitRecordBuilder
|
buildRecord
|
class RevisitRecordBuilder extends BaseWARCRecordBuilder {
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
String scheme = curi.getUURI().getScheme().toLowerCase();
return curi.isRevisit()
&& (scheme.startsWith("http") || scheme.equals("ftp") || scheme.equals("sftp"));
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo) throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
long revisedLength = 0; // By default, truncate all data
if (curi.getRevisitProfile().getProfileName().equals(PROFILE_REVISIT_IDENTICAL_DIGEST)) {
// Save response from identical digest matches
revisedLength = curi.getRecorder().getRecordedInput().getContentBegin();
revisedLength = revisedLength > 0
? revisedLength
: curi.getRecorder().getRecordedInput().getSize();
}
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
'<' + concurrentTo.toString() + '>');
}
recordInfo.setType(WARCRecordType.revisit);
recordInfo.setUrl(curi.toString());
recordInfo.setCreate14DigitDate(timestamp);
String scheme = curi.getUURI().getScheme().toLowerCase();
if (scheme.startsWith("http")) {
recordInfo.setMimetype(HTTP_RESPONSE_MIMETYPE);
}
recordInfo.setContentLength(revisedLength);
recordInfo.setEnforceLength(false);
RevisitProfile revisitProfile = curi.getRevisitProfile();
recordInfo.addExtraHeader(HEADER_KEY_PROFILE,
revisitProfile.getProfileName());
recordInfo.addExtraHeader(HEADER_KEY_TRUNCATED,
NAMED_FIELD_TRUNCATED_VALUE_LENGTH);
Map<String, String> revisitHeaders = revisitProfile.getWarcHeaders();
for (Entry<String, String> entry: revisitHeaders.entrySet()) {
recordInfo.addExtraHeader(entry.getKey(), entry.getValue());
}
ReplayInputStream ris = curi.getRecorder().getRecordedInput().getReplayInputStream();
recordInfo.setContentStream(ris);
return recordInfo;
| 145
| 558
| 703
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/warc/WhoisResponseRecordBuilder.java
|
WhoisResponseRecordBuilder
|
buildRecord
|
class WhoisResponseRecordBuilder extends BaseWARCRecordBuilder {
@Override
public boolean shouldBuildRecord(CrawlURI curi) {
return "whois".equals(curi.getUURI().getScheme().toLowerCase());
}
@Override
public WARCRecordInfo buildRecord(CrawlURI curi, URI concurrentTo) throws IOException {<FILL_FUNCTION_BODY>}
}
|
final String timestamp =
ArchiveUtils.getLog14Date(curi.getFetchBeginTime());
WARCRecordInfo recordInfo = new WARCRecordInfo();
recordInfo.setRecordId(generateRecordID());
if (concurrentTo != null) {
recordInfo.addExtraHeader(HEADER_KEY_CONCURRENT_TO,
'<' + concurrentTo.toString() + '>');
}
recordInfo.setType(WARCRecordType.response);
recordInfo.setUrl(curi.toString());
recordInfo.setCreate14DigitDate(timestamp);
recordInfo.setMimetype(curi.getContentType());
recordInfo.setContentLength(curi.getRecorder().getRecordedInput().getSize());
recordInfo.setEnforceLength(true);
if (curi.getServerIP() != null) {
recordInfo.addExtraHeader(HEADER_KEY_IP, curi.getServerIP());
}
ReplayInputStream ris =
curi.getRecorder().getRecordedInput().getReplayInputStream();
recordInfo.setContentStream(ris);
return recordInfo;
| 109
| 300
| 409
|
<methods>public non-sealed void <init>() ,public static java.net.URI generateRecordID() <variables>
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/writer/ARCWriterProcessor.java
|
ARCWriterProcessor
|
innerProcessResult
|
class ARCWriterProcessor extends WriterPoolProcessor {
final static private String METADATA_TEMPLATE = readMetadataTemplate();
@SuppressWarnings("unused")
private static final long serialVersionUID = 3L;
private static final Logger logger =
Logger.getLogger(ARCWriterProcessor.class.getName());
public long getDefaultMaxFileSize() {
return 100000000L; // 100 SI mega-bytes (10^8 bytes)
}
public List<ConfigPath> getDefaultStorePaths() {
List<ConfigPath> paths = new ArrayList<ConfigPath>();
paths.add(new ConfigPath("arcs default store path", "arcs"));
return paths;
}
private transient List<String> cachedMetadata;
public ARCWriterProcessor() {
}
@Override
protected void setupPool(AtomicInteger serialNo) {
setPool(new ARCWriterPool(serialNo, this, getPoolMaxActive(), getMaxWaitForIdleMs()));
}
/**
* Writes a CrawlURI and its associated data to store file.
*
* Currently this method understands the following uri types: dns, http,
* and https.
*
* @param curi CrawlURI to process.
*/
protected ProcessResult innerProcessResult(CrawlURI curi) {<FILL_FUNCTION_BODY>}
protected ProcessResult write(CrawlURI curi, long recordLength,
InputStream in, String ip)
throws IOException {
WriterPoolMember writer = getPool().borrowFile();
long position = writer.getPosition();
// See if we need to open a new file because we've exceeded maxBytes.
// Call to checkFileSize will open new file if we're at maximum for
// current file.
writer.checkSize();
if (writer.getPosition() != position) {
// We just closed the file because it was larger than maxBytes.
// Add to the totalBytesWritten the size of the first record
// in the file, if any.
addTotalBytesWritten(writer.getPosition() - position);
position = writer.getPosition();
}
ARCWriter w = (ARCWriter)writer;
try {
if (in instanceof ReplayInputStream) {
w.write(curi.toString(), curi.getContentType(),
ip, curi.getFetchBeginTime(),
recordLength, (ReplayInputStream)in);
} else {
w.write(curi.toString(), curi.getContentType(),
ip, curi.getFetchBeginTime(),
recordLength, in);
}
} catch (IOException e) {
// Invalidate this file (It gets a '.invalid' suffix).
getPool().invalidateFile(writer);
// Set the writer to null otherwise the pool accounting
// of how many active writers gets skewed if we subsequently
// do a returnWriter call on this object in the finally block.
writer = null;
throw e;
} finally {
if (writer != null) {
addTotalBytesWritten(writer.getPosition() - position);
getPool().returnFile(writer);
String filename = writer.getFile().getName();
if (filename.endsWith(ArchiveFileConstants.OCCUPIED_SUFFIX)) {
filename = filename.substring(0, filename.length() - ArchiveFileConstants.OCCUPIED_SUFFIX.length());
}
curi.addExtraInfo("arcFilename", filename);
Map<String,Object>[] history = curi.getFetchHistory();
if (history != null && history[0] != null) {
history[0].put(A_WRITE_TAG, filename);
}
}
}
return checkBytesWritten();
}
public List<String> getMetadata() {
if (METADATA_TEMPLATE == null) {
return null;
}
if (cachedMetadata != null) {
return cachedMetadata;
}
String meta = METADATA_TEMPLATE;
meta = replace(meta, "${VERSION}", ArchiveUtils.VERSION);
meta = replace(meta, "${HOST}", getHostName());
meta = replace(meta, "${IP}", getHostAddress());
if (meta != null) {
meta = replace(meta, "${JOB_NAME}", getMetadataProvider().getJobName());
meta = replace(meta, "${DESCRIPTION}", getMetadataProvider().getDescription());
meta = replace(meta, "${OPERATOR}", getMetadataProvider().getOperator());
// TODO: fix this to match job-start-date (from UI or operator setting)
// in the meantime, don't include a slightly-off date
// meta = replace(meta, "${DATE}", GMT());
meta = replace(meta, "${USER_AGENT}", getMetadataProvider().getUserAgent());
meta = replace(meta, "${FROM}", getMetadataProvider().getOperatorFrom());
meta = replace(meta, "${ROBOTS}", getMetadataProvider().getRobotsPolicyName());
}
this.cachedMetadata = Collections.singletonList(meta);
return this.cachedMetadata;
// ${VERSION}
// ${HOST}
// ${IP}
// ${JOB_NAME}
// ${DESCRIPTION}
// ${OPERATOR}
// ${DATE}
// ${USER_AGENT}
// ${FROM}
// ${ROBOTS}
}
private static String replace(String meta, String find, String replace) {
replace = StringUtils.defaultString(replace);
replace = StringEscapeUtils.escapeXml(replace);
return meta.replace(find, replace);
}
private static String getHostName() {
try {
return InetAddress.getLocalHost().getCanonicalHostName();
} catch (UnknownHostException e) {
logger.log(Level.SEVERE, "Could not get local host name.", e);
return "localhost";
}
}
private static String getHostAddress() {
try {
return InetAddress.getLocalHost().getHostAddress();
} catch (UnknownHostException e) {
logger.log(Level.SEVERE, "Could not get local host address.", e);
return "localhost";
}
}
private static String readMetadataTemplate() {
InputStream input = ARCWriterProcessor.class.getResourceAsStream(
"arc_metadata_template.xml");
if (input == null) {
logger.severe("No metadata template.");
return null;
}
try {
return IOUtils.toString(input);
} catch (IOException e) {
throw new IllegalStateException(e);
} finally {
IOUtils.closeQuietly(input);
}
}
}
|
long recordLength = getRecordedSize(curi);
ReplayInputStream ris = null;
try {
if (shouldWrite(curi)) {
ris = curi.getRecorder().getRecordedInput()
.getReplayInputStream();
return write(curi, recordLength, ris, getHostAddress(curi));
} else {
logger.info("does not write " + curi.toString());
copyForwardWriteTagIfDupe(curi);
}
} catch (IOException e) {
curi.getNonFatalFailures().add(e);
logger.log(Level.SEVERE, "Failed write of Record: " +
curi.toString(), e);
} finally {
IOUtils.closeQuietly(ris);
}
return ProcessResult.PROCEED;
| 1,784
| 210
| 1,994
|
<methods>public void <init>() ,public List<java.io.File> calcOutputDirs() ,public void doCheckpoint(org.archive.checkpointing.Checkpoint) throws java.io.IOException,public boolean getCompress() ,public org.archive.spring.ConfigPath getDirectory() ,public boolean getFrequentFlushes() ,public long getMaxFileSizeBytes() ,public long getMaxTotalBytesToWrite() ,public int getMaxWaitForIdleMs() ,public abstract List<java.lang.String> getMetadata() ,public org.archive.modules.CrawlMetadata getMetadataProvider() ,public int getPoolMaxActive() ,public java.lang.String getPrefix() ,public org.archive.modules.net.ServerCache getServerCache() ,public boolean getSkipIdenticalDigests() ,public boolean getStartNewFilesOnCheckpoint() ,public List<org.archive.spring.ConfigPath> getStorePaths() ,public java.lang.String getTemplate() ,public int getWriteBufferSize() ,public void setCompress(boolean) ,public void setDirectory(org.archive.spring.ConfigPath) ,public void setFrequentFlushes(boolean) ,public void setMaxFileSizeBytes(long) ,public void setMaxTotalBytesToWrite(long) ,public void setMaxWaitForIdleMs(int) ,public void setMetadataProvider(org.archive.modules.CrawlMetadata) ,public void setPoolMaxActive(int) ,public void setPrefix(java.lang.String) ,public void setServerCache(org.archive.modules.net.ServerCache) ,public void setSkipIdenticalDigests(boolean) ,public void setStartNewFilesOnCheckpoint(boolean) ,public void setStorePaths(List<org.archive.spring.ConfigPath>) ,public void setTemplate(java.lang.String) ,public void setWriteBufferSize(int) ,public synchronized void start() ,public void stop() <variables>protected static final java.lang.String ANNOTATION_UNWRITTEN,protected boolean compress,protected org.archive.spring.ConfigPath directory,protected boolean frequentFlushes,private static final java.util.logging.Logger logger,protected long maxFileSizeBytes,protected long maxTotalBytesToWrite,protected int maxWaitForIdleMs,private transient WriterPool pool,protected int poolMaxActive,protected java.lang.String prefix,private java.util.concurrent.atomic.AtomicInteger serial,private static final long serialVersionUID,protected transient org.archive.modules.net.ServerCache serverCache,protected boolean skipIdenticalDigests,protected boolean startNewFilesOnCheckpoint,protected List<org.archive.spring.ConfigPath> storePaths,protected java.lang.String template,private java.util.concurrent.atomic.AtomicLong totalBytesWritten,protected int writeBufferSize
|
internetarchive_heritrix3
|
heritrix3/modules/src/main/java/org/archive/modules/writer/WARCWriterChainProcessor.java
|
WARCWriterChainProcessor
|
writeRecords
|
class WARCWriterChainProcessor extends BaseWARCWriterProcessor implements HasKeyedProperties {
private static final Logger logger =
Logger.getLogger(WARCWriterChainProcessor.class.getName());
{
setChain(Arrays.asList(
new DnsResponseRecordBuilder(),
new HttpResponseRecordBuilder(),
new WhoisResponseRecordBuilder(),
new FtpControlConversationRecordBuilder(),
new FtpResponseRecordBuilder(),
new RevisitRecordBuilder(),
new HttpRequestRecordBuilder(),
new MetadataRecordBuilder()));
}
@SuppressWarnings("unchecked")
public List<? extends WARCRecordBuilder> getChain() {
return (List<WARCRecordBuilder>) kp.get("chain");
}
public void setChain(List<? extends WARCRecordBuilder> chain) {
kp.put("chain", chain);
}
@Override
protected boolean shouldWrite(CrawlURI curi) {
if (getSkipIdenticalDigests()
&& IdenticalDigestDecideRule.hasIdenticalDigest(curi)) {
curi.getAnnotations().add(ANNOTATION_UNWRITTEN
+ ":identicalDigest");
return false;
}
// WARCWriterProcessor has seemingly unnecessarily complicated logic
if (curi.getFetchStatus() <= 0) {
curi.getAnnotations().add(ANNOTATION_UNWRITTEN + ":status");
return false;
}
return true;
}
@Override
protected ProcessResult innerProcessResult(CrawlURI curi) {
try {
if (shouldWrite(curi)) {
return write(curi);
} else {
copyForwardWriteTagIfDupe(curi);
}
} catch (IOException e) {
curi.getNonFatalFailures().add(e);
logger.log(Level.SEVERE, "Failed write of Records: " +
curi.toString(), e);
}
return ProcessResult.PROCEED;
}
protected ProcessResult write(final CrawlURI curi)
throws IOException {
WARCWriter writer = (WARCWriter) getPool().borrowFile();
// Reset writer temp stats so they reflect only this set of records.
writer.resetTmpStats();
writer.resetTmpRecordLog();
long position = writer.getPosition();
try {
// Roll over to new warc file if we've exceeded maxBytes.
writer.checkSize();
if (writer.getPosition() != position) {
// We rolled over to a new warc and wrote a warcinfo record.
// Tally stats and reset temp stats, to avoid including warcinfo
// record in stats for current url.
addTotalBytesWritten(writer.getPosition() - position);
addStats(writer.getTmpStats());
writer.resetTmpStats();
writer.resetTmpRecordLog();
position = writer.getPosition();
}
writeRecords(curi, writer);
} catch (IOException e) {
// Invalidate this file (It gets a '.invalid' suffix).
getPool().invalidateFile(writer);
// Set the writer to null otherwise the pool accounting
// of how many active writers gets skewed if we subsequently
// do a returnWriter call on this object in the finally block.
writer = null;
throw e;
} finally {
if (writer != null) {
updateMetadataAfterWrite(curi, writer, position);
getPool().returnFile(writer);
}
}
// XXX this looks wrong, check should happen *before* writing the
// record, the way checkBytesWritten() currently works
return checkBytesWritten();
}
protected void writeRecords(CrawlURI curi, WARCWriter writer) throws IOException {<FILL_FUNCTION_BODY>}
@Override
protected JSONObject toCheckpointJson() throws JSONException {
JSONObject json = super.toCheckpointJson();
json.put("urlsWritten", urlsWritten);
json.put("stats", stats);
return json;
}
@Override
protected void fromCheckpointJson(JSONObject json) throws JSONException {
super.fromCheckpointJson(json);
// conditionals below are for backward compatibility with old checkpoints
if (json.has("urlsWritten")) {
urlsWritten.set(json.getLong("urlsWritten"));
}
if (json.has("stats")) {
HashMap<String, Map<String, Long>> cpStats = new HashMap<String, Map<String, Long>>();
JSONObject jsonStats = json.getJSONObject("stats");
if (JSONObject.getNames(jsonStats) != null) {
for (String key1: JSONObject.getNames(jsonStats)) {
JSONObject jsonSubstats = jsonStats.getJSONObject(key1);
if (!cpStats.containsKey(key1)) {
cpStats.put(key1, new HashMap<String, Long>());
}
Map<String, Long> substats = cpStats.get(key1);
for (String key2: JSONObject.getNames(jsonSubstats)) {
long value = jsonSubstats.getLong(key2);
substats.put(key2, value);
}
}
addStats(cpStats);
}
}
}
}
|
URI concurrentTo = null;
for (WARCRecordBuilder recordBuilder: getChain()) {
if (recordBuilder.shouldBuildRecord(curi)) {
WARCRecordInfo record = recordBuilder.buildRecord(curi, concurrentTo);
if (record != null) {
writer.writeRecord(record);
InputStream is = null;
try {
is = record.getContentStream();
is.close();
}
catch (Exception e){
logger.log(Level.WARNING, "problem closing Warc Record Content Stream " + e);
}
finally {
IOUtils.closeQuietly(record.getContentStream()); //Closing one way or the other seems to leave some file handles open. Calling close() and using closeQuietly() handles both FileStreams and FileChannels
}
if (concurrentTo == null) {
concurrentTo = record.getRecordId();
}
}
}
}
| 1,385
| 242
| 1,627
|
<methods>public non-sealed void <init>() ,public long getDefaultMaxFileSize() ,public List<org.archive.spring.ConfigPath> getDefaultStorePaths() ,public List<java.lang.String> getMetadata() ,public RecordIDGenerator getRecordIDGenerator() ,public ConcurrentMap<java.lang.String,ConcurrentMap<java.lang.String,java.util.concurrent.atomic.AtomicLong>> getStats() ,public java.lang.String report() ,public void setRecordIDGenerator(RecordIDGenerator) <variables>private transient List<java.lang.String> cachedMetadata,protected RecordIDGenerator generator,private static final java.util.logging.Logger logger,protected ConcurrentMap<java.lang.String,ConcurrentMap<java.lang.String,java.util.concurrent.atomic.AtomicLong>> stats,protected java.util.concurrent.atomic.AtomicLong urlsWritten
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/AlerterWorkerPool.java
|
AlerterWorkerPool
|
initWorkExecutor
|
class AlerterWorkerPool {
private ThreadPoolExecutor workerExecutor;
public AlerterWorkerPool() {
initWorkExecutor();
}
private void initWorkExecutor() {<FILL_FUNCTION_BODY>}
/**
* Run the alerter task
* @param runnable task
* @throws RejectedExecutionException when The thread pool is full of
*/
public void executeJob(Runnable runnable) throws RejectedExecutionException {
workerExecutor.execute(runnable);
}
}
|
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setUncaughtExceptionHandler((thread, throwable) -> {
log.error("workerExecutor has uncaughtException.");
log.error(throwable.getMessage(), throwable);
})
.setDaemon(true)
.setNameFormat("alerter-worker-%d")
.build();
workerExecutor = new ThreadPoolExecutor(6,
10,
10,
TimeUnit.SECONDS,
new SynchronousQueue<>(),
threadFactory,
new ThreadPoolExecutor.AbortPolicy());
| 139
| 149
| 288
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertConvergeController.java
|
AlertConvergeController
|
getAlertConverge
|
class AlertConvergeController {
@Autowired
private AlertConvergeService alertConvergeService;
@PostMapping
@Operation(summary = "New Alarm Converge", description = "Added an alarm Converge")
public ResponseEntity<Message<Void>> addNewAlertConverge(@Valid @RequestBody AlertConverge alertConverge) {
alertConvergeService.validate(alertConverge, false);
alertConvergeService.addAlertConverge(alertConverge);
return ResponseEntity.ok(Message.success("Add success"));
}
@PutMapping
@Operation(summary = "Modifying an Alarm Converge", description = "Modify an existing alarm Converge")
public ResponseEntity<Message<Void>> modifyAlertConverge(@Valid @RequestBody AlertConverge alertConverge) {
alertConvergeService.validate(alertConverge, true);
alertConvergeService.modifyAlertConverge(alertConverge);
return ResponseEntity.ok(Message.success("Modify success"));
}
@GetMapping(path = "/{id}")
@Operation(summary = "Querying Alarm Converge",
description = "You can obtain alarm Converge information based on the alarm Converge ID")
public ResponseEntity<Message<AlertConverge>> getAlertConverge(
@Parameter(description = "Alarm Converge ID", example = "6565463543") @PathVariable("id") long id) {<FILL_FUNCTION_BODY>}
}
|
AlertConverge alertConverge = alertConvergeService.getAlertConverge(id);
if (alertConverge == null) {
return ResponseEntity.ok(Message.fail(MONITOR_NOT_EXIST_CODE, "AlertConverge not exist."));
} else {
return ResponseEntity.ok(Message.success(alertConverge));
}
| 403
| 100
| 503
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertConvergesController.java
|
AlertConvergesController
|
getAlertConverges
|
class AlertConvergesController {
@Autowired
private AlertConvergeService alertConvergeService;
@GetMapping
@Operation(summary = "Query the alarm converge list",
description = "You can obtain the list of alarm converge by querying filter items")
public ResponseEntity<Message<Page<AlertConverge>>> getAlertConverges(
@Parameter(description = "Alarm Converge ID", example = "6565463543") @RequestParam(required = false) List<Long> ids,
@Parameter(description = "Search Name", example = "x") @RequestParam(required = false) String search,
@Parameter(description = "Sort field, default id", example = "id") @RequestParam(defaultValue = "id") String sort,
@Parameter(description = "Sort mode: asc: ascending, desc: descending", example = "desc") @RequestParam(defaultValue = "desc") String order,
@Parameter(description = "List current page", example = "0") @RequestParam(defaultValue = "0") int pageIndex,
@Parameter(description = "Number of list pages", example = "8") @RequestParam(defaultValue = "8") int pageSize) {<FILL_FUNCTION_BODY>}
@DeleteMapping
@Operation(summary = "Delete alarm converge in batches",
description = "Delete alarm converge in batches based on the alarm converge ID list")
public ResponseEntity<Message<Void>> deleteAlertDefines(
@Parameter(description = "Alarm Converge IDs", example = "6565463543") @RequestParam(required = false) List<Long> ids
) {
if (ids != null && !ids.isEmpty()) {
alertConvergeService.deleteAlertConverges(new HashSet<>(ids));
}
return ResponseEntity.ok(Message.success());
}
}
|
Specification<AlertConverge> specification = (root, query, criteriaBuilder) -> {
List<Predicate> andList = new ArrayList<>();
if (ids != null && !ids.isEmpty()) {
CriteriaBuilder.In<Long> inPredicate = criteriaBuilder.in(root.get("id"));
for (long id : ids) {
inPredicate.value(id);
}
andList.add(inPredicate);
}
if (StringUtils.hasText(search)) {
Predicate predicate = criteriaBuilder.or(
criteriaBuilder.like(
criteriaBuilder.lower(root.get("name")),
"%" + search.toLowerCase() + "%"
)
);
andList.add(predicate);
}
Predicate[] predicates = new Predicate[andList.size()];
return criteriaBuilder.and(andList.toArray(predicates));
};
Sort sortExp = Sort.by(new Sort.Order(Sort.Direction.fromString(order), sort));
PageRequest pageRequest = PageRequest.of(pageIndex, pageSize, sortExp);
Page<AlertConverge> alertConvergePage = alertConvergeService.getAlertConverges(specification, pageRequest);
return ResponseEntity.ok(Message.success(alertConvergePage));
| 479
| 338
| 817
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertDefineController.java
|
AlertDefineController
|
getAlertDefineMonitorsBind
|
class AlertDefineController {
@Autowired
private AlertDefineService alertDefineService;
@PostMapping
@Operation(summary = "New Alarm Definition", description = "Added an alarm definition")
public ResponseEntity<Message<Void>> addNewAlertDefine(@Valid @RequestBody AlertDefine alertDefine) {
// Verify request data
alertDefineService.validate(alertDefine, false);
alertDefineService.addAlertDefine(alertDefine);
return ResponseEntity.ok(Message.success("Add success"));
}
@PutMapping
@Operation(summary = "Modifying an Alarm Definition", description = "Modify an existing alarm definition")
public ResponseEntity<Message<Void>> modifyAlertDefine(@Valid @RequestBody AlertDefine alertDefine) {
// Verify request data
alertDefineService.validate(alertDefine, true);
alertDefineService.modifyAlertDefine(alertDefine);
return ResponseEntity.ok(Message.success("Modify success"));
}
@GetMapping(path = "/{id}")
@Operation(summary = "Querying Alarm Definitions",
description = "You can obtain alarm definition information based on the alarm definition ID")
public ResponseEntity<Message<AlertDefine>> getAlertDefine(
@Parameter(description = "Alarm Definition ID", example = "6565463543") @PathVariable("id") long id) {
// Obtaining Monitoring Information
AlertDefine alertDefine = alertDefineService.getAlertDefine(id);
if (alertDefine == null) {
return ResponseEntity.ok(Message.fail(MONITOR_NOT_EXIST_CODE, "AlertDefine not exist."));
} else {
return ResponseEntity.ok(Message.success(alertDefine));
}
}
@DeleteMapping(path = "/{id}")
@Operation(summary = "Deleting an Alarm Definition",
description = "If the alarm definition does not exist, the alarm is deleted successfully")
public ResponseEntity<Message<Void>> deleteAlertDefine(
@Parameter(description = "Alarm Definition ID", example = "6565463543") @PathVariable("id") long id) {
// If the alarm definition does not exist or is deleted successfully, the deletion succeeds
alertDefineService.deleteAlertDefine(id);
return ResponseEntity.ok(Message.success("Delete success"));
}
@PostMapping(path = "/{alertDefineId}/monitors")
@Operation(summary = "Application alarm definition is associated with monitoring",
description = "Applies the association between specified alarm definitions and monitoring")
public ResponseEntity<Message<Void>> applyAlertDefineMonitorsBind(
@Parameter(description = "Alarm Definition ID", example = "6565463543") @PathVariable("alertDefineId") long alertDefineId,
@RequestBody List<AlertDefineMonitorBind> alertDefineMonitorBinds) {
alertDefineService.applyBindAlertDefineMonitors(alertDefineId, alertDefineMonitorBinds);
return ResponseEntity.ok(Message.success("Apply success"));
}
@GetMapping(path = "/{alertDefineId}/monitors")
@Operation(summary = "Application alarm definition is associated with monitoring",
description = "Applies the association between specified alarm definitions and monitoring")
public ResponseEntity<Message<List<AlertDefineMonitorBind>>> getAlertDefineMonitorsBind(
@Parameter(description = "Alarm Definition ID", example = "6565463543") @PathVariable("alertDefineId") long alertDefineId) {<FILL_FUNCTION_BODY>}
}
|
List<AlertDefineMonitorBind> defineBinds = alertDefineService.getBindAlertDefineMonitors(alertDefineId);
defineBinds = defineBinds.stream().filter(item -> item.getMonitor() != null).collect(Collectors.toList());
return ResponseEntity.ok(Message.success(defineBinds));
| 940
| 87
| 1,027
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertDefinesController.java
|
AlertDefinesController
|
getAlertDefines
|
class AlertDefinesController {
@Autowired
private AlertDefineService alertDefineService;
@GetMapping
@Operation(summary = "Example Query the alarm definition list",
description = "You can obtain the list of alarm definitions by querying filter items")
public ResponseEntity<Message<Page<AlertDefine>>> getAlertDefines(
@Parameter(description = "Alarm Definition ID", example = "6565463543") @RequestParam(required = false) List<Long> ids,
@Parameter(description = "Search-Target Expr Template", example = "x") @RequestParam(required = false) String search,
@Parameter(description = "Alarm Definition Severity", example = "6565463543") @RequestParam(required = false) Byte priority,
@Parameter(description = "Sort field, default id", example = "id") @RequestParam(defaultValue = "id") String sort,
@Parameter(description = "Sort mode: asc: ascending, desc: descending", example = "desc") @RequestParam(defaultValue = "desc") String order,
@Parameter(description = "List current page", example = "0") @RequestParam(defaultValue = "0") int pageIndex,
@Parameter(description = "Number of list pages", example = "8") @RequestParam(defaultValue = "8") int pageSize) {<FILL_FUNCTION_BODY>}
@DeleteMapping
@Operation(summary = "Delete alarm definitions in batches",
description = "Delete alarm definitions in batches based on the alarm definition ID list")
public ResponseEntity<Message<Void>> deleteAlertDefines(
@Parameter(description = "Alarm Definition IDs", example = "6565463543") @RequestParam(required = false) List<Long> ids
) {
if (ids != null && !ids.isEmpty()) {
alertDefineService.deleteAlertDefines(new HashSet<>(ids));
}
return ResponseEntity.ok(Message.success());
}
@GetMapping("/export")
@Operation(summary = "export alertDefine config", description = "export alarm definition configuration")
public void export(
@Parameter(description = "AlertDefine ID List", example = "656937901") @RequestParam List<Long> ids,
@Parameter(description = "Export Type:JSON,EXCEL,YAML") @RequestParam(defaultValue = "JSON") String type,
HttpServletResponse res) throws Exception {
alertDefineService.export(ids, type, res);
}
@PostMapping("/import")
@Operation(summary = "import alertDefine config", description = "import alarm definition configuration")
public ResponseEntity<Message<Void>> importDefines(MultipartFile file) throws Exception {
alertDefineService.importConfig(file);
return ResponseEntity.ok(Message.success("Import success"));
}
}
|
Specification<AlertDefine> specification = (root, query, criteriaBuilder) -> {
List<Predicate> andList = new ArrayList<>();
if (ids != null && !ids.isEmpty()) {
CriteriaBuilder.In<Long> inPredicate = criteriaBuilder.in(root.get("id"));
for (long id : ids) {
inPredicate.value(id);
}
andList.add(inPredicate);
}
if (StringUtils.hasText(search)) {
Predicate predicate = criteriaBuilder.or(
criteriaBuilder.like(
criteriaBuilder.lower(root.get("app")),
"%" + search.toLowerCase() + "%"
),
criteriaBuilder.like(
criteriaBuilder.lower(root.get("metric")),
"%" + search.toLowerCase() + "%"
),
criteriaBuilder.like(
criteriaBuilder.lower(root.get("field")),
"%" + search.toLowerCase() + "%"
),
criteriaBuilder.like(
criteriaBuilder.lower(root.get("expr")),
"%" + search.toLowerCase() + "%"
),
criteriaBuilder.like(
criteriaBuilder.lower(root.get("template")),
"%" + search.toLowerCase() + "%"
)
);
andList.add(predicate);
}
if (priority != null) {
Predicate predicate = criteriaBuilder.equal(root.get("priority"), priority);
andList.add(predicate);
}
Predicate[] predicates = new Predicate[andList.size()];
return criteriaBuilder.and(andList.toArray(predicates));
};
Sort sortExp = Sort.by(new Sort.Order(Sort.Direction.fromString(order), sort));
PageRequest pageRequest = PageRequest.of(pageIndex, pageSize, sortExp);
Page<AlertDefine> alertDefinePage = alertDefineService.getAlertDefines(specification, pageRequest);
return ResponseEntity.ok(Message.success(alertDefinePage));
| 728
| 528
| 1,256
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertReportController.java
|
AlertReportController
|
addNewAlertReportFromCloud
|
class AlertReportController {
@Autowired
private AlertService alertService;
@PostMapping("/{cloud}")
@Operation(summary = "Interface for reporting external alarm information of cloud service")
public ResponseEntity<Message<Void>> addNewAlertReportFromCloud(@PathVariable("cloud") String cloudServiceName,
@RequestBody String alertReport) {<FILL_FUNCTION_BODY>}
@PostMapping
@Operation(summary = "Interface for reporting external and general alarm information",
description = "The interface is used to report external and general alarm information")
public ResponseEntity<Message<Void>> addNewAlertReport(@RequestBody GeneralCloudAlertReport alertReport) {
alertReport.refreshAlertTime();
alertService.addNewAlertReport(alertReport);
return ResponseEntity.ok(Message.success("Add report success"));
}
}
|
CloudServiceAlarmInformationEnum cloudService = CloudServiceAlarmInformationEnum
.getEnumFromCloudServiceName(cloudServiceName);
AlertReport alert = null;
if (cloudService != null) {
try {
CloudAlertReportAbstract cloudAlertReport = JsonUtil
.fromJson(alertReport, cloudService.getCloudServiceAlarmInformationEntity());
assert cloudAlertReport != null;
alert = AlertReport.builder()
.content(cloudAlertReport.getContent())
.alertName(cloudAlertReport.getAlertName())
.alertTime(cloudAlertReport.getAlertTime())
.alertDuration(cloudAlertReport.getAlertDuration())
.priority(cloudAlertReport.getPriority())
.reportType(cloudAlertReport.getReportType())
.labels(cloudAlertReport.getLabels())
.annotations(cloudAlertReport.getAnnotations())
.build();
} catch (Exception e) {
log.error("[alert report] parse cloud service alarm content failed! cloud service: {} conrent: {}",
cloudService.name(), alertReport);
}
} else {
alert = AlertReport.builder()
.content("error do not has cloud service api")
.alertName("/api/alerts/report/" + cloudServiceName)
.alertTime(new Date().getTime())
.priority(1)
.reportType(1)
.build();
}
Optional.ofNullable(alert).ifPresent(alertReportPresent ->
alertService.addNewAlertReport(alertReportPresent));
return ResponseEntity.ok(Message.success("Add report success"));
| 219
| 416
| 635
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertSilenceController.java
|
AlertSilenceController
|
getAlertSilence
|
class AlertSilenceController {
@Autowired
private AlertSilenceService alertSilenceService;
@PostMapping
@Operation(summary = "New Alarm Silence", description = "Added an alarm Silence")
public ResponseEntity<Message<Void>> addNewAlertSilence(@Valid @RequestBody AlertSilence alertSilence) {
alertSilenceService.validate(alertSilence, false);
alertSilenceService.addAlertSilence(alertSilence);
return ResponseEntity.ok(Message.success("Add success"));
}
@PutMapping
@Operation(summary = "Modifying an Alarm Silence", description = "Modify an existing alarm Silence")
public ResponseEntity<Message<Void>> modifyAlertSilence(@Valid @RequestBody AlertSilence alertSilence) {
alertSilenceService.validate(alertSilence, true);
alertSilenceService.modifyAlertSilence(alertSilence);
return ResponseEntity.ok(Message.success("Modify success"));
}
@GetMapping(path = "/{id}")
@Operation(summary = "Querying Alarm Silence",
description = "You can obtain alarm Silence information based on the alarm Silence ID")
public ResponseEntity<Message<AlertSilence>> getAlertSilence(
@Parameter(description = "Alarm Silence ID", example = "6565463543") @PathVariable("id") long id) {<FILL_FUNCTION_BODY>}
}
|
AlertSilence alertSilence = alertSilenceService.getAlertSilence(id);
if (alertSilence == null) {
return ResponseEntity.ok(Message.fail(MONITOR_NOT_EXIST_CODE, "AlertSilence not exist."));
} else {
return ResponseEntity.ok(Message.success(alertSilence));
}
| 374
| 93
| 467
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertSilencesController.java
|
AlertSilencesController
|
deleteAlertDefines
|
class AlertSilencesController {
@Autowired
private AlertSilenceService alertSilenceService;
@GetMapping
@Operation(summary = "Query the alarm silence list",
description = "You can obtain the list of alarm silence by querying filter items")
public ResponseEntity<Message<Page<AlertSilence>>> getAlertSilences(
@Parameter(description = "Alarm Silence ID", example = "6565463543") @RequestParam(required = false) List<Long> ids,
@Parameter(description = "Search Name", example = "x") @RequestParam(required = false) String search,
@Parameter(description = "Sort field, default id", example = "id") @RequestParam(defaultValue = "id") String sort,
@Parameter(description = "Sort mode: asc: ascending, desc: descending", example = "desc") @RequestParam(defaultValue = "desc") String order,
@Parameter(description = "List current page", example = "0") @RequestParam(defaultValue = "0") int pageIndex,
@Parameter(description = "Number of list pages", example = "8") @RequestParam(defaultValue = "8") int pageSize) {
Specification<AlertSilence> specification = (root, query, criteriaBuilder) -> {
List<Predicate> andList = new ArrayList<>();
if (ids != null && !ids.isEmpty()) {
CriteriaBuilder.In<Long> inPredicate= criteriaBuilder.in(root.get("id"));
for (long id : ids) {
inPredicate.value(id);
}
andList.add(inPredicate);
}
if (StringUtils.hasText(search)) {
Predicate predicate = criteriaBuilder.or(
criteriaBuilder.like(
criteriaBuilder.lower(root.get("name")),
"%" + search.toLowerCase() + "%"
)
);
andList.add(predicate);
}
Predicate[] predicates = new Predicate[andList.size()];
return criteriaBuilder.and(andList.toArray(predicates));
};
Sort sortExp = Sort.by(new Sort.Order(Sort.Direction.fromString(order), sort));
PageRequest pageRequest = PageRequest.of(pageIndex, pageSize, sortExp);
Page<AlertSilence> alertSilencePage = alertSilenceService.getAlertSilences(specification, pageRequest);
Message<Page<AlertSilence>> message = Message.success(alertSilencePage);
return ResponseEntity.ok(message);
}
@DeleteMapping
@Operation(summary = "Delete alarm silence in batches",
description = "Delete alarm silence in batches based on the alarm silence ID list")
public ResponseEntity<Message<Void>> deleteAlertDefines(
@Parameter(description = "Alarm Silence IDs", example = "6565463543") @RequestParam(required = false) List<Long> ids
) {<FILL_FUNCTION_BODY>}
}
|
if (ids != null && !ids.isEmpty()) {
alertSilenceService.deleteAlertSilences(new HashSet<>(ids));
}
Message<Void> message = Message.success();
return ResponseEntity.ok(message);
| 759
| 64
| 823
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/controller/AlertsController.java
|
AlertsController
|
getAlerts
|
class AlertsController {
@Autowired
private AlertService alertService;
@GetMapping
@Operation(summary = "Get a list of alarm information based on query filter items", description = "according to the query filter items to obtain a list of alarm information")
public ResponseEntity<Message<Page<Alert>>> getAlerts(
@Parameter(description = "Alarm ID List", example = "6565466456") @RequestParam(required = false) List<Long> ids,
@Parameter(description = "Alarm monitor object ID", example = "6565463543") @RequestParam(required = false) Long monitorId,
@Parameter(description = "Alarm level", example = "6565463543") @RequestParam(required = false) Byte priority,
@Parameter(description = "Alarm Status", example = "6565463543") @RequestParam(required = false) Byte status,
@Parameter(description = "Alarm content fuzzy query", example = "linux") @RequestParam(required = false) String content,
@Parameter(description = "Sort field, default id", example = "name") @RequestParam(defaultValue = "id") String sort,
@Parameter(description = "Sort Type", example = "desc") @RequestParam(defaultValue = "desc") String order,
@Parameter(description = "List current page", example = "0") @RequestParam(defaultValue = "0") int pageIndex,
@Parameter(description = "Number of list pagination", example = "8") @RequestParam(defaultValue = "8") int pageSize) {<FILL_FUNCTION_BODY>}
@DeleteMapping
@Operation(summary = "Delete alarms in batches", description = "according to the alarm ID list to delete the alarm information in batches")
public ResponseEntity<Message<Void>> deleteAlerts(
@Parameter(description = "Alarm List ID", example = "6565463543") @RequestParam(required = false) List<Long> ids) {
if (ids != null && !ids.isEmpty()) {
alertService.deleteAlerts(new HashSet<>(ids));
}
Message<Void> message = Message.success();
return ResponseEntity.ok(message);
}
@DeleteMapping("/clear")
@Operation(summary = "Delete alarms in batches", description = "delete all alarm information")
public ResponseEntity<Message<Void>> clearAllAlerts() {
alertService.clearAlerts();
Message<Void> message = Message.success();
return ResponseEntity.ok(message);
}
@PutMapping(path = "/status/{status}")
@Operation(summary = "Batch modify alarm status, set read and unread", description = "Batch modify alarm status, set read and unread")
public ResponseEntity<Message<Void>> applyAlertDefinesStatus(
@Parameter(description = "Alarm status value", example = "0") @PathVariable Byte status,
@Parameter(description = "Alarm List IDS", example = "6565463543") @RequestParam(required = false) List<Long> ids) {
if (ids != null && status != null && !ids.isEmpty()) {
alertService.editAlertStatus(status, ids);
}
Message<Void> message = Message.success();
return ResponseEntity.ok(message);
}
@GetMapping(path = "/summary")
@Operation(summary = "Get alarm statistics", description = "Get alarm statistics information")
public ResponseEntity<Message<AlertSummary>> getAlertsSummary() {
AlertSummary alertSummary = alertService.getAlertsSummary();
Message<AlertSummary> message = Message.success(alertSummary);
return ResponseEntity.ok(message);
}
}
|
Specification<Alert> specification = (root, query, criteriaBuilder) -> {
List<Predicate> andList = new ArrayList<>();
if (ids != null && !ids.isEmpty()) {
CriteriaBuilder.In<Long> inPredicate = criteriaBuilder.in(root.get("id"));
for (long id : ids) {
inPredicate.value(id);
}
andList.add(inPredicate);
}
if (monitorId != null) {
Predicate predicate = criteriaBuilder.like(root.get("tags").as(String.class), "%" + monitorId + "%");
andList.add(predicate);
}
if (priority != null) {
Predicate predicate = criteriaBuilder.equal(root.get("priority"), priority);
andList.add(predicate);
}
if (status != null) {
Predicate predicate = criteriaBuilder.equal(root.get("status"), status);
andList.add(predicate);
}
if (content != null && !content.isEmpty()) {
Predicate predicateContent = criteriaBuilder.like(root.get("content"), "%" + content + "%");
andList.add(predicateContent);
}
Predicate[] predicates = new Predicate[andList.size()];
return criteriaBuilder.and(andList.toArray(predicates));
};
Sort sortExp = Sort.by(new Sort.Order(Sort.Direction.fromString(order), sort));
PageRequest pageRequest = PageRequest.of(pageIndex, pageSize, sortExp);
Page<Alert> alertPage = alertService.getAlerts(specification, pageRequest);
Message<Page<Alert>> message = Message.success(alertPage);
return ResponseEntity.ok(message);
| 947
| 451
| 1,398
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/dto/GeneralCloudAlertReport.java
|
GeneralCloudAlertReport
|
refreshAlertTime
|
class GeneralCloudAlertReport extends AlertReport {
/**
* Alarm date and time
*/
private String alertDateTime;
/**
* DATE TIME FORMAT
*/
private String dateTimeFormat;
/**
* You can refresh the timestamp of the alarm time with enhanced properties
*/
public void refreshAlertTime() {<FILL_FUNCTION_BODY>}
}
|
if (getAlertTime() != 0L) {
return;
}
if (StringUtils.isNotBlank(alertDateTime)) {
Long timeStamp = null;
if (StringUtils.isNotBlank(dateTimeFormat)) {
timeStamp = DateUtil.getTimeStampFromFormat(alertDateTime, dateTimeFormat);
}
if (timeStamp == null) {
timeStamp = DateUtil.getTimeStampFromSomeFormats(alertDateTime);
}
if (timeStamp != null) {
setAlertTime(timeStamp);
return;
}
}
throw new RuntimeException("parse alarm time error");
| 106
| 173
| 279
|
<methods>public non-sealed void <init>() <variables>private java.lang.Integer alertDuration,private java.lang.String alertId,private java.lang.String alertName,private long alertTime,private Map<java.lang.String,java.lang.String> annotations,private java.lang.String content,private Map<java.lang.String,java.lang.String> labels,private java.lang.Integer priority,private java.lang.Integer reportType
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/dto/TenCloudAlertReport.java
|
Conditions
|
getContent
|
class Conditions {
// alarm metrics parameters
private String metricName;
private String metricShowName;
private String calcType;
private String calcValue;
private String calcUnit;
private String currentValue;
private String historyValue;
private String unit;
private String period;
private String periodNum;
private String alarmNotifyType;
private long alarmNotifyPeriod;
// alarm event parameters
private String productName;
private String productShowName;
private String eventName;
private String eventShowName;
}
@Override
public String getAlertName() {
return "TenCloud|腾讯云";
}
@Override
public Integer getAlertDuration() {
return this.durationTime;
}
@Override
public long getAlertTime() {
return DateUtil.getTimeStampFromFormat(getFirstOccurTime(), "yyyy-MM-dd HH:mm:ss");
}
@Override
public Integer getPriority() {
return 1;
}
@Override
public Integer getReportType() {
return 1;
}
@Override
public Map<String, String> getLabels() {
return Map.of("app", "TenCloud");
}
@Override
public Map<String, String> getAnnotations() {
return Map.of("app", "TenCloud");
}
/**
* Transaction alarm
*/
private static final String EVENT = "event";
/**
* Indicator alarm
*/
private static final String METRIC = "metric";
/**
* If the following alarm types increase, the entity class can be divided into a parent class and multiple subclasses, and then the method can be implemented in the subclass
* Since there are only two, it will not be split for the time being
*/
@Override
public String getContent() {<FILL_FUNCTION_BODY>
|
StringBuilder contentBuilder = new StringBuilder();
// 判断类型
if (EVENT.equals(getAlarmType())) {
contentBuilder
.append("[")
.append("告警状态 | ")
.append("0".equals(alarmStatus) ? "恢复" : "告警")
.append("]\n")
.append("[")
.append("告警对象信息 | ")
.append(getAlarmObjInfo().getRegion() == null ? "" : "region:" + getAlarmObjInfo().getRegion())
.append(";").append("appId:").append(getAlarmObjInfo().getAppId())
.append(";").append("uni:").append(getAlarmObjInfo().getUin())
.append(";").append("unInstanceId:").append(getAlarmObjInfo().getDimensions().getUnInstanceId())
.append("]\n")
.append("[")
.append("告警策略组信息 | ")
.append("名称:").append(getAlarmPolicyInfo().getPolicyName())
.append(";")
.append("策略类型展示名称:").append(getAlarmPolicyInfo().getConditions().getProductName())
.append(",").append(getAlarmPolicyInfo().getConditions().getProductShowName())
.append(";")
.append("事件告警名称:").append(getAlarmPolicyInfo().getConditions().getEventName())
.append(",").append(getAlarmPolicyInfo().getConditions().getEventShowName())
.append("]");
} else if (METRIC.equals(getAlarmType())) {
contentBuilder
.append("[")
.append("告警对象:")
.append(getAlarmObjInfo().getRegion() == null ? "" : getAlarmObjInfo().getRegion())
.append(getAlarmObjInfo().getRegion() == null ? "" : "|")
.append(getAlarmObjInfo().getNamespace())
.append("]")
.append("[")
.append("告警内容:")
.append(getAlarmPolicyInfo().getPolicyTypeCname()).append("|")
.append(getAlarmPolicyInfo().getConditions().getMetricShowName()).append("|")
.append(getAlarmPolicyInfo().getConditions().getMetricName())
.append(getAlarmPolicyInfo().getConditions().getCalcType())
.append(getAlarmPolicyInfo().getConditions().getCalcValue())
.append(getAlarmPolicyInfo().getConditions().getCalcUnit())
.append("]")
.append("[")
.append("当前数据")
.append(getAlarmPolicyInfo().getConditions().getCurrentValue())
.append(getAlarmPolicyInfo().getConditions().getCalcUnit())
.append("]");
}
return contentBuilder.toString();
| 495
| 726
| 1,221
|
<methods>public non-sealed void <init>() ,public abstract java.lang.Integer getAlertDuration() ,public abstract java.lang.String getAlertName() ,public abstract long getAlertTime() ,public abstract Map<java.lang.String,java.lang.String> getAnnotations() ,public abstract java.lang.String getContent() ,public abstract Map<java.lang.String,java.lang.String> getLabels() ,public abstract java.lang.Integer getPriority() ,public abstract java.lang.Integer getReportType() <variables>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/reduce/AlarmCommonReduce.java
|
AlarmCommonReduce
|
reduceAndSendAlarm
|
class AlarmCommonReduce {
private final AlarmSilenceReduce alarmSilenceReduce;
private final AlarmConvergeReduce alarmConvergeReduce;
private final CommonDataQueue dataQueue;
private final AlertMonitorDao alertMonitorDao;
public void reduceAndSendAlarm(Alert alert) {<FILL_FUNCTION_BODY>}
}
|
alert.setTimes(1);
Map<String, String> tags = alert.getTags();
if (tags == null) {
tags = new HashMap<>(8);
alert.setTags(tags);
}
String monitorIdStr = tags.get(CommonConstants.TAG_MONITOR_ID);
if (monitorIdStr == null) {
log.debug("receiver extern alarm message: {}", alert);
} else {
long monitorId = Long.parseLong(monitorIdStr);
List<Tag> tagList = alertMonitorDao.findMonitorIdBindTags(monitorId);
for (Tag tag : tagList) {
if (!tags.containsKey(tag.getName())) {
tags.put(tag.getName(), tag.getValue());
}
}
}
// converge -> silence
if (alarmConvergeReduce.filterConverge(alert) && alarmSilenceReduce.filterSilence(alert)) {
dataQueue.sendAlertsData(alert);
}
| 102
| 255
| 357
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/reduce/AlarmConvergeReduce.java
|
AlarmConvergeReduce
|
filterConverge
|
class AlarmConvergeReduce {
private final AlertConvergeDao alertConvergeDao;
private final Map<Integer, Alert> converageAlertMap;
public AlarmConvergeReduce(AlertConvergeDao alertConvergeDao) {
this.alertConvergeDao = alertConvergeDao;
this.converageAlertMap = new ConcurrentHashMap<>(16);
}
/**
* currentAlert converge filter data
*
* @param currentAlert currentAlert
* @return true when not filter
*/
@SuppressWarnings("unchecked")
public boolean filterConverge(Alert currentAlert) {<FILL_FUNCTION_BODY>}
}
|
// ignore monitor status auto recover notice
if (currentAlert.getTags() != null && currentAlert.getTags().containsKey(CommonConstants.IGNORE)) {
return true;
}
if (currentAlert.getStatus() == CommonConstants.ALERT_STATUS_CODE_RESTORED) {
// restored alert
int alertHash = Objects.hash(CommonConstants.ALERT_PRIORITY_CODE_CRITICAL)
+ Arrays.hashCode(currentAlert.getTags().keySet().toArray(new String[0]))
+ Arrays.hashCode(currentAlert.getTags().values().toArray(new String[0]));
converageAlertMap.remove(alertHash);
alertHash = Objects.hash(CommonConstants.ALERT_PRIORITY_CODE_EMERGENCY)
+ Arrays.hashCode(currentAlert.getTags().keySet().toArray(new String[0]))
+ Arrays.hashCode(currentAlert.getTags().values().toArray(new String[0]));
converageAlertMap.remove(alertHash);
alertHash = Objects.hash(CommonConstants.ALERT_PRIORITY_CODE_WARNING)
+ Arrays.hashCode(currentAlert.getTags().keySet().toArray(new String[0]))
+ Arrays.hashCode(currentAlert.getTags().values().toArray(new String[0]));
converageAlertMap.remove(alertHash);
return true;
}
CommonCacheService<String, Object> convergeCache = CacheFactory.getAlertConvergeCache();
List<AlertConverge> alertConvergeList = (List<AlertConverge>) convergeCache.get(CommonConstants.CACHE_ALERT_CONVERGE);
if (alertConvergeList == null) {
alertConvergeList = alertConvergeDao.findAll();
// matchAll is in the last
alertConvergeList.sort((item1, item2) -> {
if (item1.isMatchAll()) {
return 1;
} else if (item2.isMatchAll()) {
return -1;
} else {
return 0;
}
});
convergeCache.put(CommonConstants.CACHE_ALERT_CONVERGE, alertConvergeList);
}
for (AlertConverge alertConverge : alertConvergeList) {
if (!alertConverge.isEnable()) {
continue;
}
boolean match = alertConverge.isMatchAll();
if (!match) {
List<TagItem> tags = alertConverge.getTags();
if (currentAlert.getTags() != null && !currentAlert.getTags().isEmpty()) {
Map<String, String> alertTagMap = currentAlert.getTags();
match = tags.stream().anyMatch(item -> {
if (alertTagMap.containsKey(item.getName())) {
String tagValue = alertTagMap.get(item.getName());
if (tagValue == null && item.getValue() == null) {
return true;
} else {
return tagValue != null && tagValue.equals(item.getValue());
}
} else {
return false;
}
});
} else {
match = true;
}
if (match && alertConverge.getPriorities() != null && !alertConverge.getPriorities().isEmpty()) {
match = alertConverge.getPriorities().stream().anyMatch(item -> item != null && item == currentAlert.getPriority());
}
}
if (match) {
long evalInterval = alertConverge.getEvalInterval() * 1000;
long now = System.currentTimeMillis();
if (evalInterval <= 0) {
return true;
}
int alertHash = Objects.hash(currentAlert.getPriority())
+ Arrays.hashCode(currentAlert.getTags().keySet().toArray(new String[0]))
+ Arrays.hashCode(currentAlert.getTags().values().toArray(new String[0]));
Alert preAlert = converageAlertMap.get(alertHash);
if (preAlert == null) {
currentAlert.setTimes(1);
currentAlert.setFirstAlarmTime(now);
currentAlert.setLastAlarmTime(now);
converageAlertMap.put(alertHash, currentAlert.clone());
return true;
} else {
if (now - preAlert.getFirstAlarmTime() < evalInterval) {
preAlert.setTimes(preAlert.getTimes() + 1);
preAlert.setLastAlarmTime(now);
return false;
} else {
currentAlert.setTimes(preAlert.getTimes());
if (preAlert.getTimes() == 1) {
currentAlert.setFirstAlarmTime(now);
} else {
currentAlert.setFirstAlarmTime(preAlert.getFirstAlarmTime());
}
currentAlert.setLastAlarmTime(now);
preAlert.setFirstAlarmTime(now);
preAlert.setLastAlarmTime(now);
preAlert.setTimes(1);
return true;
}
}
}
}
return true;
| 197
| 1,361
| 1,558
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/reduce/AlarmSilenceReduce.java
|
AlarmSilenceReduce
|
filterSilence
|
class AlarmSilenceReduce {
private final AlertSilenceDao alertSilenceDao;
/**
* alert silence filter data
* @param alert alert
* @return true when not filter
*/
@SuppressWarnings("unchecked")
public boolean filterSilence(Alert alert) {<FILL_FUNCTION_BODY>}
}
|
CommonCacheService<String, Object> silenceCache = CacheFactory.getAlertSilenceCache();
List<AlertSilence> alertSilenceList = (List<AlertSilence>) silenceCache.get(CommonConstants.CACHE_ALERT_SILENCE);
if (alertSilenceList == null) {
alertSilenceList = alertSilenceDao.findAll();
silenceCache.put(CommonConstants.CACHE_ALERT_SILENCE, alertSilenceList);
}
for (AlertSilence alertSilence : alertSilenceList) {
if (!alertSilence.isEnable()) {
continue;
}
// if match the silence rule, return
boolean match = alertSilence.isMatchAll();
if (!match) {
List<TagItem> tags = alertSilence.getTags();
if (alert.getTags() != null && !alert.getTags().isEmpty()) {
Map<String, String> alertTagMap = alert.getTags();
match = tags.stream().anyMatch(item -> {
if (alertTagMap.containsKey(item.getName())) {
String tagValue = alertTagMap.get(item.getName());
if (tagValue == null && item.getValue() == null) {
return true;
} else {
return tagValue != null && tagValue.equals(item.getValue());
}
} else {
return false;
}
});
} else {
match = true;
}
if (match && alertSilence.getPriorities() != null && !alertSilence.getPriorities().isEmpty()) {
match = alertSilence.getPriorities().stream().anyMatch(item -> item != null && item == alert.getPriority());
}
}
if (match) {
LocalDateTime nowDate = LocalDateTime.now();
if (alertSilence.getType() == 0) {
// once time
boolean startMatch = alertSilence.getPeriodStart() == null ||
nowDate.isAfter(alertSilence.getPeriodStart().toLocalDateTime());
boolean endMatch = alertSilence.getPeriodEnd() == null ||
nowDate.isBefore(alertSilence.getPeriodEnd().toLocalDateTime());
if (startMatch && endMatch) {
int times = Optional.ofNullable(alertSilence.getTimes()).orElse(0);
alertSilence.setTimes(times + 1);
alertSilenceDao.save(alertSilence);
return false;
}
} else if (alertSilence.getType() == 1) {
// cyc time
int currentDayOfWeek = nowDate.toLocalDate().getDayOfWeek().getValue();
if (alertSilence.getDays() != null && !alertSilence.getDays().isEmpty()) {
boolean dayMatch = alertSilence.getDays().stream().anyMatch(item -> item == currentDayOfWeek);
if (dayMatch) {
LocalTime nowTime = nowDate.toLocalTime();
boolean startMatch = alertSilence.getPeriodStart() == null ||
nowTime.isAfter(alertSilence.getPeriodStart().toLocalTime());
boolean endMatch = alertSilence.getPeriodEnd() == null ||
nowTime.isBefore(alertSilence.getPeriodEnd().toLocalTime());
if (startMatch && endMatch) {
int times = Optional.ofNullable(alertSilence.getTimes()).orElse(0);
alertSilence.setTimes(times + 1);
alertSilenceDao.save(alertSilence);
return false;
}
}
}
}
}
}
return true;
| 95
| 911
| 1,006
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/service/impl/AlertDefineAbstractImExportServiceImpl.java
|
AlertDefineAbstractImExportServiceImpl
|
importConfig
|
class AlertDefineAbstractImExportServiceImpl implements AlertDefineImExportService {
@Resource
@Lazy
private AlertDefineService alertDefineService;
@Override
public void importConfig(InputStream is) {<FILL_FUNCTION_BODY>}
@Override
public void exportConfig(OutputStream os, List<Long> configList) {
var monitorList = configList.stream()
.map(it -> alertDefineService.getAlertDefine(it))
.map(this::convert)
.collect(Collectors.toUnmodifiableList());
writeOs(monitorList, os);
}
/**
* Parsing an input stream into a form
*
* @param is input stream
* @return form list
*/
abstract List<ExportAlertDefineDTO> parseImport(InputStream is);
/**
* Export Configuration to Output Stream
* @param exportAlertDefineList configuration list
* @param os output stream
*/
abstract void writeOs(List<ExportAlertDefineDTO> exportAlertDefineList, OutputStream os);
private ExportAlertDefineDTO convert(AlertDefine alertDefine) {
var exportAlertDefine = new ExportAlertDefineDTO();
var alertDefineDTO = new AlertDefineDTO();
BeanUtils.copyProperties(alertDefine, alertDefineDTO);
exportAlertDefine.setAlertDefine(alertDefineDTO);
return exportAlertDefine;
}
private AlertDefine convert(ExportAlertDefineDTO exportAlertDefineDTO) {
var alertDefine = new AlertDefine();
var alertDefineDTO = exportAlertDefineDTO.getAlertDefine();
BeanUtils.copyProperties(alertDefineDTO, alertDefine);
return alertDefine;
}
protected String fileNamePrefix() {
return "hertzbeat_alertDefine_" + LocalDate.now();
}
/**
* Export data transfer objects for alert configurations
*/
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
@ExcelTarget(value = "ExportAlertDefineDTO")
protected static class ExportAlertDefineDTO {
@Excel(name = "AlertDefine")
private AlertDefineDTO alertDefine;
}
/**
* Data transfer object for alert configuration
*/
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
@ExcelTarget(value = "AlertDefineDTO")
protected static class AlertDefineDTO {
@Excel(name = "App")
private String app;
@Excel(name = "Metric")
private String metric;
@Excel(name = "Field")
private String field;
@Excel(name = "Preset")
private Boolean preset;
@Excel(name = "Expr")
private String expr;
@Excel(name = "Priority")
private Byte priority;
@Excel(name = "Times")
private Integer times;
@Excel(name = "Tags")
private List<TagItem> tags;
@Excel(name = "Enable")
private Boolean enable;
@Excel(name = "RecoverNotice")
private Boolean recoverNotice;
@Excel(name = "Template")
private String template;
}
}
|
var formList = parseImport(is)
.stream()
.map(this::convert)
.collect(Collectors.toUnmodifiableList());
if (!CollectionUtils.isEmpty(formList)) {
formList.forEach(alertDefine -> {
alertDefineService.validate(alertDefine, false);
alertDefineService.addAlertDefine(alertDefine);
});
}
| 915
| 106
| 1,021
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/service/impl/AlertDefineJsonImExportServiceImpl.java
|
AlertDefineJsonImExportServiceImpl
|
parseImport
|
class AlertDefineJsonImExportServiceImpl extends AlertDefineAbstractImExportServiceImpl {
public static final String TYPE = "JSON";
public static final String FILE_SUFFIX = ".json";
private final ObjectMapper objectMapper;
@Override
public String type() {
return TYPE;
}
@Override
public String getFileName() {
return fileNamePrefix() + FILE_SUFFIX;
}
@Override
List<ExportAlertDefineDTO> parseImport(InputStream is) {<FILL_FUNCTION_BODY>}
@Override
void writeOs(List<ExportAlertDefineDTO> exportAlertDefineList, OutputStream os) {
try {
objectMapper.writeValue(os, exportAlertDefineList);
} catch (IOException ex) {
log.error("export alertDefine failed.", ex);
throw new RuntimeException("export alertDefine failed");
}
}
}
|
try {
return objectMapper.readValue(is, new TypeReference<>() {
});
} catch (IOException ex) {
log.error("import alertDefine failed.", ex);
throw new RuntimeException("import alertDefine failed");
}
| 242
| 66
| 308
|
<methods>public non-sealed void <init>() ,public void exportConfig(java.io.OutputStream, List<java.lang.Long>) ,public void importConfig(java.io.InputStream) <variables>private org.apache.hertzbeat.alert.service.AlertDefineService alertDefineService
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/service/impl/AlertDefineServiceImpl.java
|
AlertDefineServiceImpl
|
importConfig
|
class AlertDefineServiceImpl implements AlertDefineService {
@Autowired
private AlertDefineDao alertDefineDao;
@Autowired
private AlertDefineBindDao alertDefineBindDao;
private final Map<String, AlertDefineImExportService> alertDefineImExportServiceMap = new HashMap<>();
public AlertDefineServiceImpl(List<AlertDefineImExportService> alertDefineImExportServiceList) {
alertDefineImExportServiceList.forEach(it -> alertDefineImExportServiceMap.put(it.type(), it));
}
@Override
public void validate(AlertDefine alertDefine, boolean isModify) throws IllegalArgumentException {
// todo
if (StringUtils.hasText(alertDefine.getExpr())) {
try {
AviatorEvaluator.compile(alertDefine.getExpr(), false);
} catch (Exception e) {
throw new IllegalArgumentException("alert expr error: " + e.getMessage());
}
}
}
@Override
public void addAlertDefine(AlertDefine alertDefine) throws RuntimeException {
alertDefineDao.save(alertDefine);
}
@Override
public void modifyAlertDefine(AlertDefine alertDefine) throws RuntimeException {
alertDefineDao.save(alertDefine);
}
@Override
public void deleteAlertDefine(long alertId) throws RuntimeException {
alertDefineDao.deleteById(alertId);
}
@Override
public AlertDefine getAlertDefine(long alertId) throws RuntimeException {
Optional<AlertDefine> optional = alertDefineDao.findById(alertId);
return optional.orElse(null);
}
@Override
public void deleteAlertDefines(Set<Long> alertIds) throws RuntimeException {
alertDefineDao.deleteAlertDefinesByIdIn(alertIds);
}
@Override
public Page<AlertDefine> getMonitorBindAlertDefines(Specification<AlertDefine> specification, PageRequest pageRequest) {
return alertDefineDao.findAll(specification, pageRequest);
}
@Override
public void applyBindAlertDefineMonitors(Long alertId, List<AlertDefineMonitorBind> alertDefineBinds) {
// todo checks whether the alarm definition and monitoring exist
if (!alertDefineBindDao.existsById(alertId)){
alertDefineBindDao.deleteAlertDefineBindsByAlertDefineIdEquals(alertId);
}
// Delete all associations of this alarm
alertDefineBindDao.deleteAlertDefineBindsByAlertDefineIdEquals(alertId);
// Save the associated
alertDefineBindDao.saveAll(alertDefineBinds);
}
@Override
public Map<String, List<AlertDefine>> getMonitorBindAlertDefines(long monitorId, String app, String metrics) {
List<AlertDefine> defines = alertDefineDao.queryAlertDefinesByMonitor(monitorId, app, metrics);
List<AlertDefine> defaultDefines = alertDefineDao.queryAlertDefinesByAppAndMetricAndPresetTrueAndEnableTrue(app, metrics);
defines.addAll(defaultDefines);
Set<AlertDefine> defineSet = defines.stream().filter(item -> item.getField() != null).collect(Collectors.toSet());
// The alarm thresholds are defined in ascending order of the alarm severity from 0 to 3.
// The lower the number, the higher the alarm is. That is, the alarm is calculated from the highest alarm threshold
return defineSet.stream().sorted(Comparator.comparing(AlertDefine::getPriority))
.collect(Collectors.groupingBy(AlertDefine::getField));
}
@Override
public AlertDefine getMonitorBindAlertAvaDefine(long monitorId, String app, String metrics) {
List<AlertDefine> defines = alertDefineDao.queryAlertDefinesByMonitor(monitorId, app, metrics);
List<AlertDefine> defaultDefines = alertDefineDao.queryAlertDefinesByAppAndMetricAndPresetTrueAndEnableTrue(app, metrics);
defines.addAll(defaultDefines);
return defines.stream().findFirst().orElse(null);
}
@Override
public Page<AlertDefine> getAlertDefines(Specification<AlertDefine> specification, PageRequest pageRequest) {
return alertDefineDao.findAll(specification, pageRequest);
}
@Override
public List<AlertDefineMonitorBind> getBindAlertDefineMonitors(long alertDefineId) {
return alertDefineBindDao.getAlertDefineBindsByAlertDefineIdEquals(alertDefineId);
}
@Override
public void export(List<Long> ids, String type, HttpServletResponse res) throws Exception {
var imExportService = alertDefineImExportServiceMap.get(type);
if (imExportService == null) {
throw new IllegalArgumentException("not support export type: " + type);
}
var fileName = imExportService.getFileName();
res.setHeader("content-type", "application/octet-stream;charset=UTF-8");
res.setContentType("application/octet-stream;charset=UTF-8");
res.setHeader(HttpHeaders.CONTENT_DISPOSITION, "attachment;filename=" + URLEncoder.encode(fileName, StandardCharsets.UTF_8));
res.setHeader("Access-Control-Expose-Headers", "Content-Disposition");
imExportService.exportConfig(res.getOutputStream(), ids);
}
@Override
public void importConfig(MultipartFile file) throws Exception {<FILL_FUNCTION_BODY>}
}
|
var fileName = file.getOriginalFilename();
if (!StringUtils.hasText(fileName)) {
return;
}
var type = "";
if (fileName.toLowerCase().endsWith(AlertDefineJsonImExportServiceImpl.FILE_SUFFIX)) {
type = AlertDefineJsonImExportServiceImpl.TYPE;
}
if (fileName.toLowerCase().endsWith(AlertDefineExcelImExportServiceImpl.FILE_SUFFIX)) {
type = AlertDefineExcelImExportServiceImpl.TYPE;
}
if (fileName.toLowerCase().endsWith(AlertDefineYamlImExportServiceImpl.FILE_SUFFIX)) {
type = AlertDefineYamlImExportServiceImpl.TYPE;
}
if (!alertDefineImExportServiceMap.containsKey(type)) {
throw new RuntimeException("file " + fileName + " is not supported.");
}
var imExportService = alertDefineImExportServiceMap.get(type);
imExportService.importConfig(file.getInputStream());
| 1,504
| 269
| 1,773
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/service/impl/AlertDefineYamlImExportServiceImpl.java
|
AlertDefineYamlImExportServiceImpl
|
writeOs
|
class AlertDefineYamlImExportServiceImpl extends AlertDefineAbstractImExportServiceImpl {
public static final String TYPE = "YAML";
public static final String FILE_SUFFIX = ".yaml";
@Override
public String type() {
return TYPE;
}
@Override
public String getFileName() {
return fileNamePrefix() + FILE_SUFFIX;
}
@Override
List<ExportAlertDefineDTO> parseImport(InputStream is) {
Yaml yaml = new Yaml();
return yaml.load(is);
}
@Override
void writeOs(List<ExportAlertDefineDTO> exportAlertDefineList, OutputStream os) {<FILL_FUNCTION_BODY>}
}
|
DumperOptions options = new DumperOptions();
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
options.setIndent(2);
options.setPrettyFlow(true);
Yaml yaml = new Yaml(options);
yaml.dump(exportAlertDefineList, new OutputStreamWriter(os, StandardCharsets.UTF_8));
| 199
| 97
| 296
|
<methods>public non-sealed void <init>() ,public void exportConfig(java.io.OutputStream, List<java.lang.Long>) ,public void importConfig(java.io.InputStream) <variables>private org.apache.hertzbeat.alert.service.AlertDefineService alertDefineService
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/service/impl/AlertServiceImpl.java
|
AlertServiceImpl
|
getAlertsSummary
|
class AlertServiceImpl implements AlertService {
@Autowired
private AlertDao alertDao;
@Autowired
private AlarmCommonReduce alarmCommonReduce;
@Override
public void addAlert(Alert alert) throws RuntimeException {
alertDao.save(alert);
}
@Override
public Page<Alert> getAlerts(Specification<Alert> specification, PageRequest pageRequest) {
return alertDao.findAll(specification, pageRequest);
}
@Override
public void deleteAlerts(HashSet<Long> ids) {
alertDao.deleteAlertsByIdIn(ids);
}
@Override
public void clearAlerts() {
alertDao.deleteAll();
}
@Override
public void editAlertStatus(Byte status, List<Long> ids) {
alertDao.updateAlertsStatus(status, ids);
}
@Override
public AlertSummary getAlertsSummary() {<FILL_FUNCTION_BODY>}
@Override
public void addNewAlertReport(AlertReport alertReport) {
alarmCommonReduce.reduceAndSendAlarm(buildAlertData(alertReport));
}
@Override
public List<Alert> getAlerts(Specification<Alert> specification) {
return alertDao.findAll(specification);
}
/**
* The external alarm information is converted to Alert
* @param alertReport alarm body
* @return Alert entity
*/
private Alert buildAlertData(AlertReport alertReport){
Map<String, String> annotations = alertReport.getAnnotations();
StringBuilder sb = new StringBuilder();
if (alertReport.getContent() == null || alertReport.getContent().length() <= 0){
StringBuilder finalSb = sb;
annotations.forEach((k, v) -> {
finalSb.append(k).append(":").append(v).append("\n");
});
}else {
sb = new StringBuilder(alertReport.getContent());
}
LocalDateTime dateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(alertReport.getAlertTime()),
ZoneId.systemDefault());
return Alert.builder()
.content("Alert Center\n" + sb)
.priority(alertReport.getPriority().byteValue())
.status(CommonConstants.ALERT_STATUS_CODE_PENDING)
.tags(alertReport.getLabels())
.target(alertReport.getAlertName())
.triggerTimes(1)
.firstAlarmTime(alertReport.getAlertTime())
.lastAlarmTime(alertReport.getAlertTime())
.gmtCreate(dateTime)
.gmtUpdate(dateTime)
.build();
}
}
|
AlertSummary alertSummary = new AlertSummary();
// Statistics on the alarm information in the alarm state
List<AlertPriorityNum> priorityNums = alertDao.findAlertPriorityNum();
if (priorityNums != null) {
for (AlertPriorityNum priorityNum : priorityNums) {
switch (priorityNum.getPriority()) {
case CommonConstants
.ALERT_PRIORITY_CODE_WARNING:
alertSummary.setPriorityWarningNum(priorityNum.getNum());
break;
case CommonConstants.ALERT_PRIORITY_CODE_CRITICAL:
alertSummary.setPriorityCriticalNum(priorityNum.getNum());
break;
case CommonConstants.ALERT_PRIORITY_CODE_EMERGENCY:
alertSummary.setPriorityEmergencyNum(priorityNum.getNum());
break;
default:
break;
}
}
}
long total = alertDao.count();
alertSummary.setTotal(total);
long dealNum = total - alertSummary.getPriorityCriticalNum()
- alertSummary.getPriorityEmergencyNum() - alertSummary.getPriorityWarningNum();
alertSummary.setDealNum(dealNum);
try {
if (total == 0) {
alertSummary.setRate(100);
} else {
float rate = BigDecimal.valueOf(100 * (float) dealNum / total)
.setScale(2, RoundingMode.HALF_UP)
.floatValue();
alertSummary.setRate(rate);
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return alertSummary;
| 732
| 432
| 1,164
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/service/impl/AlertSilenceServiceImpl.java
|
AlertSilenceServiceImpl
|
validate
|
class AlertSilenceServiceImpl implements AlertSilenceService {
@Autowired
private AlertSilenceDao alertSilenceDao;
@Override
public void validate(AlertSilence alertSilence, boolean isModify) throws IllegalArgumentException {<FILL_FUNCTION_BODY>}
@Override
public void addAlertSilence(AlertSilence alertSilence) throws RuntimeException {
alertSilenceDao.save(alertSilence);
clearAlertSilencesCache();
}
@Override
public void modifyAlertSilence(AlertSilence alertSilence) throws RuntimeException {
alertSilenceDao.save(alertSilence);
clearAlertSilencesCache();
}
@Override
public AlertSilence getAlertSilence(long silenceId) throws RuntimeException {
return alertSilenceDao.findById(silenceId).orElse(null);
}
@Override
public void deleteAlertSilences(Set<Long> silenceIds) throws RuntimeException {
alertSilenceDao.deleteAlertSilencesByIdIn(silenceIds);
clearAlertSilencesCache();
}
@Override
public Page<AlertSilence> getAlertSilences(Specification<AlertSilence> specification, PageRequest pageRequest) {
return alertSilenceDao.findAll(specification, pageRequest);
}
private void clearAlertSilencesCache() {
CommonCacheService<String, Object> silenceCache = CacheFactory.getAlertSilenceCache();
silenceCache.remove(CommonConstants.CACHE_ALERT_SILENCE);
}
}
|
// todo
// if the alarm silent selection date set in periodic situations is empty, it will be deemed to be all checked.
if (alertSilence.getType() == 1 && alertSilence.getDays() == null) {
alertSilence.setDays(Arrays.asList((byte) 7, (byte) 1, (byte) 2, (byte) 3, (byte) 4, (byte) 5, (byte) 6));
}
| 413
| 118
| 531
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/util/AlertTemplateUtil.java
|
AlertTemplateUtil
|
render
|
class AlertTemplateUtil {
/**
* Match the variable ${key}
* eg: Alert, the instance: ${instance} metrics: ${metrics} is over flow.
*/
private static final Pattern PATTERN = Pattern.compile("\\$\\{(\\w+)\\}");
public static String render(String template, Map<String, Object> replaceData) {<FILL_FUNCTION_BODY>}
}
|
if (template == null) {
return null;
}
try {
Matcher matcher = PATTERN.matcher(template);
StringBuilder builder = new StringBuilder();
while (matcher.find()) {
Object objectValue = replaceData.getOrDefault(matcher.group(1), "NullValue");
String value = objectValue.toString();
matcher.appendReplacement(builder, Matcher.quoteReplacement(value));
}
matcher.appendTail(builder);
return builder.toString();
} catch (Exception e) {
log.error(e.getMessage(), e);
return template;
}
| 111
| 165
| 276
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/alerter/src/main/java/org/apache/hertzbeat/alert/util/DateUtil.java
|
DateUtil
|
getTimeStampFromSomeFormats
|
class DateUtil {
private static final String[] DATE_FORMATS = {
"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'",
"yyyy-MM-dd HH:mm:ss"};
/**
* convert date to timestamp
* @param date date
*/
public static Long getTimeStampFromSomeFormats(String date) {<FILL_FUNCTION_BODY>}
/**
* convert format data to timestamp
*/
public static Long getTimeStampFromFormat(String date, String format) {
SimpleDateFormat sdf = new SimpleDateFormat(format);
try {
return sdf.parse(date).getTime();
} catch (Exception e) {
log.error(e.getMessage());
}
return null;
}
}
|
SimpleDateFormat sdf;
for (String dateFormat : DATE_FORMATS) {
try {
sdf = new SimpleDateFormat(dateFormat);
return sdf.parse(date).getTime();
} catch (ParseException e) {
log.error(e.getMessage());
}
}
return null;
| 211
| 87
| 298
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/cache/CacheIdentifier.java
|
CacheIdentifier
|
toString
|
class CacheIdentifier {
private String ip;
private String port;
private String username;
private String password;
private String customArg;
@Override
public String toString() {<FILL_FUNCTION_BODY>}
}
|
return "CacheIdentifier {" +
"ip='" + ip + '\'' +
", port='" + port + '\'' +
", username+password=>hash='" + Objects.hash(username, password) + '\'' +
", customArg='" + customArg + '\'' +
'}';
| 69
| 78
| 147
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/cache/ConnectionCommonCache.java
|
ConnectionCommonCache
|
addCache
|
class ConnectionCommonCache {
/**
* default cache time 200s
*/
private static final long DEFAULT_CACHE_TIMEOUT = 200 * 1000L;
/**
* default max cache num
*/
private static final int DEFAULT_MAX_CAPACITY = 10000;
/**
* cacheTime length
*/
private static final int CACHE_TIME_LENGTH = 2;
/**
* cache timeout map
*/
private Map<Object, Long[]> timeoutMap;
/**
* object cache
*/
private ConcurrentLinkedHashMap<Object, Object> cacheMap;
/**
* the executor who clean cache when timeout
*/
private ThreadPoolExecutor timeoutCleanerExecutor;
private ConnectionCommonCache() {
init();
}
private void init() {
cacheMap = new ConcurrentLinkedHashMap
.Builder<>()
.maximumWeightedCapacity(DEFAULT_MAX_CAPACITY)
.listener((key, value) -> {
timeoutMap.remove(key);
if (value instanceof CacheCloseable) {
((CacheCloseable) value).close();
}
log.info("connection common cache discard key: {}, value: {}.", key, value);
}).build();
timeoutMap = new ConcurrentHashMap<>(DEFAULT_MAX_CAPACITY >> 6);
// last-first-coverage algorithm, run the first and last thread, discard mid
timeoutCleanerExecutor = new ThreadPoolExecutor(1, 1, 1, TimeUnit.SECONDS,
new ArrayBlockingQueue<>(1),
r -> new Thread(r, "connection-cache-timeout-cleaner"),
new ThreadPoolExecutor.DiscardOldestPolicy());
// init monitor available detector cyc task
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setNameFormat("connection-cache-ava-detector-%d")
.setDaemon(true)
.build();
ScheduledThreadPoolExecutor scheduledExecutor = new ScheduledThreadPoolExecutor(1, threadFactory);
scheduledExecutor.scheduleWithFixedDelay(this::detectCacheAvailable, 2, 20, TimeUnit.MINUTES);
}
/**
* detect all cache available, cleanup not ava connection
*/
private void detectCacheAvailable() {
try {
cacheMap.forEach((key, value) -> {
Long[] cacheTime = timeoutMap.get(key);
long currentTime = System.currentTimeMillis();
if (cacheTime == null || cacheTime.length != CACHE_TIME_LENGTH
|| cacheTime[0] + cacheTime[1] < currentTime) {
cacheMap.remove(key);
timeoutMap.remove(key);
if (value instanceof CacheCloseable) {
((CacheCloseable) value).close();
}
}
});
} catch (Exception e) {
log.error("connection common cache detect cache available error: {}.", e.getMessage(), e);
}
}
/**
* clean timeout cache
*/
private void cleanTimeoutCache() {
try {
cacheMap.forEach((key, value) -> {
// index 0 is startTime, 1 is timeDiff
Long[] cacheTime = timeoutMap.get(key);
long currentTime = System.currentTimeMillis();
if (cacheTime == null || cacheTime.length != CACHE_TIME_LENGTH) {
timeoutMap.put(key, new Long[]{currentTime, DEFAULT_CACHE_TIMEOUT});
} else if (cacheTime[0] + cacheTime[1] < currentTime) {
// timeout, remove this object cache
log.warn("[connection common cache] clean the timeout cache, key {}", key);
timeoutMap.remove(key);
cacheMap.remove(key);
if (value instanceof CacheCloseable) {
log.warn("[connection common cache] close the timeout cache, key {}", key);
((CacheCloseable) value).close();
}
}
});
Thread.sleep(20 * 1000);
} catch (Exception e) {
log.error("[connection common cache] clean timeout cache error: {}.", e.getMessage(), e);
}
}
/**
* add update cache
*
* @param key cache key
* @param value cache value
* @param timeDiff cache time millis
*/
public void addCache(Object key, Object value, Long timeDiff) {<FILL_FUNCTION_BODY>}
/**
* add update cache
*
* @param key cache key
* @param value cache value
*/
public void addCache(Object key, Object value) {
addCache(key, value, DEFAULT_CACHE_TIMEOUT);
}
/**
* get cache by key
*
* @param key cache key
* @param refreshCache is refresh cache
* @return cache object
*/
public Optional<Object> getCache(Object key, boolean refreshCache) {
Long[] cacheTime = timeoutMap.get(key);
if (cacheTime == null || cacheTime.length != CACHE_TIME_LENGTH) {
log.info("[connection common cache] not hit the cache, key {}.", key);
return Optional.empty();
}
if (cacheTime[0] + cacheTime[1] < System.currentTimeMillis()) {
log.warn("[connection common cache] is timeout, remove it, key {}.", key);
timeoutMap.remove(key);
cacheMap.remove(key);
return Optional.empty();
}
Object value = cacheMap.get(key);
if (value == null) {
log.error("[connection common cache] value is null, remove it, key {}.", key);
cacheMap.remove(key);
timeoutMap.remove(key);
} else if (refreshCache) {
cacheTime[0] = System.currentTimeMillis();
timeoutMap.put(key, cacheTime);
}
return Optional.ofNullable(value);
}
/**
* remove cache by key
*
* @param key key
*/
public void removeCache(Object key) {
timeoutMap.remove(key);
Object value = cacheMap.remove(key);
if (value instanceof CacheCloseable) {
((CacheCloseable) value).close();
}
}
/**
* get common cache instance
*
* @return connection common cache
*/
public static ConnectionCommonCache getInstance() {
return SingleInstance.INSTANCE;
}
/**
* static single instance
*/
private static class SingleInstance {
private static final ConnectionCommonCache INSTANCE = new ConnectionCommonCache();
}
}
|
removeCache(key);
if (timeDiff == null) {
timeDiff = DEFAULT_CACHE_TIMEOUT;
}
cacheMap.put(key, value);
timeoutMap.put(key, new Long[]{System.currentTimeMillis(), timeDiff});
timeoutCleanerExecutor.execute(this::cleanTimeoutCache);
| 1,712
| 85
| 1,797
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/cache/JdbcConnect.java
|
JdbcConnect
|
close
|
class JdbcConnect implements CacheCloseable {
private final Connection connection;
public JdbcConnect(Connection connection) {
this.connection = connection;
}
@Override
public void close() {<FILL_FUNCTION_BODY>}
public Connection getConnection() {
return connection;
}
}
|
try {
if (connection != null) {
connection.close();
}
} catch (Exception e) {
log.error("[connection common cache] close jdbc connect error: {}", e.getMessage());
}
| 84
| 61
| 145
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/cache/JmxConnect.java
|
JmxConnect
|
close
|
class JmxConnect implements CacheCloseable {
private final JMXConnector connection;
public JmxConnect(JMXConnector connection) {
this.connection = connection;
}
@Override
public void close() {<FILL_FUNCTION_BODY>}
public JMXConnector getConnection() {
return connection;
}
}
|
try {
if (connection != null) {
connection.close();
}
} catch (Exception e) {
log.error("[connection common cache] close jmx connect error: {}", e.getMessage());
}
| 97
| 61
| 158
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/cache/MongodbConnect.java
|
MongodbConnect
|
close
|
class MongodbConnect implements CacheCloseable {
private final MongoClient mongoClient;
public MongodbConnect(MongoClient mongoClient) {
this.mongoClient = mongoClient;
}
@Override
public void close() {<FILL_FUNCTION_BODY>}
public MongoClient getMongoClient() {
return mongoClient;
}
}
|
try {
if (this.mongoClient != null) {
this.mongoClient.close();
}
} catch (Exception e) {
log.error("[connection common cache] close mongodb connect error: {}", e.getMessage());
}
| 103
| 69
| 172
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/cache/RedisConnect.java
|
RedisConnect
|
close
|
class RedisConnect implements CacheCloseable {
private final StatefulConnection<String, String> connection;
public RedisConnect(StatefulConnection<String, String> connection) {
this.connection = connection;
}
@Override
public void close() {<FILL_FUNCTION_BODY>}
public StatefulConnection<String, String> getConnection() {
return connection;
}
}
|
try {
if (connection != null) {
connection.closeAsync();
}
} catch (Exception e) {
log.error("[connection common cache] close redis connect error: {}", e.getMessage());
}
| 105
| 62
| 167
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/cache/SshConnect.java
|
SshConnect
|
close
|
class SshConnect implements CacheCloseable {
private final ClientSession clientSession;
public SshConnect(ClientSession clientSession) {
this.clientSession = clientSession;
}
@Override
public void close() {<FILL_FUNCTION_BODY>}
public ClientSession getConnection() {
return clientSession;
}
}
|
try {
if (clientSession != null) {
clientSession.close();
}
} catch (Exception e) {
log.error("[connection common cache] close ssh connect error: {}", e.getMessage());
}
| 91
| 62
| 153
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/common/http/CommonHttpClient.java
|
CommonHttpClient
|
checkServerTrusted
|
class CommonHttpClient {
private static CloseableHttpClient httpClient;
private static PoolingHttpClientConnectionManager connectionManager;
/**
* all max total connection
*/
private static final int MAX_TOTAL_CONNECTIONS = 50000;
/**
* peer route max total connection
*/
private static final int MAX_PER_ROUTE_CONNECTIONS = 80;
/**
* timeout for get connect from pool(ms)
*/
private static final int REQUIRE_CONNECT_TIMEOUT = 4000;
/**
* tcp connect timeout(ms)
*/
private static final int CONNECT_TIMEOUT = 4000;
/**
* socket read timeout(ms)
*/
private static final int SOCKET_TIMEOUT = 60000;
/**
* validated time for idle connection. if when reuse this connection after this time, we will check it available.
*/
private static final int INACTIVITY_VALIDATED_TIME = 10000;
/**
* ssl supported version
*/
private static final String[] SUPPORTED_SSL = {"TLSv1", "TLSv1.1", "TLSv1.2", "SSLv3"};
static {
try {
SSLContext sslContext = SSLContexts.createDefault();
X509TrustManager x509TrustManager = new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s) { }
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException {<FILL_FUNCTION_BODY>}
@Override
public X509Certificate[] getAcceptedIssuers() { return null; }
};
sslContext.init(null, new TrustManager[]{x509TrustManager}, null);
SSLConnectionSocketFactory sslFactory = new SSLConnectionSocketFactory(sslContext, SUPPORTED_SSL, null, new NoopHostnameVerifier());
Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("http", PlainConnectionSocketFactory.INSTANCE)
.register("https", sslFactory)
.build();
RequestConfig requestConfig = RequestConfig.custom()
.setConnectionRequestTimeout(REQUIRE_CONNECT_TIMEOUT)
.setConnectTimeout(CONNECT_TIMEOUT)
.setSocketTimeout(SOCKET_TIMEOUT)
// auto redirect when 301 302 response status
.setRedirectsEnabled(true)
.build();
// connection pool
connectionManager = new PoolingHttpClientConnectionManager(registry);
connectionManager.setMaxTotal(MAX_TOTAL_CONNECTIONS);
connectionManager.setDefaultMaxPerRoute(MAX_PER_ROUTE_CONNECTIONS);
connectionManager.setValidateAfterInactivity(INACTIVITY_VALIDATED_TIME);
httpClient = HttpClients.custom()
.setConnectionManager(connectionManager)
.setDefaultRequestConfig(requestConfig)
// clean up unavailable expired connections
.evictExpiredConnections()
// clean up available but idle connections
.evictIdleConnections(100, TimeUnit.SECONDS)
.build();
ThreadFactory threadFactory = new ThreadFactoryBuilder()
.setNameFormat("http-connection-pool-cleaner-%d")
.setDaemon(true)
.build();
ScheduledExecutorService scheduledExecutor = Executors.newScheduledThreadPool(1, threadFactory);
scheduledExecutor.scheduleWithFixedDelay(() -> {
connectionManager.closeExpiredConnections();
connectionManager.closeIdleConnections(100, TimeUnit.SECONDS);
}, 40L, 40L, TimeUnit.SECONDS);
} catch (Exception ignored) {}
}
public static CloseableHttpClient getHttpClient() {
return httpClient;
}
}
|
// check server ssl certificate expired
Date now = new Date();
if (x509Certificates != null) {
for (X509Certificate certificate : x509Certificates) {
Date deadline = certificate.getNotAfter();
if (deadline != null && now.after(deadline)) {
throw new CertificateExpiredException();
}
}
}
| 1,039
| 104
| 1,143
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/database/JdbcSpiLoader.java
|
JdbcSpiLoader
|
run
|
class JdbcSpiLoader implements CommandLineRunner {
@Override
public void run(String... args) throws Exception {<FILL_FUNCTION_BODY>}
}
|
log.info("start load jdbc drivers");
try {
Class.forName("org.postgresql.Driver");
Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
Class.forName("dm.jdbc.driver.DmDriver");
Class.forName("com.clickhouse.jdbc.ClickHouseDriver");
} catch (Exception e) {
log.error("load jdbc error: {}", e.getMessage());
}
log.info("end load jdbc drivers");
| 45
| 133
| 178
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/dns/DnsCollectImpl.java
|
DnsCollectImpl
|
dig
|
class DnsCollectImpl extends AbstractCollect {
/*
each part of dig command output
*/
private static final String HEADER = "header";
private static final String QUESTION = "question";
private static final String ANSWER = "answer";
private static final String AUTHORITY = "authority";
private static final String ADDITIONAL = "additional";
/*
* used for header key
* example:
* ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 3221
* ;; flags: qr rd ra ; qd: 1 an: 1 au: 0 ad: 0
*
*
* opcode -> opcode
* status -> status
* flags -> flags
* qd -> questionRowCount
* an -> answerRowCount
* au -> authorityRowCount
* ad -> additionalRowCount
*/
private static final String RESPONSE_TIME = "responseTime";
private static final String OP_CODE = "opcode";
private static final String STATUS = "status";
private static final String FLAGS = "flags";
private static final String QUESTION_ROW_COUNT = "questionRowCount";
private static final String ANSWER_ROW_COUNT = "answerRowCount";
private static final String AUTHORITY_ROW_COUNT = "authorityRowCount";
private static final String ADDITIONAL_ROW_COUNT = "additionalRowCount";
@Override
public void collect(CollectRep.MetricsData.Builder builder, long monitorId, String app, Metrics metrics) {
// check params
if (checkDnsProtocolFailed(metrics.getDns())) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("DNS collect must have a valid DNS protocol param! ");
return;
}
DnsResolveResult dnsResolveResult;
try {
// run dig command
dnsResolveResult = dig(metrics.getDns());
} catch (IOException e) {
log.info(CommonUtil.getMessageFromThrowable(e));
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg(e.getMessage());
return;
} catch (Exception e) {
String errorMsg = CommonUtil.getMessageFromThrowable(e);
log.warn("[dns collect] error: {}", e.getMessage(), e);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
return;
}
// build dns metrics data
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
if (StringUtils.equals(HEADER, metrics.getName())) {
// add header columns
Map<String, String> headerInfo = dnsResolveResult.getHeaderInfo();
metrics.getAliasFields().forEach(field -> valueRowBuilder.addColumns(headerInfo.getOrDefault(field, CommonConstants.NULL_VALUE)));
}else {
// add question/answer/authority/additional columns
List<String> currentMetricsResolveResultList = dnsResolveResult.getList(metrics.getName());
for (int index = 0; index < metrics.getAliasFields().size(); index++) {
valueRowBuilder.addColumns(index >= currentMetricsResolveResultList.size()
? CommonConstants.NULL_VALUE
: currentMetricsResolveResultList.get(index));
}
}
builder.addValues(valueRowBuilder.build());
}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_DNS;
}
private boolean checkDnsProtocolFailed(DnsProtocol dnsProtocol) {
return Objects.isNull(dnsProtocol) || dnsProtocol.isInvalid();
}
/**
* run dig command
*/
private DnsResolveResult dig(DnsProtocol dns) throws IOException {<FILL_FUNCTION_BODY>}
private DnsResolveResult resolve(Message message, Long responseTime) {
return DnsResolveResult.builder()
.headerInfo(getHeaderInfo(message, responseTime))
.questionList(getSectionInfo(message, Section.QUESTION))
.answerList(getSectionInfo(message, Section.ANSWER))
.authorityList(getSectionInfo(message, Section.AUTHORITY))
.additionalList(getSectionInfo(message, Section.ADDITIONAL))
.build();
}
private Map<String, String> getHeaderInfo(Message message, Long responseTime) {
Map<String, String> resultMap = Maps.newHashMap();
resultMap.put(RESPONSE_TIME, String.valueOf(responseTime));
resultMap.put(OP_CODE, Opcode.string(message.getHeader().getOpcode()));
resultMap.put(STATUS, Rcode.string(message.getHeader().getRcode()));
resultMap.put(FLAGS, message.getHeader().printFlags());
resultMap.put(QUESTION_ROW_COUNT, String.valueOf(message.getHeader().getCount(Section.QUESTION)));
resultMap.put(ANSWER_ROW_COUNT, String.valueOf(message.getHeader().getCount(Section.ANSWER)));
resultMap.put(AUTHORITY_ROW_COUNT, String.valueOf(message.getHeader().getCount(Section.AUTHORITY)));
resultMap.put(ADDITIONAL_ROW_COUNT, String.valueOf(message.getHeader().getCount(Section.ADDITIONAL)));
return resultMap;
}
private List<String> getSectionInfo(Message message, int section) {
List<RRset> currentSetList = message.getSectionRRsets(section);
if (currentSetList == null || currentSetList.size() <= 0) {
return Lists.newArrayList();
}
List<String> infoList = Lists.newArrayListWithCapacity(currentSetList.size());
currentSetList.forEach(res -> infoList.add(res.toString()));
return infoList;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
private static class DnsResolveResult {
private Map<String, String> headerInfo;
/** example: www.google.com. 140 IN A 192.133.77.133 **/
private List<String> questionList;
private List<String> answerList;
private List<String> authorityList;
private List<String> additionalList;
public List<String> getList(String metricsName) {
switch (metricsName) {
case QUESTION: return questionList;
case ANSWER: return answerList;
case AUTHORITY: return authorityList;
case ADDITIONAL: return additionalList;
default: return Collections.emptyList();
}
}
}
}
|
StopWatch responseTimeStopWatch = new StopWatch("responseTime");
responseTimeStopWatch.start();
Name name = Name.fromString(dns.getAddress(), Name.root);
Message query = Message.newQuery(Record.newRecord(name, Type.ANY, DClass.ANY));
Resolver res = new SimpleResolver(dns.getDnsServerIP());
res.setTimeout(Duration.of(Long.parseLong(dns.getTimeout()), ChronoUnit.MILLIS));
res.setTCP(Boolean.parseBoolean(dns.getTcp()));
res.setPort(Integer.parseInt(dns.getPort()));
Message response = res.send(query);
responseTimeStopWatch.stop();
return resolve(response, responseTimeStopWatch.getLastTaskTimeMillis());
| 1,756
| 204
| 1,960
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/ftp/FtpCollectImpl.java
|
FtpCollectImpl
|
collect
|
class FtpCollectImpl extends AbstractCollect {
private static final String ANONYMOUS = "anonymous";
private static final String PASSWORD = "password";
@Override
public void collect(CollectRep.MetricsData.Builder builder, long monitorId, String app, Metrics metrics) {<FILL_FUNCTION_BODY>}
/**
* collect data: key-value
* Please modify this, if you want to add some metrics.
*/
private Map<String, String> collectValue(FTPClient ftpClient, FtpProtocol ftpProtocol) {
boolean isActive;
String responseTime;
try {
long startTime = System.currentTimeMillis();
connect(ftpClient, ftpProtocol);
login(ftpClient, ftpProtocol);
// In here, we can do some extended operation without changing the architecture
isActive = ftpClient.changeWorkingDirectory(ftpProtocol.getDirection());
long endTime = System.currentTimeMillis();
responseTime = String.valueOf(endTime - startTime);
ftpClient.disconnect();
} catch (Exception e) {
log.info("[FTPClient] error: {}", CommonUtil.getMessageFromThrowable(e), e);
throw new IllegalArgumentException(e.getMessage());
}
return new HashMap<>(8) {
{
put("isActive", Boolean.toString(isActive));
put("responseTime", responseTime);
}
};
}
/**
* login
*/
private void login(FTPClient ftpClient, FtpProtocol ftpProtocol) {
try {
// username: not empty, password: not empty
if (StringUtils.hasText(ftpProtocol.getUsername()) && StringUtils.hasText(ftpProtocol.getPassword())) {
if (!ftpClient.login(ftpProtocol.getUsername(), ftpProtocol.getPassword())) {
throw new IllegalArgumentException("The username or password may be wrong.");
}
return;
}
// anonymous access
if (!ftpClient.login(ANONYMOUS, PASSWORD)) {
throw new IllegalArgumentException("The server may not allow anonymous access, we need to username and password.");
}
} catch (Exception e) {
log.info("[ftp login] error: {}", CommonUtil.getMessageFromThrowable(e), e);
throw new IllegalArgumentException(e.getMessage());
}
}
/**
* connect
*/
private void connect(FTPClient ftpClient, FtpProtocol ftpProtocol) {
try {
ftpClient.connect(ftpProtocol.getHost(), Integer.parseInt(ftpProtocol.getPort()));
} catch (Exception e) {
log.info("[ftp connection] error: {}", CommonUtil.getMessageFromThrowable(e), e);
throw new IllegalArgumentException("The host or port may be wrong.");
}
}
/**
* preCheck params
*/
private void preCheck(Metrics metrics) {
if (metrics == null || metrics.getFtp() == null) {
throw new IllegalArgumentException("Ftp collect must has ftp params.");
}
FtpProtocol ftpProtocol = metrics.getFtp();
Assert.hasText(ftpProtocol.getHost(), "Ftp Protocol host is required.");
Assert.hasText(ftpProtocol.getPort(), "Ftp Protocol port is required.");
Assert.hasText(ftpProtocol.getDirection(), "Ftp Protocol direction is required.");
}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_FTP;
}
}
|
FTPClient ftpClient = new FTPClient();
FtpProtocol ftpProtocol = metrics.getFtp();
// Set timeout
ftpClient.setControlKeepAliveReplyTimeout(Integer.parseInt(ftpProtocol.getTimeout()));
// Judge whether the basic information is wrong
try {
preCheck(metrics);
} catch (Exception e) {
log.info("[FtpProtocol] error: {}", CommonUtil.getMessageFromThrowable(e), e);
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg(e.getMessage());
return;
}
// Collect data to load in CollectRep.ValueRow.Builder's object
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
Map<String, String> valueMap;
try {
valueMap = collectValue(ftpClient, ftpProtocol);
metrics.getAliasFields().forEach(it -> {
if (valueMap.containsKey(it)) {
String fieldValue = valueMap.get(it);
valueRowBuilder.addColumns(Objects.requireNonNullElse(fieldValue, CommonConstants.NULL_VALUE));
} else {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
});
} catch (Exception e) {
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg(e.getMessage());
return;
}
builder.addValues(valueRowBuilder.build());
| 904
| 384
| 1,288
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/http/SslCertificateCollectImpl.java
|
SslCertificateCollectImpl
|
validateParams
|
class SslCertificateCollectImpl extends AbstractCollect {
private static final String NAME_SUBJECT = "subject";
private static final String NAME_EXPIRED = "expired";
private static final String NAME_START_TIME = "start_time";
private static final String NAME_START_TIMESTAMP = "start_timestamp";
private static final String NAME_END_TIME = "end_time";
private static final String NAME_END_TIMESTAMP = "end_timestamp";
public SslCertificateCollectImpl() {}
@Override
public void collect(CollectRep.MetricsData.Builder builder,
long monitorId, String app, Metrics metrics) {
long startTime = System.currentTimeMillis();
try {
validateParams(metrics);
} catch (Exception e) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(e.getMessage());
return;
}
HttpProtocol httpProtocol = metrics.getHttp();
HttpsURLConnection urlConnection = null;
try {
String uri = "";
if (IpDomainUtil.isHasSchema(httpProtocol.getHost())) {
uri = httpProtocol.getHost() + ":" + httpProtocol.getPort();
} else {
uri = "https://" + httpProtocol.getHost() + ":" + httpProtocol.getPort();
}
urlConnection = (HttpsURLConnection) new URL(uri).openConnection();
urlConnection.connect();
Certificate[] certificates = urlConnection.getServerCertificates();
if (certificates == null || certificates.length == 0) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("Ssl certificate does not exist.");
return;
}
long responseTime = System.currentTimeMillis() - startTime;
for (Certificate certificate : urlConnection.getServerCertificates()) {
X509Certificate x509Certificate = (X509Certificate) certificate;
Date now = new Date();
Date deadline = x509Certificate.getNotAfter();
boolean expired = deadline != null && now.after(deadline);
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
for (String alias : metrics.getAliasFields()) {
if (CollectorConstants.RESPONSE_TIME.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(Long.toString(responseTime));
} else if (NAME_SUBJECT.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(x509Certificate.getSubjectDN().getName());
} else if (NAME_EXPIRED.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(Boolean.toString(expired));
} else if (NAME_START_TIME.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(x509Certificate.getNotBefore().toLocaleString());
} else if (NAME_START_TIMESTAMP.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(String.valueOf(x509Certificate.getNotBefore().getTime()));
} else if (NAME_END_TIME.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(x509Certificate.getNotAfter().toLocaleString());
} else if (NAME_END_TIMESTAMP.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(String.valueOf(x509Certificate.getNotAfter().getTime()));
} else {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
}
builder.addValues(valueRowBuilder.build());
}
} catch (SSLPeerUnverifiedException e1) {
String errorMsg = "Ssl certificate does not exist.";
if (e1.getMessage() != null) {
errorMsg = e1.getMessage();
log.error(errorMsg);
}
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
} catch (UnknownHostException e2) {
String errorMsg = CommonUtil.getMessageFromThrowable(e2);
log.info(errorMsg);
builder.setCode(CollectRep.Code.UN_REACHABLE);
builder.setMsg("unknown host:" + errorMsg);
} catch (InterruptedIOException | ConnectException | SSLException e3) {
String errorMsg = CommonUtil.getMessageFromThrowable(e3);
log.info(errorMsg);
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg(errorMsg);
} catch (IOException e4) {
String errorMsg = CommonUtil.getMessageFromThrowable(e4);
log.info(errorMsg);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
} catch (Exception e) {
String errorMsg = CommonUtil.getMessageFromThrowable(e);
log.error(errorMsg, e);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
}
}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_SSL_CERT;
}
private void validateParams(Metrics metrics) throws Exception {<FILL_FUNCTION_BODY>}
}
|
if (metrics == null || metrics.getHttp() == null) {
throw new Exception("Http/Https collect must has http params");
}
HttpProtocol httpProtocol = metrics.getHttp();
if (httpProtocol.getUrl() == null
|| "".equals(httpProtocol.getUrl())
|| !httpProtocol.getUrl().startsWith(RIGHT_DASH)) {
httpProtocol.setUrl(httpProtocol.getUrl() == null ? RIGHT_DASH : RIGHT_DASH + httpProtocol.getUrl().trim());
}
| 1,363
| 139
| 1,502
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/http/promethus/AbstractPrometheusParse.java
|
AbstractPrometheusParse
|
handle
|
class AbstractPrometheusParse {
/**
* Downstream node
*/
private AbstractPrometheusParse prometheusParse;
AbstractPrometheusParse() {
}
public AbstractPrometheusParse setInstance(AbstractPrometheusParse prometheusParse) {
this.prometheusParse = prometheusParse;
return this;
}
/**
* prom response type check: string, matrix, vector, scalar
* todo:string、scalar类型响应未实现
* @param responseStr 返回字符串
* @return return
*/
abstract Boolean checkType(String responseStr);
/**
* Parse the prom interface response data
* @param resp 返回数据
* @param aliasFields alias fields
* @param http httpProtocol
* @param builder builder
*/
abstract void parse(String resp, List<String> aliasFields, HttpProtocol http,
CollectRep.MetricsData.Builder builder);
/**
* Processing prom interface response data
* @param resp resp
* @param aliasFields alias fields
* @param http http
* @param builder builder
*/
public void handle(String resp, List<String> aliasFields, HttpProtocol http,
CollectRep.MetricsData.Builder builder) {<FILL_FUNCTION_BODY>}
}
|
if (checkType(resp)) {
parse(resp, aliasFields, http,
builder);
} else {
prometheusParse.handle(resp, aliasFields, http,
builder);
}
| 335
| 56
| 391
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/http/promethus/PrometheusLastParser.java
|
PrometheusLastParser
|
checkType
|
class PrometheusLastParser extends AbstractPrometheusParse {
@Override
public Boolean checkType(String responseStr) {<FILL_FUNCTION_BODY>}
@Override
public void parse(String resp, List<String> aliasFields, HttpProtocol http, CollectRep.MetricsData.Builder builder) {
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
aliasFields.forEach(aliasField -> valueRowBuilder.addColumns(CommonConstants.NULL_VALUE));
}
}
|
log.error("prometheus response data:{} ,no adaptive parser", responseStr);
return true;
| 129
| 30
| 159
|
<methods>public void handle(java.lang.String, List<java.lang.String>, org.apache.hertzbeat.common.entity.job.protocol.HttpProtocol, org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder) ,public org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse setInstance(org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse) <variables>private org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse prometheusParse
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/http/promethus/PrometheusMatrixParser.java
|
PrometheusMatrixParser
|
parse
|
class PrometheusMatrixParser extends AbstractPrometheusParse {
@Override
public Boolean checkType(String responseStr) {
try {
PromVectorOrMatrix promVectorOrMatrix = JsonUtil.fromJson(responseStr, PromVectorOrMatrix.class);
if (promVectorOrMatrix != null && promVectorOrMatrix.getData() != null) {
return DispatchConstants.PARSE_PROM_QL_MATRIX.equals(promVectorOrMatrix.getData().getResultType());
}
return false;
} catch (Exception e) {
return false;
}
}
@Override
public void parse(String resp, List<String> aliasFields, HttpProtocol http, CollectRep.MetricsData.Builder builder) {<FILL_FUNCTION_BODY>}
}
|
PromVectorOrMatrix promVectorOrMatrix = JsonUtil.fromJson(resp, PromVectorOrMatrix.class);
if (promVectorOrMatrix == null){
return;
}
List<PromVectorOrMatrix.Result> result = promVectorOrMatrix.getData().getResult();
for (PromVectorOrMatrix.Result r : result) {
for (List<Object> value : r.getValues()) {
boolean setTimeFlag = false;
boolean setValueFlag = false;
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
for (String aliasField : aliasFields) {
if (!CollectUtil.assertPromRequireField(aliasField)) {
JsonElement jsonElement = r.getMetric().get(aliasField);
if (jsonElement != null) {
valueRowBuilder.addColumns(jsonElement.getAsString());
} else {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
} else {
if (CommonConstants.PROM_TIME.equals(aliasField)) {
for (Object o : value) {
if (o instanceof Double) {
valueRowBuilder.addColumns(String.valueOf(BigDecimal.valueOf((Double) o * 1000)));
setTimeFlag = true;
}
}
if (!setTimeFlag) {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
} else {
for (Object o : value) {
if (o instanceof String) {
valueRowBuilder.addColumns((String) o);
setValueFlag = true;
}
}
if (!setValueFlag) {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
}
}
}
builder.addValues(valueRowBuilder);
}
}
| 197
| 460
| 657
|
<methods>public void handle(java.lang.String, List<java.lang.String>, org.apache.hertzbeat.common.entity.job.protocol.HttpProtocol, org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder) ,public org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse setInstance(org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse) <variables>private org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse prometheusParse
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/http/promethus/PrometheusVectorParser.java
|
PrometheusVectorParser
|
parse
|
class PrometheusVectorParser extends AbstractPrometheusParse {
@Override
public Boolean checkType(String responseStr) {
try {
PromVectorOrMatrix promVectorOrMatrix = JsonUtil.fromJson(responseStr, PromVectorOrMatrix.class);
if (promVectorOrMatrix != null && promVectorOrMatrix.getData() != null) {
return DispatchConstants.PARSE_PROM_QL_VECTOR.equals(promVectorOrMatrix.getData().getResultType());
}
return false;
} catch (Exception e) {
return false;
}
}
@Override
public void parse(String resp, List<String> aliasFields, HttpProtocol http, CollectRep.MetricsData.Builder builder) {<FILL_FUNCTION_BODY>}
}
|
boolean setTimeFlag = false;
boolean setValueFlag = false;
PromVectorOrMatrix promVectorOrMatrix = JsonUtil.fromJson(resp, PromVectorOrMatrix.class);
if (promVectorOrMatrix == null){
return;
}
List<PromVectorOrMatrix.Result> result = promVectorOrMatrix.getData().getResult();
for (PromVectorOrMatrix.Result r : result) {
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
for (String aliasField : aliasFields) {
if (!CollectUtil.assertPromRequireField(aliasField)) {
JsonElement jsonElement = r.getMetric().get(aliasField);
if (jsonElement != null) {
valueRowBuilder.addColumns(jsonElement.getAsString());
} else {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
} else {
if (CommonConstants.PROM_TIME.equals(aliasField)) {
for (Object o : r.getValue()) {
if (o instanceof Double) {
valueRowBuilder.addColumns(String.valueOf(BigDecimal.valueOf((Double) o * 1000)));
setTimeFlag = true;
}
}
if (!setTimeFlag) {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
} else {
for (Object o : r.getValue()) {
if (o instanceof String) {
valueRowBuilder.addColumns((String) o);
setValueFlag = true;
}
}
if (!setValueFlag) {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
}
}
}
builder.addValues(valueRowBuilder);
}
| 196
| 444
| 640
|
<methods>public void handle(java.lang.String, List<java.lang.String>, org.apache.hertzbeat.common.entity.job.protocol.HttpProtocol, org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder) ,public org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse setInstance(org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse) <variables>private org.apache.hertzbeat.collector.collect.http.promethus.AbstractPrometheusParse prometheusParse
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/httpsd/HttpsdImpl.java
|
HttpsdImpl
|
collect
|
class HttpsdImpl extends AbstractCollect {
private final static String SERVER = "server";
private final DiscoveryClientManagement discoveryClientManagement = new DiscoveryClientManagement();
@Override
public void collect(CollectRep.MetricsData.Builder builder, long monitorId, String app, Metrics metrics) {<FILL_FUNCTION_BODY>}
private void collectMetrics(CollectRep.MetricsData.Builder builder, Metrics metrics, DiscoveryClient discoveryClient) {
long beginTime = System.currentTimeMillis();
// Available and Server monitor
if (StringUtils.equals(metrics.getName(), SERVER)) {
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
ServerInfo serverInfo = discoveryClient.getServerInfo();
metrics.getAliasFields().forEach(fieldName -> {
if (StringUtils.equalsAnyIgnoreCase(CollectorConstants.RESPONSE_TIME, fieldName)) {
valueRowBuilder.addColumns(String.valueOf(System.currentTimeMillis() - beginTime));
}else {
addColumnIfMatched(fieldName, serverInfo, valueRowBuilder);
}
});
builder.addValues(valueRowBuilder.build());
}else {
// Service instances monitor
discoveryClient.getServices().forEach(serviceInstance -> {
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
metrics.getAliasFields().forEach(fieldName -> addColumnIfMatched(fieldName, serviceInstance, valueRowBuilder));
builder.addValues(valueRowBuilder.build());
});
}
}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_HTTP_SD;
}
private boolean checkParamsFailed(HttpsdProtocol httpsd) {
return Objects.isNull(httpsd) || httpsd.isInvalid();
}
private void addColumnIfMatched(String fieldName, Object sourceObj, CollectRep.ValueRow.Builder valueRowBuilder) {
String columnValue = null;
try {
Field declaredField = sourceObj.getClass().getDeclaredField(fieldName);
declaredField.setAccessible(Boolean.TRUE);
columnValue = (String) declaredField.get(sourceObj);
} catch (NoSuchFieldException | IllegalAccessException e) {
log.warn("No such field for {}", fieldName);
}
valueRowBuilder.addColumns(StringUtils.isBlank(columnValue)
? CommonConstants.NULL_VALUE
: columnValue);
}
}
|
HttpsdProtocol httpsdProtocol = metrics.getHttpsd();
// check params
if (checkParamsFailed(httpsdProtocol)) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("http_sd collect must have a valid http_sd protocol param! ");
return;
}
try (DiscoveryClient discoveryClient = discoveryClientManagement.getClient(httpsdProtocol)) {
collectMetrics(builder, metrics, discoveryClient);
} catch (TransportException e1) {
String errorMsg = "Consul " + CommonUtil.getMessageFromThrowable(e1);
log.error(errorMsg);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
} catch (Exception e) {
String errorMsg = CommonUtil.getMessageFromThrowable(e);
log.error(errorMsg, e);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
}
| 636
| 250
| 886
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/httpsd/discovery/DiscoveryClientManagement.java
|
DiscoveryClientManagement
|
doCreateClient
|
class DiscoveryClientManagement {
public DiscoveryClient getClient(HttpsdProtocol httpsdProtocol) {
return createClient(httpsdProtocol, DiscoveryClientInstance.getByName(httpsdProtocol.getDiscoveryClientTypeName()));
}
private DiscoveryClient createClient(HttpsdProtocol httpsdProtocol, DiscoveryClientInstance discoveryClientInstance) {
if (Objects.equals(discoveryClientInstance, DiscoveryClientInstance.NOT_SUPPORT)) {
return null;
}
return doCreateClient(httpsdProtocol, discoveryClientInstance);
}
private DiscoveryClient doCreateClient(HttpsdProtocol httpsdProtocol, DiscoveryClientInstance discoveryClientInstance) {<FILL_FUNCTION_BODY>}
}
|
DiscoveryClient discoveryClient;
switch (discoveryClientInstance) {
case CONSUL:
discoveryClient = new ConsulDiscoveryClient();
break;
case NACOS:
discoveryClient = new NacosDiscoveryClient();
break;
default:
return null;
}
discoveryClient.initClient(discoveryClient.buildConnectConfig(httpsdProtocol));
return discoveryClient;
| 183
| 107
| 290
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/httpsd/discovery/impl/ConsulDiscoveryClient.java
|
ConsulDiscoveryClient
|
getServices
|
class ConsulDiscoveryClient implements DiscoveryClient {
private ConsulClient consulClient;
@Override
public ConnectConfig buildConnectConfig(HttpsdProtocol httpsdProtocol) {
return ConnectConfig.builder()
.host(httpsdProtocol.getHost())
.port(Integer.parseInt(httpsdProtocol.getPort()))
.build();
}
@Override
public void initClient(ConnectConfig connectConfig) {
consulClient = new ConsulClient(connectConfig.getHost(), connectConfig.getPort());
}
@Override
public ServerInfo getServerInfo() {
Self self = consulClient.getAgentSelf().getValue();
return ServerInfo.builder()
.address(self.getMember().getAddress())
.port(String.valueOf(self.getMember().getPort()))
.build();
}
@Override
public List<ServiceInstance> getServices() {<FILL_FUNCTION_BODY>}
@Override
public void close() {
}
private String getHealthStatus(String serviceId, Collection<Check> healthCheckList) {
return healthCheckList.stream()
.filter(healthCheck -> StringUtils.equals(healthCheck.getServiceId(), serviceId))
.findFirst()
.map(check -> check.getStatus().name())
.orElse("");
}
}
|
Map<String, Service> serviceMap = consulClient.getAgentServices().getValue();
List<ServiceInstance> serviceInstanceList = Lists.newArrayListWithExpectedSize(serviceMap.size());
Collection<Check> healthCheckList = consulClient.getAgentChecks().getValue().values();
serviceMap.forEach((serviceId, instance) -> serviceInstanceList.add(ServiceInstance.builder()
.serviceId(serviceId)
.serviceName(instance.getService())
.address(instance.getAddress())
.port(String.valueOf(instance.getPort()))
.healthStatus(getHealthStatus(serviceId, healthCheckList))
.build()));
return serviceInstanceList;
| 345
| 173
| 518
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/httpsd/discovery/impl/NacosDiscoveryClient.java
|
NacosDiscoveryClient
|
getServices
|
class NacosDiscoveryClient implements DiscoveryClient {
private NamingService namingService;
private ConnectConfig localConnectConfig;
@Override
public ConnectConfig buildConnectConfig(HttpsdProtocol httpsdProtocol) {
return ConnectConfig.builder()
.host(httpsdProtocol.getHost())
.port(Integer.parseInt(httpsdProtocol.getPort()))
.build();
}
@Override
public void initClient(ConnectConfig connectConfig) {
try {
localConnectConfig = connectConfig;
namingService = NamingFactory.createNamingService(connectConfig.getHost() + ":" + connectConfig.getPort());
}catch (NacosException exception) {
throw new RuntimeException("Failed to init namingService");
}
}
@Override
public ServerInfo getServerInfo() {
if (Objects.isNull(namingService)) {
throw new NullPointerException("NamingService is null");
}
String serverStatus = namingService.getServerStatus();
return switch (serverStatus) {
case "UP" -> ServerInfo.builder()
.address(localConnectConfig.getHost())
.port(String.valueOf(localConnectConfig.getPort()))
.build();
case "DOWN" -> throw new RuntimeException("Nacos connection failed");
default -> throw new RuntimeException("ServerStatus must be UP or DOWN");
};
}
@Override
public List<ServiceInstance> getServices() {<FILL_FUNCTION_BODY>}
@Override
public void close() {
if (namingService == null) {
return;
}
try {
namingService.shutDown();
}catch (NacosException ignore) {
}
}
}
|
if (Objects.isNull(namingService)) {
return Collections.emptyList();
}
List<ServiceInstance> serviceInstanceList = Lists.newArrayList();
try {
for (String serviceName : namingService.getServicesOfServer(0, 9999).getData()) {
namingService.getAllInstances(serviceName).forEach(instance ->
serviceInstanceList.add(ServiceInstance.builder()
.serviceId(instance.getInstanceId())
.serviceName(instance.getServiceName())
.address(instance.getIp())
.port(String.valueOf(instance.getPort()))
.healthStatus(instance.isHealthy() ? "UP" : "DOWN")
.build()));
}
} catch (NacosException e) {
throw new RuntimeException("Failed to fetch instance info");
}
return serviceInstanceList;
| 450
| 225
| 675
|
<no_super_class>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/icmp/IcmpCollectImpl.java
|
IcmpCollectImpl
|
collect
|
class IcmpCollectImpl extends AbstractCollect {
public IcmpCollectImpl(){}
@Override
public void collect(CollectRep.MetricsData.Builder builder, long monitorId, String app, Metrics metrics) {<FILL_FUNCTION_BODY>}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_ICMP;
}
}
|
long startTime = System.currentTimeMillis();
// 简单校验必有参数
if (metrics == null || metrics.getIcmp() == null) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("ICMP collect must has icmp params");
return;
}
IcmpProtocol icmp = metrics.getIcmp();
// 超时时间默认6000毫秒
int timeout = 6000;
try {
timeout = Integer.parseInt(icmp.getTimeout());
} catch (Exception e) {
log.warn(e.getMessage());
}
try {
// todo need root java jcm to use ICMP, else it telnet the peer server 7 port available
// todo 需要配置java虚拟机root权限从而使用ICMP,否则是判断telnet对端7号端口是否开通
// todo https://stackoverflow.com/questions/11506321/how-to-ping-an-ip-address
boolean status = InetAddress.getByName(icmp.getHost()).isReachable(timeout);
long responseTime = System.currentTimeMillis() - startTime;
if (status) {
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
for (String alias : metrics.getAliasFields()) {
if (CollectorConstants.RESPONSE_TIME.equalsIgnoreCase(alias)) {
valueRowBuilder.addColumns(Long.toString(responseTime));
} else {
valueRowBuilder.addColumns(CommonConstants.NULL_VALUE);
}
}
builder.addValues(valueRowBuilder.build());
} else {
builder.setCode(CollectRep.Code.UN_REACHABLE);
builder.setMsg("Un Reachable, Timeout " + timeout + "ms");
return;
}
} catch (UnknownHostException unknownHostException) {
String errorMsg = CommonUtil.getMessageFromThrowable(unknownHostException);
builder.setCode(CollectRep.Code.UN_REACHABLE);
builder.setMsg("UnknownHost " + errorMsg);
} catch (IOException ioException) {
String errorMsg = CommonUtil.getMessageFromThrowable(ioException);
builder.setCode(CollectRep.Code.UN_REACHABLE);
builder.setMsg("IOException " + errorMsg);
} catch (Exception e) {
String errorMsg = CommonUtil.getMessageFromThrowable(e);
log.error(errorMsg, e);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
}
| 99
| 664
| 763
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/jmx/JmxCollectImpl.java
|
JmxCollectImpl
|
collect
|
class JmxCollectImpl extends AbstractCollect {
private static final String JMX_URL_PREFIX = "service:jmx:rmi:///jndi/rmi://";
private static final String JMX_URL_SUFFIX = "/jmxrmi";
private static final String IGNORED_STUB = "/stub/";
private static final String SUB_ATTRIBUTE = "->";
public JmxCollectImpl() {
}
@Override
public void collect(CollectRep.MetricsData.Builder builder, long monitorId, String app, Metrics metrics) {<FILL_FUNCTION_BODY>}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_JMX;
}
private Map<String, String> extractAttributeValue(AttributeList attributeList) {
if (attributeList == null || attributeList.isEmpty()) {
throw new RuntimeException("attributeList is empty");
}
Map<String, String> attributeValueMap = new HashMap<>(attributeList.size());
for (Attribute attribute : attributeList.asList()) {
Object value = attribute.getValue();
if (value == null) {
log.info("attribute {} value is null.", attribute.getName());
continue;
}
if (value instanceof Number || value instanceof String || value instanceof ObjectName
|| value instanceof Boolean || value instanceof Date || value instanceof TimeUnit) {
attributeValueMap.put(attribute.getName(), value.toString());
} else if (value instanceof CompositeData) {
CompositeData compositeData = (CompositeData) value;
CompositeType compositeType = compositeData.getCompositeType();
for (String typeKey : compositeType.keySet()) {
Object fieldValue = compositeData.get(typeKey);
attributeValueMap.put(attribute.getName() + SUB_ATTRIBUTE + typeKey, fieldValue.toString());
}
} else if (value instanceof String[]) {
String[] values = (String[]) value;
StringBuilder builder = new StringBuilder();
for (int index = 0; index < values.length; index++) {
builder.append(values[index]);
if (index < values.length - 1) {
builder.append(",");
}
}
attributeValueMap.put(attribute.getName(), builder.toString());
} else {
log.warn("attribute value type {} not support.", value.getClass().getName());
}
}
return attributeValueMap;
}
private void validateParams(Metrics metrics) throws IllegalArgumentException {
if (metrics == null || metrics.getJmx() == null) {
throw new IllegalArgumentException("JMX collect must has jmx params");
}
if (StringUtils.hasText(metrics.getJmx().getUrl())) {
if (metrics.getJmx().getUrl().contains(IGNORED_STUB)) {
throw new IllegalArgumentException("JMX url prohibit contains stub, please check");
}
}
}
private JMXConnector getConnectSession(JmxProtocol jmxProtocol) throws IOException {
CacheIdentifier identifier = CacheIdentifier.builder().ip(jmxProtocol.getHost())
.port(jmxProtocol.getPort()).username(jmxProtocol.getUsername())
.password(jmxProtocol.getPassword()).build();
Optional<Object> cacheOption = ConnectionCommonCache.getInstance().getCache(identifier, true);
JMXConnector conn = null;
if (cacheOption.isPresent()) {
JmxConnect jmxConnect = (JmxConnect) cacheOption.get();
conn = jmxConnect.getConnection();
try {
conn.getMBeanServerConnection();
} catch (Exception e) {
conn = null;
ConnectionCommonCache.getInstance().removeCache(identifier);
}
}
if (conn != null) {
return conn;
}
String url;
if (jmxProtocol.getUrl() != null) {
url = jmxProtocol.getUrl();
} else {
url = JMX_URL_PREFIX + jmxProtocol.getHost() + ":" + jmxProtocol.getPort() + JMX_URL_SUFFIX;
}
Map<String, Object> environment = new HashMap<>(4);
if (StringUtils.hasText(jmxProtocol.getUsername()) && StringUtils.hasText(jmxProtocol.getPassword())) {
String[] credential = new String[] {jmxProtocol.getUsername(), jmxProtocol.getPassword()};
environment.put(javax.management.remote.JMXConnector.CREDENTIALS, credential);
}
if (Boolean.TRUE.toString().equals(jmxProtocol.getSsl())) {
environment.put(Context.SECURITY_PROTOCOL, "ssl");
SslRMIClientSocketFactory clientSocketFactory = new SslRMIClientSocketFactory();
environment.put(RMIConnectorServer.RMI_CLIENT_SOCKET_FACTORY_ATTRIBUTE, clientSocketFactory);
environment.put("com.sun.jndi.rmi.factory.socket", clientSocketFactory);
}
JMXServiceURL jmxServiceUrl = new JMXServiceURL(url);
conn = JMXConnectorFactory.connect(jmxServiceUrl, environment);
ConnectionCommonCache.getInstance().addCache(identifier, new JmxConnect(conn));
return conn;
}
}
|
try {
JmxProtocol jmxProtocol = metrics.getJmx();
validateParams(metrics);
// Create a jndi remote connection
JMXConnector jmxConnector = getConnectSession(jmxProtocol);
MBeanServerConnection serverConnection = jmxConnector.getMBeanServerConnection();
ObjectName objectName = new ObjectName(jmxProtocol.getObjectName());
Set<ObjectInstance> objectInstanceSet = serverConnection.queryMBeans(objectName, null);
Set<String> attributeNameSet = metrics.getAliasFields().stream()
.map(field -> field.split(SUB_ATTRIBUTE)[0]).collect(Collectors.toSet());
for (ObjectInstance objectInstance : objectInstanceSet) {
ObjectName currentObjectName = objectInstance.getObjectName();
MBeanInfo beanInfo = serverConnection.getMBeanInfo(currentObjectName);
MBeanAttributeInfo[] attrInfos = beanInfo.getAttributes();
String[] attributes = new String[attributeNameSet.size()];
attributes = Arrays.stream(attrInfos)
.filter(item -> item.isReadable() && attributeNameSet.contains(item.getName()))
.map(MBeanFeatureInfo::getName)
.collect(Collectors.toList()).toArray(attributes);
AttributeList attributeList = serverConnection.getAttributes(currentObjectName, attributes);
Map<String, String> attributeValueMap = extractAttributeValue(attributeList);
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
for (String aliasField : metrics.getAliasFields()) {
String fieldValue = attributeValueMap.get(aliasField);
valueRowBuilder.addColumns(fieldValue != null ? fieldValue : CommonConstants.NULL_VALUE);
}
builder.addValues(valueRowBuilder.build());
}
} catch (IOException exception) {
String errorMsg = CommonUtil.getMessageFromThrowable(exception);
log.error("JMX IOException :{}", errorMsg);
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg(errorMsg);
} catch (Exception e) {
String errorMsg = CommonUtil.getMessageFromThrowable(e);
log.error("JMX Error :{}", errorMsg);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
}
| 1,356
| 599
| 1,955
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/memcached/MemcachedCollectImpl.java
|
MemcachedCollectImpl
|
collect
|
class MemcachedCollectImpl extends AbstractCollect {
public MemcachedCollectImpl() {
}
private static final String STATS = "stats";
private static final String STATS_SETTINGS = "stats settings";
private static final String STATS_ITEMS = "stats items";
private static final String STATS_SIZES = "stats sizes";
private static final String STATS_END_RSP = "END";
@Override
public void collect(CollectRep.MetricsData.Builder builder, long monitorId, String app, Metrics metrics) {<FILL_FUNCTION_BODY>}
private static void parseCmdResponse(Map<String, String> statsMap,
BufferedReader in,
PrintWriter out,
String cmd) throws IOException {
out.println(cmd);
String line;
while ((line = in.readLine()) != null && !line.equals(STATS_END_RSP)) {
// 解析每一行,将键值对存入HashMap
String[] parts = line.split(" ");
if (parts.length == 3) {
statsMap.put(parts[1], parts[2]);
}
}
}
private static void parseSizesOutput(Map<String, String> statsMap,
BufferedReader in,
PrintWriter out) throws IOException {
out.println(STATS_SIZES);
String line;
while ((line = in.readLine()) != null && !line.equals(STATS_END_RSP)) {
String[] parts = line.split("\\s+");
// 提取 slab size 和 slab count,并放入HashMap
if (parts.length >= 3 && "STAT".equals(parts[0])) {
statsMap.put("item_size", parts[1]);
statsMap.put("item_count", parts[2]);
}
}
}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_MEMCACHED;
}
}
|
long startTime = System.currentTimeMillis();
if (metrics == null || metrics.getMemcached() == null) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("Memcached collect must has Memcached params");
return;
}
MemcachedProtocol memcachedProtocol = metrics.getMemcached();
String memcachedHost = memcachedProtocol.getHost();
String memcachedPort = memcachedProtocol.getPort();
Socket socket = null;
try {
socket = new Socket();
SocketAddress socketAddress = new InetSocketAddress(memcachedHost, Integer.parseInt(memcachedPort));
socket.connect(socketAddress);
if (socket.isConnected()) {
long responseTime = System.currentTimeMillis() - startTime;
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream()));
// 发送统计命令
Map<String, String> resultMap = new HashMap<>(128);
parseCmdResponse(resultMap, in, out, STATS);
parseCmdResponse(resultMap, in, out, STATS_SETTINGS);
parseSizesOutput(resultMap, in, out);
resultMap.put(CollectorConstants.RESPONSE_TIME, Long.toString(responseTime));
// 关闭输出流和Socket连接
in.close();
out.close();
socket.close();
List<String> aliasFields = metrics.getAliasFields();
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
for (String field : aliasFields) {
String fieldValue = resultMap.get(field);
valueRowBuilder.addColumns(Objects.requireNonNullElse(fieldValue, CommonConstants.NULL_VALUE));
}
builder.addValues(valueRowBuilder.build());
} else {
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg("Peer connect failed:");
}
} catch (UnknownHostException unknownHostException) {
String errorMsg = CommonUtil.getMessageFromThrowable(unknownHostException);
log.info(errorMsg);
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg("UnknownHost:" + errorMsg);
} catch (SocketTimeoutException socketTimeoutException) {
String errorMsg = CommonUtil.getMessageFromThrowable(socketTimeoutException);
log.info(errorMsg);
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg("Socket connect timeout: " + errorMsg);
} catch (IOException ioException) {
String errorMsg = CommonUtil.getMessageFromThrowable(ioException);
log.info(errorMsg);
builder.setCode(CollectRep.Code.UN_CONNECTABLE);
builder.setMsg("Connect fail:" + errorMsg);
} finally {
if (socket != null) {
try {
socket.close();
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
| 503
| 802
| 1,305
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
apache_hertzbeat
|
hertzbeat/collector/src/main/java/org/apache/hertzbeat/collector/collect/nebulagraph/NebulaGraphCollectImpl.java
|
NebulaGraphCollectImpl
|
collect
|
class NebulaGraphCollectImpl extends AbstractCollect {
private final static int SUCCESS_CODE = 200;
private final static String[] TIME_RANGE = new String[]{"5", "60", "600", "3600"};
private final static String REGEX = "\\.%s\\=";
private final static String STR_SPLIT = "\n";
private final static String STORAGE_SPLIT_KEY_VALUE = "=";
private final static String GRAPH_API = "/stats";
private final static String STORAGE_API = "/rocksdb_stats";
@Override
public void collect(CollectRep.MetricsData.Builder builder, long monitorId, String app, Metrics metrics) {<FILL_FUNCTION_BODY>}
@Override
public String supportProtocol() {
return DispatchConstants.PROTOCOL_NEBULAGRAPH;
}
private HttpContext createHttpContext(String host, String port) {
HttpHost httpHost = new HttpHost(host, Integer.parseInt(port));
HttpClientContext httpClientContext = new HttpClientContext();
httpClientContext.setTargetHost(httpHost);
return httpClientContext;
}
private HttpUriRequest createHttpRequest(String host, String port, String url, String timeoutStr) {
RequestBuilder requestBuilder = RequestBuilder.get();
// uri
String uri = CollectUtil.replaceUriSpecialChar(url);
if (IpDomainUtil.isHasSchema(host)) {
requestBuilder.setUri(host + ":" + port + uri);
} else {
String ipAddressType = IpDomainUtil.checkIpAddressType(host);
String baseUri = CollectorConstants.IPV6.equals(ipAddressType)
? String.format("[%s]:%s", host, port + uri)
: String.format("%s:%s", host, port + uri);
requestBuilder.setUri(CollectorConstants.HTTP_HEADER + baseUri);
}
requestBuilder.addHeader(HttpHeaders.CONNECTION, "keep-alive");
requestBuilder.addHeader(HttpHeaders.USER_AGENT, "Mozilla/5.0 (Windows NT 6.1; WOW64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.76 Safari/537.36");
requestBuilder.addHeader(HttpHeaders.ACCEPT, "text/plain");
int timeout = Integer.parseInt(timeoutStr);
if (timeout > 0) {
RequestConfig requestConfig = RequestConfig.custom()
.setConnectTimeout(timeout)
.setSocketTimeout(timeout)
.setRedirectsEnabled(true)
.build();
requestBuilder.setConfig(requestConfig);
}
return requestBuilder.build();
}
/**
* 解析Stats响应通过时间间隔进行筛选
*
* @param responseBody 响应体
* @param timePeriod 时间间隔
*/
private void parseStatsResponse(String responseBody, String timePeriod, HashMap<String, String> resultMap) {
// 设置正则匹配
String timeRegex = String.format(REGEX, timePeriod);
Pattern pattern = Pattern.compile(timeRegex);
String[] strArray = responseBody.split(STR_SPLIT);
for (String str : strArray) {
Matcher matcher = pattern.matcher(str);
if (matcher.find()) {
String[] split = str.split(timeRegex);
resultMap.put(split[0], split[1]);
}
}
}
/**
* 解析Storage响应通过时间间隔进行筛选
*
* @param responseBody 响应体
*/
private void parseStorageResponse(String responseBody, HashMap<String, String> resultMap) {
String[] strArray = responseBody.split(STR_SPLIT);
for (String str : strArray) {
String[] split = str.split(STORAGE_SPLIT_KEY_VALUE);
resultMap.put(split[0], split[1]);
}
}
}
|
long startTime = System.currentTimeMillis();
if (metrics == null || metrics.getNebulaGraph() == null) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("NebulaGraph collect must has NebulaGraph params");
return;
}
NebulaGraphProtocol nebulaGraph = metrics.getNebulaGraph();
String timePeriod = nebulaGraph.getTimePeriod();
if (!Objects.isNull(nebulaGraph.getTimePeriod())&&!Arrays.asList(TIME_RANGE).contains(timePeriod)) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("The time range for metric statistics, currently supporting 5 seconds, 60 seconds, 600 seconds, and 3600 seconds.");
return;
}
if (nebulaGraph.getHost() == null || nebulaGraph.getHost().isEmpty()) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("The host of NebulaGraph must be set");
return;
}
String resp;
long responseTime;
HashMap<String, String> resultMap = new HashMap<>(64);
CloseableHttpResponse response;
HttpContext httpContext = createHttpContext(nebulaGraph.getHost(), nebulaGraph.getPort());
HttpUriRequest request = createHttpRequest(nebulaGraph.getHost(), nebulaGraph.getPort(),
nebulaGraph.getUrl(), nebulaGraph.getTimeout());
try {
// 发起http请求,获取响应数据
response = CommonHttpClient.getHttpClient().execute(request, httpContext);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != SUCCESS_CODE) {
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg("StatusCode " + statusCode);
return;
}
resp = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8);
responseTime = System.currentTimeMillis() - startTime;
resultMap.put(CollectorConstants.RESPONSE_TIME, Long.toString(responseTime));
// 根据API进行不同解析
if (GRAPH_API.equals(nebulaGraph.getUrl())) {
parseStatsResponse(resp, nebulaGraph.getTimePeriod(), resultMap);
} else if (STORAGE_API.equals(nebulaGraph.getUrl())) {
parseStorageResponse(resp, resultMap);
}
List<String> aliasFields = metrics.getAliasFields();
CollectRep.ValueRow.Builder valueRowBuilder = CollectRep.ValueRow.newBuilder();
for (String field : aliasFields) {
String fieldValue = resultMap.get(field);
valueRowBuilder.addColumns(Objects.requireNonNullElse(fieldValue, CommonConstants.NULL_VALUE));
}
builder.addValues(valueRowBuilder.build());
} catch (IOException e) {
String errorMsg = CommonUtil.getMessageFromThrowable(e);
log.info(errorMsg);
builder.setCode(CollectRep.Code.FAIL);
builder.setMsg(errorMsg);
}
| 1,071
| 814
| 1,885
|
<methods>public non-sealed void <init>() ,public abstract void collect(org.apache.hertzbeat.common.entity.message.CollectRep.MetricsData.Builder, long, java.lang.String, org.apache.hertzbeat.common.entity.job.Metrics) ,public abstract java.lang.String supportProtocol() <variables>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.