Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: TypeError
Message: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback: Traceback (most recent call last):
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
writer.write_table(table)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
pa_table = table_cast(pa_table, self._schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
return cast_table_to_schema(table, schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
TypeError: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
builder.download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
self._download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
for job_id, done, content in self._prepare_split_single(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
text
string | meta
dict |
|---|---|
#import <DevToolsCore/PBXProjectItem.h>
@class NSMutableArray, NSString, PBXFileType, PBXTarget;
@interface PBXBuildRule : PBXProjectItem
{
NSString *_name;
PBXTarget *_target;
PBXFileType *_fileType;
NSString *_filePatterns;
NSString *_compilerSpecIdentifier;
NSString *_script;
NSMutableArray *_outputFiles;
BOOL _isEditable;
}
+ (id)_loadBuildRulesAtPath:(id)arg1;
+ (id)archivableAttributes;
+ (id)buildPhaseSpecificBuildRulesForBuildPhaseIdentifier:(id)arg1 platform:(id)arg2;
+ (id)buildRuleWithName:(id)arg1;
+ (id)buildRulesFromBuildRuleActionSpecification:(id)arg1;
+ (id)buildRulesFromCompilationTaskSpecification:(id)arg1;
+ (id)compilerSpecIsScriptProxy;
+ (id)fileTypeIsPatternProxy;
+ (id)findFirstBuildRuleInArray:(id)arg1 thatAppliesToInputFileNamed:(id)arg2 ofType:(id)arg3 withDesiredOutputTypes:(id)arg4 inContext:(id)arg5 platformDomain:(id)arg6;
+ (void)initialize;
+ (id)loadBuildRulesFromPropertyListsInDirectory:(id)arg1;
+ (id)systemBuildRules;
+ (id)systemBuildRulesForPlatform:(id)arg1;
- (id)_compilerSpec;
- (id)_defaultName;
- (BOOL)_filePath:(const char *)arg1 matchesPatternInPatternString:(id)arg2;
- (id)_fileType;
- (void)_setCompilerSpec:(id)arg1;
- (void)_setFileType:(id)arg1;
- (void)addRelevantToolSpecificationsForFileType:(id)arg1 inPropertyExpansionContext:(id)arg2 andPlatformDomain:(id)arg3 toSet:(id)arg4;
- (BOOL)appliesToInputFileNamed:(id)arg1 ofType:(id)arg2 withDesiredOutputTypes:(id)arg3 inContext:(id)arg4 platformDomain:(id)arg5;
- (id)compilerSpecForIdentifier:(id)arg1;
- (id)compilerSpecificationIdentifier;
- (id)compilerSpecificationInDomain:(id)arg1;
- (id)container;
- (id)copyWithZone:(struct _NSZone *)arg1;
- (id)createDependencyGraphSnapshotForPlatform:(id)arg1;
- (void)dealloc;
- (Class)dependencyGraphSnapshotClass;
- (id)description;
- (id)errorRegexes;
- (id)filePatterns;
- (id)fileType;
- (id)fileTypeForIdentifier:(id)arg1;
- (id)ignoredRegexes;
- (id)init;
- (id)initWithName:(id)arg1;
- (id)initWithPropertyListDictionary:(id)arg1;
- (id)innerDescription;
- (unsigned long long)inputFileParameterNumber;
- (void)insertOutputFile:(id)arg1 atIndex:(unsigned long long)arg2;
- (BOOL)isEditable;
- (id)name;
- (id)objectSpecifier;
- (id)outputFileAtIndex:(unsigned long long)arg1;
- (id)outputFiles;
- (id)outputParseRules;
- (void)removeOutputFileAtIndex:(unsigned long long)arg1;
- (void)replaceOutputFileAtIndex:(unsigned long long)arg1 withFile:(id)arg2;
- (id)script;
- (void)setCompilerSpecificationIdentifier:(id)arg1;
- (void)setFilePatterns:(id)arg1;
- (void)setFileType:(id)arg1;
- (void)setIsEditable:(BOOL)arg1;
- (void)setName:(id)arg1;
- (void)setOutputFiles:(id)arg1;
- (void)setScript:(id)arg1;
- (void)setTarget:(id)arg1;
- (id)statusMessageFormat;
- (id)target;
- (id)warningRegexes;
- (void)willChange;
@end
|
{
"content_hash": "321e7af506f6006556ac579947c63539",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 169,
"avg_line_length": 34.19277108433735,
"alnum_prop": 0.766384778012685,
"repo_name": "larsxschneider/Xcode-Scripting-Interface",
"id": "991755d659239346de40b9310bfc8029562dc88a",
"size": "2978",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/Libraries/DevToolsCoreHeader/DevToolsCore/PBXBuildRule.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "13512"
},
{
"name": "Objective-C",
"bytes": "729411"
}
]
}
|
package org.apache.nifi.accumulo.processors;
import org.apache.accumulo.core.client.AccumuloClient;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.MultiTableBatchWriter;
import org.apache.accumulo.core.client.MutationsRejectedException;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.apache.nifi.annotation.behavior.DynamicProperties;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.EventDriven;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnDisabled;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.Validator;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.record.path.FieldValue;
import org.apache.nifi.record.path.RecordPath;
import org.apache.nifi.record.path.RecordPathResult;
import org.apache.nifi.record.path.util.RecordPathCache;
import org.apache.nifi.serialization.RecordReader;
import org.apache.nifi.serialization.RecordReaderFactory;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.serialization.record.RecordField;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.util.StringUtils;
import org.apache.nifi.accumulo.controllerservices.BaseAccumuloService;
import org.apache.nifi.accumulo.data.AccumuloRecordConfiguration;
import javax.xml.bind.DatatypeConverter;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@EventDriven
@SupportsBatching
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@Tags({"hadoop", "accumulo", "put", "record"})
@DynamicProperties({
@DynamicProperty(name = "visibility.<COLUMN FAMILY>", description = "Visibility label for everything under that column family " +
"when a specific label for a particular column qualifier is not available.", expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES,
value = "visibility label for <COLUMN FAMILY>"
),
@DynamicProperty(name = "visibility.<COLUMN FAMILY>.<COLUMN QUALIFIER>", description = "Visibility label for the specified column qualifier " +
"qualified by a configured column family.", expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES,
value = "visibility label for <COLUMN FAMILY>:<COLUMN QUALIFIER>."
)
})
/**
* Purpose and Design: Requires a connector be defined by way of an AccumuloService object. This class
* simply extens BaseAccumuloProcessor to extract records from a flow file. The location of a record field value can be
* placed into the value or part of the column qualifier ( this can/may change )
*
* Supports deletes. If the delete flag is used we'll delete keys found within that flow file.
*/
public class PutAccumuloRecord extends BaseAccumuloProcessor {
protected static final PropertyDescriptor MEMORY_SIZE = new PropertyDescriptor.Builder()
.name("Memory Size")
.description("The maximum memory size Accumulo at any one time from the record set.")
.required(true)
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
.defaultValue("10 MB")
.build();
protected static final PropertyDescriptor COLUMN_FAMILY = new PropertyDescriptor.Builder()
.name("Column Family")
.description("The Column Family to use when inserting data into Accumulo")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(Validator.VALID)
.build();
protected static final PropertyDescriptor COLUMN_FAMILY_FIELD = new PropertyDescriptor.Builder()
.name("Column Family Field")
.description("Field name used as the column family if one is not specified above.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(Validator.VALID)
.build();
protected static final PropertyDescriptor DELETE_KEY = new PropertyDescriptor.Builder()
.name("delete-key")
.displayName("Delete Key")
.description("Deletes the key")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
protected static final PropertyDescriptor RECORD_IN_QUALIFIER = new PropertyDescriptor.Builder()
.name("record-value-in-qualifier")
.displayName("Record Value In Qualifier")
.description("Places the record value into the column qualifier instead of the value.")
.required(false)
.defaultValue("False")
.allowableValues("True", "False")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
protected static final PropertyDescriptor FLUSH_ON_FLOWFILE = new PropertyDescriptor.Builder()
.name("flush-on-flow-file")
.displayName("Flush Every FlowFile")
.description("Flushes the table writer on every flow file.")
.required(true)
.defaultValue("True")
.allowableValues("True", "False")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
protected static final PropertyDescriptor FIELD_DELIMITER_AS_HEX = new PropertyDescriptor.Builder()
.name("field-delimiter-as-hex")
.displayName("Hex Encode Field Delimiter")
.description("Allows you to hex encode the delimiter as a character. So 0x00 places a null character between the record name and value.")
.required(false)
.defaultValue("False")
.allowableValues("True", "False")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
protected static final PropertyDescriptor FIELD_DELIMITER = new PropertyDescriptor.Builder()
.name("field-delimiter")
.displayName("Field Delimiter")
.description("Delimiter between the record value and name. ")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
protected static final PropertyDescriptor RECORD_READER_FACTORY = new PropertyDescriptor.Builder()
.name("record-reader")
.displayName("Record Reader")
.description("Specifies the Controller Service to use for parsing incoming data and determining the data's schema")
.identifiesControllerService(RecordReaderFactory.class)
.required(true)
.build();
protected static final PropertyDescriptor ROW_FIELD_NAME = new PropertyDescriptor.Builder()
.name("Row Identifier Field Name")
.description("Specifies the name of a record field whose value should be used as the row id for the given record." +
" If EL defines a value that is not a field name that will be used as the row identifier.")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
protected static final PropertyDescriptor TIMESTAMP_FIELD = new PropertyDescriptor.Builder()
.name("timestamp-field")
.displayName("Timestamp Field")
.description("Specifies the name of a record field whose value should be used as the timestamp. If empty a timestamp will be recorded as the time of insertion")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
protected static final PropertyDescriptor VISIBILITY_PATH = new PropertyDescriptor.Builder()
.name("visibility-path")
.displayName("Visibility String Record Path Root")
.description("A record path that points to part of the record which contains a path to a mapping of visibility strings to record paths")
.required(false)
.addValidator(Validator.VALID)
.build();
protected static final PropertyDescriptor DEFAULT_VISIBILITY = new PropertyDescriptor.Builder()
.name("default-visibility")
.displayName("Default Visibility")
.description("Default visibility when VISIBILITY_PATH is not defined. ")
.required(false)
.addValidator(Validator.VALID)
.build();
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("A FlowFile is routed to this relationship after it has been successfully stored in Accumulo")
.build();
public static final Relationship REL_FAILURE = new Relationship.Builder()
.name("failure")
.description("A FlowFile is routed to this relationship if it cannot be sent to Accumulo")
.build();
/**
* Connector service which provides us a connector if the configuration is correct.
*/
protected BaseAccumuloService accumuloConnectorService;
/**
* Connector that we need to persist while we are operational.
*/
protected AccumuloClient client;
/**
* Table writer that will close when we shutdown or upon error.
*/
private MultiTableBatchWriter tableWriter = null;
/**
* Record path cache
*/
protected RecordPathCache recordPathCache;
/**
* Flushes the tableWriter on every flow file if true.
*/
protected boolean flushOnEveryFlow;
@Override
public Set<Relationship> getRelationships() {
final Set<Relationship> rels = new HashSet<>();
rels.add(REL_SUCCESS);
rels.add(REL_FAILURE);
return rels;
}
@Override
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
Collection<ValidationResult> set = new HashSet<>();
if (!validationContext.getProperty(COLUMN_FAMILY).isSet() && !validationContext.getProperty(COLUMN_FAMILY_FIELD).isSet())
set.add(new ValidationResult.Builder().explanation("Column Family OR Column family field name must be defined").build());
else if (validationContext.getProperty(COLUMN_FAMILY).isSet() && validationContext.getProperty(COLUMN_FAMILY_FIELD).isSet())
set.add(new ValidationResult.Builder().explanation("Column Family OR Column family field name must be defined, but not both").build());
return set;
}
@OnScheduled
public void onScheduled(final ProcessContext context) {
accumuloConnectorService = context.getProperty(ACCUMULO_CONNECTOR_SERVICE).asControllerService(BaseAccumuloService.class);
final Double maxBytes = context.getProperty(MEMORY_SIZE).asDataSize(DataUnit.B);
this.client = accumuloConnectorService.getClient();
BatchWriterConfig writerConfig = new BatchWriterConfig();
writerConfig.setMaxWriteThreads(context.getProperty(THREADS).asInteger());
writerConfig.setMaxMemory(maxBytes.longValue());
writerConfig.setTimeout(context.getProperty(ACCUMULO_TIMEOUT).asTimePeriod(TimeUnit.SECONDS).longValue(), TimeUnit.SECONDS);
tableWriter = client.createMultiTableBatchWriter(writerConfig);
flushOnEveryFlow = context.getProperty(FLUSH_ON_FLOWFILE).asBoolean();
if (!flushOnEveryFlow){
writerConfig.setMaxLatency(60, TimeUnit.SECONDS);
}
if (context.getProperty(CREATE_TABLE).asBoolean() && !context.getProperty(TABLE_NAME).isExpressionLanguagePresent()) {
final Map<String, String> flowAttributes = new HashMap<>();
final String table = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowAttributes).getValue();
final TableOperations tableOps = this.client.tableOperations();
if (!tableOps.exists(table)) {
getLogger().info("Creating " + table + " table.");
try {
tableOps.create(table);
} catch (TableExistsException te) {
// can safely ignore
} catch (AccumuloSecurityException | AccumuloException e) {
getLogger().info("Accumulo or Security error creating. Continuing... " + table + ". ", e);
}
}
}
}
@OnUnscheduled
@OnDisabled
public synchronized void shutdown(){
/**
* Close the writer when we are shut down.
*/
if (null != tableWriter){
try {
tableWriter.close();
} catch (MutationsRejectedException e) {
getLogger().error("Mutations were rejected",e);
}
tableWriter = null;
}
}
@Override
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
final List<PropertyDescriptor> properties = new ArrayList<>(baseProperties);
properties.add(RECORD_READER_FACTORY);
properties.add(ROW_FIELD_NAME);
properties.add(ROW_FIELD_NAME);
properties.add(COLUMN_FAMILY);
properties.add(COLUMN_FAMILY_FIELD);
properties.add(DELETE_KEY);
properties.add(FLUSH_ON_FLOWFILE);
properties.add(FIELD_DELIMITER);
properties.add(FIELD_DELIMITER_AS_HEX);
properties.add(MEMORY_SIZE);
properties.add(RECORD_IN_QUALIFIER);
properties.add(TIMESTAMP_FIELD);
properties.add(VISIBILITY_PATH);
properties.add(DEFAULT_VISIBILITY);
return properties;
}
@Override
public void onTrigger(ProcessContext processContext, ProcessSession processSession) throws ProcessException {
final FlowFile flowFile = processSession.get();
if (flowFile == null) {
return;
}
final RecordReaderFactory recordParserFactory = processContext.getProperty(RECORD_READER_FACTORY)
.asControllerService(RecordReaderFactory.class);
final String recordPathText = processContext.getProperty(VISIBILITY_PATH).getValue();
final String defaultVisibility = processContext.getProperty(DEFAULT_VISIBILITY).isSet() ? processContext.getProperty(DEFAULT_VISIBILITY).getValue() : null;
final String tableName = processContext.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
accumuloConnectorService.renewTgtIfNecessary();
// create the table if EL is present, create table is true and the table does not exist.
if (processContext.getProperty(TABLE_NAME).isExpressionLanguagePresent() && processContext.getProperty(CREATE_TABLE).asBoolean()) {
final TableOperations tableOps = this.client.tableOperations();
if (!tableOps.exists(tableName)) {
getLogger().info("Creating " + tableName + " table.");
try {
tableOps.create(tableName);
} catch (TableExistsException te) {
// can safely ignore, though we shouldn't arrive here due to table.exists called, but it's possible
// that with multiple threads two could attempt table creation concurrently. We don't want that
// to be a failure.
} catch (AccumuloSecurityException | AccumuloException e) {
throw new ProcessException("Accumulo or Security error creating. Continuing... " + tableName + ". ",e);
}
}
}
AccumuloRecordConfiguration builder = AccumuloRecordConfiguration.Builder.newBuilder()
.setTableName(tableName)
.setColumnFamily(processContext.getProperty(COLUMN_FAMILY).evaluateAttributeExpressions(flowFile).getValue())
.setColumnFamilyField(processContext.getProperty(COLUMN_FAMILY_FIELD).evaluateAttributeExpressions(flowFile).getValue())
.setRowField(processContext.getProperty(ROW_FIELD_NAME).evaluateAttributeExpressions(flowFile).getValue())
.setEncodeFieldDelimiter(processContext.getProperty(FIELD_DELIMITER_AS_HEX).asBoolean())
.setFieldDelimiter(processContext.getProperty(FIELD_DELIMITER).isSet() ? processContext.getProperty(FIELD_DELIMITER).evaluateAttributeExpressions(flowFile).getValue() : "")
.setQualifierInKey(processContext.getProperty(RECORD_IN_QUALIFIER).isSet() ? processContext.getProperty(RECORD_IN_QUALIFIER).asBoolean() : false)
.setDelete(processContext.getProperty(DELETE_KEY).isSet() ? processContext.getProperty(DELETE_KEY).evaluateAttributeExpressions(flowFile).asBoolean() : false)
.setTimestampField(processContext.getProperty(TIMESTAMP_FIELD).evaluateAttributeExpressions(flowFile).getValue()).build();
RecordPath recordPath = null;
if (recordPathCache != null && !StringUtils.isEmpty(recordPathText)) {
recordPath = recordPathCache.getCompiled(recordPathText);
}
boolean failed = false;
Mutation prevMutation=null;
try (final InputStream in = processSession.read(flowFile);
final RecordReader reader = recordParserFactory.createRecordReader(flowFile, in, getLogger())) {
Record record;
/**
* HBase supports a restart point. This may be something that we can/should add if needed.
*/
while ((record = reader.nextRecord()) != null) {
prevMutation = createMutation(prevMutation, processContext, record, reader.getSchema(), recordPath, flowFile,defaultVisibility, builder);
}
addMutation(builder.getTableName(),prevMutation);
} catch (Exception ex) {
getLogger().error("Failed to put records to Accumulo.", ex);
failed = true;
}
if (flushOnEveryFlow){
try {
tableWriter.flush();
} catch (MutationsRejectedException e) {
throw new ProcessException(e);
}
}
if (failed) {
processSession.transfer(processSession.penalize(flowFile), REL_FAILURE);
} else {
processSession.transfer(flowFile, REL_SUCCESS);
}
}
/**
* Adapted from HBASEUtils. Their approach seemed ideal for what our intent is here.
* @param columnFamily column family from which to extract the visibility or to execute an expression against
* @param columnQualifier column qualifier from which to extract the visibility or to execute an expression against
* @param flowFile flow file being written
* @param context process context
* @return Visibility
*/
public static String produceVisibility(String columnFamily, String columnQualifier, FlowFile flowFile, ProcessContext context) {
if (org.apache.commons.lang3.StringUtils.isNotEmpty(columnFamily)) {
return null;
}
String lookupKey = String.format("visibility.%s%s%s", columnFamily, !org.apache.commons.lang3.StringUtils.isNotEmpty(columnQualifier) ? "." : "", columnQualifier);
String fromAttribute = flowFile.getAttribute(lookupKey);
if (fromAttribute == null && !org.apache.commons.lang3.StringUtils.isBlank(columnQualifier)) {
String lookupKeyFam = String.format("visibility.%s", columnFamily);
fromAttribute = flowFile.getAttribute(lookupKeyFam);
}
if (fromAttribute != null) {
return fromAttribute;
} else {
PropertyValue descriptor = context.getProperty(lookupKey);
if (descriptor == null || !descriptor.isSet()) {
descriptor = context.getProperty(String.format("visibility.%s", columnFamily));
}
String retVal = descriptor != null ? descriptor.evaluateAttributeExpressions(flowFile).getValue() : null;
return retVal;
}
}
private void addMutation(final String tableName, final Mutation m) throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
tableWriter.getBatchWriter(tableName).addMutation(m);
}
/**
* Returns the row provided the record schema
* @param record record against which we are evaluating
* @param schema Record schema
* @param rowOrFieldName Row identifier or field name
* @return Text object containing the resulting row.
*/
private Text getRow(final Record record,
final RecordSchema schema,
final String rowOrFieldName){
if ( !schema.getFieldNames().contains(rowOrFieldName) ){
return new Text(rowOrFieldName);
} else{
return new Text(record.getAsString(rowOrFieldName));
}
}
/**
* Creates a mutation with the provided arguments
* @param prevMutation previous mutation, to append to if in the same row.
* @param context process context.
* @param record record object extracted from the flow file
* @param schema schema for this record
* @param recordPath record path for visibility extraction
* @param flowFile flow file
* @param defaultVisibility default visibility
* @param config configuration of this instance.
* @return Returns the Mutation to insert
* @throws AccumuloSecurityException Error accessing Accumulo
* @throws AccumuloException Non security ( or table ) related Accumulo exceptions writing to the store.
* @throws TableNotFoundException Table not found on the cluster
*/
protected Mutation createMutation(final Mutation prevMutation,
final ProcessContext context,
final Record record,
final RecordSchema schema,
final RecordPath recordPath,
final FlowFile flowFile,
final String defaultVisibility,
AccumuloRecordConfiguration config) throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
Mutation m=null;
if (record != null) {
final Long timestamp;
Set<String> fieldsToSkip = new HashSet<>();
if (!StringUtils.isBlank(config.getTimestampField())) {
try {
timestamp = record.getAsLong(config.getTimestampField());
fieldsToSkip.add(config.getTimestampField());
} catch (Exception e) {
throw new AccumuloException("Could not convert " + config.getTimestampField() + " to a long", e);
}
if (timestamp == null) {
getLogger().warn("The value of timestamp field " + config.getTimestampField() + " was null, record will be inserted with latest timestamp");
}
} else {
timestamp = null;
}
RecordField visField = null;
Map visSettings = null;
if (recordPath != null) {
final RecordPathResult result = recordPath.evaluate(record);
FieldValue fv = result.getSelectedFields().findFirst().get();
visField = fv.getField();
if (null != visField)
fieldsToSkip.add(visField.getFieldName());
visSettings = (Map)fv.getValue();
}
if (null != prevMutation){
Text row = new Text(prevMutation.getRow());
Text curRow = getRow(record,schema,config.getRowField());
if (row.equals(curRow)){
m = prevMutation;
} else{
m = new Mutation(curRow);
addMutation(config.getTableName(),prevMutation);
}
} else{
Text row = getRow(record,schema,config.getRowField());
m = new Mutation(row);
}
fieldsToSkip.add(config.getRowField());
String columnFamily = config.getColumnFamily();
if (StringUtils.isBlank(columnFamily) && !StringUtils.isBlank(config.getColumnFamilyField())) {
final String cfField = config.getColumnFamilyField();
columnFamily = record.getAsString(cfField);
fieldsToSkip.add(cfField);
} else if (StringUtils.isBlank(columnFamily) && StringUtils.isBlank(config.getColumnFamilyField())){
throw new IllegalArgumentException("Invalid configuration for column family " + columnFamily + " and " + config.getColumnFamilyField());
}
final Text cf = new Text(columnFamily);
for (String name : schema.getFieldNames().stream().filter(p->!fieldsToSkip.contains(p)).collect(Collectors.toList())) {
String visString = (visField != null && visSettings != null && visSettings.containsKey(name))
? (String)visSettings.get(name) : defaultVisibility;
Text cq = new Text(name);
final Value value;
String recordValue = record.getAsString(name);
if (config.getQualifierInKey()){
final String delim = config.getFieldDelimiter();
if (!StringUtils.isEmpty(delim)) {
if (config.getEncodeDelimiter()) {
byte [] asHex = DatatypeConverter.parseHexBinary(delim);
cq.append(asHex, 0, asHex.length);
}else{
cq.append(delim.getBytes(), 0, delim.length());
}
}
cq.append(recordValue.getBytes(),0,recordValue.length());
value = new Value();
} else{
value = new Value(recordValue.getBytes());
}
if (StringUtils.isBlank(visString)) {
visString = produceVisibility(cf.toString(), cq.toString(), flowFile, context);
}
ColumnVisibility cv = new ColumnVisibility();
if (StringUtils.isBlank(visString)) {
if (!StringUtils.isBlank(defaultVisibility)) {
cv = new ColumnVisibility(defaultVisibility);
}
} else {
cv = new ColumnVisibility(visString);
}
if (null != timestamp) {
if (config.isDeleteKeys()) {
m.putDelete(cf, cq, cv, timestamp);
} else {
m.put(cf, cq, cv, timestamp, value);
}
} else{
if (config.isDeleteKeys())
m.putDelete(cf, cq, cv);
else
m.put(cf, cq, cv, value);
}
}
}
return m;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
/**
* Adapted from HBase puts. This is a good approach and one that we should adopt here, too.
*/
if (propertyDescriptorName.startsWith("visibility.")) {
String[] parts = propertyDescriptorName.split("\\.");
String displayName;
String description;
if (parts.length == 2) {
displayName = String.format("Column Family %s Default Visibility", parts[1]);
description = String.format("Default visibility setting for %s", parts[1]);
} else if (parts.length == 3) {
displayName = String.format("Column Qualifier %s.%s Default Visibility", parts[1], parts[2]);
description = String.format("Default visibility setting for %s.%s", parts[1], parts[2]);
} else {
return null;
}
return new PropertyDescriptor.Builder()
.name(propertyDescriptorName)
.displayName(displayName)
.description(description)
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.dynamic(true)
.build();
}
return null;
}
}
|
{
"content_hash": "b8958304429b0dad0c162b309ed7b5c8",
"timestamp": "",
"source": "github",
"line_count": 641,
"max_line_length": 188,
"avg_line_length": 47.42277691107644,
"alnum_prop": 0.6455358905191131,
"repo_name": "MikeThomsen/nifi",
"id": "97062e4df9c2ad747e4311d02ba1dccac794ff71",
"size": "31200",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "nifi-nar-bundles/nifi-accumulo-bundle/nifi-accumulo-processors/src/main/java/org/apache/nifi/accumulo/processors/PutAccumuloRecord.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "44090"
},
{
"name": "C++",
"bytes": "652"
},
{
"name": "CSS",
"bytes": "284471"
},
{
"name": "Clojure",
"bytes": "3993"
},
{
"name": "Dockerfile",
"bytes": "24591"
},
{
"name": "GAP",
"bytes": "30934"
},
{
"name": "Groovy",
"bytes": "2346453"
},
{
"name": "HTML",
"bytes": "1014100"
},
{
"name": "Handlebars",
"bytes": "38554"
},
{
"name": "Java",
"bytes": "53038782"
},
{
"name": "JavaScript",
"bytes": "4015073"
},
{
"name": "Lua",
"bytes": "983"
},
{
"name": "Mustache",
"bytes": "2438"
},
{
"name": "PLpgSQL",
"bytes": "1211"
},
{
"name": "Python",
"bytes": "26583"
},
{
"name": "Ruby",
"bytes": "23018"
},
{
"name": "SCSS",
"bytes": "20988"
},
{
"name": "Shell",
"bytes": "164305"
},
{
"name": "XSLT",
"bytes": "7835"
}
]
}
|
#region Copyright
//
// DotNetNuke® - https://www.dnnsoftware.com
// Copyright (c) 2002-2018
// by DotNetNuke Corporation
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
// documentation files (the "Software"), to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
// to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
// CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
#endregion
#region Usings
using System.Web.UI.WebControls;
#endregion
namespace DotNetNuke.Web.UI.WebControls.Internal
{
///<remarks>
/// This control is only for internal use, please don't reference it in any other place as it may be removed in future.
/// </remarks>
public class DnnGridTemplateColumn : TemplateField
{
}
}
|
{
"content_hash": "646a49194ec977223f27e2107e5bdda7",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 123,
"avg_line_length": 43.30555555555556,
"alnum_prop": 0.7466324567030147,
"repo_name": "RichardHowells/Dnn.Platform",
"id": "dd3dfedc3cbdd8bc53dd7df40f5bb4a11d0cde96",
"size": "1562",
"binary": false,
"copies": "2",
"ref": "refs/heads/development",
"path": "DNN Platform/DotNetNuke.Web/UI/WebControls/Internal/DnnGridTemplateColumn.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "1325033"
},
{
"name": "Batchfile",
"bytes": "374"
},
{
"name": "C#",
"bytes": "20877202"
},
{
"name": "CSS",
"bytes": "1028965"
},
{
"name": "Erlang",
"bytes": "2168"
},
{
"name": "HTML",
"bytes": "629809"
},
{
"name": "JavaScript",
"bytes": "3825173"
},
{
"name": "PHP",
"bytes": "2199"
},
{
"name": "Smalltalk",
"bytes": "66184"
},
{
"name": "Visual Basic",
"bytes": "139461"
},
{
"name": "XSLT",
"bytes": "16560"
}
]
}
|
Listlist::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Configure static asset server for tests with Cache-Control for performance
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
# Raise exception on mass assignment protection for Active Record models
config.active_record.mass_assignment_sanitizer = :strict
# Print deprecation notices to the stderr
config.active_support.deprecation = :stderr
config.eager_load = false
end
|
{
"content_hash": "a132b5bdfe366d1b8943df25719cd688",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 84,
"avg_line_length": 41.5945945945946,
"alnum_prop": 0.7667316439246263,
"repo_name": "cyrusinnovation/cyrus-lists",
"id": "7a5ae49b38344483ae8f4e31482e82deefcba55d",
"size": "1539",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "config/environments/test.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3287"
},
{
"name": "CoffeeScript",
"bytes": "537"
},
{
"name": "HTML",
"bytes": "14619"
},
{
"name": "JavaScript",
"bytes": "662"
},
{
"name": "Ruby",
"bytes": "124378"
}
]
}
|
[](https://travis-ci.org/remind101/formatted-metrics) [](https://codeclimate.com/github/remind101/formatted-metrics)
Easily produce metrics that can be consumed by [l2met](https://github.com/ryandotsmith/l2met) or statsd.
## Installation
Add this line to your application's Gemfile:
```ruby
gem 'formatted-metrics'
```
## Usage
### Rack
If you want to instrument rack request times:
```
use Rack::Instrumentation
```
### Instrument
Use `Metrics.instrument` to instrument events to STDOUT.
```ruby
Metrics.instrument 'rack.request' do
@app.call(env)
end
# => 'source=app measure.rack.request=50ms'
Metrics.instrument 'workers.busy', 10, units: 'workers'
# => 'source=app measure.workers.busy=10workers'
Metrics.instrument 'sidekiq.queue', source: 'background' do
yield
end
# => 'source=app.background measure.sidekiq.queue=500ms'
Metrics.group 'sidekiq' do |group|
group.instrument 'queues.process', 100, units: 'jobs'
group.instrument 'workers.busy', 10, units: 'workers'
group.instrument 'queue.time', source: 'worker.1' do
sleep 1
end
end
# => 'source=app measure.sidekiq.queues.processed=100jobs measure.sidekiq.workers.busy=10workers'
# => 'source=app.worker.1 measure.sidekiq.queue.time=1000.00ms'
```
## TODO
* Add Rack middleware for outputting rack performance metrics.
* Instrument some default rails stuff.
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
|
{
"content_hash": "97b779b4001dabd328f0651a1d04a9a0",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 265,
"avg_line_length": 26.753846153846155,
"alnum_prop": 0.738355376653249,
"repo_name": "remind101/formatted-metrics",
"id": "77f450216d8bc5ee7259770fe479da124b88779c",
"size": "1760",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "33441"
}
]
}
|
"""Command-line tools for authenticating via OAuth 2.0
Do the OAuth 2.0 Web Server dance for a command line application. Stores the
generated credentials in a common file that is used by other example apps in
the same directory.
"""
from __future__ import print_function
import logging
import socket
import sys
from six.moves import BaseHTTPServer
from six.moves import http_client
from six.moves import input
from six.moves import urllib
from oauth2client import client
from oauth2client import util
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = ['argparser', 'run_flow', 'message_if_missing']
_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
{file_path}
with information from the APIs Console <https://code.google.com/apis/console>.
"""
_FAILED_START_MESSAGE = """
Failed to start a local webserver listening on either port 8080
or port 8090. Please check your firewall settings and locally
running programs that may be blocking or using those ports.
Falling back to --noauth_local_webserver and continuing with
authorization.
"""
_BROWSER_OPENED_MESSAGE = """
Your browser has been opened to visit:
{address}
If your browser is on a different machine then exit and re-run this
application with the command-line parameter
--noauth_local_webserver
"""
_GO_TO_LINK_MESSAGE = """
Go to the following link in your browser:
{address}
"""
def _CreateArgumentParser():
try:
import argparse
except ImportError: # pragma: NO COVER
return None
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--auth_host_name', default='localhost',
help='Hostname when running a local web server.')
parser.add_argument('--noauth_local_webserver', action='store_true',
default=False, help='Do not run a local web server.')
parser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
nargs='*', help='Port web server should listen on.')
parser.add_argument(
'--logging_level', default='ERROR',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
help='Set the logging level of detail.')
return parser
# argparser is an ArgumentParser that contains command-line options expected
# by tools.run(). Pass it in as part of the 'parents' argument to your own
# ArgumentParser.
argparser = _CreateArgumentParser()
class ClientRedirectServer(BaseHTTPServer.HTTPServer):
"""A server to handle OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into query_params and then stops serving.
"""
query_params = {}
class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler for OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into the servers query_params and then stops serving.
"""
def do_GET(self):
"""Handle a GET request.
Parses the query parameters and prints a message
if the flow has completed. Note that we can't detect
if an error occurred.
"""
self.send_response(http_client.OK)
self.send_header("Content-type", "text/html")
self.end_headers()
query = self.path.split('?', 1)[-1]
query = dict(urllib.parse.parse_qsl(query))
self.server.query_params = query
self.wfile.write(
b"<html><head><title>Authentication Status</title></head>")
self.wfile.write(
b"<body><p>The authentication flow has completed.</p>")
self.wfile.write(b"</body></html>")
def log_message(self, format, *args):
"""Do not log messages to stdout while running as cmd. line program."""
@util.positional(3)
def run_flow(flow, storage, flags=None, http=None):
"""Core code for a command-line application.
The ``run()`` function is called from your application and runs
through all the steps to obtain credentials. It takes a ``Flow``
argument and attempts to open an authorization server page in the
user's default web browser. The server asks the user to grant your
application access to the user's data. If the user grants access,
the ``run()`` function returns new credentials. The new credentials
are also stored in the ``storage`` argument, which updates the file
associated with the ``Storage`` object.
It presumes it is run from a command-line application and supports the
following flags:
``--auth_host_name`` (string, default: ``localhost``)
Host name to use when running a local web server to handle
redirects during OAuth authorization.
``--auth_host_port`` (integer, default: ``[8080, 8090]``)
Port to use when running a local web server to handle redirects
during OAuth authorization. Repeat this option to specify a list
of values.
``--[no]auth_local_webserver`` (boolean, default: ``True``)
Run a local web server to handle redirects during OAuth
authorization.
The tools module defines an ``ArgumentParser`` the already contains the
flag definitions that ``run()`` requires. You can pass that
``ArgumentParser`` to your ``ArgumentParser`` constructor::
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[tools.argparser])
flags = parser.parse_args(argv)
Args:
flow: Flow, an OAuth 2.0 Flow to step through.
storage: Storage, a ``Storage`` to store the credential in.
flags: ``argparse.Namespace``, (Optional) The command-line flags. This
is the object returned from calling ``parse_args()`` on
``argparse.ArgumentParser`` as described above. Defaults
to ``argparser.parse_args()``.
http: An instance of ``httplib2.Http.request`` or something that
acts like it.
Returns:
Credentials, the obtained credential.
"""
if flags is None:
flags = argparser.parse_args()
logging.getLogger().setLevel(getattr(logging, flags.logging_level))
if not flags.noauth_local_webserver:
success = False
port_number = 0
for port in flags.auth_host_port:
port_number = port
try:
httpd = ClientRedirectServer((flags.auth_host_name, port),
ClientRedirectHandler)
except socket.error:
pass
else:
success = True
break
flags.noauth_local_webserver = not success
if not success:
print(_FAILED_START_MESSAGE)
if not flags.noauth_local_webserver:
oauth_callback = 'http://{host}:{port}/'.format(
host=flags.auth_host_name, port=port_number)
else:
oauth_callback = client.OOB_CALLBACK_URN
flow.redirect_uri = oauth_callback
authorize_url = flow.step1_get_authorize_url()
if not flags.noauth_local_webserver:
import webbrowser
webbrowser.open(authorize_url, new=1, autoraise=True)
print(_BROWSER_OPENED_MESSAGE.format(address=authorize_url))
else:
print(_GO_TO_LINK_MESSAGE.format(address=authorize_url))
code = None
if not flags.noauth_local_webserver:
httpd.handle_request()
if 'error' in httpd.query_params:
sys.exit('Authentication request was rejected.')
if 'code' in httpd.query_params:
code = httpd.query_params['code']
else:
print('Failed to find "code" in the query parameters '
'of the redirect.')
sys.exit('Try running with --noauth_local_webserver.')
else:
code = input('Enter verification code: ').strip()
try:
credential = flow.step2_exchange(code, http=http)
except client.FlowExchangeError as e:
sys.exit('Authentication has failed: {0}'.format(e))
storage.put(credential)
credential.set_store(storage)
print('Authentication successful.')
return credential
def message_if_missing(filename):
"""Helpful message to display if the CLIENT_SECRETS file is missing."""
return _CLIENT_SECRETS_MESSAGE.format(file_path=filename)
|
{
"content_hash": "919ceea7e5b93868a5b4b07a9df54956",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 79,
"avg_line_length": 35.1198347107438,
"alnum_prop": 0.6576067772679138,
"repo_name": "axbaretto/beam",
"id": "89471574f4ab3e6d326099103224ea94dfa5d7b5",
"size": "9097",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "sdks/python/.tox/docs/lib/python2.7/site-packages/oauth2client/tools.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1598"
},
{
"name": "Batchfile",
"bytes": "3220"
},
{
"name": "C",
"bytes": "1339873"
},
{
"name": "C++",
"bytes": "1132901"
},
{
"name": "CSS",
"bytes": "124283"
},
{
"name": "Dockerfile",
"bytes": "23950"
},
{
"name": "FreeMarker",
"bytes": "7428"
},
{
"name": "Go",
"bytes": "2795906"
},
{
"name": "Groovy",
"bytes": "187109"
},
{
"name": "HTML",
"bytes": "238575"
},
{
"name": "Java",
"bytes": "39085315"
},
{
"name": "JavaScript",
"bytes": "1221326"
},
{
"name": "Jupyter Notebook",
"bytes": "7396"
},
{
"name": "Makefile",
"bytes": "354938"
},
{
"name": "Python",
"bytes": "51449019"
},
{
"name": "Roff",
"bytes": "70716"
},
{
"name": "Ruby",
"bytes": "4159"
},
{
"name": "Shell",
"bytes": "351541"
},
{
"name": "TeX",
"bytes": "70920"
},
{
"name": "Thrift",
"bytes": "1118"
}
]
}
|
/***
* Inferno Engine v4 2015-2017
* Written by Tomasz "Rex Dex" Jonarski
*
* [#filter: rtti\types #]
***/
#include "build.h"
#include "rttiEnumType.h"
#include "streamTextReader.h"
#include "streamTextWriter.h"
#include "streamBinaryWriter.h"
#include "streamBinaryReader.h"
#include "base/containers/include/stringConversions.h"
namespace base
{
namespace rtti
{
EnumType::EnumType(const StringID& name, const Uint32 size, const Uint64 nativeHash, const Bool scripted)
: IType(name)
, m_minValue(std::numeric_limits<Int64>::max())
, m_maxValue(std::numeric_limits<Int64>::min())
{
ASSERT_EX(size == 1 || size == 2 || size == 4 || size == 8, "Unsupported enum size");
m_traits.m_metaType = MetaType::Enum;
m_traits.m_scripted = scripted;
m_traits.m_convClass = TypeConversionClass::TypeEnum;
m_traits.m_size = size;
m_traits.m_alignment = 1;
m_traits.m_nativeHash = nativeHash;
m_traits.m_initializedFromZeroMem = true;
m_traits.m_requiresConstructor = false;
m_traits.m_requiresDestructor = false;
m_traits.m_simpleCopyCompare = true;
}
void EnumType::clear()
{
m_minValue = std::numeric_limits<Int64>::max();
m_maxValue = std::numeric_limits<Int64>::min();
m_options.reset();
m_values.reset();
}
void EnumType::add(const StringID& name, const Int64 value)
{
for (const auto existingVal : m_values)
{
ASSERT_EX(existingVal != value, "Duplicated enum value");
}
for (const auto existingName : m_options)
{
ASSERT_EX(existingName != name, "Duplicated enum value");
}
m_options.pushBack(name);
m_values.pushBack(value);
m_minValue = std::min(m_minValue, value);
m_maxValue = std::max(m_maxValue, value);
}
const Bool EnumType::findValue(const StringID& name, Int64& outValue) const
{
for (Uint32 i = 0; i < m_options.size(); ++i)
{
if (m_options[i] == name)
{
outValue = m_values[i];
return true;
}
}
return false;
}
const Bool EnumType::findName(const Int64 value, StringID& outName) const
{
for (Uint32 i = 0; i < m_values.size(); ++i)
{
if (m_values[i] == value)
{
outName = m_options[i];
return true;
}
}
return false;
}
const Bool EnumType::compare(const void* data1, const void* data2) const
{
Int64 a, b;
readInt64(data1, a);
readInt64(data2, b);
return (a == b);
}
void EnumType::copy(void* dest, const void* src) const
{
Int64 val;
readInt64(src, val);
writeInt64(dest, val);
}
//----
const Bool EnumType::toString(const void* value, StringBuf& outName) const
{
Int64 val = 0;
readInt64(value, val);
StringID name;
if (!findName(val, name))
return false;
outName = name.getBuffer();
return true;
}
const Bool EnumType::toStringID(const void* value, StringID& outNam) const
{
Int64 val = 0;
readInt64(value, val);
return findName(val, outNam);
}
const Bool EnumType::toNumber(const void* value, Int64& outNumber) const
{
readInt64(value, outNumber);
return true;
}
const Bool EnumType::fromString(const StringBuf& name, void* outValue) const
{
Int64 val = 0;
if (!findValue(StringID(name.c_str()), val))
return false;
writeInt64(outValue, val);
return true;
}
const Bool EnumType::fromStringID(const StringID& name, void* outValue) const
{
Int64 val = 0;
if (!findValue(name, val))
return false;
writeInt64(outValue, val);
return true;
}
const Bool EnumType::fromNumber(const Int64 number, void* outValue) const
{
writeInt64(outValue, number);
return true;
}
//----
const Bool EnumType::writeBinary(const TypeSerializationContext& typeContext, stream::IBinaryWriter& file, const void* data, const void* defaultData) const
{
Int64 val = 0;
readInt64(data, val);
StringID optionName;
if (!findName(val, optionName))
{
TRACE_ERROR("Missing option name for enum %ld in {}, the value would be lost", val, getName());
return false;
}
file << optionName;
return true;
}
const Bool EnumType::readBinary(const TypeSerializationContext& typeContext, stream::IBinaryReader& file, void* data) const
{
StringID optionName;
file >> optionName;
Int64 val = 0;
if (!findValue(optionName, val))
{
TRACE_ERROR("Failed to find numerical value for option '{}' in enum '{}'", optionName, getName());
return false;
}
writeInt64(data, val);
return true;
}
const Bool EnumType::writeText(const TypeSerializationContext& typeContext, stream::ITextWriter& stream, const void* data, const void* defaultData) const
{
Int64 val = 0;
readInt64(data, val);
StringID optionName;
if (!findName(val, optionName))
{
TRACE_ERROR("Unable to assign enum option to value %lld", val);
return false;
}
stream.writeValue(optionName.c_str());
return true;
}
const Bool EnumType::readText(const TypeSerializationContext& typeContext, stream::ITextReader& stream, void* data) const
{
StringView<AnsiChar> optionName;
if (!stream.readValue(optionName))
{
TRACE_ERROR("Enum can only be parsed from single value");
return false;
}
Int64 val = 0;
if (!findValue(StringID(optionName), val))
{
TRACE_ERROR("Unable to assign value to enum option '{}'", optionName);
return false;
}
writeInt64(data, val);
return true;
}
void EnumType::calcHash(const void* data, CRC64& crc) const
{
Int64 val = 0;
readInt64(data, val);
crc << val;
}
void EnumType::readInt64(const void* data, Int64& outValue) const
{
switch (getSize())
{
case 1: outValue = (Int64) *(const Int8*)data; break;
case 2: outValue = (Int64) *(const Int16*)data; break;
case 4: outValue = (Int64) *(const Int32*)data; break;
case 8: outValue = (Int64) *(const Int64*)data; break;
}
}
void EnumType::writeInt64(void* data, const Int64 value) const
{
switch (getSize())
{
case 1: *(Int8*)data = (Int8)value; break;
case 2: *(Int16*)data = (Int16)value; break;
case 4: *(Int32*)data = (Int32)value; break;
case 8: *(Int64*)data = value; break;
}
}
const AnsiChar* GetEnumValueName(const rtti::EnumType* enumType, const Int64 enumValue)
{
if (!enumType)
return "InvalidType";
StringID valueName;
if (!enumType->findName(enumValue, valueName))
return "UnknownEnumOption";
return valueName.c_str();
}
const Bool GetEnumNameValue(const rtti::EnumType* enumType, const StringID name, Int64& outEnumValue)
{
if (!enumType)
return false;
return enumType->findValue(name, outEnumValue);
}
} // rtti
} // base
|
{
"content_hash": "371fa958a75c1a7de1aebea22d7c18ff",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 163,
"avg_line_length": 29.75347222222222,
"alnum_prop": 0.5085774302719104,
"repo_name": "InfernoEngine/engine",
"id": "412322b7a76bb0356dc039412c6119aaae4bd24c",
"size": "8569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dev/src/base/serialization/src/rttiEnumType.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2330"
},
{
"name": "C",
"bytes": "3093219"
},
{
"name": "C++",
"bytes": "16712786"
},
{
"name": "CSS",
"bytes": "41369"
},
{
"name": "Lua",
"bytes": "27464"
},
{
"name": "Shell",
"bytes": "3813"
},
{
"name": "Yacc",
"bytes": "72956"
}
]
}
|
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is the Netscape Portable Runtime (NSPR).
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1998-2000
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef prcmon_h___
#define prcmon_h___
/*
** Interface to cached monitors. Cached monitors use an address to find a
** given PR monitor. In this way a monitor can be associated with another
** object without preallocating a monitor for all objects.
**
** A hash table is used to quickly map addresses to individual monitors
** and the system automatically grows the hash table as needed.
**
** Cache monitors are about 5 times slower to use than uncached monitors.
*/
#include "prmon.h"
#include "prinrval.h"
PR_BEGIN_EXTERN_C
/**
** Like PR_EnterMonitor except use the "address" to find a monitor in the
** monitor cache. If successful, returns the PRMonitor now associated
** with "address". Note that you must PR_CExitMonitor the address to
** release the monitor cache entry (otherwise the monitor cache will fill
** up). This call will return NULL if the monitor cache needs to be
** expanded and the system is out of memory.
*/
NSPR_API(PRMonitor*) PR_CEnterMonitor(void *address);
/*
** Like PR_ExitMonitor except use the "address" to find a monitor in the
** monitor cache.
*/
NSPR_API(PRStatus) PR_CExitMonitor(void *address);
/*
** Like PR_Wait except use the "address" to find a monitor in the
** monitor cache.
*/
NSPR_API(PRStatus) PR_CWait(void *address, PRIntervalTime timeout);
/*
** Like PR_Notify except use the "address" to find a monitor in the
** monitor cache.
*/
NSPR_API(PRStatus) PR_CNotify(void *address);
/*
** Like PR_NotifyAll except use the "address" to find a monitor in the
** monitor cache.
*/
NSPR_API(PRStatus) PR_CNotifyAll(void *address);
/*
** Set a callback to be invoked each time a monitor is recycled from the cache
** freelist, with the monitor's cache-key passed in address.
*/
NSPR_API(void) PR_CSetOnMonitorRecycle(void (PR_CALLBACK *callback)(void *address));
PR_END_EXTERN_C
#endif /* prcmon_h___ */
|
{
"content_hash": "e3f8c44e9ba6aa369187b88ba30d03bb",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 84,
"avg_line_length": 37.38775510204081,
"alnum_prop": 0.728438864628821,
"repo_name": "leighpauls/k2cro4",
"id": "f61cf56a0badc8a13512dbe2c119f2c9435495a1",
"size": "3664",
"binary": false,
"copies": "71",
"ref": "refs/heads/master",
"path": "third_party/xulrunner-sdk/win/sdk/include/prcmon.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "3062"
},
{
"name": "AppleScript",
"bytes": "25392"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "68131038"
},
{
"name": "C",
"bytes": "242794338"
},
{
"name": "C#",
"bytes": "11024"
},
{
"name": "C++",
"bytes": "353525184"
},
{
"name": "Common Lisp",
"bytes": "3721"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Emacs Lisp",
"bytes": "1639"
},
{
"name": "F#",
"bytes": "4992"
},
{
"name": "FORTRAN",
"bytes": "10404"
},
{
"name": "Java",
"bytes": "3845159"
},
{
"name": "JavaScript",
"bytes": "39146656"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "Matlab",
"bytes": "22373"
},
{
"name": "Objective-C",
"bytes": "21887598"
},
{
"name": "PHP",
"bytes": "2344144"
},
{
"name": "Perl",
"bytes": "49033099"
},
{
"name": "Prolog",
"bytes": "2926122"
},
{
"name": "Python",
"bytes": "39863959"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Racket",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "304063"
},
{
"name": "Scheme",
"bytes": "14853"
},
{
"name": "Shell",
"bytes": "9195117"
},
{
"name": "Tcl",
"bytes": "1919771"
},
{
"name": "Verilog",
"bytes": "3092"
},
{
"name": "Visual Basic",
"bytes": "1430"
},
{
"name": "eC",
"bytes": "5079"
}
]
}
|
// Copyright (c) 2013 GitHub, Inc.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
#ifndef ATOM_VERSION_H
#define ATOM_VERSION_H
#define ATOM_MAJOR_VERSION 0
#define ATOM_MINOR_VERSION 30
#define ATOM_PATCH_VERSION 0
#define ATOM_VERSION_IS_RELEASE 1
#ifndef ATOM_TAG
# define ATOM_TAG ""
#endif
#ifndef ATOM_STRINGIFY
#define ATOM_STRINGIFY(n) ATOM_STRINGIFY_HELPER(n)
#define ATOM_STRINGIFY_HELPER(n) #n
#endif
#if ATOM_VERSION_IS_RELEASE
# define ATOM_VERSION_STRING ATOM_STRINGIFY(ATOM_MAJOR_VERSION) "." \
ATOM_STRINGIFY(ATOM_MINOR_VERSION) "." \
ATOM_STRINGIFY(ATOM_PATCH_VERSION) \
ATOM_TAG
#else
# define ATOM_VERSION_STRING ATOM_STRINGIFY(ATOM_MAJOR_VERSION) "." \
ATOM_STRINGIFY(ATOM_MINOR_VERSION) "." \
ATOM_STRINGIFY(ATOM_PATCH_VERSION) \
ATOM_TAG "-pre"
#endif
#define ATOM_VERSION "v" ATOM_VERSION_STRING
#define ATOM_VERSION_AT_LEAST(major, minor, patch) \
(( (major) < ATOM_MAJOR_VERSION) \
|| ((major) == ATOM_MAJOR_VERSION && (minor) < ATOM_MINOR_VERSION) \
|| ((major) == ATOM_MAJOR_VERSION && (minor) == ATOM_MINOR_VERSION && (patch) <= ATOM_PATCH_VERSION))
#endif /* ATOM_VERSION_H */
|
{
"content_hash": "b8c51e04b0807972cb8a8ac847df08b1",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 103,
"avg_line_length": 31.74418604651163,
"alnum_prop": 0.6131868131868132,
"repo_name": "yalexx/electron",
"id": "64d0c7bfd73d66ebcaf6651d1b34162eeffd16be",
"size": "1365",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "atom/common/atom_version.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3671"
},
{
"name": "C++",
"bytes": "1276084"
},
{
"name": "CoffeeScript",
"bytes": "181682"
},
{
"name": "HTML",
"bytes": "11184"
},
{
"name": "JavaScript",
"bytes": "15568"
},
{
"name": "Objective-C",
"bytes": "10123"
},
{
"name": "Objective-C++",
"bytes": "116519"
},
{
"name": "Python",
"bytes": "74739"
},
{
"name": "Shell",
"bytes": "2593"
}
]
}
|
<?php
/**
* Xmlconnect Config data upgrade
*
* @category Mage
* @package Mage_Xmlconnect
* @author Magento Core Team <core@magentocommerce.com>
*/
/** @var $installer Mage_Core_Model_Resource_Setup */
$installer = $this;
$installer->startSetup();
/**
* Create table 'xmlconnect_config_data'
*/
$configTableName = $installer->getTable('xmlconnect/configData');
$configTable = $installer->getConnection()
->newTable($configTableName)
->addColumn('application_id', Varien_Db_Ddl_Table::TYPE_SMALLINT, null, array(
'unsigned' => true,
'nullable' => false,
), 'Application Id')
->addColumn('category', Varien_Db_Ddl_Table::TYPE_TEXT, 60, array(
'nullable' => false,
'default' => 'default',
), 'Category')
->addColumn('path', Varien_Db_Ddl_Table::TYPE_TEXT, 250, array(
'nullable' => false,
), 'Path')
->addColumn('value', Varien_Db_Ddl_Table::TYPE_TEXT, '64k', array(
'nullable' => false,
), 'Value')
->addIndex(
$installer->getIdxName(
$configTableName,
array('application_id', 'category', 'path'),
Varien_Db_Adapter_Interface::INDEX_TYPE_UNIQUE
),
array('application_id', 'category', 'path'),
array('type' => Varien_Db_Adapter_Interface::INDEX_TYPE_UNIQUE)
)
->addForeignKey(
$installer->getFkName(
$configTableName,
'application_id',
$installer->getTable('xmlconnect/application'),
'application_id'
),
'application_id',
$installer->getTable('xmlconnect/application'),
'application_id',
Varien_Db_Ddl_Table::ACTION_CASCADE,
Varien_Db_Ddl_Table::ACTION_CASCADE
)
->setComment('Xmlconnect Configuration Data');
$installer->getConnection()->createTable($configTable);
$installer->endSetup();
|
{
"content_hash": "8aaadee45c409878e09ef52f14ac63f3",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 82,
"avg_line_length": 31.688524590163933,
"alnum_prop": 0.5897568546301086,
"repo_name": "krattai/ss-middleware",
"id": "5ac2854e9e631b27cc3b919378caf782af3e6c2f",
"size": "2892",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "mageplus/app/code/core/Mage/XmlConnect/sql/xmlconnect_setup/mysql4-upgrade-1.4.0.13-1.4.0.14.php",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ActionScript",
"bytes": "19946"
},
{
"name": "ApacheConf",
"bytes": "5851"
},
{
"name": "Batchfile",
"bytes": "5104"
},
{
"name": "CSS",
"bytes": "993063"
},
{
"name": "HTML",
"bytes": "4180718"
},
{
"name": "Java",
"bytes": "17605"
},
{
"name": "JavaScript",
"bytes": "846402"
},
{
"name": "Makefile",
"bytes": "5580"
},
{
"name": "PHP",
"bytes": "42505354"
},
{
"name": "Python",
"bytes": "329268"
},
{
"name": "Shell",
"bytes": "2731"
},
{
"name": "XSLT",
"bytes": "2135"
}
]
}
|
<object-stream>
<com.fatwire.realtime.ResourceDataImpl>
<id class="string">OpenMarket/Xcelerate/AssetType/AdvCols/UnPackRuleDetails</id>
<resourceType>ELEMENTCATALOG</resourceType>
<deps class="empty-set"/>
<data class="com.openmarket.xcelerate.common.ElementCatalogEntry" serialization="custom">
<com.openmarket.xcelerate.common.ElementCatalogEntry>
<int>1</int>
<string>OpenMarket/Xcelerate/AssetType/AdvCols/UnPackRuleDetails</string>
<string>rules to variables</string>
<string></string>
<string></string>
<string>OpenMarket/Xcelerate/AssetType/AdvCols/UnPackRuleDetails.xml</string>
<string></string>
<string></string>
</com.openmarket.xcelerate.common.ElementCatalogEntry>
</data>
</com.fatwire.realtime.ResourceDataImpl>
</object-stream>
<!--==================___ METADATA ___================
<dependencies>
<list/>
</dependencies>
==================______________________==================
#
#Mon Dec 16 23:39:37 UTC 2013
storable0=/ELEMENTS/OpenMarket/Xcelerate/AssetType/AdvCols/UnPackRuleDetails.xml|null|_storable_type\=%2FELEMENTS%2FOpenMarket%2FXcelerate%2FAssetType%2FAdvCols%2FUnPackRuleDetails.xml
@hashval=-1826001403
key=@ELEMENTCATALOG-OpenMarket/Xcelerate/AssetType/AdvCols/UnPackRuleDetails
type=ELEMENTCATALOG
==================___ END METADATA ___==================-->
|
{
"content_hash": "af91fcea5aa9da084b96e9f7288c57da",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 184,
"avg_line_length": 43.625,
"alnum_prop": 0.670487106017192,
"repo_name": "sciabarra/AgileSites",
"id": "6dc335d2a4681809c8bf80453ecb555a3453dd17",
"size": "1396",
"binary": false,
"copies": "2",
"ref": "refs/heads/1.8.1",
"path": "export/envision/Demo-11.6/src/_metadata/ELEMENTS/OpenMarket/Xcelerate/AssetType/AdvCols/UnPackRuleDetails.xml.main.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3000"
},
{
"name": "CSS",
"bytes": "28291"
},
{
"name": "HTML",
"bytes": "23822"
},
{
"name": "Java",
"bytes": "906893"
},
{
"name": "JavaScript",
"bytes": "12920"
},
{
"name": "Scala",
"bytes": "61202"
},
{
"name": "Shell",
"bytes": "2956"
}
]
}
|
using Markdig.Syntax.Inlines;
namespace Markdig.Renderers.Normalize.Inlines
{
/// <summary>
/// A Normalize renderer for a <see cref="LinkInline"/>.
/// </summary>
/// <seealso cref="NormalizeObjectRenderer{LinkInline}" />
public class LinkInlineRenderer : NormalizeObjectRenderer<LinkInline>
{
protected override void Write(NormalizeRenderer renderer, LinkInline link)
{
if (link.IsImage)
{
renderer.Write('!');
}
renderer.Write('[');
renderer.WriteChildren(link);
renderer.Write(']');
if (link.Label != null)
{
if (link.FirstChild is LiteralInline literal && literal.Content.Length == link.Label.Length && literal.Content.Match(link.Label))
{
// collapsed reference and shortcut links
if (!link.IsShortcut)
{
renderer.Write("[]");
}
}
else
{
// full link
renderer.Write('[').Write(link.Label).Write(']');
}
}
else
{
if (!string.IsNullOrEmpty(link.Url))
{
renderer.Write('(').Write(link.Url);
if (!string.IsNullOrEmpty(link.Title))
{
renderer.Write(" \"");
renderer.Write(link.Title.Replace(@"""", @"\"""));
renderer.Write("\"");
}
renderer.Write(')');
}
}
}
}
}
|
{
"content_hash": "4dcab4c4b9dc5568cd2a39365e3e904c",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 145,
"avg_line_length": 31.618181818181817,
"alnum_prop": 0.4284071305347901,
"repo_name": "lunet-io/markdig",
"id": "1357ce93c04ea28799b802df956cae32ea084eff",
"size": "1923",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Markdig/Renderers/Normalize/Inlines/LinkInlineRenderer.cs",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C#",
"bytes": "2087944"
},
{
"name": "PowerShell",
"bytes": "4550"
}
]
}
|
<?xml version="1.0" encoding="UTF-8"?>
<Tokens version="1.0">
<File path="Classes/AWSKinesisHashKeyRange.html">
<Token>
<TokenIdentifier>//apple_ref/occ/cl/AWSKinesisHashKeyRange</TokenIdentifier>
<Abstract type="html"><p>The range of possible hash key values for the shard, which is a set of ordered contiguous positive integers.</p>
Required parameters: [StartingHashKey, EndingHashKey]</Abstract>
<DeclaredIn>AWSKinesisModel.h</DeclaredIn>
<NodeRef refid="968"/>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instm/AWSKinesisHashKeyRange/setEndingHashKey:</TokenIdentifier>
<Abstract type="html"><p>The ending hash key of the hash key range.</p></Abstract>
<DeclaredIn>AWSKinesisModel.h</DeclaredIn>
<Declaration>@property (nonatomic, strong) NSString *endingHashKey</Declaration>
<Anchor>//api/name/endingHashKey</Anchor>
<NodeRef refid="968"/>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instm/AWSKinesisHashKeyRange/endingHashKey</TokenIdentifier>
<Abstract type="html"><p>The ending hash key of the hash key range.</p></Abstract>
<DeclaredIn>AWSKinesisModel.h</DeclaredIn>
<Declaration>@property (nonatomic, strong) NSString *endingHashKey</Declaration>
<Anchor>//api/name/endingHashKey</Anchor>
<NodeRef refid="968"/>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instp/AWSKinesisHashKeyRange/endingHashKey</TokenIdentifier>
<Abstract type="html"><p>The ending hash key of the hash key range.</p></Abstract>
<DeclaredIn>AWSKinesisModel.h</DeclaredIn>
<Declaration>@property (nonatomic, strong) NSString *endingHashKey</Declaration>
<Anchor>//api/name/endingHashKey</Anchor>
<NodeRef refid="968"/>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instm/AWSKinesisHashKeyRange/setStartingHashKey:</TokenIdentifier>
<Abstract type="html"><p>The starting hash key of the hash key range.</p></Abstract>
<DeclaredIn>AWSKinesisModel.h</DeclaredIn>
<Declaration>@property (nonatomic, strong) NSString *startingHashKey</Declaration>
<Anchor>//api/name/startingHashKey</Anchor>
<NodeRef refid="968"/>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instm/AWSKinesisHashKeyRange/startingHashKey</TokenIdentifier>
<Abstract type="html"><p>The starting hash key of the hash key range.</p></Abstract>
<DeclaredIn>AWSKinesisModel.h</DeclaredIn>
<Declaration>@property (nonatomic, strong) NSString *startingHashKey</Declaration>
<Anchor>//api/name/startingHashKey</Anchor>
<NodeRef refid="968"/>
</Token>
<Token>
<TokenIdentifier>//apple_ref/occ/instp/AWSKinesisHashKeyRange/startingHashKey</TokenIdentifier>
<Abstract type="html"><p>The starting hash key of the hash key range.</p></Abstract>
<DeclaredIn>AWSKinesisModel.h</DeclaredIn>
<Declaration>@property (nonatomic, strong) NSString *startingHashKey</Declaration>
<Anchor>//api/name/startingHashKey</Anchor>
<NodeRef refid="968"/>
</Token>
</File>
</Tokens>
|
{
"content_hash": "71e6a68cff6361e1187a2e2e7990e5e8",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 152,
"avg_line_length": 35.64835164835165,
"alnum_prop": 0.6954377311960542,
"repo_name": "davidbutz/ChristmasFamDuels",
"id": "48002c7d9fec57be63b59066c23f9e0f7e75d8b2",
"size": "3244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iOS/frameworks/aws-ios-sdk-2.4.1/documentation/com.amazon.aws.ios.docset/Contents/Resources/Tokens968.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "211"
},
{
"name": "CSS",
"bytes": "77"
},
{
"name": "HTML",
"bytes": "330"
},
{
"name": "JavaScript",
"bytes": "212297"
},
{
"name": "Objective-C",
"bytes": "3465743"
},
{
"name": "Ruby",
"bytes": "461"
},
{
"name": "Shell",
"bytes": "2569"
},
{
"name": "Swift",
"bytes": "325330"
}
]
}
|
FROM jenkins/jenkins:2.112
LABEL maintainer="mstewart@riotgames.com"
# Create Jenkins Log Folder
USER root
RUN mkdir /var/log/jenkins
RUN chown -R jenkins:jenkins /var/log/jenkins
USER jenkins
# Set default options
ENV JAVA_OPTS="-Xmx8192m"
ENV JENKINS_OPTS="--handlerCountMax=300 --logfile=/var/log/jenkins/jenkins.log"
|
{
"content_hash": "126172b4ba524ad1c115420425fe4b83",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 79,
"avg_line_length": 26.916666666666668,
"alnum_prop": 0.7832817337461301,
"repo_name": "maxfields2000/dockerjenkins_tutorial",
"id": "d8b100bdfed21203753332d67c63ca073458f87f",
"size": "323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tutorial_02/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "17125"
},
{
"name": "Groovy",
"bytes": "8063"
},
{
"name": "Makefile",
"bytes": "3726"
},
{
"name": "Shell",
"bytes": "40467"
}
]
}
|
package org.apache.archiva.rest.services.interceptors;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
/**
* class to setup Jackson Json configuration
*
* @author Olivier Lamy
* @since 1.4-M3
*/
@Service("archivaJacksonJsonConfigurator")
public class JacksonJsonConfigurator
{
private Logger log = LoggerFactory.getLogger( getClass() );
@Inject
public JacksonJsonConfigurator( ObjectMapper objectMapper )
{
log.info( "configure jackson ObjectMapper" );
objectMapper.configure( DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false );
}
}
|
{
"content_hash": "bf61049cce3b25b2afb8540eae160be7",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 91,
"avg_line_length": 26.2,
"alnum_prop": 0.7659033078880407,
"repo_name": "Altiscale/archiva",
"id": "4137eb74ffd3af84e4bdf62476cacc12e2154855",
"size": "1593",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/interceptors/JacksonJsonConfigurator.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "168555"
},
{
"name": "HTML",
"bytes": "216486"
},
{
"name": "Java",
"bytes": "4514350"
},
{
"name": "JavaScript",
"bytes": "824502"
},
{
"name": "Shell",
"bytes": "1301"
}
]
}
|
import {MessageBasedPlatformLocation} from './platform_location';
import {CONST_EXPR} from 'angular2/src/facade/lang';
import {BrowserPlatformLocation} from 'angular2/src/router/browser_platform_location';
import {APP_INITIALIZER, Provider, Injector, NgZone} from 'angular2/core';
export const WORKER_RENDER_ROUTER = CONST_EXPR([
MessageBasedPlatformLocation,
BrowserPlatformLocation,
CONST_EXPR(
new Provider(APP_INITIALIZER,
{useFactory: initRouterListeners, multi: true, deps: CONST_EXPR([Injector])}))
]);
function initRouterListeners(injector: Injector): () => void {
return () => {
let zone = injector.get(NgZone);
zone.run(() => injector.get(MessageBasedPlatformLocation).start());
};
}
|
{
"content_hash": "9f609b45b323cae27b534b1691bf5339",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 97,
"avg_line_length": 37,
"alnum_prop": 0.722972972972973,
"repo_name": "hdeshev/angular",
"id": "21805ee2e045a971b66bf8d978986cc8cacf05f4",
"size": "740",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "modules/angular2/src/web_workers/ui/router_providers.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "62917"
},
{
"name": "Dart",
"bytes": "660795"
},
{
"name": "HTML",
"bytes": "67612"
},
{
"name": "JavaScript",
"bytes": "96667"
},
{
"name": "Protocol Buffer",
"bytes": "4818"
},
{
"name": "Python",
"bytes": "3535"
},
{
"name": "Shell",
"bytes": "29200"
},
{
"name": "TypeScript",
"bytes": "3372059"
}
]
}
|
var App = require('app');
App.MainChartsHeatmapHostDetailView = Em.View.extend({
templateName: require('templates/main/charts/heatmap/heatmap_host_detail'),
/** @private */ classNames:['heatmap_host_details'],
/** @private */ elementId:'heatmapDetailsBlock',
/** @private */ details:{
hostName:'test node',
publicHostName:'test node',
osType: 'OS',
ip: '192.168.0.0',
metricName: 'metric-name',
metricValue: 'metric-value',
diskUsage: '10',
cpuUsage: '10',
memoryUsage: '10',
hostComponents: 'host components'
}
});
|
{
"content_hash": "896beaa13a3d1cfbba30f0d465363608",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 77,
"avg_line_length": 27.047619047619047,
"alnum_prop": 0.647887323943662,
"repo_name": "keedio/ambari-web",
"id": "db2fc41ddbd97bf8b7180d7e6a513b79dd79ed16",
"size": "1374",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "app/views/main/charts/heatmap/heatmap_host_detail.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "454646"
},
{
"name": "CoffeeScript",
"bytes": "3291"
},
{
"name": "HTML",
"bytes": "479881"
},
{
"name": "JavaScript",
"bytes": "11003846"
},
{
"name": "Shell",
"bytes": "3467"
}
]
}
|
GMRES solves the problem $Ax = b$ approximately for $x$ where $A$ is a general, linear operator and $b$ the right-hand side vector. The method is optimal in the sense that it selects the solution with minimal residual from a Krylov subspace, but the price of optimality is increasing storage and computation effort per iteration. Restarts are necessary to fix these costs.
## Usage
```@docs
gmres
gmres!
```
## Implementation details
The implementation pre-allocates a matrix $V$ of size `n` by `restart` whose columns form an orthonormal basis for the Krylov subspace. This allows BLAS2 operations when updating the solution vector $x$. The Hessenberg matrix is also pre-allocated.
Modified Gram-Schmidt is used to orthogonalize the columns of $V$.
The computation of the residual norm is implemented in a non-standard way, namely keeping track of a vector $\gamma$ in the null-space of $H_k^*$, which is the adjoint of the $(k + 1) \times k$ Hessenberg matrix $H_k$ at the $k$th iteration. Only when $x$ needs to be updated is the Hessenberg matrix mutated with Givens rotations.
!!! tip
GMRES can be used as an [iterator](@ref Iterators). This makes it possible to access the Hessenberg matrix and Krylov basis vectors during the iterations.
|
{
"content_hash": "a0547384d4e1e8f14e18fa86d9450818",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 372,
"avg_line_length": 66.05263157894737,
"alnum_prop": 0.7673306772908367,
"repo_name": "JuliaLang/IterativeSolvers.jl",
"id": "5f5574d2ab86f8c1b169bb70275e72d1563d29a8",
"size": "1287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/src/linear_systems/gmres.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Julia",
"bytes": "126805"
}
]
}
|
<?xml version="1.0" encoding="utf-8"?>
<TextView xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:padding="20dp"
android:textSize="18sp"
android:textColor="#800080">
</TextView>
|
{
"content_hash": "627afff555ae5d197930cc7a8c9aa7de",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 68,
"avg_line_length": 35.75,
"alnum_prop": 0.7062937062937062,
"repo_name": "AndroidDevLog/AndroidDevLog",
"id": "538eb9be57e763b1c46e2e34932618823922b0e6",
"size": "286",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "118.RunTimeAddItemtoListView/app/src/main/res/layout/list.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "262"
},
{
"name": "Java",
"bytes": "1360389"
},
{
"name": "PHP",
"bytes": "11775"
}
]
}
|
import * as fs from "fs";
import * as path from "path";
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-var-requires
const { marked } = require("marked");
import TerminalRenderer = require("marked-terminal");
import { checkMinRequiredVersion } from "../checkMinRequiredVersion";
import { Command } from "../command";
import { Config } from "../config";
import { FirebaseError } from "../error";
import { promptOnce } from "../prompt";
import { logger } from "../logger";
import * as npmDependencies from "../init/features/functions/npm-dependencies";
marked.setOptions({
renderer: new TerminalRenderer(),
});
const TEMPLATE_ROOT = path.resolve(__dirname, "../../templates/extensions/");
const FUNCTIONS_ROOT = path.resolve(__dirname, "../../templates/init/functions/");
function readCommonTemplates() {
return {
extSpecTemplate: fs.readFileSync(path.join(TEMPLATE_ROOT, "extension.yaml"), "utf8"),
preinstallTemplate: fs.readFileSync(path.join(TEMPLATE_ROOT, "PREINSTALL.md"), "utf8"),
postinstallTemplate: fs.readFileSync(path.join(TEMPLATE_ROOT, "POSTINSTALL.md"), "utf8"),
changelogTemplate: fs.readFileSync(path.join(TEMPLATE_ROOT, "CHANGELOG.md"), "utf8"),
};
}
/**
* Command for setting up boilerplate code for a new extension.
*/
export const command = new Command("ext:dev:init")
.description("initialize files for writing an extension in the current directory")
.before(checkMinRequiredVersion, "extDevMinVersion")
.action(async (options: any) => {
const cwd = options.cwd || process.cwd();
const config = new Config({}, { projectDir: cwd, cwd: cwd });
try {
const lang = await promptOnce({
type: "list",
name: "language",
message: "In which language do you want to write the Cloud Functions for your extension?",
default: "javascript",
choices: [
{
name: "JavaScript",
value: "javascript",
},
{
name: "TypeScript",
value: "typescript",
},
],
});
switch (lang) {
case "javascript": {
await javascriptSelected(config);
break;
}
case "typescript": {
await typescriptSelected(config);
break;
}
default: {
throw new FirebaseError(`${lang} is not supported.`);
}
}
await npmDependencies.askInstallDependencies({}, config);
const welcome = fs.readFileSync(path.join(TEMPLATE_ROOT, lang, "WELCOME.md"), "utf8");
return logger.info("\n" + marked(welcome));
} catch (err: any) {
if (!(err instanceof FirebaseError)) {
throw new FirebaseError(
`Error occurred when initializing files for new extension: ${err.message}`,
{
original: err,
}
);
}
throw err;
}
});
/**
* Sets up Typescript boilerplate code for new extension
* @param {Config} config configuration options
*/
async function typescriptSelected(config: Config): Promise<void> {
const packageLintingTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "typescript", "package.lint.json"),
"utf8"
);
const packageNoLintingTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "typescript", "package.nolint.json"),
"utf8"
);
const tsconfigTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "typescript", "tsconfig.json"),
"utf8"
);
const tsconfigDevTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "typescript", "tsconfig.dev.json"),
"utf8"
);
const indexTemplate = fs.readFileSync(path.join(TEMPLATE_ROOT, "typescript", "index.ts"), "utf8");
const gitignoreTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "typescript", "_gitignore"),
"utf8"
);
const eslintTemplate = fs.readFileSync(
path.join(FUNCTIONS_ROOT, "typescript", "_eslintrc"),
"utf8"
);
const lint = await promptOnce({
name: "lint",
type: "confirm",
message: "Do you want to use ESLint to catch probable bugs and enforce style?",
default: true,
});
const templates = readCommonTemplates();
await config.askWriteProjectFile("extension.yaml", templates.extSpecTemplate);
await config.askWriteProjectFile("PREINSTALL.md", templates.preinstallTemplate);
await config.askWriteProjectFile("POSTINSTALL.md", templates.postinstallTemplate);
await config.askWriteProjectFile("CHANGELOG.md", templates.changelogTemplate);
await config.askWriteProjectFile("functions/src/index.ts", indexTemplate);
if (lint) {
await config.askWriteProjectFile("functions/package.json", packageLintingTemplate);
await config.askWriteProjectFile("functions/.eslintrc.js", eslintTemplate);
} else {
await config.askWriteProjectFile("functions/package.json", packageNoLintingTemplate);
}
await config.askWriteProjectFile("functions/tsconfig.json", tsconfigTemplate);
if (lint) {
await config.askWriteProjectFile("functions/tsconfig.dev.json", tsconfigDevTemplate);
}
await config.askWriteProjectFile("functions/.gitignore", gitignoreTemplate);
}
/**
* Sets up Javascript boilerplate code for new extension
* @param {Config} config configuration options
*/
async function javascriptSelected(config: Config): Promise<void> {
const indexTemplate = fs.readFileSync(path.join(TEMPLATE_ROOT, "javascript", "index.js"), "utf8");
const packageLintingTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "javascript", "package.lint.json"),
"utf8"
);
const packageNoLintingTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "javascript", "package.nolint.json"),
"utf8"
);
const gitignoreTemplate = fs.readFileSync(
path.join(TEMPLATE_ROOT, "javascript", "_gitignore"),
"utf8"
);
const eslintTemplate = fs.readFileSync(
path.join(FUNCTIONS_ROOT, "javascript", "_eslintrc"),
"utf8"
);
const lint = await promptOnce({
name: "lint",
type: "confirm",
message: "Do you want to use ESLint to catch probable bugs and enforce style?",
default: false,
});
const templates = readCommonTemplates();
await config.askWriteProjectFile("extension.yaml", templates.extSpecTemplate);
await config.askWriteProjectFile("PREINSTALL.md", templates.preinstallTemplate);
await config.askWriteProjectFile("POSTINSTALL.md", templates.postinstallTemplate);
await config.askWriteProjectFile("CHANGELOG.md", templates.changelogTemplate);
await config.askWriteProjectFile("functions/index.js", indexTemplate);
if (lint) {
await config.askWriteProjectFile("functions/package.json", packageLintingTemplate);
await config.askWriteProjectFile("functions/.eslintrc.js", eslintTemplate);
} else {
await config.askWriteProjectFile("functions/package.json", packageNoLintingTemplate);
}
await config.askWriteProjectFile("functions/.gitignore", gitignoreTemplate);
}
|
{
"content_hash": "57d9096f72e4b810bc2fbb5ed4c29efc",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 103,
"avg_line_length": 36.99465240641711,
"alnum_prop": 0.6869037294015612,
"repo_name": "firebase/firebase-tools",
"id": "86277dd87d0339ee9a23a2f66e8ae28faf7fe585",
"size": "6918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/commands/ext-dev-init.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3817"
},
{
"name": "Dockerfile",
"bytes": "1085"
},
{
"name": "Go",
"bytes": "899"
},
{
"name": "HTML",
"bytes": "12558"
},
{
"name": "JavaScript",
"bytes": "130080"
},
{
"name": "Shell",
"bytes": "15843"
},
{
"name": "TypeScript",
"bytes": "4750085"
}
]
}
|
int main(int argc, char** argv) {
if (argc != 2) {
std::cout << "well_formed <html filename>\n";
exit(EXIT_FAILURE);
}
const char* filename = argv[1];
std::ifstream in(filename, std::ios::in | std::ios::binary);
if (!in) {
std::cout << "File " << filename << " not found!\n";
exit(EXIT_FAILURE);
}
std::string contents;
in.seekg(0, std::ios::end);
contents.resize(in.tellg());
in.seekg(0, std::ios::beg);
in.read(&contents[0], contents.size());
in.close();
// remove any xml header line and trailing whitespace
if (contents.compare(0,5,"<?xml") == 0) {
size_t end = contents.find_first_of('>', 0);
end = contents.find_first_not_of("\n\r\t\v\f ",end+1);
contents.erase(0,end);
}
// add in basic doctype if missing
if ((contents.compare(0,9,"<!DOCTYPE") != 0) && (contents.compare(0,9,"<!doctype") != 0)) {
contents.insert(0,"<!DOCTYPE html>\n");
}
fprintf(stdout, "%s", contents.c_str());
fprintf(stdout, "\n--------\n");
GumboOptions myoptions = kGumboDefaultOptions;
myoptions.use_xhtml_rules = true;
// leave this as false to prevent pre-mature stopping when no error exists
myoptions.stop_on_first_error = false;
GumboOutput* output = gumbo_parse_with_options(&myoptions, contents.data(), contents.length());
const GumboVector* errors = &output->errors;
for (int i=0; i< errors->length; ++i) {
GumboError* er = static_cast<GumboError*>(errors->data[i]);
unsigned int linenum = er->position.line;
unsigned int colnum = er->position.column;
unsigned int typenum = er->type;
GumboStringBuffer text;
gumbo_string_buffer_init(&text);
gumbo_error_to_string(er, &text);
std::string errmsg(text.data, text.length);
fprintf(stdout, "line: %d col: %d type %d %s\n", linenum, colnum, typenum, errmsg.c_str());
gumbo_string_buffer_destroy(&text);
gumbo_print_caret_diagnostic(er, contents.c_str());
}
gumbo_destroy_output(output);
}
|
{
"content_hash": "a0189bbb8edab57a5ea174462ebc1a95",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 97,
"avg_line_length": 34.03448275862069,
"alnum_prop": 0.6337386018237082,
"repo_name": "Sigil-Ebook/sigil-gumbo",
"id": "3a567a965de1b4ec3382a39bf2acdb5ddba489e9",
"size": "2894",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/well_formed.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1210967"
},
{
"name": "C++",
"bytes": "138756"
},
{
"name": "CMake",
"bytes": "3609"
},
{
"name": "HTML",
"bytes": "9914178"
},
{
"name": "M4",
"bytes": "928"
},
{
"name": "Makefile",
"bytes": "4287"
},
{
"name": "Python",
"bytes": "64138"
},
{
"name": "Ragel",
"bytes": "128067"
},
{
"name": "Shell",
"bytes": "990"
}
]
}
|
// Template Source: BaseEntityRequestBuilder.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.requests;
import com.microsoft.graph.http.IRequestBuilder;
import com.microsoft.graph.core.ClientException;
import com.microsoft.graph.models.IosLobApp;
import com.microsoft.graph.models.MobileAppAssignment;
import java.util.Arrays;
import java.util.EnumSet;
import javax.annotation.Nullable;
import javax.annotation.Nonnull;
import com.microsoft.graph.core.IBaseClient;
import com.microsoft.graph.http.BaseRequestBuilder;
import com.microsoft.graph.models.MobileAppAssignParameterSet;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Ios Lob App Request Builder.
*/
public class IosLobAppRequestBuilder extends BaseRequestBuilder<IosLobApp> {
/**
* The request builder for the IosLobApp
*
* @param requestUrl the request URL
* @param client the service client
* @param requestOptions the options for this request
*/
public IosLobAppRequestBuilder(@Nonnull final String requestUrl, @Nonnull final IBaseClient<?> client, @Nullable final java.util.List<? extends com.microsoft.graph.options.Option> requestOptions) {
super(requestUrl, client, requestOptions);
}
/**
* Creates the request
*
* @param requestOptions the options for this request
* @return the IosLobAppRequest instance
*/
@Nonnull
public IosLobAppRequest buildRequest(@Nullable final com.microsoft.graph.options.Option... requestOptions) {
return buildRequest(getOptions(requestOptions));
}
/**
* Creates the request with specific requestOptions instead of the existing requestOptions
*
* @param requestOptions the options for this request
* @return the IosLobAppRequest instance
*/
@Nonnull
public IosLobAppRequest buildRequest(@Nonnull final java.util.List<? extends com.microsoft.graph.options.Option> requestOptions) {
return new com.microsoft.graph.requests.IosLobAppRequest(getRequestUrl(), getClient(), requestOptions);
}
/**
* Gets a request builder for the MobileAppAssignment collection
*
* @return the collection request builder
*/
@Nonnull
public com.microsoft.graph.requests.MobileAppAssignmentCollectionRequestBuilder assignments() {
return new com.microsoft.graph.requests.MobileAppAssignmentCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("assignments"), getClient(), null);
}
/**
* Gets a request builder for the MobileAppAssignment item
*
* @return the request builder
* @param id the item identifier
*/
@Nonnull
public com.microsoft.graph.requests.MobileAppAssignmentRequestBuilder assignments(@Nonnull final String id) {
return new com.microsoft.graph.requests.MobileAppAssignmentRequestBuilder(getRequestUrlWithAdditionalSegment("assignments") + "/" + id, getClient(), null);
}
/**
* Gets a request builder for the MobileAppCategory collection
*
* @return the collection request builder
*/
@Nonnull
public com.microsoft.graph.requests.MobileAppCategoryCollectionWithReferencesRequestBuilder categories() {
return new com.microsoft.graph.requests.MobileAppCategoryCollectionWithReferencesRequestBuilder(getRequestUrlWithAdditionalSegment("categories"), getClient(), null);
}
/**
* Gets a request builder for the MobileAppCategory item
*
* @return the request builder
* @param id the item identifier
*/
@Nonnull
public com.microsoft.graph.requests.MobileAppCategoryWithReferenceRequestBuilder categories(@Nonnull final String id) {
return new com.microsoft.graph.requests.MobileAppCategoryWithReferenceRequestBuilder(getRequestUrlWithAdditionalSegment("categories") + "/" + id, getClient(), null);
}
/**
* Gets a request builder for the MobileAppContent collection
*
* @return the collection request builder
*/
@Nonnull
public com.microsoft.graph.requests.MobileAppContentCollectionRequestBuilder contentVersions() {
return new com.microsoft.graph.requests.MobileAppContentCollectionRequestBuilder(getRequestUrlWithAdditionalSegment("contentVersions"), getClient(), null);
}
/**
* Gets a request builder for the MobileAppContent item
*
* @return the request builder
* @param id the item identifier
*/
@Nonnull
public com.microsoft.graph.requests.MobileAppContentRequestBuilder contentVersions(@Nonnull final String id) {
return new com.microsoft.graph.requests.MobileAppContentRequestBuilder(getRequestUrlWithAdditionalSegment("contentVersions") + "/" + id, getClient(), null);
}
/**
* Gets a builder to execute the method
* @return the request builder
* @param parameters the parameters for the service method
*/
@Nonnull
public MobileAppAssignRequestBuilder assign(@Nonnull final MobileAppAssignParameterSet parameters) {
return new MobileAppAssignRequestBuilder(getRequestUrlWithAdditionalSegment("microsoft.graph.assign"), getClient(), null, parameters);
}
}
|
{
"content_hash": "2bee8a425232c53c41d42de949046d52",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 201,
"avg_line_length": 42.083969465648856,
"alnum_prop": 0.7157627426083802,
"repo_name": "microsoftgraph/msgraph-sdk-java",
"id": "567cf219362b01a7b50a4dd89526a1885d8685a9",
"size": "5513",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "src/main/java/com/microsoft/graph/requests/IosLobAppRequestBuilder.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "27286837"
},
{
"name": "PowerShell",
"bytes": "5635"
}
]
}
|
# Makefile.in generated by automake 1.11.3 from Makefile.am.
# Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software
# Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
# Automake file
pkgdatadir = $(datadir)/gtest
pkgincludedir = $(includedir)/gtest
pkglibdir = $(libdir)/gtest
pkglibexecdir = $(libexecdir)/gtest
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-unknown-linux-gnu
host_triplet = x86_64-unknown-linux-gnu
TESTS = samples/sample1_unittest$(EXEEXT) \
samples/sample10_unittest$(EXEEXT) \
test/gtest_all_test$(EXEEXT) $(am__EXEEXT_1)
check_PROGRAMS = samples/sample1_unittest$(EXEEXT) \
samples/sample10_unittest$(EXEEXT) \
test/gtest_all_test$(EXEEXT) $(am__EXEEXT_1)
am__append_1 = test/fused_gtest_test
am__append_2 = test/fused_gtest_test
subdir = .
DIST_COMMON = README $(am__configure_deps) $(pkginclude_HEADERS) \
$(pkginclude_internal_HEADERS) $(srcdir)/Makefile.am \
$(srcdir)/Makefile.in $(top_srcdir)/build-aux/config.h.in \
$(top_srcdir)/configure $(top_srcdir)/scripts/gtest-config.in \
build-aux/config.guess build-aux/config.sub build-aux/depcomp \
build-aux/install-sh build-aux/ltmain.sh build-aux/missing
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/m4/acx_pthread.m4 $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
configure.lineno config.status.lineno
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/build-aux/config.h
CONFIG_CLEAN_FILES = scripts/gtest-config
CONFIG_CLEAN_VPATH_FILES =
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(m4datadir)" \
"$(DESTDIR)$(pkgincludedir)" \
"$(DESTDIR)$(pkginclude_internaldir)"
LTLIBRARIES = $(lib_LTLIBRARIES) $(noinst_LTLIBRARIES)
lib_libgtest_la_LIBADD =
am__dirstamp = $(am__leading_dot)dirstamp
am_lib_libgtest_la_OBJECTS = src/gtest-all.lo
lib_libgtest_la_OBJECTS = $(am_lib_libgtest_la_OBJECTS)
lib_libgtest_main_la_DEPENDENCIES = lib/libgtest.la
am_lib_libgtest_main_la_OBJECTS = src/gtest_main.lo
lib_libgtest_main_la_OBJECTS = $(am_lib_libgtest_main_la_OBJECTS)
samples_libsamples_la_LIBADD =
am_samples_libsamples_la_OBJECTS = samples/sample1.lo \
samples/sample2.lo samples/sample4.lo
samples_libsamples_la_OBJECTS = $(am_samples_libsamples_la_OBJECTS)
am__EXEEXT_1 = test/fused_gtest_test$(EXEEXT)
am_samples_sample10_unittest_OBJECTS = \
samples/sample10_unittest.$(OBJEXT)
samples_sample10_unittest_OBJECTS = \
$(am_samples_sample10_unittest_OBJECTS)
samples_sample10_unittest_DEPENDENCIES = lib/libgtest.la
am_samples_sample1_unittest_OBJECTS = \
samples/sample1_unittest.$(OBJEXT)
samples_sample1_unittest_OBJECTS = \
$(am_samples_sample1_unittest_OBJECTS)
samples_sample1_unittest_DEPENDENCIES = lib/libgtest_main.la \
lib/libgtest.la samples/libsamples.la
am__test_fused_gtest_test_SOURCES_DIST = fused-src/gtest/gtest-all.cc \
fused-src/gtest/gtest.h fused-src/gtest/gtest_main.cc \
samples/sample1.cc samples/sample1_unittest.cc
am__objects_1 = \
fused-src/gtest/test_fused_gtest_test-gtest-all.$(OBJEXT) \
fused-src/gtest/test_fused_gtest_test-gtest_main.$(OBJEXT)
am_test_fused_gtest_test_OBJECTS = $(am__objects_1) \
samples/test_fused_gtest_test-sample1.$(OBJEXT) \
samples/test_fused_gtest_test-sample1_unittest.$(OBJEXT)
test_fused_gtest_test_OBJECTS = $(am_test_fused_gtest_test_OBJECTS)
test_fused_gtest_test_LDADD = $(LDADD)
am_test_gtest_all_test_OBJECTS = test/gtest_all_test.$(OBJEXT)
test_gtest_all_test_OBJECTS = $(am_test_gtest_all_test_OBJECTS)
test_gtest_all_test_DEPENDENCIES = lib/libgtest_main.la \
lib/libgtest.la
DEFAULT_INCLUDES = -I. -I$(top_builddir)/build-aux
depcomp = $(SHELL) $(top_srcdir)/build-aux/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
--mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
--mode=link $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \
$(LDFLAGS) -o $@
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
--mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
CCLD = $(CC)
LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
--mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \
$(LDFLAGS) -o $@
SOURCES = $(lib_libgtest_la_SOURCES) $(lib_libgtest_main_la_SOURCES) \
$(samples_libsamples_la_SOURCES) \
$(samples_sample10_unittest_SOURCES) \
$(samples_sample1_unittest_SOURCES) \
$(test_fused_gtest_test_SOURCES) \
$(test_gtest_all_test_SOURCES)
DIST_SOURCES = $(lib_libgtest_la_SOURCES) \
$(lib_libgtest_main_la_SOURCES) \
$(samples_libsamples_la_SOURCES) \
$(samples_sample10_unittest_SOURCES) \
$(samples_sample1_unittest_SOURCES) \
$(am__test_fused_gtest_test_SOURCES_DIST) \
$(test_gtest_all_test_SOURCES)
DATA = $(m4data_DATA)
HEADERS = $(pkginclude_HEADERS) $(pkginclude_internal_HEADERS)
ETAGS = etags
CTAGS = ctags
am__tty_colors = \
red=; grn=; lgn=; blu=; std=
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
distdir = $(PACKAGE)-$(VERSION)
top_distdir = $(distdir)
am__remove_distdir = \
if test -d "$(distdir)"; then \
find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \
&& rm -rf "$(distdir)" \
|| { sleep 5 && rm -rf "$(distdir)"; }; \
else :; fi
DIST_ARCHIVES = $(distdir).tar.gz $(distdir).tar.bz2 $(distdir).zip
GZIP_ENV = --best
distuninstallcheck_listfiles = find . -type f -print
am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \
| sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$'
distcleancheck_listfiles = find . -type f -print
ACLOCAL = ${SHELL} /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0/build-aux/missing --run aclocal-1.11
AMTAR = $${TAR-tar}
AR = ar
AUTOCONF = ${SHELL} /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0/build-aux/missing --run autoconf
AUTOHEADER = ${SHELL} /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0/build-aux/missing --run autoheader
AUTOMAKE = ${SHELL} /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0/build-aux/missing --run automake-1.11
AWK = mawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -g -O2
CYGPATH_W = echo
DEFS = -DHAVE_CONFIG_H
DEPDIR = .deps
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
FGREP = /bin/grep -F
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
MAKEINFO = ${SHELL} /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0/build-aux/missing --run makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = gtest
PACKAGE_BUGREPORT = googletestframework@googlegroups.com
PACKAGE_NAME = Google C++ Testing Framework
PACKAGE_STRING = Google C++ Testing Framework 1.7.0
PACKAGE_TARNAME = gtest
PACKAGE_URL =
PACKAGE_VERSION = 1.7.0
PATH_SEPARATOR = :
PTHREAD_CC = gcc
PTHREAD_CFLAGS = -pthread
PTHREAD_LIBS =
PYTHON = /usr/bin/python
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 1.7.0
abs_builddir = /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0
abs_srcdir = /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0
abs_top_builddir = /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0
abs_top_srcdir = /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
acx_pthread_config =
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-unknown-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = unknown
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-unknown-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = unknown
htmldir = ${docdir}
includedir = ${prefix}/include
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/gene.ge/personal/github/lib/unit_testing/gtest-1.7.0/build-aux/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = /bin/mkdir -p
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix =
top_builddir = .
top_srcdir = .
ACLOCAL_AMFLAGS = -I m4
# Nonstandard package files for distribution
# Sample files that we don't compile.
# C++ test files that we don't compile directly.
# Python tests that we don't run.
# CMake script
# MSVC project files
# xcode project files
# xcode sample files
# C++Builder project files
EXTRA_DIST = CHANGES CONTRIBUTORS LICENSE \
include/gtest/gtest-param-test.h.pump \
include/gtest/internal/gtest-param-util-generated.h.pump \
include/gtest/internal/gtest-tuple.h.pump \
include/gtest/internal/gtest-type-util.h.pump make/Makefile \
scripts/fuse_gtest_files.py scripts/gen_gtest_pred_impl.py \
scripts/pump.py scripts/test/Makefile $(GTEST_SRC) \
samples/prime_tables.h samples/sample2_unittest.cc \
samples/sample3_unittest.cc samples/sample4_unittest.cc \
samples/sample5_unittest.cc samples/sample6_unittest.cc \
samples/sample7_unittest.cc samples/sample8_unittest.cc \
samples/sample9_unittest.cc test/gtest-death-test_ex_test.cc \
test/gtest-death-test_test.cc test/gtest-filepath_test.cc \
test/gtest-linked_ptr_test.cc test/gtest-listener_test.cc \
test/gtest-message_test.cc test/gtest-options_test.cc \
test/gtest-param-test2_test.cc test/gtest-param-test2_test.cc \
test/gtest-param-test_test.cc test/gtest-param-test_test.cc \
test/gtest-param-test_test.h test/gtest-port_test.cc \
test/gtest_premature_exit_test.cc test/gtest-printers_test.cc \
test/gtest-test-part_test.cc test/gtest-tuple_test.cc \
test/gtest-typed-test2_test.cc test/gtest-typed-test_test.cc \
test/gtest-typed-test_test.h test/gtest-unittest-api_test.cc \
test/gtest_break_on_failure_unittest_.cc \
test/gtest_catch_exceptions_test_.cc test/gtest_color_test_.cc \
test/gtest_env_var_test_.cc test/gtest_environment_test.cc \
test/gtest_filter_unittest_.cc test/gtest_help_test_.cc \
test/gtest_list_tests_unittest_.cc test/gtest_main_unittest.cc \
test/gtest_no_test_unittest.cc test/gtest_output_test_.cc \
test/gtest_pred_impl_unittest.cc test/gtest_prod_test.cc \
test/gtest_repeat_test.cc test/gtest_shuffle_test_.cc \
test/gtest_sole_header_test.cc test/gtest_stress_test.cc \
test/gtest_throw_on_failure_ex_test.cc \
test/gtest_throw_on_failure_test_.cc \
test/gtest_uninitialized_test_.cc test/gtest_unittest.cc \
test/gtest_unittest.cc test/gtest_xml_outfile1_test_.cc \
test/gtest_xml_outfile2_test_.cc \
test/gtest_xml_output_unittest_.cc test/production.cc \
test/production.h test/gtest_break_on_failure_unittest.py \
test/gtest_catch_exceptions_test.py test/gtest_color_test.py \
test/gtest_env_var_test.py test/gtest_filter_unittest.py \
test/gtest_help_test.py test/gtest_list_tests_unittest.py \
test/gtest_output_test.py \
test/gtest_output_test_golden_lin.txt \
test/gtest_shuffle_test.py test/gtest_test_utils.py \
test/gtest_throw_on_failure_test.py \
test/gtest_uninitialized_test.py \
test/gtest_xml_outfiles_test.py \
test/gtest_xml_output_unittest.py test/gtest_xml_test_utils.py \
CMakeLists.txt cmake/internal_utils.cmake msvc/gtest-md.sln \
msvc/gtest-md.vcproj msvc/gtest.sln msvc/gtest.vcproj \
msvc/gtest_main-md.vcproj msvc/gtest_main.vcproj \
msvc/gtest_prod_test-md.vcproj msvc/gtest_prod_test.vcproj \
msvc/gtest_unittest-md.vcproj msvc/gtest_unittest.vcproj \
xcode/Config/DebugProject.xcconfig \
xcode/Config/FrameworkTarget.xcconfig \
xcode/Config/General.xcconfig \
xcode/Config/ReleaseProject.xcconfig \
xcode/Config/StaticLibraryTarget.xcconfig \
xcode/Config/TestTarget.xcconfig xcode/Resources/Info.plist \
xcode/Scripts/runtests.sh xcode/Scripts/versiongenerate.py \
xcode/gtest.xcodeproj/project.pbxproj \
xcode/Samples/FrameworkSample/Info.plist \
xcode/Samples/FrameworkSample/WidgetFramework.xcodeproj/project.pbxproj \
xcode/Samples/FrameworkSample/runtests.sh \
xcode/Samples/FrameworkSample/widget.cc \
xcode/Samples/FrameworkSample/widget.h \
xcode/Samples/FrameworkSample/widget_test.cc \
codegear/gtest.cbproj codegear/gtest.groupproj \
codegear/gtest_all.cc codegear/gtest_link.cc \
codegear/gtest_main.cbproj codegear/gtest_unittest.cbproj \
$(m4data_DATA)
# gtest source files that we don't compile directly. They are
# #included by gtest-all.cc.
GTEST_SRC = \
src/gtest-death-test.cc \
src/gtest-filepath.cc \
src/gtest-internal-inl.h \
src/gtest-port.cc \
src/gtest-printers.cc \
src/gtest-test-part.cc \
src/gtest-typed-test.cc \
src/gtest.cc
# Distribute and install M4 macro
m4datadir = $(datadir)/aclocal
m4data_DATA = m4/gtest.m4
# We define the global AM_CPPFLAGS as everything we compile includes from these
# directories.
AM_CPPFLAGS = -I$(srcdir) -I$(srcdir)/include
#AM_CXXFLAGS = -DGTEST_HAS_PTHREAD=0
# Modifies compiler and linker flags for pthreads compatibility.
AM_CXXFLAGS = -pthread -DGTEST_HAS_PTHREAD=1
AM_LIBS =
# Build rules for libraries.
lib_LTLIBRARIES = lib/libgtest.la lib/libgtest_main.la
lib_libgtest_la_SOURCES = src/gtest-all.cc
pkginclude_HEADERS = \
include/gtest/gtest-death-test.h \
include/gtest/gtest-message.h \
include/gtest/gtest-param-test.h \
include/gtest/gtest-printers.h \
include/gtest/gtest-spi.h \
include/gtest/gtest-test-part.h \
include/gtest/gtest-typed-test.h \
include/gtest/gtest.h \
include/gtest/gtest_pred_impl.h \
include/gtest/gtest_prod.h
pkginclude_internaldir = $(pkgincludedir)/internal
pkginclude_internal_HEADERS = \
include/gtest/internal/gtest-death-test-internal.h \
include/gtest/internal/gtest-filepath.h \
include/gtest/internal/gtest-internal.h \
include/gtest/internal/gtest-linked_ptr.h \
include/gtest/internal/gtest-param-util-generated.h \
include/gtest/internal/gtest-param-util.h \
include/gtest/internal/gtest-port.h \
include/gtest/internal/gtest-string.h \
include/gtest/internal/gtest-tuple.h \
include/gtest/internal/gtest-type-util.h
lib_libgtest_main_la_SOURCES = src/gtest_main.cc
lib_libgtest_main_la_LIBADD = lib/libgtest.la
# Bulid rules for samples and tests. Automake's naming for some of
# these variables isn't terribly obvious, so this is a brief
# reference:
#
# TESTS -- Programs run automatically by "make check"
# check_PROGRAMS -- Programs built by "make check" but not necessarily run
noinst_LTLIBRARIES = samples/libsamples.la
samples_libsamples_la_SOURCES = \
samples/sample1.cc \
samples/sample1.h \
samples/sample2.cc \
samples/sample2.h \
samples/sample3-inl.h \
samples/sample4.cc \
samples/sample4.h
TESTS_ENVIRONMENT = GTEST_SOURCE_DIR="$(srcdir)/test" \
GTEST_BUILD_DIR="$(top_builddir)/test"
samples_sample1_unittest_SOURCES = samples/sample1_unittest.cc
samples_sample1_unittest_LDADD = lib/libgtest_main.la \
lib/libgtest.la \
samples/libsamples.la
samples_sample10_unittest_SOURCES = samples/sample10_unittest.cc
samples_sample10_unittest_LDADD = lib/libgtest.la
test_gtest_all_test_SOURCES = test/gtest_all_test.cc
test_gtest_all_test_LDADD = lib/libgtest_main.la \
lib/libgtest.la
# Tests that fused gtest files compile and work.
FUSED_GTEST_SRC = \
fused-src/gtest/gtest-all.cc \
fused-src/gtest/gtest.h \
fused-src/gtest/gtest_main.cc
test_fused_gtest_test_SOURCES = $(FUSED_GTEST_SRC) \
samples/sample1.cc samples/sample1_unittest.cc
test_fused_gtest_test_CPPFLAGS = -I"$(srcdir)/fused-src"
# Death tests may produce core dumps in the build directory. In case
# this happens, clean them to keep distcleancheck happy.
CLEANFILES = core
all: all-am
.SUFFIXES:
.SUFFIXES: .cc .lo .o .obj
am--refresh: Makefile
@:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \
$(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \
&& exit 0; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
echo ' $(SHELL) ./config.status'; \
$(SHELL) ./config.status;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
$(SHELL) ./config.status --recheck
$(top_srcdir)/configure: $(am__configure_deps)
$(am__cd) $(srcdir) && $(AUTOCONF)
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
$(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
$(am__aclocal_m4_deps):
build-aux/config.h: build-aux/stamp-h1
@if test ! -f $@; then rm -f build-aux/stamp-h1; else :; fi
@if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) build-aux/stamp-h1; else :; fi
build-aux/stamp-h1: $(top_srcdir)/build-aux/config.h.in $(top_builddir)/config.status
@rm -f build-aux/stamp-h1
cd $(top_builddir) && $(SHELL) ./config.status build-aux/config.h
$(top_srcdir)/build-aux/config.h.in: $(am__configure_deps)
($(am__cd) $(top_srcdir) && $(AUTOHEADER))
rm -f build-aux/stamp-h1
touch $@
distclean-hdr:
-rm -f build-aux/config.h build-aux/stamp-h1
scripts/gtest-config: $(top_builddir)/config.status $(top_srcdir)/scripts/gtest-config.in
cd $(top_builddir) && $(SHELL) ./config.status $@
install-libLTLIBRARIES: $(lib_LTLIBRARIES)
@$(NORMAL_INSTALL)
test -z "$(libdir)" || $(MKDIR_P) "$(DESTDIR)$(libdir)"
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
list2=; for p in $$list; do \
if test -f $$p; then \
list2="$$list2 $$p"; \
else :; fi; \
done; \
test -z "$$list2" || { \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \
}
uninstall-libLTLIBRARIES:
@$(NORMAL_UNINSTALL)
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
for p in $$list; do \
$(am__strip_dir) \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \
done
clean-libLTLIBRARIES:
-test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES)
@list='$(lib_LTLIBRARIES)'; for p in $$list; do \
dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
test "$$dir" != "$$p" || dir=.; \
echo "rm -f \"$${dir}/so_locations\""; \
rm -f "$${dir}/so_locations"; \
done
clean-noinstLTLIBRARIES:
-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
@list='$(noinst_LTLIBRARIES)'; for p in $$list; do \
dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
test "$$dir" != "$$p" || dir=.; \
echo "rm -f \"$${dir}/so_locations\""; \
rm -f "$${dir}/so_locations"; \
done
src/$(am__dirstamp):
@$(MKDIR_P) src
@: > src/$(am__dirstamp)
src/$(DEPDIR)/$(am__dirstamp):
@$(MKDIR_P) src/$(DEPDIR)
@: > src/$(DEPDIR)/$(am__dirstamp)
src/gtest-all.lo: src/$(am__dirstamp) src/$(DEPDIR)/$(am__dirstamp)
lib/$(am__dirstamp):
@$(MKDIR_P) lib
@: > lib/$(am__dirstamp)
lib/libgtest.la: $(lib_libgtest_la_OBJECTS) $(lib_libgtest_la_DEPENDENCIES) $(EXTRA_lib_libgtest_la_DEPENDENCIES) lib/$(am__dirstamp)
$(CXXLINK) -rpath $(libdir) $(lib_libgtest_la_OBJECTS) $(lib_libgtest_la_LIBADD) $(LIBS)
src/gtest_main.lo: src/$(am__dirstamp) src/$(DEPDIR)/$(am__dirstamp)
lib/libgtest_main.la: $(lib_libgtest_main_la_OBJECTS) $(lib_libgtest_main_la_DEPENDENCIES) $(EXTRA_lib_libgtest_main_la_DEPENDENCIES) lib/$(am__dirstamp)
$(CXXLINK) -rpath $(libdir) $(lib_libgtest_main_la_OBJECTS) $(lib_libgtest_main_la_LIBADD) $(LIBS)
samples/$(am__dirstamp):
@$(MKDIR_P) samples
@: > samples/$(am__dirstamp)
samples/$(DEPDIR)/$(am__dirstamp):
@$(MKDIR_P) samples/$(DEPDIR)
@: > samples/$(DEPDIR)/$(am__dirstamp)
samples/sample1.lo: samples/$(am__dirstamp) \
samples/$(DEPDIR)/$(am__dirstamp)
samples/sample2.lo: samples/$(am__dirstamp) \
samples/$(DEPDIR)/$(am__dirstamp)
samples/sample4.lo: samples/$(am__dirstamp) \
samples/$(DEPDIR)/$(am__dirstamp)
samples/libsamples.la: $(samples_libsamples_la_OBJECTS) $(samples_libsamples_la_DEPENDENCIES) $(EXTRA_samples_libsamples_la_DEPENDENCIES) samples/$(am__dirstamp)
$(CXXLINK) $(samples_libsamples_la_OBJECTS) $(samples_libsamples_la_LIBADD) $(LIBS)
clean-checkPROGRAMS:
@list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
samples/sample10_unittest.$(OBJEXT): samples/$(am__dirstamp) \
samples/$(DEPDIR)/$(am__dirstamp)
samples/sample10_unittest$(EXEEXT): $(samples_sample10_unittest_OBJECTS) $(samples_sample10_unittest_DEPENDENCIES) $(EXTRA_samples_sample10_unittest_DEPENDENCIES) samples/$(am__dirstamp)
@rm -f samples/sample10_unittest$(EXEEXT)
$(CXXLINK) $(samples_sample10_unittest_OBJECTS) $(samples_sample10_unittest_LDADD) $(LIBS)
samples/sample1_unittest.$(OBJEXT): samples/$(am__dirstamp) \
samples/$(DEPDIR)/$(am__dirstamp)
samples/sample1_unittest$(EXEEXT): $(samples_sample1_unittest_OBJECTS) $(samples_sample1_unittest_DEPENDENCIES) $(EXTRA_samples_sample1_unittest_DEPENDENCIES) samples/$(am__dirstamp)
@rm -f samples/sample1_unittest$(EXEEXT)
$(CXXLINK) $(samples_sample1_unittest_OBJECTS) $(samples_sample1_unittest_LDADD) $(LIBS)
fused-src/gtest/$(am__dirstamp):
@$(MKDIR_P) fused-src/gtest
@: > fused-src/gtest/$(am__dirstamp)
fused-src/gtest/$(DEPDIR)/$(am__dirstamp):
@$(MKDIR_P) fused-src/gtest/$(DEPDIR)
@: > fused-src/gtest/$(DEPDIR)/$(am__dirstamp)
fused-src/gtest/test_fused_gtest_test-gtest-all.$(OBJEXT): \
fused-src/gtest/$(am__dirstamp) \
fused-src/gtest/$(DEPDIR)/$(am__dirstamp)
fused-src/gtest/test_fused_gtest_test-gtest_main.$(OBJEXT): \
fused-src/gtest/$(am__dirstamp) \
fused-src/gtest/$(DEPDIR)/$(am__dirstamp)
samples/test_fused_gtest_test-sample1.$(OBJEXT): \
samples/$(am__dirstamp) samples/$(DEPDIR)/$(am__dirstamp)
samples/test_fused_gtest_test-sample1_unittest.$(OBJEXT): \
samples/$(am__dirstamp) samples/$(DEPDIR)/$(am__dirstamp)
test/$(am__dirstamp):
@$(MKDIR_P) test
@: > test/$(am__dirstamp)
test/fused_gtest_test$(EXEEXT): $(test_fused_gtest_test_OBJECTS) $(test_fused_gtest_test_DEPENDENCIES) $(EXTRA_test_fused_gtest_test_DEPENDENCIES) test/$(am__dirstamp)
@rm -f test/fused_gtest_test$(EXEEXT)
$(CXXLINK) $(test_fused_gtest_test_OBJECTS) $(test_fused_gtest_test_LDADD) $(LIBS)
test/$(DEPDIR)/$(am__dirstamp):
@$(MKDIR_P) test/$(DEPDIR)
@: > test/$(DEPDIR)/$(am__dirstamp)
test/gtest_all_test.$(OBJEXT): test/$(am__dirstamp) \
test/$(DEPDIR)/$(am__dirstamp)
test/gtest_all_test$(EXEEXT): $(test_gtest_all_test_OBJECTS) $(test_gtest_all_test_DEPENDENCIES) $(EXTRA_test_gtest_all_test_DEPENDENCIES) test/$(am__dirstamp)
@rm -f test/gtest_all_test$(EXEEXT)
$(CXXLINK) $(test_gtest_all_test_OBJECTS) $(test_gtest_all_test_LDADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
-rm -f fused-src/gtest/test_fused_gtest_test-gtest-all.$(OBJEXT)
-rm -f fused-src/gtest/test_fused_gtest_test-gtest_main.$(OBJEXT)
-rm -f samples/sample1.$(OBJEXT)
-rm -f samples/sample1.lo
-rm -f samples/sample10_unittest.$(OBJEXT)
-rm -f samples/sample1_unittest.$(OBJEXT)
-rm -f samples/sample2.$(OBJEXT)
-rm -f samples/sample2.lo
-rm -f samples/sample4.$(OBJEXT)
-rm -f samples/sample4.lo
-rm -f samples/test_fused_gtest_test-sample1.$(OBJEXT)
-rm -f samples/test_fused_gtest_test-sample1_unittest.$(OBJEXT)
-rm -f src/gtest-all.$(OBJEXT)
-rm -f src/gtest-all.lo
-rm -f src/gtest_main.$(OBJEXT)
-rm -f src/gtest_main.lo
-rm -f test/gtest_all_test.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
include fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest-all.Po
include fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest_main.Po
include samples/$(DEPDIR)/sample1.Plo
include samples/$(DEPDIR)/sample10_unittest.Po
include samples/$(DEPDIR)/sample1_unittest.Po
include samples/$(DEPDIR)/sample2.Plo
include samples/$(DEPDIR)/sample4.Plo
include samples/$(DEPDIR)/test_fused_gtest_test-sample1.Po
include samples/$(DEPDIR)/test_fused_gtest_test-sample1_unittest.Po
include src/$(DEPDIR)/gtest-all.Plo
include src/$(DEPDIR)/gtest_main.Plo
include test/$(DEPDIR)/gtest_all_test.Po
.cc.o:
depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
$(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
$(am__mv) $$depbase.Tpo $$depbase.Po
# source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXXCOMPILE) -c -o $@ $<
.cc.obj:
depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\
$(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ `$(CYGPATH_W) '$<'` &&\
$(am__mv) $$depbase.Tpo $$depbase.Po
# source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cc.lo:
depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\
$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\
$(am__mv) $$depbase.Tpo $$depbase.Plo
# source='$<' object='$@' libtool=yes \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(LTCXXCOMPILE) -c -o $@ $<
fused-src/gtest/test_fused_gtest_test-gtest-all.o: fused-src/gtest/gtest-all.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT fused-src/gtest/test_fused_gtest_test-gtest-all.o -MD -MP -MF fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest-all.Tpo -c -o fused-src/gtest/test_fused_gtest_test-gtest-all.o `test -f 'fused-src/gtest/gtest-all.cc' || echo '$(srcdir)/'`fused-src/gtest/gtest-all.cc
$(am__mv) fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest-all.Tpo fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest-all.Po
# source='fused-src/gtest/gtest-all.cc' object='fused-src/gtest/test_fused_gtest_test-gtest-all.o' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o fused-src/gtest/test_fused_gtest_test-gtest-all.o `test -f 'fused-src/gtest/gtest-all.cc' || echo '$(srcdir)/'`fused-src/gtest/gtest-all.cc
fused-src/gtest/test_fused_gtest_test-gtest-all.obj: fused-src/gtest/gtest-all.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT fused-src/gtest/test_fused_gtest_test-gtest-all.obj -MD -MP -MF fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest-all.Tpo -c -o fused-src/gtest/test_fused_gtest_test-gtest-all.obj `if test -f 'fused-src/gtest/gtest-all.cc'; then $(CYGPATH_W) 'fused-src/gtest/gtest-all.cc'; else $(CYGPATH_W) '$(srcdir)/fused-src/gtest/gtest-all.cc'; fi`
$(am__mv) fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest-all.Tpo fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest-all.Po
# source='fused-src/gtest/gtest-all.cc' object='fused-src/gtest/test_fused_gtest_test-gtest-all.obj' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o fused-src/gtest/test_fused_gtest_test-gtest-all.obj `if test -f 'fused-src/gtest/gtest-all.cc'; then $(CYGPATH_W) 'fused-src/gtest/gtest-all.cc'; else $(CYGPATH_W) '$(srcdir)/fused-src/gtest/gtest-all.cc'; fi`
fused-src/gtest/test_fused_gtest_test-gtest_main.o: fused-src/gtest/gtest_main.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT fused-src/gtest/test_fused_gtest_test-gtest_main.o -MD -MP -MF fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest_main.Tpo -c -o fused-src/gtest/test_fused_gtest_test-gtest_main.o `test -f 'fused-src/gtest/gtest_main.cc' || echo '$(srcdir)/'`fused-src/gtest/gtest_main.cc
$(am__mv) fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest_main.Tpo fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest_main.Po
# source='fused-src/gtest/gtest_main.cc' object='fused-src/gtest/test_fused_gtest_test-gtest_main.o' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o fused-src/gtest/test_fused_gtest_test-gtest_main.o `test -f 'fused-src/gtest/gtest_main.cc' || echo '$(srcdir)/'`fused-src/gtest/gtest_main.cc
fused-src/gtest/test_fused_gtest_test-gtest_main.obj: fused-src/gtest/gtest_main.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT fused-src/gtest/test_fused_gtest_test-gtest_main.obj -MD -MP -MF fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest_main.Tpo -c -o fused-src/gtest/test_fused_gtest_test-gtest_main.obj `if test -f 'fused-src/gtest/gtest_main.cc'; then $(CYGPATH_W) 'fused-src/gtest/gtest_main.cc'; else $(CYGPATH_W) '$(srcdir)/fused-src/gtest/gtest_main.cc'; fi`
$(am__mv) fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest_main.Tpo fused-src/gtest/$(DEPDIR)/test_fused_gtest_test-gtest_main.Po
# source='fused-src/gtest/gtest_main.cc' object='fused-src/gtest/test_fused_gtest_test-gtest_main.obj' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o fused-src/gtest/test_fused_gtest_test-gtest_main.obj `if test -f 'fused-src/gtest/gtest_main.cc'; then $(CYGPATH_W) 'fused-src/gtest/gtest_main.cc'; else $(CYGPATH_W) '$(srcdir)/fused-src/gtest/gtest_main.cc'; fi`
samples/test_fused_gtest_test-sample1.o: samples/sample1.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT samples/test_fused_gtest_test-sample1.o -MD -MP -MF samples/$(DEPDIR)/test_fused_gtest_test-sample1.Tpo -c -o samples/test_fused_gtest_test-sample1.o `test -f 'samples/sample1.cc' || echo '$(srcdir)/'`samples/sample1.cc
$(am__mv) samples/$(DEPDIR)/test_fused_gtest_test-sample1.Tpo samples/$(DEPDIR)/test_fused_gtest_test-sample1.Po
# source='samples/sample1.cc' object='samples/test_fused_gtest_test-sample1.o' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o samples/test_fused_gtest_test-sample1.o `test -f 'samples/sample1.cc' || echo '$(srcdir)/'`samples/sample1.cc
samples/test_fused_gtest_test-sample1.obj: samples/sample1.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT samples/test_fused_gtest_test-sample1.obj -MD -MP -MF samples/$(DEPDIR)/test_fused_gtest_test-sample1.Tpo -c -o samples/test_fused_gtest_test-sample1.obj `if test -f 'samples/sample1.cc'; then $(CYGPATH_W) 'samples/sample1.cc'; else $(CYGPATH_W) '$(srcdir)/samples/sample1.cc'; fi`
$(am__mv) samples/$(DEPDIR)/test_fused_gtest_test-sample1.Tpo samples/$(DEPDIR)/test_fused_gtest_test-sample1.Po
# source='samples/sample1.cc' object='samples/test_fused_gtest_test-sample1.obj' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o samples/test_fused_gtest_test-sample1.obj `if test -f 'samples/sample1.cc'; then $(CYGPATH_W) 'samples/sample1.cc'; else $(CYGPATH_W) '$(srcdir)/samples/sample1.cc'; fi`
samples/test_fused_gtest_test-sample1_unittest.o: samples/sample1_unittest.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT samples/test_fused_gtest_test-sample1_unittest.o -MD -MP -MF samples/$(DEPDIR)/test_fused_gtest_test-sample1_unittest.Tpo -c -o samples/test_fused_gtest_test-sample1_unittest.o `test -f 'samples/sample1_unittest.cc' || echo '$(srcdir)/'`samples/sample1_unittest.cc
$(am__mv) samples/$(DEPDIR)/test_fused_gtest_test-sample1_unittest.Tpo samples/$(DEPDIR)/test_fused_gtest_test-sample1_unittest.Po
# source='samples/sample1_unittest.cc' object='samples/test_fused_gtest_test-sample1_unittest.o' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o samples/test_fused_gtest_test-sample1_unittest.o `test -f 'samples/sample1_unittest.cc' || echo '$(srcdir)/'`samples/sample1_unittest.cc
samples/test_fused_gtest_test-sample1_unittest.obj: samples/sample1_unittest.cc
$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT samples/test_fused_gtest_test-sample1_unittest.obj -MD -MP -MF samples/$(DEPDIR)/test_fused_gtest_test-sample1_unittest.Tpo -c -o samples/test_fused_gtest_test-sample1_unittest.obj `if test -f 'samples/sample1_unittest.cc'; then $(CYGPATH_W) 'samples/sample1_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/samples/sample1_unittest.cc'; fi`
$(am__mv) samples/$(DEPDIR)/test_fused_gtest_test-sample1_unittest.Tpo samples/$(DEPDIR)/test_fused_gtest_test-sample1_unittest.Po
# source='samples/sample1_unittest.cc' object='samples/test_fused_gtest_test-sample1_unittest.obj' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(test_fused_gtest_test_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o samples/test_fused_gtest_test-sample1_unittest.obj `if test -f 'samples/sample1_unittest.cc'; then $(CYGPATH_W) 'samples/sample1_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/samples/sample1_unittest.cc'; fi`
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
-rm -rf lib/.libs lib/_libs
-rm -rf samples/.libs samples/_libs
-rm -rf src/.libs src/_libs
-rm -rf test/.libs test/_libs
distclean-libtool:
-rm -f libtool config.lt
install-m4dataDATA: $(m4data_DATA)
@$(NORMAL_INSTALL)
test -z "$(m4datadir)" || $(MKDIR_P) "$(DESTDIR)$(m4datadir)"
@list='$(m4data_DATA)'; test -n "$(m4datadir)" || list=; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(m4datadir)'"; \
$(INSTALL_DATA) $$files "$(DESTDIR)$(m4datadir)" || exit $$?; \
done
uninstall-m4dataDATA:
@$(NORMAL_UNINSTALL)
@list='$(m4data_DATA)'; test -n "$(m4datadir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(m4datadir)'; $(am__uninstall_files_from_dir)
install-pkgincludeHEADERS: $(pkginclude_HEADERS)
@$(NORMAL_INSTALL)
test -z "$(pkgincludedir)" || $(MKDIR_P) "$(DESTDIR)$(pkgincludedir)"
@list='$(pkginclude_HEADERS)'; test -n "$(pkgincludedir)" || list=; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(pkgincludedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(pkgincludedir)" || exit $$?; \
done
uninstall-pkgincludeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(pkginclude_HEADERS)'; test -n "$(pkgincludedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(pkgincludedir)'; $(am__uninstall_files_from_dir)
install-pkginclude_internalHEADERS: $(pkginclude_internal_HEADERS)
@$(NORMAL_INSTALL)
test -z "$(pkginclude_internaldir)" || $(MKDIR_P) "$(DESTDIR)$(pkginclude_internaldir)"
@list='$(pkginclude_internal_HEADERS)'; test -n "$(pkginclude_internaldir)" || list=; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(pkginclude_internaldir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(pkginclude_internaldir)" || exit $$?; \
done
uninstall-pkginclude_internalHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(pkginclude_internal_HEADERS)'; test -n "$(pkginclude_internaldir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(pkginclude_internaldir)'; $(am__uninstall_files_from_dir)
ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
mkid -fID $$unique
tags: TAGS
TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
set x; \
here=`pwd`; \
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: CTAGS
CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
check-TESTS: $(TESTS)
@failed=0; all=0; xfail=0; xpass=0; skip=0; \
srcdir=$(srcdir); export srcdir; \
list=' $(TESTS) '; \
$(am__tty_colors); \
if test -n "$$list"; then \
for tst in $$list; do \
if test -f ./$$tst; then dir=./; \
elif test -f $$tst; then dir=; \
else dir="$(srcdir)/"; fi; \
if $(TESTS_ENVIRONMENT) $${dir}$$tst; then \
all=`expr $$all + 1`; \
case " $(XFAIL_TESTS) " in \
*[\ \ ]$$tst[\ \ ]*) \
xpass=`expr $$xpass + 1`; \
failed=`expr $$failed + 1`; \
col=$$red; res=XPASS; \
;; \
*) \
col=$$grn; res=PASS; \
;; \
esac; \
elif test $$? -ne 77; then \
all=`expr $$all + 1`; \
case " $(XFAIL_TESTS) " in \
*[\ \ ]$$tst[\ \ ]*) \
xfail=`expr $$xfail + 1`; \
col=$$lgn; res=XFAIL; \
;; \
*) \
failed=`expr $$failed + 1`; \
col=$$red; res=FAIL; \
;; \
esac; \
else \
skip=`expr $$skip + 1`; \
col=$$blu; res=SKIP; \
fi; \
echo "$${col}$$res$${std}: $$tst"; \
done; \
if test "$$all" -eq 1; then \
tests="test"; \
All=""; \
else \
tests="tests"; \
All="All "; \
fi; \
if test "$$failed" -eq 0; then \
if test "$$xfail" -eq 0; then \
banner="$$All$$all $$tests passed"; \
else \
if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \
banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \
fi; \
else \
if test "$$xpass" -eq 0; then \
banner="$$failed of $$all $$tests failed"; \
else \
if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \
banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \
fi; \
fi; \
dashes="$$banner"; \
skipped=""; \
if test "$$skip" -ne 0; then \
if test "$$skip" -eq 1; then \
skipped="($$skip test was not run)"; \
else \
skipped="($$skip tests were not run)"; \
fi; \
test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \
dashes="$$skipped"; \
fi; \
report=""; \
if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \
report="Please report to $(PACKAGE_BUGREPORT)"; \
test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \
dashes="$$report"; \
fi; \
dashes=`echo "$$dashes" | sed s/./=/g`; \
if test "$$failed" -eq 0; then \
col="$$grn"; \
else \
col="$$red"; \
fi; \
echo "$${col}$$dashes$${std}"; \
echo "$${col}$$banner$${std}"; \
test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \
test -z "$$report" || echo "$${col}$$report$${std}"; \
echo "$${col}$$dashes$${std}"; \
test "$$failed" -eq 0; \
else :; fi
distdir: $(DISTFILES)
$(am__remove_distdir)
test -d "$(distdir)" || mkdir "$(distdir)"
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
-test -n "$(am__skip_mode_fix)" \
|| find "$(distdir)" -type d ! -perm -755 \
-exec chmod u+rwx,go+rx {} \; -o \
! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
! -type d ! -perm -400 -exec chmod a+r {} \; -o \
! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
|| chmod -R a+r "$(distdir)"
dist-gzip: distdir
tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
$(am__remove_distdir)
dist-bzip2: distdir
tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2
$(am__remove_distdir)
dist-lzip: distdir
tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz
$(am__remove_distdir)
dist-lzma: distdir
tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma
$(am__remove_distdir)
dist-xz: distdir
tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz
$(am__remove_distdir)
dist-tarZ: distdir
tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
$(am__remove_distdir)
dist-shar: distdir
shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
$(am__remove_distdir)
dist-zip: distdir
-rm -f $(distdir).zip
zip -rq $(distdir).zip $(distdir)
$(am__remove_distdir)
dist dist-all: distdir
tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2
-rm -f $(distdir).zip
zip -rq $(distdir).zip $(distdir)
$(am__remove_distdir)
# This target untars the dist file and tries a VPATH configuration. Then
# it guarantees that the distribution is self-contained by making another
# tarfile.
distcheck: dist
case '$(DIST_ARCHIVES)' in \
*.tar.gz*) \
GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\
*.tar.bz2*) \
bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\
*.tar.lzma*) \
lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\
*.tar.lz*) \
lzip -dc $(distdir).tar.lz | $(am__untar) ;;\
*.tar.xz*) \
xz -dc $(distdir).tar.xz | $(am__untar) ;;\
*.tar.Z*) \
uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
*.shar.gz*) \
GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\
*.zip*) \
unzip $(distdir).zip ;;\
esac
chmod -R a-w $(distdir); chmod a+w $(distdir)
mkdir $(distdir)/_build
mkdir $(distdir)/_inst
chmod a-w $(distdir)
test -d $(distdir)/_build || exit 0; \
dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
&& dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
&& am__cwd=`pwd` \
&& $(am__cd) $(distdir)/_build \
&& ../configure --srcdir=.. --prefix="$$dc_install_base" \
$(AM_DISTCHECK_CONFIGURE_FLAGS) \
$(DISTCHECK_CONFIGURE_FLAGS) \
&& $(MAKE) $(AM_MAKEFLAGS) \
&& $(MAKE) $(AM_MAKEFLAGS) dvi \
&& $(MAKE) $(AM_MAKEFLAGS) check \
&& $(MAKE) $(AM_MAKEFLAGS) install \
&& $(MAKE) $(AM_MAKEFLAGS) installcheck \
&& $(MAKE) $(AM_MAKEFLAGS) uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
distuninstallcheck \
&& chmod -R a-w "$$dc_install_base" \
&& ({ \
(cd ../.. && umask 077 && mkdir "$$dc_destdir") \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
} || { rm -rf "$$dc_destdir"; exit 1; }) \
&& rm -rf "$$dc_destdir" \
&& $(MAKE) $(AM_MAKEFLAGS) dist \
&& rm -rf $(DIST_ARCHIVES) \
&& $(MAKE) $(AM_MAKEFLAGS) distcleancheck \
&& cd "$$am__cwd" \
|| exit 1
$(am__remove_distdir)
@(echo "$(distdir) archives ready for distribution: "; \
list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
distuninstallcheck:
@test -n '$(distuninstallcheck_dir)' || { \
echo 'ERROR: trying to run $@ with an empty' \
'$$(distuninstallcheck_dir)' >&2; \
exit 1; \
}; \
$(am__cd) '$(distuninstallcheck_dir)' || { \
echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \
exit 1; \
}; \
test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \
|| { echo "ERROR: files left after uninstall:" ; \
if test -n "$(DESTDIR)"; then \
echo " (check DESTDIR support)"; \
fi ; \
$(distuninstallcheck_listfiles) ; \
exit 1; } >&2
distcleancheck: distclean
@if test '$(srcdir)' = . ; then \
echo "ERROR: distcleancheck can only run from a VPATH build" ; \
exit 1 ; \
fi
@test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
|| { echo "ERROR: files left in build directory after distclean:" ; \
$(distcleancheck_listfiles) ; \
exit 1; } >&2
check-am: all-am
$(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-am
all-am: Makefile $(LTLIBRARIES) $(DATA) $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(m4datadir)" "$(DESTDIR)$(pkgincludedir)" "$(DESTDIR)$(pkginclude_internaldir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
-test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
-rm -f fused-src/gtest/$(DEPDIR)/$(am__dirstamp)
-rm -f fused-src/gtest/$(am__dirstamp)
-rm -f lib/$(am__dirstamp)
-rm -f samples/$(DEPDIR)/$(am__dirstamp)
-rm -f samples/$(am__dirstamp)
-rm -f src/$(DEPDIR)/$(am__dirstamp)
-rm -f src/$(am__dirstamp)
-rm -f test/$(DEPDIR)/$(am__dirstamp)
-rm -f test/$(am__dirstamp)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
#maintainer-clean-local:
clean: clean-am
clean-am: clean-checkPROGRAMS clean-generic clean-libLTLIBRARIES \
clean-libtool clean-noinstLTLIBRARIES mostlyclean-am
distclean: distclean-am
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf fused-src/gtest/$(DEPDIR) samples/$(DEPDIR) src/$(DEPDIR) test/$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-hdr distclean-libtool distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-data-local install-m4dataDATA \
install-pkgincludeHEADERS install-pkginclude_internalHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am: install-exec-local install-libLTLIBRARIES
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf $(top_srcdir)/autom4te.cache
-rm -rf fused-src/gtest/$(DEPDIR) samples/$(DEPDIR) src/$(DEPDIR) test/$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic \
maintainer-clean-local
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-libLTLIBRARIES uninstall-m4dataDATA \
uninstall-pkgincludeHEADERS \
uninstall-pkginclude_internalHEADERS
.MAKE: check-am install-am install-strip
.PHONY: CTAGS GTAGS all all-am am--refresh check check-TESTS check-am \
clean clean-checkPROGRAMS clean-generic clean-libLTLIBRARIES \
clean-libtool clean-noinstLTLIBRARIES ctags dist dist-all \
dist-bzip2 dist-gzip dist-lzip dist-lzma dist-shar dist-tarZ \
dist-xz dist-zip distcheck distclean distclean-compile \
distclean-generic distclean-hdr distclean-libtool \
distclean-tags distcleancheck distdir distuninstallcheck dvi \
dvi-am html html-am info info-am install install-am \
install-data install-data-am install-data-local install-dvi \
install-dvi-am install-exec install-exec-am install-exec-local \
install-html install-html-am install-info install-info-am \
install-libLTLIBRARIES install-m4dataDATA install-man \
install-pdf install-pdf-am install-pkgincludeHEADERS \
install-pkginclude_internalHEADERS install-ps install-ps-am \
install-strip installcheck installcheck-am installdirs \
maintainer-clean maintainer-clean-generic \
maintainer-clean-local mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags uninstall uninstall-am uninstall-libLTLIBRARIES \
uninstall-m4dataDATA uninstall-pkgincludeHEADERS \
uninstall-pkginclude_internalHEADERS
# Build rules for putting fused Google Test files into the distribution
# package. The user can also create those files by manually running
# scripts/fuse_gtest_files.py.
$(test_fused_gtest_test_SOURCES): fused-gtest
fused-gtest: $(pkginclude_HEADERS) $(pkginclude_internal_HEADERS) \
$(GTEST_SRC) src/gtest-all.cc src/gtest_main.cc \
scripts/fuse_gtest_files.py
mkdir -p "$(srcdir)/fused-src"
chmod -R u+w "$(srcdir)/fused-src"
rm -f "$(srcdir)/fused-src/gtest/gtest-all.cc"
rm -f "$(srcdir)/fused-src/gtest/gtest.h"
"$(srcdir)/scripts/fuse_gtest_files.py" "$(srcdir)/fused-src"
cp -f "$(srcdir)/src/gtest_main.cc" "$(srcdir)/fused-src/gtest/"
maintainer-clean-local:
rm -rf "$(srcdir)/fused-src"
# Disables 'make install' as installing a compiled version of Google
# Test can lead to undefined behavior due to violation of the
# One-Definition Rule.
install-exec-local:
echo "'make install' is dangerous and not supported. Instead, see README for how to integrate Google Test into your build system."
false
install-data-local:
echo "'make install' is dangerous and not supported. Instead, see README for how to integrate Google Test into your build system."
false
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
|
{
"content_hash": "edfb32333ede921a3a69962e97130be0",
"timestamp": "",
"source": "github",
"line_count": 1360,
"max_line_length": 472,
"avg_line_length": 41.766176470588235,
"alnum_prop": 0.6619133129115172,
"repo_name": "geshuning/lib",
"id": "7d2ae68ca6d9fc51a94b5e5dd0eb314e1d7ee1f3",
"size": "56802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unit_testing/gtest-1.7.0/Makefile",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "278863"
},
{
"name": "C++",
"bytes": "3084722"
},
{
"name": "Python",
"bytes": "193870"
},
{
"name": "Shell",
"bytes": "642820"
}
]
}
|
package org.apache.camel.component.xmlsecurity.api;
/**
* This exception is thrown if XML signature verification fails.
*/
public class XmlSignatureInvalidException extends XmlSignatureException {
private static final long serialVersionUID = 1L;
public XmlSignatureInvalidException(String message, Throwable cause) {
super(message, cause);
}
public XmlSignatureInvalidException(String message) {
super(message);
}
}
|
{
"content_hash": "9fcf1d00946db7a700169208801d5e52",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 74,
"avg_line_length": 25.5,
"alnum_prop": 0.7407407407407407,
"repo_name": "apache/camel",
"id": "9f4a033fbb46adef85b403ae9204c3e3a393fbed",
"size": "1261",
"binary": false,
"copies": "14",
"ref": "refs/heads/main",
"path": "components/camel-xmlsecurity/src/main/java/org/apache/camel/component/xmlsecurity/api/XmlSignatureInvalidException.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6695"
},
{
"name": "Batchfile",
"bytes": "2353"
},
{
"name": "CSS",
"bytes": "5472"
},
{
"name": "Dockerfile",
"bytes": "5676"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "8015"
},
{
"name": "Groovy",
"bytes": "412301"
},
{
"name": "HTML",
"bytes": "213802"
},
{
"name": "Java",
"bytes": "114936384"
},
{
"name": "JavaScript",
"bytes": "103655"
},
{
"name": "Jsonnet",
"bytes": "1734"
},
{
"name": "Kotlin",
"bytes": "41869"
},
{
"name": "Mustache",
"bytes": "525"
},
{
"name": "RobotFramework",
"bytes": "8461"
},
{
"name": "Ruby",
"bytes": "88"
},
{
"name": "Shell",
"bytes": "15327"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "699"
},
{
"name": "XSLT",
"bytes": "276597"
}
]
}
|
module <%= moduleName %> {
'use strict';
angular
.module('<% if (parentModuleName) { %><%= parentModuleName %>.<% } %><%= moduleName %>')
.config(config)
function config() {
}
}
|
{
"content_hash": "bcdbc13676f628c869bb913b2149f52d",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 92,
"avg_line_length": 19.6,
"alnum_prop": 0.5510204081632653,
"repo_name": "Ins87/generator-ng-poly",
"id": "09c99583af8311842746b2d2eb96a9a283abfd9a",
"size": "241",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "module/templates/_module-routes.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "258"
},
{
"name": "CoffeeScript",
"bytes": "12835"
},
{
"name": "HTML",
"bytes": "8443"
},
{
"name": "JavaScript",
"bytes": "194632"
},
{
"name": "TypeScript",
"bytes": "15808"
}
]
}
|
/*
Fresh is a command line tool that builds and (re)starts your web application everytime you save a go or template file.
If the web framework you are using supports the Fresh runner, it will show build errors on your browser.
It currently works with Traffic (https://github.com/pilu/traffic), Martini (https://github.com/codegangsta/martini) and gocraft/web (https://github.com/gocraft/web).
Fresh will watch for file events, and every time you create/modifiy/delete a file it will build and restart the application.
If `go build` returns an error, it will logs it in the tmp folder.
Traffic (https://github.com/pilu/traffic) already has a middleware that shows the content of that file if it is present. This middleware is automatically added if you run a Traffic web app in dev mode with Fresh.
*/
package main
import (
"flag"
"fmt"
"os"
"github.com/bom-d-van/fresh/runner"
)
func main() {
configPath := flag.String("c", "", "config file path")
flag.Parse()
if *configPath != "" {
if _, err := os.Stat(*configPath); err != nil {
fmt.Printf("Can't find config file `%s`\n", *configPath)
os.Exit(1)
} else {
os.Setenv("RUNNER_CONFIG_PATH", *configPath)
}
}
runner.Start()
}
|
{
"content_hash": "02232c6ccd4da8fff3c933c21a493d54",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 212,
"avg_line_length": 32.648648648648646,
"alnum_prop": 0.7160596026490066,
"repo_name": "bom-d-van/fresh",
"id": "d046965d561566dd21ec6e4d1aee516e7ee1dc34",
"size": "1208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "14332"
}
]
}
|
::
:: Licensed under the Apache License, Version 2.0 (the "License");
:: you may not use this file except in compliance with the License.
:: You may obtain a copy of the License at
::
:: http://www.apache.org/licenses/LICENSE-2.0
::
:: Unless required by applicable law or agreed to in writing, software
:: distributed under the License is distributed on an "AS IS" BASIS,
:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
:: See the License for the specific language governing permissions and
:: limitations under the License.
::
::
:: Build script example for inside the windows docker container
::
:: C:\build is the out-of-tree build directory
:: C:\install is the location where artifacts are placed
:: C:\thrift is where the sources are
::
:: Make and go into the out-of-tree directory
IF NOT EXIST C:\build (MKDIR C:\build)
cd c:\build
:: Generate the out-of-tree build files
cmake^
-DBOOST_ROOT=C:\Libraries\boost_1_69_0^
-DBOOST_LIBRARYDIR=C:\Libraries\boost_1_69_0\lib64-msvc-14.1^
-DBUILD_LIBRARIES=OFF^
-DCMAKE_BUILD_TYPE=Release^
-DCMAKE_INSTALL_PREFIX=C:\install^
-DWITH_MT=ON^
c:\thrift || EXIT /B
:: Build
cmake --build . --target thrift-compiler --config Release || EXIT /B
:: Test
cmake --build . --target check || EXIT /B
:: Install
cmake --build . --target install
|
{
"content_hash": "436874d36061d5b5b21438c6b9804972",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 75,
"avg_line_length": 30.318181818181817,
"alnum_prop": 0.7151424287856072,
"repo_name": "bforbis/thrift",
"id": "5534428b446122b9c7a0df5b522913eaf7e7527c",
"size": "1334",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "build/docker/msvc2017/build-compiler.bat",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "890"
},
{
"name": "ActionScript",
"bytes": "75794"
},
{
"name": "Batchfile",
"bytes": "53982"
},
{
"name": "C",
"bytes": "909705"
},
{
"name": "C#",
"bytes": "1253801"
},
{
"name": "C++",
"bytes": "4827485"
},
{
"name": "CMake",
"bytes": "126517"
},
{
"name": "CSS",
"bytes": "1070"
},
{
"name": "Common Lisp",
"bytes": "39679"
},
{
"name": "D",
"bytes": "649593"
},
{
"name": "Dart",
"bytes": "181338"
},
{
"name": "Dockerfile",
"bytes": "57262"
},
{
"name": "Emacs Lisp",
"bytes": "5361"
},
{
"name": "Erlang",
"bytes": "322716"
},
{
"name": "Go",
"bytes": "479056"
},
{
"name": "HTML",
"bytes": "39395"
},
{
"name": "Haskell",
"bytes": "141452"
},
{
"name": "Haxe",
"bytes": "311348"
},
{
"name": "Java",
"bytes": "1013199"
},
{
"name": "JavaScript",
"bytes": "418781"
},
{
"name": "Lex",
"bytes": "10881"
},
{
"name": "Lua",
"bytes": "81257"
},
{
"name": "M4",
"bytes": "170643"
},
{
"name": "Makefile",
"bytes": "216894"
},
{
"name": "OCaml",
"bytes": "39269"
},
{
"name": "PHP",
"bytes": "351752"
},
{
"name": "Pascal",
"bytes": "462629"
},
{
"name": "Perl",
"bytes": "132516"
},
{
"name": "Python",
"bytes": "466118"
},
{
"name": "Ruby",
"bytes": "414945"
},
{
"name": "Rust",
"bytes": "328233"
},
{
"name": "Shell",
"bytes": "59140"
},
{
"name": "Smalltalk",
"bytes": "22944"
},
{
"name": "Swift",
"bytes": "143590"
},
{
"name": "Thrift",
"bytes": "394052"
},
{
"name": "TypeScript",
"bytes": "61760"
},
{
"name": "Vim script",
"bytes": "2846"
},
{
"name": "Yacc",
"bytes": "27391"
}
]
}
|
package gaia3d.controller.view;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import gaia3d.domain.widget.Widget;
import gaia3d.service.WidgetService;
import gaia3d.utils.DateUtils;
import gaia3d.utils.FormatUtils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Controller
@RequestMapping("/widget")
public class WidgetController {
@Autowired
private WidgetService widgetService;
@GetMapping(value = "/modify")
public String modify(HttpServletRequest reuqet, Model model) {
Widget widget = new Widget();
List<Widget> widgetList = widgetService.getListWidget(widget);
String today = DateUtils.getToday(FormatUtils.VIEW_YEAR_MONTH_DAY_TIME);
String yearMonthDay = today.substring(0, 4) + today.substring(5,7) + today.substring(8,10);
model.addAttribute("today", today);
model.addAttribute("yearMonthDay", today.subSequence(0, 10));
model.addAttribute("thisYear", yearMonthDay.subSequence(0, 4));
model.addAttribute(widget);
model.addAttribute(widgetList);
return "/widget/modify";
}
}
|
{
"content_hash": "2f4df15f4b2883a4a67761ba68f03a19",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 93,
"avg_line_length": 29.11111111111111,
"alnum_prop": 0.7893129770992366,
"repo_name": "Gaia3D/mago3d",
"id": "f7308eb5ec2495beb6f655aeb8ca7bad6a59e775",
"size": "1310",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "mago3d-admin/src/main/java/gaia3d/controller/view/WidgetController.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "853699"
},
{
"name": "HTML",
"bytes": "2164643"
},
{
"name": "Java",
"bytes": "2158477"
},
{
"name": "JavaScript",
"bytes": "22316674"
},
{
"name": "Less",
"bytes": "356476"
},
{
"name": "SCSS",
"bytes": "391356"
},
{
"name": "Shell",
"bytes": "587"
}
]
}
|
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="64dp"
android:gravity="center"
android:orientation="vertical"
android:background="@android:color/white">
<com.pnikosis.materialishprogress.ProgressWheel
android:visibility="gone"
android:id="@+id/item_load_more_icon_loading"
android:layout_width="wrap_content"
android:layout_height="match_parent"
app:matProg_barColor="@color/color_accent"
app:matProg_progressIndeterminate="true"
app:matProg_barWidth="3dp" />
<TextView
android:id="@+id/item_load_more_icon_finish"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/load_more"
android:textColor="@color/text_color_secondary"
android:textSize="16sp" />
</LinearLayout>
|
{
"content_hash": "bef422144f493256ea3e91d433a86961",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 72,
"avg_line_length": 37.96296296296296,
"alnum_prop": 0.6780487804878049,
"repo_name": "GKerison/CNode-Material-Design",
"id": "4ebb980dec7758d69cb06577ad0f39f2df1cda83",
"size": "1025",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/activity_item_load_more.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3697"
},
{
"name": "Java",
"bytes": "203749"
}
]
}
|
import {Component, OnInit} from '@angular/core';
@Component({
selector: 'app-name-parent',
templateUrl: './name-parent.component.html',
styleUrls: ['./name-parent.component.css']
})
export class NameParentComponent implements OnInit {
names = ['Mr. IQ', ' ', ' Bombasto '];
constructor() {
}
ngOnInit() {
}
}
|
{
"content_hash": "14525029fc9a56a11110de382a3cf42d",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 52,
"avg_line_length": 18.555555555555557,
"alnum_prop": 0.6377245508982036,
"repo_name": "scp504677840/Angular",
"id": "1272afdb914c32ff5ee1ee96bf0c6eddf9ee5509",
"size": "334",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ComponentInteraction/src/app/name-parent/name-parent.component.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11184"
},
{
"name": "HTML",
"bytes": "93656"
},
{
"name": "JavaScript",
"bytes": "46077"
},
{
"name": "TypeScript",
"bytes": "478165"
}
]
}
|
from django.urls import path
from .admin import site
urlpatterns = [
path('admin/', site.urls),
]
|
{
"content_hash": "8f612c7bdcd83ed333a23f4b0d073aef",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 30,
"avg_line_length": 14.857142857142858,
"alnum_prop": 0.6826923076923077,
"repo_name": "wkschwartz/django",
"id": "13910ef99935a6e3b5a2c497ab9c152d63ea18cd",
"size": "104",
"binary": false,
"copies": "55",
"ref": "refs/heads/stable/3.2.x",
"path": "tests/proxy_models/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "43253"
},
{
"name": "HTML",
"bytes": "171790"
},
{
"name": "JavaScript",
"bytes": "105066"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11050239"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
]
}
|
local spawnedDebugThings = {}
local defaultTime = 5
local defaultColor = {255,0,255,150}
-- x,y centered
spawnDebugCircle = function (x,y,r,t,color)
t = t or defaultTime
color = color or defaultColor
local d = Fill:new{ shape="circle", x = x-r, y = y-r, width = r*2, height = r*2, border = color, fill = {0,0,0,0} }
the.app.view.layers.debug:add(d)
the.app.view.timer:after(t, function()
the.app.view.layers.debug:remove(d)
end)
end
-- x,y centered
spawnDebugPoint = function (x,y,r,t,color)
t = t or defaultTime
color = color or defaultColor
r = r or 3
local d = Fill:new{ shape="circle", x = x-r, y = y-r, width = r*2, height = r*2, fill = color }
the.app.view.layers.debug:add(d)
the.app.view.timer:after(t, function()
the.app.view.layers.debug:remove(d)
end)
end
-- x,y left-top
spawnDebugRect = function (x,y,w,h,t,color)
t = t or defaultTime
color = color or defaultColor
local d = Fill:new{ x = x, y = y-r, width = w, height = h, border = color, fill = {0,0,0,0} }
the.app.view.layers.debug:add(d)
the.app.view.timer:after(t, function()
the.app.view.layers.debug:remove(d)
end)
end
drawDebugThings = function()
local drawCount = 0
for k,v in pairs(spawnedDebugThings) do
if v.shape == "circle" then
love.graphics.setColor(v.color)
love.graphics.circle("line", v.x, v.y, v.radius, 10)
end
if v.shape == "point" then
love.graphics.setColor(v.color)
love.graphics.circle("fill", v.x, v.y, v.radius, 10)
end
end
if drawCount == 0 then spawnedDebugThings = {} end
end
drawDebugWrapper = function (sprite)
local oldDraw = sprite.draw
sprite.draw = function(self,x,y)
if not config.draw_debug_info then oldDraw(self,x,y) return end
x = math.floor(x or self.x)
y = math.floor(y or self.y)
local w = self.width or 1
local h = self.height or 1
oldDraw(self,x,y)
local c = {love.graphics.getColor()}
love.graphics.setColor(255, 0, 0)
love.graphics.circle("fill", x, y, 3, 10)
love.graphics.setColor(0, 255, 0)
love.graphics.circle("fill", x+w/2, y+h/2, 3, 10)
love.graphics.setColor(0, 0, 255)
love.graphics.rectangle("line", x, y, w, h )
love.graphics.setColor(c)
end
end
DebugPoint = Tile:extend
{
width = 32,
height = 32,
image = '/assets/graphics/debugpoint.png',
}
|
{
"content_hash": "4f33585d65c3d8d4df6ad553ba4c72ec",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 116,
"avg_line_length": 25.344444444444445,
"alnum_prop": 0.6650591845681718,
"repo_name": "dbltnk/macro-prototype",
"id": "83c3247be9e5060565091b5155a3d984b496c111",
"size": "2297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "debug_utils.lua",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3565076"
},
{
"name": "C++",
"bytes": "763745"
},
{
"name": "JavaScript",
"bytes": "10760"
},
{
"name": "Lua",
"bytes": "1183392"
},
{
"name": "Shell",
"bytes": "3081"
}
]
}
|
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_height="48dp"
android:layout_width="match_parent">
<TextView
android:id="@+id/item_name"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginLeft="16dp"
android:layout_marginStart="16dp"
android:layout_marginRight="16dp"
android:drawablePadding="16dp"
android:layout_marginEnd="16dp"
android:textSize="17sp"
android:alpha="0.87"
android:focusable="false"
android:focusableInTouchMode="false"
android:gravity="center_vertical"
android:textColor="@color/grey_600"/>
</LinearLayout>
|
{
"content_hash": "dab1449ad121f555da9eaa3d33de3878",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 72,
"avg_line_length": 33.166666666666664,
"alnum_prop": 0.6407035175879398,
"repo_name": "ab27/news",
"id": "3a51ceecfbaaaeab1a970097bf1f49cf6d60a586",
"size": "796",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/drawer_row_title.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "182037"
}
]
}
|
End of preview.
No dataset card yet
- Downloads last month
- 4