text
stringlengths
2
1.04M
meta
dict
package org.apache.geode.management.internal.cli.functions; import org.apache.commons.lang3.StringUtils; import org.apache.geode.cache.execute.FunctionContext; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.InternalLocator; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.internal.cache.execute.InternalFunction; import org.apache.geode.management.internal.configuration.domain.SharedConfigurationStatus; import org.apache.geode.management.internal.functions.CliFunctionResult; public class FetchSharedConfigurationStatusFunction implements InternalFunction<Void> { private static final long serialVersionUID = 1L; private static final String ID = "org.apache.geode.management.internal.cli.functions.FetchSharedConfigurationStatusFunction"; @Override public String getId() { return ID; } @Override public void execute(FunctionContext<Void> context) { InternalLocator locator = InternalLocator.getLocator(); InternalCache cache = (InternalCache) context.getCache(); DistributedMember member = cache.getDistributedSystem().getDistributedMember(); SharedConfigurationStatus status = locator.getSharedConfigurationStatus().getStatus(); String memberId = member.getName(); if (StringUtils.isBlank(memberId)) { memberId = member.getId(); } CliFunctionResult result = new CliFunctionResult(memberId, status.name(), null); context.getResultSender().lastResult(result); } }
{ "content_hash": "003cf3f2c96492e945b98fe547418cf7", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 98, "avg_line_length": 38.125, "alnum_prop": 0.7934426229508197, "repo_name": "jdeppe-pivotal/geode", "id": "c7518122b7ca4f03f5a3dcc3788391d16bebf646", "size": "2314", "binary": false, "copies": "4", "ref": "refs/heads/develop", "path": "geode-gfsh/src/main/java/org/apache/geode/management/internal/cli/functions/FetchSharedConfigurationStatusFunction.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "104031" }, { "name": "Dockerfile", "bytes": "15956" }, { "name": "Go", "bytes": "40709" }, { "name": "Groovy", "bytes": "41916" }, { "name": "HTML", "bytes": "4037680" }, { "name": "Java", "bytes": "33151406" }, { "name": "JavaScript", "bytes": "1780821" }, { "name": "Python", "bytes": "29801" }, { "name": "Ruby", "bytes": "1801" }, { "name": "SCSS", "bytes": "2677" }, { "name": "Shell", "bytes": "275617" } ], "symlink_target": "" }
package com.welyab.jcpa.classfile.attribute; public interface SourceFileAttribute extends Attribute { }
{ "content_hash": "c815ba59c55b26c757b8f8b1b14934b8", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 56, "avg_line_length": 17.833333333333332, "alnum_prop": 0.822429906542056, "repo_name": "ayszu/ayszu", "id": "31181682d1065b07a506bea29262d57995bb0469", "size": "107", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/welyab/jcpa/classfile/attribute/SourceFileAttribute.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "76" }, { "name": "HTML", "bytes": "8462" }, { "name": "Java", "bytes": "156220" } ], "symlink_target": "" }
"use strict"; module.exports = function(Promise, PromiseArray, apiRejection, tryConvertToPromise, INTERNAL, debug) { var util = require("./util"); var tryCatch = util.tryCatch; function ReductionPromiseArray(promises, fn, initialValue, _each) { this.constructor$(promises); var context = Promise._getContext(); this._fn = util.contextBind(context, fn); if (initialValue !== undefined) { initialValue = Promise.resolve(initialValue); initialValue._attachCancellationCallback(this); } this._initialValue = initialValue; this._currentCancellable = null; if(_each === INTERNAL) { this._eachValues = Array(this._length); } else if (_each === 0) { this._eachValues = null; } else { this._eachValues = undefined; } this._promise._captureStackTrace(); this._init$(undefined, RESOLVE_CALL_METHOD); } util.inherits(ReductionPromiseArray, PromiseArray); ReductionPromiseArray.prototype._gotAccum = function(accum) { if (this._eachValues !== undefined && this._eachValues !== null && accum !== INTERNAL) { this._eachValues.push(accum); } }; ReductionPromiseArray.prototype._eachComplete = function(value) { if (this._eachValues !== null) { this._eachValues.push(value); } return this._eachValues; }; // Override ReductionPromiseArray.prototype._init = function() {}; // Override ReductionPromiseArray.prototype._resolveEmptyArray = function() { this._resolve(this._eachValues !== undefined ? this._eachValues : this._initialValue); }; // Override ReductionPromiseArray.prototype.shouldCopyValues = function () { return false; }; // Override ReductionPromiseArray.prototype._resolve = function(value) { this._promise._resolveCallback(value); this._values = null; }; // Override ReductionPromiseArray.prototype._resultCancelled = function(sender) { if (sender === this._initialValue) return this._cancel(); if (this._isResolved()) return; this._resultCancelled$(); if (this._currentCancellable instanceof Promise) { this._currentCancellable.cancel(); } if (this._initialValue instanceof Promise) { this._initialValue.cancel(); } }; // Override ReductionPromiseArray.prototype._iterate = function (values) { this._values = values; var value; var i; var length = values.length; if (this._initialValue !== undefined) { value = this._initialValue; i = 0; } else { value = Promise.resolve(values[0]); i = 1; } this._currentCancellable = value; for (var j = i; j < length; ++j) { var maybePromise = values[j]; if (maybePromise instanceof Promise) { maybePromise.suppressUnhandledRejections(); } } if (!value.isRejected()) { for (; i < length; ++i) { var ctx = { accum: null, value: values[i], index: i, length: length, array: this }; value = value._then(gotAccum, undefined, undefined, ctx, undefined); // Too many promises chained with asyncGuaranteed will result in // stack overflow. Break up long chains to reset stack. if ((i & 127) === 0) { value._setNoAsyncGuarantee(); } } } if (this._eachValues !== undefined) { value = value ._then(this._eachComplete, undefined, undefined, this, undefined); } value._then(completed, completed, undefined, value, this); }; Promise.prototype.reduce = function (fn, initialValue) { return reduce(this, fn, initialValue, null); }; Promise.reduce = function (promises, fn, initialValue, _each) { return reduce(promises, fn, initialValue, _each); }; function completed(valueOrReason, array) { if (this.isFulfilled()) { array._resolve(valueOrReason); } else { array._reject(valueOrReason); } } function reduce(promises, fn, initialValue, _each) { if (typeof fn !== "function") { return apiRejection(FUNCTION_ERROR + util.classString(fn)); } var array = new ReductionPromiseArray(promises, fn, initialValue, _each); return array.promise(); } function gotAccum(accum) { this.accum = accum; this.array._gotAccum(accum); var value = tryConvertToPromise(this.value, this.array._promise); if (value instanceof Promise) { this.array._currentCancellable = value; return value._then(gotValue, undefined, undefined, this, undefined); } else { return gotValue.call(this, value); } } function gotValue(value) { var array = this.array; var promise = array._promise; var fn = tryCatch(array._fn); promise._pushContext(); var ret; if (array._eachValues !== undefined) { ret = fn.call(promise._boundValue(), value, this.index, this.length); } else { ret = fn.call(promise._boundValue(), this.accum, value, this.index, this.length); } if (ret instanceof Promise) { array._currentCancellable = ret; } var promiseCreated = promise._popContext(); debug.checkForgottenReturns( ret, promiseCreated, array._eachValues !== undefined ? "Promise.each" : "Promise.reduce", promise ); return ret; } };
{ "content_hash": "5353e725c39b44fc9cb153db47a41405", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 80, "avg_line_length": 29.26178010471204, "alnum_prop": 0.6002862766147791, "repo_name": "petkaantonov/bluebird", "id": "51f86aab9d781e4e511b015a32757b5d5c75d2ba", "size": "5589", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/reduce.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4242" }, { "name": "HTML", "bytes": "657" }, { "name": "JavaScript", "bytes": "1353043" }, { "name": "Shell", "bytes": "1149" } ], "symlink_target": "" }
package org.wildfly.camel.test.file; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.nio.file.Paths; import org.apache.camel.CamelContext; import org.apache.camel.ProducerTemplate; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.impl.DefaultCamelContext; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(Arquillian.class) public class FileIntegrationTest { @Deployment public static JavaArchive createdeployment() throws IOException { final JavaArchive archive = ShrinkWrap.create(JavaArchive.class, "camel-file-tests"); return archive; } @Test public void testFileEndpoint() throws Exception { final String datadir = System.getProperty("jboss.server.data.dir"); Assert.assertNotNull("Directory name not nul", datadir); CamelContext camelctx = new DefaultCamelContext(); camelctx.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").transform(body().prepend("Hello ")). to("file:" + datadir + "?fileName=camel-file.txt"); } }); camelctx.start(); try { ProducerTemplate producer = camelctx.createProducerTemplate(); String result = producer.requestBody("direct:start", "Kermit", String.class); Assert.assertEquals("Hello Kermit", result); } finally { camelctx.stop(); } BufferedReader br = new BufferedReader(new FileReader(Paths.get(datadir, "camel-file.txt").toFile())); Assert.assertEquals("Hello Kermit", br.readLine()); br.close(); } }
{ "content_hash": "6075ee853779aae7b11e8cbb7225f44e", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 110, "avg_line_length": 33.89655172413793, "alnum_prop": 0.6846388606307223, "repo_name": "myfear/wildfly-camel", "id": "b927c25e745fb31ecfd8ed625d56f55c62a96669", "size": "2620", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "itests/standalone/basic/src/main/java/org/wildfly/camel/test/file/FileIntegrationTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "9383" }, { "name": "Java", "bytes": "790601" }, { "name": "JavaScript", "bytes": "59" }, { "name": "Protocol Buffer", "bytes": "538" }, { "name": "Python", "bytes": "60" }, { "name": "Ruby", "bytes": "61" }, { "name": "Shell", "bytes": "7574" }, { "name": "XSLT", "bytes": "762" } ], "symlink_target": "" }
package edu.umn.amicus.aligners; import edu.umn.amicus.util.AlignedTuple; import edu.umn.amicus.AmicusException; import edu.umn.amicus.util.AnalysisPiece; import edu.umn.amicus.util.ANA; import java.util.Iterator; import java.util.List; /** * Align tuples from all systems to get an iterator over aligned tuples (a tuple may have null elements). * * Created by gpfinley on 10/21/16. */ public interface Aligner extends AnalysisPiece { /** * @param allAnnotations A list of lists of annotations, one list per input. * @return An iterator over AlignedTuple objects, which contain Amicus-Native Annotation objects. * @throws AmicusException */ Iterator<AlignedTuple> alignAndIterate(List<List<ANA>> allAnnotations) throws AmicusException; }
{ "content_hash": "d752d9960f11fe6765ab606ee00394d4", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 105, "avg_line_length": 31, "alnum_prop": 0.7535483870967742, "repo_name": "gpfinley/ensembles", "id": "43a1e158e741ceadba0afd195eced7540876c783", "size": "775", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/java/edu/umn/amicus/aligners/Aligner.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "197786" }, { "name": "Python", "bytes": "2823" }, { "name": "Shell", "bytes": "1323" } ], "symlink_target": "" }
package io.noties.markwon.core.suite; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import io.noties.markwon.test.TestSpan.Document; import static io.noties.markwon.test.TestSpan.args; import static io.noties.markwon.test.TestSpan.document; import static io.noties.markwon.test.TestSpan.span; import static io.noties.markwon.test.TestSpan.text; @RunWith(RobolectricTestRunner.class) @Config(manifest = Config.NONE) public class LinkTest extends BaseSuiteTest { @Test public void single() { final Document document = document( span(LINK, args("href", "#href"), text("link")) ); match("[link](#href)", document); } }
{ "content_hash": "129f140fdf8ec45eeebe4b23610b12bc", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 55, "avg_line_length": 26.571428571428573, "alnum_prop": 0.7580645161290323, "repo_name": "noties/Markwon", "id": "a3e6f2af9c0beaed81e454c8ed954badab2e00c6", "size": "744", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "markwon-core/src/test/java/io/noties/markwon/core/suite/LinkTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1113630" } ], "symlink_target": "" }
package org.netbeans.modules.plantumlnb.lexer; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.netbeans.modules.plantumlnb.pumllexer.PUMLParserConstants; import org.netbeans.spi.lexer.LanguageHierarchy; import org.netbeans.spi.lexer.Lexer; import org.netbeans.spi.lexer.LexerRestartInfo; /** * * @author venkat */ public class PUMLLanguageHierarchy extends LanguageHierarchy<PUMLTokenId> { private static List<PUMLTokenId> tokens; private static Map<Integer, PUMLTokenId> idToToken; private static void init() { tokens = Arrays.<PUMLTokenId>asList(new PUMLTokenId[]{ new PUMLTokenId("EOF", "whitespace" ,PUMLParserConstants.EOF), new PUMLTokenId("WHITESPACE", "whitespace" ,PUMLParserConstants.WHITESPACE), new PUMLTokenId("SINGLE_LINE_COMMENT", "comment" , PUMLParserConstants.SINGLE_LINE_COMMENT), new PUMLTokenId("MESSAGE", "message" , PUMLParserConstants.MESSAGE), new PUMLTokenId("START_UML", "keyword" , PUMLParserConstants.START_UML), new PUMLTokenId("END_UML", "keyword" , PUMLParserConstants.END_UML), new PUMLTokenId("AS", "keyword" , PUMLParserConstants.AS), new PUMLTokenId("ALSO", "keyword" , PUMLParserConstants.ALSO), new PUMLTokenId("AUTO_NUMBER", "keyword" , PUMLParserConstants.AUTO_NUMBER), new PUMLTokenId("TITLE", "keyword" , PUMLParserConstants.TITLE), new PUMLTokenId("NEW_PAGE", "keyword" , PUMLParserConstants.NEW_PAGE), new PUMLTokenId("BOX", "keyword" , PUMLParserConstants.BOX), new PUMLTokenId("ALT", "keyword" , PUMLParserConstants.ALT), new PUMLTokenId("ELSE", "keyword" , PUMLParserConstants.ELSE), new PUMLTokenId("OPT", "keyword" , PUMLParserConstants.OPT), new PUMLTokenId("LOOP", "keyword" , PUMLParserConstants.LOOP), new PUMLTokenId("PAR", "keyword" , PUMLParserConstants.PAR), new PUMLTokenId("BREAK", "keyword" , PUMLParserConstants.BREAK), new PUMLTokenId("CRITICAL", "keyword" , PUMLParserConstants.CRITICAL), new PUMLTokenId("NOTE_LEFT", "keyword" , PUMLParserConstants.NOTE_LEFT), new PUMLTokenId("NOTE_LEFT_ON_LINK", "keyword" , PUMLParserConstants.NOTE_LEFT_ON_LINK), new PUMLTokenId("NOTE_RIGHT", "keyword" , PUMLParserConstants.NOTE_RIGHT), new PUMLTokenId("NOTE_RIGHT_ON_LINK", "keyword" , PUMLParserConstants.NOTE_RIGHT_ON_LINK), new PUMLTokenId("NOTE_TOP", "keyword" , PUMLParserConstants.NOTE_TOP), new PUMLTokenId("NOTE_TOP_ON_LINK", "keyword" , PUMLParserConstants.NOTE_TOP_ON_LINK), new PUMLTokenId("NOTE_BOTTOM", "keyword" , PUMLParserConstants.NOTE_BOTTOM), new PUMLTokenId("NOTE_BOTTOM_ON_LINK", "keyword" , PUMLParserConstants.NOTE_BOTTOM_ON_LINK), new PUMLTokenId("NOTE_OVER", "keyword" , PUMLParserConstants.NOTE_OVER), new PUMLTokenId("NOTE_ON_LINK", "keyword" , PUMLParserConstants.NOTE_ON_LINK), new PUMLTokenId("END_NOTE", "keyword" , PUMLParserConstants.END_NOTE), new PUMLTokenId("REF_OVER", "keyword" , PUMLParserConstants.REF_OVER), new PUMLTokenId("DELAY", "keyword" , PUMLParserConstants.DELAY), new PUMLTokenId("GROUP", "keyword" , PUMLParserConstants.GROUP), new PUMLTokenId("LEFT", "keyword" , PUMLParserConstants.LEFT), new PUMLTokenId("RIGHT", "keyword" , PUMLParserConstants.RIGHT), new PUMLTokenId("OF", "keyword" , PUMLParserConstants.OF), new PUMLTokenId("ON", "keyword" , PUMLParserConstants.ON), new PUMLTokenId("LINK", "keyword" , PUMLParserConstants.LINK), new PUMLTokenId("OVER", "keyword" , PUMLParserConstants.OVER), new PUMLTokenId("END", "keyword" , PUMLParserConstants.END), new PUMLTokenId("ACTIVATE", "keyword" , PUMLParserConstants.ACTIVATE), new PUMLTokenId("DEACTIVATE", "keyword" , PUMLParserConstants.DEACTIVATE), new PUMLTokenId("DESTROY", "keyword" , PUMLParserConstants.DESTROY), new PUMLTokenId("CREATE", "keyword" , PUMLParserConstants.CREATE), new PUMLTokenId("FOOTBOX", "keyword" , PUMLParserConstants.FOOTBOX), new PUMLTokenId("HIDE", "keyword" , PUMLParserConstants.HIDE), new PUMLTokenId("SHOW", "keyword" , PUMLParserConstants.SHOW), new PUMLTokenId("SKIN_PARAM", "keyword" , PUMLParserConstants.SKIN_PARAM), new PUMLTokenId("SKIN", "keyword" , PUMLParserConstants.SKIN), new PUMLTokenId("TOP", "keyword" , PUMLParserConstants.TOP), new PUMLTokenId("BOTTOM", "keyword" , PUMLParserConstants.BOTTOM), new PUMLTokenId("TOP_TO_BOTTOM_DIRECTION", "keyword" , PUMLParserConstants.TOP_TO_BOTTOM_DIRECTION), new PUMLTokenId("PACKAGE", "keyword" , PUMLParserConstants.PACKAGE), new PUMLTokenId("NAMESPACE", "keyword" , PUMLParserConstants.NAMESPACE), new PUMLTokenId("PAGE", "keyword" , PUMLParserConstants.PAGE), new PUMLTokenId("UP", "keyword" , PUMLParserConstants.UP), new PUMLTokenId("DOWN", "keyword" , PUMLParserConstants.DOWN), new PUMLTokenId("IF", "keyword" , PUMLParserConstants.IF), new PUMLTokenId("ENDIF", "keyword" , PUMLParserConstants.ENDIF), new PUMLTokenId("PARTITION", "keyword" , PUMLParserConstants.PARTITION), new PUMLTokenId("FOOTER", "keyword" , PUMLParserConstants.FOOTER), new PUMLTokenId("HEADER", "keyword" , PUMLParserConstants.HEADER), new PUMLTokenId("CENTER", "keyword" , PUMLParserConstants.CENTER), new PUMLTokenId("ROTATE", "keyword" , PUMLParserConstants.ROTATE), new PUMLTokenId("REF", "keyword" , PUMLParserConstants.REF), new PUMLTokenId("RETURN", "keyword" , PUMLParserConstants.RETURN), new PUMLTokenId("IS", "keyword" , PUMLParserConstants.IS), new PUMLTokenId("REPEAT", "keyword" , PUMLParserConstants.REPEAT), new PUMLTokenId("START", "keyword" , PUMLParserConstants.START), new PUMLTokenId("STOP", "keyword" , PUMLParserConstants.STOP), new PUMLTokenId("WHILE", "keyword" , PUMLParserConstants.WHILE), new PUMLTokenId("ENDWHILE", "keyword" , PUMLParserConstants.ENDWHILE), new PUMLTokenId("FORK", "keyword" , PUMLParserConstants.FORK), new PUMLTokenId("AGAIN", "keyword" , PUMLParserConstants.AGAIN), new PUMLTokenId("FORWARD_CALL_MESSAGE", "operator" , PUMLParserConstants.FORWARD_CALL_MESSAGE), new PUMLTokenId("FORWARD_CALL_DOTTED_MESSAGE", "operator" , PUMLParserConstants.FORWARD_CALL_DOTTED_MESSAGE), new PUMLTokenId("REVERSE_CALL_MESSAGE", "operator" , PUMLParserConstants.REVERSE_CALL_MESSAGE), new PUMLTokenId("REVERSE_CALL_DOTTED_MESSAGE", "operator" , PUMLParserConstants.REVERSE_CALL_DOTTED_MESSAGE), new PUMLTokenId("FORWARD_ASYNCHRONOUS_CALL_MESSAGE", "operator" , PUMLParserConstants.FORWARD_ASYNCHRONOUS_CALL_MESSAGE), new PUMLTokenId("REVERSE_ASYNCHRONOUS_CALL_MESSAGE", "operator" , PUMLParserConstants.REVERSE_ASYNCHRONOUS_CALL_MESSAGE), new PUMLTokenId("DOTS_DASHES", "operator", PUMLParserConstants.DOTS_DASHES), new PUMLTokenId("LEFT_AGGREGATION", "operator", PUMLParserConstants.LEFT_AGGREGATION), new PUMLTokenId("LEFT_EXTENSION", "operator", PUMLParserConstants.LEFT_EXTENSION), new PUMLTokenId("LEFT_COMPOSITION", "operator", PUMLParserConstants.LEFT_COMPOSITION), new PUMLTokenId("RIGHT_AGGREGATION", "operator", PUMLParserConstants.RIGHT_AGGREGATION), new PUMLTokenId("RIGHT_EXTENSION", "operator", PUMLParserConstants.RIGHT_EXTENSION), new PUMLTokenId("RIGHT_COMPOSITION", "operator", PUMLParserConstants.RIGHT_COMPOSITION), new PUMLTokenId("ACTOR", "type" , PUMLParserConstants.ACTOR), new PUMLTokenId("PARTICIPANT", "type" , PUMLParserConstants.PARTICIPANT), new PUMLTokenId("USECASE", "type" , PUMLParserConstants.USECASE), new PUMLTokenId("CLASS", "type" , PUMLParserConstants.CLASS), new PUMLTokenId("INTERFACE", "type" , PUMLParserConstants.INTERFACE), new PUMLTokenId("ABSTRACT", "type" , PUMLParserConstants.ABSTRACT), new PUMLTokenId("ANNOTATION", "type", PUMLParserConstants.ANNOTATION), new PUMLTokenId("ENUM", "type" , PUMLParserConstants.ENUM), new PUMLTokenId("COMPONENT", "type" , PUMLParserConstants.COMPONENT), new PUMLTokenId("STATE", "type" , PUMLParserConstants.STATE), new PUMLTokenId("OBJECT", "type" , PUMLParserConstants.OBJECT), new PUMLTokenId("ARTIFACT", "part" , PUMLParserConstants.ARTIFACT), new PUMLTokenId("FOLDER", "part" , PUMLParserConstants.FOLDER), new PUMLTokenId("RECT", "part" , PUMLParserConstants.RECT), new PUMLTokenId("NODE", "part" , PUMLParserConstants.NODE), new PUMLTokenId("FRAME", "part" , PUMLParserConstants.FRAME), new PUMLTokenId("CLOUD", "part" , PUMLParserConstants.CLOUD), new PUMLTokenId("DATABASE", "part" , PUMLParserConstants.DATABASE), new PUMLTokenId("STORAGE", "part" , PUMLParserConstants.STORAGE), new PUMLTokenId("AGENT", "part" , PUMLParserConstants.AGENT), new PUMLTokenId("BOUNDARY", "part" , PUMLParserConstants.BOUNDARY), new PUMLTokenId("CONTROL", "part" , PUMLParserConstants.CONTROL), new PUMLTokenId("ENTITY", "part" , PUMLParserConstants.ENTITY), new PUMLTokenId("IDENTIFIER", "identifier" , PUMLParserConstants.IDENTIFIER), new PUMLTokenId("LETTER", "literal" , PUMLParserConstants.LETTER), new PUMLTokenId("PART_LETTER", "literal" , PUMLParserConstants.PART_LETTER), }); idToToken = new HashMap<Integer, PUMLTokenId>(); for (PUMLTokenId token : tokens) { idToToken.put(token.ordinal(), token); } } static synchronized PUMLTokenId getToken(int id) { if (idToToken == null) { init(); } return idToToken.get(id); } @Override protected synchronized Collection<PUMLTokenId> createTokenIds() { if (tokens == null) { init(); } return tokens; } @Override protected synchronized Lexer<PUMLTokenId> createLexer(LexerRestartInfo<PUMLTokenId> info) { return new PUMLLexer(info); } @Override protected String mimeType() { return "text/x-puml"; } }
{ "content_hash": "5fd910000ecaa0dc86d15720c86b2f80", "timestamp": "", "source": "github", "line_count": 181, "max_line_length": 152, "avg_line_length": 62.03867403314917, "alnum_prop": 0.6423546175082376, "repo_name": "esamson/plantumlnb", "id": "67ff297ed8bf4086c8e41663106fc093686e3662", "size": "12372", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/main/java/org/netbeans/modules/plantumlnb/lexer/PUMLLanguageHierarchy.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "438124" }, { "name": "Shell", "bytes": "234" } ], "symlink_target": "" }
<DWPBody xsi:schemaLocation="http://www.govtalk.gov.uk/dwp/carers-allowance file:/future/0.24/schema/ca/CarersAllowance_Schema.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ds="http://www.w3.org/2000/09/xmldsig#" xmlns="http://www.govtalk.gov.uk/dwp/carers-allowance"> <Version>0.24</Version> <ClaimVersion>3.8</ClaimVersion> <Origin>GB</Origin> <DWPCATransaction id="1408TEST432"> <TransactionId>1408TEST432</TransactionId> <DateTimeGenerated>05-03-2015 12:02</DateTimeGenerated> <LanguageUsed>English</LanguageUsed> <DWPCAChangeOfCircumstances> <ClaimantDetails> <FullName><QuestionLabel>Full name</QuestionLabel><Answer>Qff1IOpCpz6d32nE6m9BrCjNFyxzLfHsgXwsu2tS6fuXCqZbWb0oBRYlfpXTabYMGNFJ6J/GmrwD0QVNlTViGw==</Answer></FullName> <DateOfBirth><QuestionLabel>Date of birth</QuestionLabel><Answer>03-04-1950</Answer></DateOfBirth> <NationalInsuranceNumber><QuestionLabel>National Insurance number</QuestionLabel><Answer>5hl0EtaVS6K6CxsQKX311EYwEU0tBKJ9lc+tBlwhj7txcevRdJOQJSH9x+EBJkUF</Answer></NationalInsuranceNumber> <ContactPreference><QuestionLabel>Contact phone or mobile number</QuestionLabel><Answer>By Post</Answer></ContactPreference> </ClaimantDetails> <CareeDetails> <FullName><QuestionLabel>Their full name</QuestionLabel><Answer>Pan0K8RRCz8zkc2ezhQNx5jBJxff15YE7vHZJQahq+FLgI+rX0yrizB5SXFI3fb3</Answer></FullName> <RelationToClaimant><QuestionLabel>Their relationship to you</QuestionLabel><Answer>Wife</Answer></RelationToClaimant> </CareeDetails> <EmploymentChange> <StillCaring><QuestionLabel>Are you still caring 35 hours or more each week?</QuestionLabel><Answer>No</Answer></StillCaring> <DateStoppedCaring><QuestionLabel>What date did you stop caring 35 hours a week?</QuestionLabel><Answer>19-12-2013</Answer></DateStoppedCaring> <HasWorkStartedYet><QuestionLabel>Has this work started yet?</QuestionLabel><Answer>Yes</Answer></HasWorkStartedYet> <DateWorkedStarted><QuestionLabel>When did you start this work?</QuestionLabel><Answer>01-01-2014</Answer></DateWorkedStarted><HasWorkFinishedYet><QuestionLabel>Has this work finished?</QuestionLabel><Answer>No</Answer></HasWorkFinishedYet> <TypeOfWork><QuestionLabel>What type of work is this?</QuestionLabel><Answer>employed</Answer></TypeOfWork> <StartedEmploymentAndOngoing> <EmployerName><QuestionLabel>Employer's name</QuestionLabel><Answer>DWP</Answer></EmployerName> <Address> <QuestionLabel>Employer's name and address</QuestionLabel> <Answer> <Line>1 Plod Avenue</Line><Line>Preston</Line><PostCode>PN1 3DJ</PostCode> </Answer> </Address> <EmployerContactNumber><QuestionLabel>Employer's contact number</QuestionLabel><Answer>0111 123 4567</Answer></EmployerContactNumber> <EmployerPayroll><QuestionLabel>Payroll or employee number</QuestionLabel><Answer>XYZ123890</Answer></EmployerPayroll> <BeenPaidYet><QuestionLabel>Have you been paid yet?</QuestionLabel><Answer>Yes</Answer></BeenPaidYet> <HowMuchPaid><QuestionLabel>How much were you last paid?</QuestionLabel><Answer>12345678.12</Answer></HowMuchPaid><PaymentDate><QuestionLabel>What date did you receive your last pay?</QuestionLabel><Answer>20-01-2014</Answer></PaymentDate> <PayFrequency> <Frequency><QuestionLabel>How often are you paid?</QuestionLabel><Answer>Weekly</Answer></Frequency> </PayFrequency> <UsuallyPaidSameAmount><QuestionLabel>Do you usually get the same amount each week?</QuestionLabel><Answer>Yes</Answer></UsuallyPaidSameAmount> <PayIntoPension><QuestionLabel>Do you pay into a pension?</QuestionLabel><Answer>No</Answer></PayIntoPension> <PaidForThingsToDoJob><QuestionLabel>Do you pay for things you need to do your job?</QuestionLabel><Answer>No</Answer></PaidForThingsToDoJob> <CareCostsForThisWork><QuestionLabel>Do you have any care costs because of this work?</QuestionLabel><Answer>No</Answer></CareCostsForThisWork> </StartedEmploymentAndOngoing> </EmploymentChange> <Declaration> <DeclarationStatement> <Title>The declarations below set out your legal responsibilities in respect of your claim.</Title> <Content>I declare that I understand the Carer's Allowance Claim Notes and that the information provided on this claim form is correct and complete.</Content> <Content>I understand that I must report all changes in my circumstances or that of the person that I am caring for which may affect my entitlement promptly and by failing to do so I may be liable to prosecution or face a financial penalty.</Content> <Content>I will use the online service or phone 0345 608 4321 or write to Carer's Allowance Unit, Palatine House, Lancaster Road, Preston, PR1 1HB to report a change in my circumstances or that of the person that I am caring for.</Content> <Content>If I give false or incomplete information or fail to report changes in my circumstances or that of the person that I am caring for promptly, I understand that my Carer's Allowance may be stopped or reduced and any overpayment of Carer's Allowance may be recovered. In addition I may be prosecuted or face a financial penalty.</Content> </DeclarationStatement> <DeclarationQuestion><QuestionLabel>Please tick this box if this change of circumstances form has been filled in by someone else. Please ensure that you understand the declarations above as another person cannot make the declarations on your behalf.</QuestionLabel><Answer>Yes</Answer></DeclarationQuestion> <DeclarationQuestion><QuestionLabel>Please tick this box to confirm that you understand and make the declarations above.</QuestionLabel><Answer>Yes</Answer></DeclarationQuestion> <DeclarationNameOrg><QuestionLabel>Your name or organisation</QuestionLabel><Answer>Mr Smith</Answer></DeclarationNameOrg> </Declaration> <EvidenceList> <Evidence> <Title>What you need to do now.</Title> <Content>Send us the documents below including your Name and National Insurance (NI) number.</Content> <Content>Any payslips you have</Content> <Content>Send the above documents to:</Content> <Content>CA Freepost</Content> <Content>Palatine House</Content> <Content>Preston</Content> <Content>PR1 1HB</Content> </Evidence> </EvidenceList> <Consents> <Consent><QuestionLabel>Do you agree to the Carer's Allowance Unit contacting anyone mentioned in this form?</QuestionLabel><Answer>No</Answer><Why><QuestionLabel>List anyone you don't want to be contacted and say why.</QuestionLabel><Answer>I don't want to</Answer></Why></Consent> </Consents> </DWPCAChangeOfCircumstances> </DWPCATransaction> <ds:Signature> <ds:SignedInfo> <ds:CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315"/> <ds:SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/> <ds:Reference URI="#1408TEST432"> <ds:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/> <ds:DigestValue>D/Ugv4uLF+dIRHDFiCRApnx2Y9c=</ds:DigestValue> </ds:Reference> </ds:SignedInfo> <ds:SignatureValue> Z++nuaBKoYHlsNvG3RsnyG0q8yE1HxaIk1E978KZckOqjicXI2o/1byNx1qsw0K2TwYihwLjK3He 1ljOmVSc2tL7mkj3ck8sScDIv9Mw2GiGOP59IElNcGaedNe0rp0x3YYje/wnOABi3BY0McCG/Cvq 6ZohAiFORTQTlJbYHBZxfe6+jGSanyDyWiA9wyQ1iPDsErcmgO+9HIO6xh08ch9rm8Al/ZKLb3mf LANnPomxY9o1rGpFqOCk85dXB2iSDztiSBJUa4YCeblaRyqtYirnOsfKU5oc67+uES3rnIwYQEkY dSVsrirjLzC814iegB9C5xdqyioOkG84J7aUGQ== </ds:SignatureValue> </ds:Signature></DWPBody>
{ "content_hash": "bcac138385f32135457848cbfa3fdef1", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 364, "avg_line_length": 73.47863247863248, "alnum_prop": 0.6829126439455624, "repo_name": "Department-for-Work-and-Pensions/RenderingService", "id": "16b5c7e236978a87b83c6e3e72cafb53945a113f", "size": "8597", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/resources/0.24/circs/c3_functional33_circs.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "719" }, { "name": "Scala", "bytes": "2056061" } ], "symlink_target": "" }
package org.drools.core.fluent.impl; import org.drools.core.command.NewKieSessionCommand; import org.kie.api.runtime.builder.ExecutableBuilder; import org.kie.api.runtime.builder.KieContainerFluent; import org.kie.api.runtime.builder.KieSessionFluent; public class KieContainerFluentImpl extends BaseBatchFluent<ExecutableBuilder, ExecutableBuilder> implements KieContainerFluent { private ExecutableImpl ctx; public KieContainerFluentImpl(ExecutableImpl ctx) { super(ctx); this.ctx = ctx; } @Override public KieSessionFluent newSession() { return newSession(null); } @Override public KieSessionFluent newSession(String sessionId) { NewKieSessionCommand cmd = new NewKieSessionCommand(sessionId); ctx.addCommand(cmd); return new KieSessionFluentImpl(ctx); } }
{ "content_hash": "d0eabf35ce05de8fbbf0e91b23ec1b52", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 129, "avg_line_length": 27.516129032258064, "alnum_prop": 0.7456037514654161, "repo_name": "sutaakar/drools", "id": "06335592922f53b1f1af66276caa9e5da35fb1cc", "size": "1474", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "drools-core/src/main/java/org/drools/core/fluent/impl/KieContainerFluentImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "14216" }, { "name": "Batchfile", "bytes": "2554" }, { "name": "CSS", "bytes": "1412" }, { "name": "GAP", "bytes": "197080" }, { "name": "HTML", "bytes": "9298" }, { "name": "Java", "bytes": "28490800" }, { "name": "Protocol Buffer", "bytes": "13855" }, { "name": "Python", "bytes": "4555" }, { "name": "Ruby", "bytes": "491" }, { "name": "Shell", "bytes": "1120" }, { "name": "Standard ML", "bytes": "82260" }, { "name": "XSLT", "bytes": "24302" } ], "symlink_target": "" }
import argparse import re # Taken from http://genomewiki.ucsc.edu/index.php/Hg19_Genome_size_statistics HG19_NON_N_GENOME_SIZE = 2897310462 def scrape_bamstat(statsfile, threshold=7): proper_exp = re.compile(r'Proper pairs\s*(\d+)') insert_exp = re.compile(r'Actual FR median insert size:\s*(\d+)') dev_exp = re.compile(r'Actual FR median absolute deviation:\s*(\d+)') for line in statsfile: if proper_exp.match(line): proper = int(proper_exp.match(line).group(1)) if insert_exp.match(line): insert = int(insert_exp.match(line).group(1)) if dev_exp.match(line): dev = int(dev_exp.match(line).group(1)) coverage = proper * insert / HG19_NON_N_GENOME_SIZE del_size = insert + threshold * dev return coverage, proper, insert, dev, del_size def main(): parser = argparse.ArgumentParser( description="Script that scrapes sample directories in Samples/ndd " "for bamstat stats files and writes a file of average library " "coverage, number of proper pairs in the library, median insert of " "the library, and the median absolute deviation from this median.") parser.add_argument('samples', type=argparse.FileType('r'), help="Tab separated file containing sample names " "and their subdirectories under " "/data/talkowski/Samples/ndd") parser.add_argument('outfile', type=argparse.FileType('w'), help="Output file to write to. File will be tab " "separated and be in the format: Sample Coverage " "Proper_Pair_Count Median_Insert Deviation") args = parser.parse_args() args.outfile.write('sample\tcoverage\tproper\tinsert\tdev\tdel_size\n') for line in args.samples: sample, bstat_dir = line.rstrip().split()[0:2] statsfile = open('%s/stats.file' % bstat_dir) coverage, proper, insert, dev, del_size = scrape_bamstat(statsfile) args.outfile.write("%s\t%d\t%d\t%d\t%d\t%d\n" % (sample, coverage, proper, insert, dev, del_size)) if __name__ == '__main__': main()
{ "content_hash": "1ba2426dc979bc6a8b5e3349965a31c2", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 77, "avg_line_length": 40.32727272727273, "alnum_prop": 0.6172227231740307, "repo_name": "talkowski-lab/Holmes", "id": "66fc230f5ca571dc561970ea569b6b9185cbec8d", "size": "2242", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "pycluster/scrape_bamstat.py", "mode": "33261", "license": "mit", "language": [ { "name": "C++", "bytes": "14905" }, { "name": "Makefile", "bytes": "627" }, { "name": "Python", "bytes": "184975" }, { "name": "R", "bytes": "49110" }, { "name": "Shell", "bytes": "349854" } ], "symlink_target": "" }
package org.xdi.oxauth.model.uma; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.annotate.JsonPropertyOrder; import org.jboss.resteasy.annotations.providers.jaxb.IgnoreMediaTypes; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import java.util.Date; import java.util.List; /** * Token status response according to RPT introspection profile: * http://docs.kantarainitiative.org/uma/draft-uma-core.html#uma-bearer-token-profile * * @author Yuriy Zabrovarnyy * @author Yuriy Movchan * Date: 10/24/2012 */ // ignore jettison as it's recommended here: http://docs.jboss.org/resteasy/docs/2.3.4.Final/userguide/html/json.html @IgnoreMediaTypes("application/*+json") @JsonPropertyOrder({"active", "exp", "iat", "nbf", "permissions", "client_id", "sub", "aud", "iss", "jti"}) @XmlRootElement public class RptIntrospectionResponse { private boolean active; // according spec, must be "active" http://tools.ietf.org/html/draft-richer-oauth-introspection-03#section-2.2 private Date expiresAt; private Date issuedAt; private Date nbf; private String clientId; private String sub; private String aud; private String iss; private String jti; private List<RegisterPermissionRequest> permissions; public RptIntrospectionResponse() { } public RptIntrospectionResponse(boolean status) { this.active = status; } @JsonProperty(value = "aud") @XmlElement(name = "aud") public String getAud() { return aud; } public void setAud(String aud) { this.aud = aud; } @JsonProperty(value = "iss") @XmlElement(name = "iss") public String getIss() { return iss; } public void setIss(String iss) { this.iss = iss; } @JsonProperty(value = "jti") @XmlElement(name = "jti") public String getJti() { return jti; } public void setJti(String jti) { this.jti = jti; } @JsonProperty(value = "sub") @XmlElement(name = "sub") public String getSub() { return sub; } public void setSub(String sub) { this.sub = sub; } @JsonProperty(value = "client_id") @XmlElement(name = "client_id") public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } @JsonProperty(value = "active") @XmlElement(name = "active") public boolean getActive() { return active; } public void setActive(boolean status) { this.active = status; } @JsonProperty(value = "nbf") @XmlElement(name = "nbf") public Date getNbf() { return nbf; } public void setNbf(Date nbf) { this.nbf = nbf; } @JsonProperty(value = "exp") @XmlElement(name = "exp") public Date getExpiresAt() { return expiresAt != null ? new Date(expiresAt.getTime()) : null; } public void setExpiresAt(Date expirationDate) { this.expiresAt = expirationDate != null ? new Date(expirationDate.getTime()) : null; } @JsonProperty(value = "iat") @XmlElement(name = "iat") public Date getIssuedAt() { return issuedAt != null ? new Date(issuedAt.getTime()) : null; } public void setIssuedAt(Date p_issuedAt) { issuedAt = p_issuedAt != null ? new Date(p_issuedAt.getTime()) : null; } @JsonProperty(value = "permissions") @XmlElement(name = "permissions") public List<RegisterPermissionRequest> getPermissions() { return permissions; } public void setPermissions(List<RegisterPermissionRequest> p_permissions) { permissions = p_permissions; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("RptStatusResponse"); sb.append("{active=").append(active); sb.append(", expiresAt=").append(expiresAt); sb.append(", issuedAt=").append(issuedAt); sb.append(", nbf=").append(nbf); sb.append(", clientId=").append(clientId); sb.append(", sub=").append(sub); sb.append(", aud=").append(aud); sb.append(", iss=").append(iss); sb.append(", jti=").append(jti); sb.append(", permissions=").append(permissions); sb.append('}'); return sb.toString(); } }
{ "content_hash": "f1d943cc463085e2632887a71fd54be4", "timestamp": "", "source": "github", "line_count": 164, "max_line_length": 140, "avg_line_length": 27.04268292682927, "alnum_prop": 0.6324689966178129, "repo_name": "nixu-corp/oxAuth", "id": "022f4ae192b4dffdc3d9aace0d705c94452717c5", "size": "4579", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Model/src/main/java/org/xdi/oxauth/model/uma/RptIntrospectionResponse.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "49" }, { "name": "C", "bytes": "102966" }, { "name": "CSS", "bytes": "43980" }, { "name": "HTML", "bytes": "156734" }, { "name": "Java", "bytes": "4559838" }, { "name": "JavaScript", "bytes": "9618" }, { "name": "PHP", "bytes": "19271" }, { "name": "Python", "bytes": "267184" }, { "name": "XSLT", "bytes": "35486" } ], "symlink_target": "" }
{% extends "shop/base.html" %} {% load staticfiles mezzanine_tags shop_tags rating_tags i18n %} {% block meta_title %}{{ product.meta_title }}{% endblock %} {% block body_id %}category{% endblock %} {% block meta_keywords %}{% metablock %} {% for keyword in product.keywords.all %} {% if not forloop.first %}, {% endif %} {{ keyword }} {% endfor %} {% endmetablock %}{% endblock %} {% block meta_description %}{% metablock %} {{ product.description }} {% endmetablock %}{% endblock %} {% block extra_css %} {{ block.super }} <link rel="stylesheet" href="{% static "mezzanine/css/magnific-popup.css" %}"> {% endblock %} {% block extra_js %} {{ block.super }} <script src="{% static "mezzanine/js/magnific-popup.js" %}"></script> <script> $(document).ready(function() { $('#product-images-large').magnificPopup({ delegate: 'a', type: 'image', gallery: { enabled: true, } }); }); </script> {% endblock %} {% block extra_head %} {{ block.super }} <script>var variations = {{ variations_json|safe }};</script> <script src="{% static "cartridge/js/product_variations.js" %}"></script> {% endblock %} {% block breadcrumb_menu %} {{ block.super }} <li>{{ product.title }}</li> {% endblock %} {% block title %} {% editable product.title %}{{ product.title }}{% endeditable %} {% endblock %} {% block main %} {% if images %} {% spaceless %} <ul id="product-images-large" class="list-unstyled list-inline"> {% for image in images %} <li id="image-{{ image.id }}-large"{% if not forloop.first %}style="display:none;"{% endif %}> <a class="product-image-large" href="{{ MEDIA_URL }}{{ image.file }}"> <img alt="{{ image.description }}" src="{{ MEDIA_URL }}{% thumbnail image.file 0 300 %}" class="img-thumbnail img-responsive col-xs-12"> </a> </li> {% endfor %} </ul> {% if images|length != 1 %} <ul id="product-images-thumb" class="list-unstyled list-inline"> {% for image in images %} <li> <a class="thumbnail" id="image-{{ image.id }}" href="{{ MEDIA_URL }}{{ image.file }}"> <img alt="{{ image.description }}" src="{{ MEDIA_URL }}{% thumbnail image.file 75 75 %}"> </a> </li> {% endfor %} </ul> {% endif %} {% endspaceless %} {% endif %} {% editable product.content %} {{ product.content|richtext_filters|safe }} {% endeditable %} {% if product.available and has_available_variations %} <ul id="variations" class="list-unstyled"> {% for variation in variations %} <li id="variation-{{ variation.sku }}" {% if not variation.default %}style="display:none;"{% endif %}> {% if variation.has_price %} {% if variation.on_sale %} <span class="old-price">{{ variation.unit_price|currency }}</span> {% trans "On sale:" %} {% endif %} <span class="price">{{ variation.price|currency }}</span> {% else %} {% if has_available_variations %} <span class="error-msg"> {% trans "The selected options are currently unavailable." %} </span> {% endif %} {% endif %} </li> {% endfor %} </ul> {% errors_for add_product_form %} <form method="post" id="add-cart" class="shop-form"> {% fields_for add_product_form %} <div class="form-actions"> <input type="submit" class="btn btn-primary btn-lg pull-right" name="add_cart" value="{% trans "Buy" %}"> {% if settings.SHOP_USE_WISHLIST %} <input type="submit" class="btn btn-default btn-lg pull-left" name="add_wishlist" value="{% trans "Save for later" %}"> {% endif %} </div> </form> {% else %} <p class="error-msg">{% trans "This product is currently unavailable." %}</p> {% endif %} {% if settings.SHOP_USE_RATINGS %} <div class="panel panel-default rating"> <div class="panel-body">{% rating_for product %}</div> </div> {% endif %} {% if settings.SHOP_USE_RELATED_PRODUCTS and related_products %} <h2>{% trans "Related Products" %}</h2> <div class="row related-products"> {% for product in related_products %} <div class="col-xs-6 col-sm-4 col-md-3 product-thumb"> <a class="thumbnail" href="{{ product.get_absolute_url }}"> {% if product.image %} <img src="{{ MEDIA_URL }}{% thumbnail product.image 90 90 %}"> {% endif %} <div class="caption"> <h6>{{ product }}</h6> <div class="price-info"> {% if product.has_price %} {% if product.on_sale %} <span class="old-price">{{ product.unit_price|currency }}</span> {% trans "On sale:" %} {% endif %} <span class="price">{{ product.price|currency }}</span> {% else %} <span class="coming-soon">{% trans "Coming soon" %}</span> {% endif %} </div> </div> </a> </div> {% endfor %} </div> {% endif %} {% endblock %}
{ "content_hash": "32de31423002c70fcf251be0379843c7", "timestamp": "", "source": "github", "line_count": 159, "max_line_length": 148, "avg_line_length": 31.61006289308176, "alnum_prop": 0.5543175487465181, "repo_name": "Parisson/cartridge", "id": "4ef715d0f3439a31714861f69efc9bc8f30582a5", "size": "5026", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "cartridge/shop/templates/shop/product.html", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "CSS", "bytes": "5988" }, { "name": "HTML", "bytes": "32570" }, { "name": "JavaScript", "bytes": "3467" }, { "name": "Python", "bytes": "226403" } ], "symlink_target": "" }
package com.wavesplatform.it.sync import com.typesafe.config.{Config, ConfigFactory} import com.wavesplatform.it.BaseFunSuite import com.wavesplatform.it.api.State import com.wavesplatform.it.api.SyncHttpApi._ import com.wavesplatform.test._ import scala.concurrent.duration._ class MinerStateTestSuite extends BaseFunSuite { import MinerStateTestSuite._ override protected def nodeConfigs: Seq[Config] = Configs private val transferAmount = 1000.waves private def last = nodes.last test("node w/o balance can forge blocks after effective balance increase") { val newKeyPair = last.createKeyPair() val newAddress = newKeyPair.toAddress.toString val (balance1, eff1) = miner.accountBalances(miner.address) val minerFullBalanceDetails = miner.balanceDetails(miner.address) assert(balance1 == minerFullBalanceDetails.available) assert(eff1 == minerFullBalanceDetails.effective) val (balance2, eff2) = last.accountBalances(newAddress) val newAccBalanceDetails = last.balanceDetails(newAddress) assert(balance2 == newAccBalanceDetails.available) assert(eff2 == newAccBalanceDetails.effective) val minerInfoBefore = last.debugMinerInfo() all(minerInfoBefore) shouldNot matchPattern { case State(`newAddress`, _, ts) if ts > 0 => } miner.waitForPeers(1) val txId = miner.transfer(miner.keyPair, newAddress, transferAmount, minFee).id nodes.waitForHeightAriseAndTxPresent(txId) val heightAfterTransfer = miner.height last.assertBalances(newAddress, balance2 + transferAmount, eff2 + transferAmount) last.waitForHeight(heightAfterTransfer + 51, 6.minutes) // if you know how to reduce waiting time, please ping @monroid assert(last.balanceDetails(newAddress).generating == balance2 + transferAmount) val minerInfoAfter = last.debugMinerInfo() atMost(1, minerInfoAfter) should matchPattern { case State(`newAddress`, _, ts) if ts > 0 => } last.waitForPeers(1) val leaseBack = last.lease(newKeyPair, miner.address, (transferAmount - minFee), minFee).id nodes.waitForHeightAriseAndTxPresent(leaseBack) assert(last.balanceDetails(newAddress).generating == balance2) all(miner.debugMinerInfo()) shouldNot matchPattern { case State(`newAddress`, _, ts) if ts > 0 => } all(last.debugMinerInfo()) shouldNot matchPattern { case State(`newAddress`, _, ts) if ts > 0 => } } } object MinerStateTestSuite { import com.wavesplatform.it.NodeConfigs._ private val minerConfig = ConfigFactory.parseString(s""" |waves { | synchronization.synchronization-timeout = 10s | blockchain.custom.functionality { | pre-activated-features.1 = 0 | generation-balance-depth-from-50-to-1000-after-height = 100 | } | blockchain.custom.genesis { | average-block-delay = 5s | } | miner.quorum = 1 |}""".stripMargin) val Configs: Seq[Config] = Seq( minerConfig.withFallback(Default.head), minerConfig.withFallback(Default(1)) ) }
{ "content_hash": "10befa7f3140b30470d824069008e034", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 123, "avg_line_length": 35.49411764705882, "alnum_prop": 0.731852833941001, "repo_name": "wavesplatform/Waves", "id": "0aedbddacb2aa3b55f55ce1e76ce7681bd62a65f", "size": "3017", "binary": false, "copies": "1", "ref": "refs/heads/version-1.4.x", "path": "node-it/src/test/scala/com/wavesplatform/it/sync/MinerStateTestSuite.scala", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "2248" }, { "name": "HTML", "bytes": "4665" }, { "name": "Java", "bytes": "360" }, { "name": "Scala", "bytes": "7393151" }, { "name": "Shell", "bytes": "1365" } ], "symlink_target": "" }
class AISEntry : public QWidget { Q_OBJECT public: AISEntry(QWidget *parent = 0); ~AISEntry(); signals: void buttonClickedEvent(QMouseEvent *event); public slots: void showServerInfo(); void incomeMessage(const QString &from, const QString &message); void newConnection(const QString &partner); void speak(); void about(); protected: void mousePressEvent(QMouseEvent *event); void mouseReleaseEvent(QMouseEvent *event); void mouseMoveEvent(QMouseEvent *event); private: void setWindowShape(); QString getClipboard(); private: QPoint dragPosition; ulong mousePressTimestamp; HWND concernedWnd; // for record the active window to get selection Transceiver myTransceiver; }; #endif // AISENTRY_H
{ "content_hash": "5dd9390a0623cc5216510456f766d60d", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 71, "avg_line_length": 21.47222222222222, "alnum_prop": 0.7102199223803364, "repo_name": "jink2005/AISEntry", "id": "802560b088e6f7211d10ae9601c81ba2bfccc694", "size": "857", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Desktop/AISEntry/aisentry.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "38677" }, { "name": "IDL", "bytes": "550" }, { "name": "Java", "bytes": "48667" } ], "symlink_target": "" }
<!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/> <link rel="SHORTCUT ICON" href="../../../../../img/clover.ico" /> <link rel="stylesheet" href="../../../../../aui/css/aui.min.css" media="all"/> <link rel="stylesheet" href="../../../../../aui/css/aui-experimental.min.css" media="all"/> <!--[if IE 9]><link rel="stylesheet" href="../../../../../aui/css/aui-ie9.min.css" media="all"/><![endif]--> <style type="text/css" media="all"> @import url('../../../../../style.css'); @import url('../../../../../tree.css'); </style> <script src="../../../../../jquery-1.8.3.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui-experimental.min.js" type="text/javascript"></script> <script src="../../../../../aui/js/aui-soy.min.js" type="text/javascript"></script> <script src="../../../../../package-nodes-tree.js" type="text/javascript"></script> <script src="../../../../../clover-tree.js" type="text/javascript"></script> <script src="../../../../../clover.js" type="text/javascript"></script> <script src="../../../../../clover-descriptions.js" type="text/javascript"></script> <script src="../../../../../cloud.js" type="text/javascript"></script> <title>ABA Route Transit Number Validator 1.0.1-SNAPSHOT</title> </head> <body> <div id="page"> <header id="header" role="banner"> <nav class="aui-header aui-dropdown2-trigger-group" role="navigation"> <div class="aui-header-inner"> <div class="aui-header-primary"> <h1 id="logo" class="aui-header-logo aui-header-logo-clover"> <a href="http://openclover.org" title="Visit OpenClover home page"><span class="aui-header-logo-device">OpenClover</span></a> </h1> </div> <div class="aui-header-secondary"> <ul class="aui-nav"> <li id="system-help-menu"> <a class="aui-nav-link" title="Open online documentation" target="_blank" href="http://openclover.org/documentation"> <span class="aui-icon aui-icon-small aui-iconfont-help">&#160;Help</span> </a> </li> </ul> </div> </div> </nav> </header> <div class="aui-page-panel"> <div class="aui-page-panel-inner"> <div class="aui-page-panel-nav aui-page-panel-nav-clover"> <div class="aui-page-header-inner" style="margin-bottom: 20px;"> <div class="aui-page-header-image"> <a href="http://cardatechnologies.com" target="_top"> <div class="aui-avatar aui-avatar-large aui-avatar-project"> <div class="aui-avatar-inner"> <img src="../../../../../img/clover_logo_large.png" alt="Clover icon"/> </div> </div> </a> </div> <div class="aui-page-header-main" > <h1> <a href="http://cardatechnologies.com" target="_top"> ABA Route Transit Number Validator 1.0.1-SNAPSHOT </a> </h1> </div> </div> <nav class="aui-navgroup aui-navgroup-vertical"> <div class="aui-navgroup-inner"> <ul class="aui-nav"> <li class=""> <a href="../../../../../dashboard.html">Project overview</a> </li> </ul> <div class="aui-nav-heading packages-nav-heading"> <strong>Packages</strong> </div> <div class="aui-nav project-packages"> <form method="get" action="#" class="aui package-filter-container"> <input type="text" autocomplete="off" class="package-filter text" placeholder="Type to filter packages..." name="package-filter" id="package-filter" title="Start typing package name (or part of the name) to search through the tree. Use arrow keys and the Enter key to navigate."/> </form> <p class="package-filter-no-results-message hidden"> <small>No results found.</small> </p> <div class="packages-tree-wrapper" data-root-relative="../../../../../" data-package-name="com.cardatechnologies.utils.validators.abaroutevalidator"> <div class="packages-tree-container"></div> <div class="clover-packages-lozenges"></div> </div> </div> </div> </nav> </div> <section class="aui-page-panel-content"> <div class="aui-page-panel-content-clover"> <div class="aui-page-header-main"><ol class="aui-nav aui-nav-breadcrumbs"> <li><a href="../../../../../dashboard.html"> Project Clover database Sat Aug 7 2021 12:29:33 MDT</a></li> <li><a href="test-pkg-summary.html">Package com.cardatechnologies.utils.validators.abaroutevalidator</a></li> <li><a href="test-Test_AbaRouteValidator_10.html">Class Test_AbaRouteValidator_10</a></li> </ol></div> <h1 class="aui-h2-clover"> Test testAbaNumberCheck_21211_good </h1> <table class="aui"> <thead> <tr> <th>Test</th> <th><label title="The test result. Either a Pass, Fail or Error.">Status</label></th> <th><label title="When the test execution was started">Start time</label></th> <th><label title="The total time in seconds taken to run this test.">Time (seconds)</label></th> <th><label title="A failure or error message if the test is not successful.">Message</label></th> </tr> </thead> <tbody> <tr> <td> <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_10.html?line=29129#src-29129" >testAbaNumberCheck_21211_good</a> </td> <td> <span class="sortValue">1</span><span class="aui-lozenge aui-lozenge-success">PASS</span> </td> <td> 7 Aug 12:39:57 </td> <td> 0.0 </td> <td> <div></div> <div class="errorMessage"></div> </td> </tr> </tbody> </table> <div>&#160;</div> <table class="aui aui-table-sortable"> <thead> <tr> <th style="white-space:nowrap;"><label title="A class that was directly hit by this test.">Target Class</label></th> <th colspan="4"><label title="The percentage of coverage contributed by each single test.">Coverage contributed by</label> testAbaNumberCheck_21211_good</th> </tr> </thead> <tbody> <tr> <td> <span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</span> &#160;&#160;<a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/AbaRouteValidator.html?id=23179#AbaRouteValidator" title="AbaRouteValidator" name="sl-47">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</a> </td> <td> <span class="sortValue">0.7352941</span>73.5% </td> <td class="align-middle" style="width: 100%" colspan="3"> <div> <div title="73.5% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:73.5%"></div></div></div> </td> </tr> </tbody> </table> </div> <!-- class="aui-page-panel-content-clover" --> <footer id="footer" role="contentinfo"> <section class="footer-body"> <ul> <li> Report generated by <a target="_new" href="http://openclover.org">OpenClover</a> v 4.4.1 on Sat Aug 7 2021 12:49:26 MDT using coverage data from Sat Aug 7 2021 12:47:23 MDT. </li> </ul> <ul> <li>OpenClover is free and open-source software. </li> </ul> </section> </footer> </section> <!-- class="aui-page-panel-content" --> </div> <!-- class="aui-page-panel-inner" --> </div> <!-- class="aui-page-panel" --> </div> <!-- id="page" --> </body> </html>
{ "content_hash": "1a6378739d9144deeaeaba974a091a72", "timestamp": "", "source": "github", "line_count": 209, "max_line_length": 297, "avg_line_length": 43.92822966507177, "alnum_prop": 0.5097483934211959, "repo_name": "dcarda/aba.route.validator", "id": "0c75da8c565c4c75d1ecb69e820b5715754367ea", "size": "9181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "target13/site/clover/com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_10_testAbaNumberCheck_21211_good_hvv.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "18715254" } ], "symlink_target": "" }
#ifndef USB_DEVICE_VCP_HPP #define USB_DEVICE_VCP_HPP #include "types.h" #include "usb_device.hpp" #include "iokit_types.hpp" #include <CoreFoundation/CoreFoundation.h> #include <IOKit/IOKitLib.h> ////////////////////////////////////////////////////////////////////////////////// // Public Definitions ////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////// // Public Class Prototypes ////////////////////////////////////////////////////////////////////////////////// class USBDeviceSI : public USBDevice { public: USBDeviceSI(const io_service_t& hService_); //hService_ should be of IOUSBDevice class USBDeviceSI(UCHAR* pucBsdName_); USBDeviceSI(const USBDeviceSI& clDevice_); USBDeviceSI& operator=(const USBDeviceSI& clDevice_); BOOL USBReset() const; const UCHAR* GetBsdName() const { return szBsdName; } //std::auto_ptr<USBDevice> MakeCopy() const { return auto_ptr<USBDevice>(new USBDeviceSI(*this)); } //!! //Implementation of Device Interface USHORT GetVid() const { return usVid; } USHORT GetPid() const { return usPid; } ULONG GetSerialNumber() const { return ulSerialNumber; } ULONG GetLocation() const { return ulLocation; } BOOL GetProductDescription(UCHAR* pucProductDescription_, USHORT usBufferSize_) const; //guaranteed to be null-terminated BOOL GetSerialString(UCHAR* pucSerialString_, USHORT usBufferSize_) const; usb_device_t** GetDeviceInterface() const { return ppstDeviceInterface; } DeviceType::Enum GetDeviceType() const { return DeviceType::SI_LABS; } private: static ULONG GetDeviceNumber(const io_service_t& hService_, CFStringRef hProperty_); static BOOL GetDeviceString(const io_service_t& hService_, CFStringRef hProperty_, UCHAR* pucBsdName_, ULONG ulSize_, BOOL bSearchChildren_ = FALSE); static ULONG GetSerialNumber(const io_service_t& hService_, UCHAR* pucSerialString_, ULONG ulSize_); static usb_device_t** CreateDeviceInterface(const io_service_t& hService_); USHORT usVid; USHORT usPid; ULONG ulSerialNumber; UCHAR szBsdName[255]; UCHAR szProductDescription[USB_MAX_STRLEN]; UCHAR szSerialString[USB_MAX_STRLEN]; ULONG ulLocation; //GUID for USB device usb_device_t** ppstDeviceInterface; }; #endif // !defined(USB_DEVICE_VCP_HPP)
{ "content_hash": "0b43d0ac64ed5da5f89be6a9ce83655f", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 152, "avg_line_length": 30.670886075949365, "alnum_prop": 0.6269087907552621, "repo_name": "corbamico/ANT-Library", "id": "b6fd0441815e5060be41033a2b149900ff7e28b1", "size": "2684", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/ANT_LIB/software/USB/devices/usb_device_vcp.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "104077" }, { "name": "C++", "bytes": "1222636" } ], "symlink_target": "" }
""" pyinotify @author: Sebastien Martini @license: MIT License @contact: seb@dbzteam.org """ class PyinotifyError(Exception): """Indicates exceptions raised by a Pyinotify class.""" pass class UnsupportedPythonVersionError(PyinotifyError): """ Raised on unsupported Python versions. """ def __init__(self, version): """ @param version: Current Python version @type version: string """ PyinotifyError.__init__(self, ('Python %s is unsupported, requires ' 'at least Python 3.0') % version) class UnsupportedLibcVersionError(PyinotifyError): """ Raised when libc couldn't be loaded or when inotify functions werent provided. """ def __init__(self): err = 'libc does not provide required inotify support' PyinotifyError.__init__(self, err) # Check Python version import sys if sys.version < '3.0': raise UnsupportedPythonVersionError(sys.version) # Import directives import threading import os import select import struct import fcntl import errno import termios import array import logging import atexit from collections import deque from datetime import datetime, timedelta import time import fnmatch import re import ctypes import ctypes.util import asyncore import glob try: from functools import reduce except ImportError: pass # Will fail on Python 2.4 which has reduce() builtin anyway. __author__ = "seb@dbzteam.org (Sebastien Martini)" __version__ = "0.9.0" # Compatibity mode: set to True to improve compatibility with # Pyinotify 0.7.1. Do not set this variable yourself, call the # function compatibility_mode() instead. COMPATIBILITY_MODE = False # Load libc LIBC = None def strerrno(): code = ctypes.get_errno() return '%s (%s)' % (os.strerror(code), errno.errorcode[code]) def load_libc(): global LIBC libc = None try: libc = ctypes.util.find_library('c') except OSError as err: pass # Will attemp to load it with None anyway. except IOError as err: pass LIBC = ctypes.CDLL(libc, use_errno=True) # Check that libc has needed functions inside. if (not hasattr(LIBC, 'inotify_init') or not hasattr(LIBC, 'inotify_add_watch') or not hasattr(LIBC, 'inotify_rm_watch')): raise UnsupportedLibcVersionError() load_libc() class PyinotifyLogger(logging.Logger): """ Pyinotify logger used for logging unicode strings. """ def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None): rv = UnicodeLogRecord(name, level, fn, lno, msg, args, exc_info, func) if extra is not None: for key in extra: if (key in ["message", "asctime"]) or (key in rv.__dict__): raise KeyError("Attempt to overwrite %r in LogRecord" % key) rv.__dict__[key] = extra[key] return rv # Logging def logger_init(): """Initialize logger instance.""" log = logging.getLogger("pyinotify") console_handler = logging.StreamHandler() console_handler.setFormatter( logging.Formatter("[%(asctime)s %(name)s %(levelname)s] %(message)s")) log.addHandler(console_handler) log.setLevel(20) return log log = logger_init() # inotify's variables class SysCtlINotify: """ Access (read, write) inotify's variables through sysctl. Usually it requires administrator rights to update them. Examples: - Read max_queued_events attribute: myvar = max_queued_events.value - Update max_queued_events attribute: max_queued_events.value = 42 """ inotify_attrs = {'max_user_instances': 1, 'max_user_watches': 2, 'max_queued_events': 3} def __init__(self, attrname): sino = ctypes.c_int * 3 self._attrname = attrname self._attr = sino(5, 20, SysCtlINotify.inotify_attrs[attrname]) def get_val(self): """ Gets attribute's value. @return: stored value. @rtype: int """ oldv = ctypes.c_int(0) size = ctypes.c_int(ctypes.sizeof(oldv)) LIBC.sysctl(self._attr, 3, ctypes.c_voidp(ctypes.addressof(oldv)), ctypes.addressof(size), None, 0) return oldv.value def set_val(self, nval): """ Sets new attribute's value. @param nval: replaces current value by nval. @type nval: int """ oldv = ctypes.c_int(0) sizeo = ctypes.c_int(ctypes.sizeof(oldv)) newv = ctypes.c_int(nval) sizen = ctypes.c_int(ctypes.sizeof(newv)) LIBC.sysctl(self._attr, 3, ctypes.c_voidp(ctypes.addressof(oldv)), ctypes.addressof(sizeo), ctypes.c_voidp(ctypes.addressof(newv)), ctypes.addressof(sizen)) value = property(get_val, set_val) def __repr__(self): return '<%s=%d>' % (self._attrname, self.get_val()) # Singleton instances # # read: myvar = max_queued_events.value # update: max_queued_events.value = 42 # for attrname in ('max_queued_events', 'max_user_instances', 'max_user_watches'): globals()[attrname] = SysCtlINotify(attrname) class EventsCodes: """ Set of codes corresponding to each kind of events. Some of these flags are used to communicate with inotify, whereas the others are sent to userspace by inotify notifying some events. @cvar IN_ACCESS: File was accessed. @type IN_ACCESS: int @cvar IN_MODIFY: File was modified. @type IN_MODIFY: int @cvar IN_ATTRIB: Metadata changed. @type IN_ATTRIB: int @cvar IN_CLOSE_WRITE: Writtable file was closed. @type IN_CLOSE_WRITE: int @cvar IN_CLOSE_NOWRITE: Unwrittable file closed. @type IN_CLOSE_NOWRITE: int @cvar IN_OPEN: File was opened. @type IN_OPEN: int @cvar IN_MOVED_FROM: File was moved from X. @type IN_MOVED_FROM: int @cvar IN_MOVED_TO: File was moved to Y. @type IN_MOVED_TO: int @cvar IN_CREATE: Subfile was created. @type IN_CREATE: int @cvar IN_DELETE: Subfile was deleted. @type IN_DELETE: int @cvar IN_DELETE_SELF: Self (watched item itself) was deleted. @type IN_DELETE_SELF: int @cvar IN_MOVE_SELF: Self (watched item itself) was moved. @type IN_MOVE_SELF: int @cvar IN_UNMOUNT: Backing fs was unmounted. @type IN_UNMOUNT: int @cvar IN_Q_OVERFLOW: Event queued overflowed. @type IN_Q_OVERFLOW: int @cvar IN_IGNORED: File was ignored. @type IN_IGNORED: int @cvar IN_ONLYDIR: only watch the path if it is a directory (new in kernel 2.6.15). @type IN_ONLYDIR: int @cvar IN_DONT_FOLLOW: don't follow a symlink (new in kernel 2.6.15). IN_ONLYDIR we can make sure that we don't watch the target of symlinks. @type IN_DONT_FOLLOW: int @cvar IN_MASK_ADD: add to the mask of an already existing watch (new in kernel 2.6.14). @type IN_MASK_ADD: int @cvar IN_ISDIR: Event occurred against dir. @type IN_ISDIR: int @cvar IN_ONESHOT: Only send event once. @type IN_ONESHOT: int @cvar ALL_EVENTS: Alias for considering all of the events. @type ALL_EVENTS: int """ # The idea here is 'configuration-as-code' - this way, we get our nice class # constants, but we also get nice human-friendly text mappings to do lookups # against as well, for free: FLAG_COLLECTIONS = {'OP_FLAGS': { 'IN_ACCESS' : 0x00000001, # File was accessed 'IN_MODIFY' : 0x00000002, # File was modified 'IN_ATTRIB' : 0x00000004, # Metadata changed 'IN_CLOSE_WRITE' : 0x00000008, # Writable file was closed 'IN_CLOSE_NOWRITE' : 0x00000010, # Unwritable file closed 'IN_OPEN' : 0x00000020, # File was opened 'IN_MOVED_FROM' : 0x00000040, # File was moved from X 'IN_MOVED_TO' : 0x00000080, # File was moved to Y 'IN_CREATE' : 0x00000100, # Subfile was created 'IN_DELETE' : 0x00000200, # Subfile was deleted 'IN_DELETE_SELF' : 0x00000400, # Self (watched item itself) # was deleted 'IN_MOVE_SELF' : 0x00000800, # Self (watched item itself) was moved }, 'EVENT_FLAGS': { 'IN_UNMOUNT' : 0x00002000, # Backing fs was unmounted 'IN_Q_OVERFLOW' : 0x00004000, # Event queued overflowed 'IN_IGNORED' : 0x00008000, # File was ignored }, 'SPECIAL_FLAGS': { 'IN_ONLYDIR' : 0x01000000, # only watch the path if it is a # directory 'IN_DONT_FOLLOW' : 0x02000000, # don't follow a symlink 'IN_MASK_ADD' : 0x20000000, # add to the mask of an already # existing watch 'IN_ISDIR' : 0x40000000, # event occurred against dir 'IN_ONESHOT' : 0x80000000, # only send event once }, } def maskname(mask): """ Returns the event name associated to mask. IN_ISDIR is appended to the result when appropriate. Note: only one event is returned, because only one event can be raised at a given time. @param mask: mask. @type mask: int @return: event name. @rtype: str """ ms = mask name = '%s' if mask & IN_ISDIR: ms = mask - IN_ISDIR name = '%s|IN_ISDIR' return name % EventsCodes.ALL_VALUES[ms] maskname = staticmethod(maskname) # So let's now turn the configuration into code EventsCodes.ALL_FLAGS = {} EventsCodes.ALL_VALUES = {} for flagc, valc in EventsCodes.FLAG_COLLECTIONS.items(): # Make the collections' members directly accessible through the # class dictionary setattr(EventsCodes, flagc, valc) # Collect all the flags under a common umbrella EventsCodes.ALL_FLAGS.update(valc) # Make the individual masks accessible as 'constants' at globals() scope # and masknames accessible by values. for name, val in valc.items(): globals()[name] = val EventsCodes.ALL_VALUES[val] = name # all 'normal' events ALL_EVENTS = reduce(lambda x, y: x | y, EventsCodes.OP_FLAGS.values()) EventsCodes.ALL_FLAGS['ALL_EVENTS'] = ALL_EVENTS EventsCodes.ALL_VALUES[ALL_EVENTS] = 'ALL_EVENTS' class _Event: """ Event structure, represent events raised by the system. This is the base class and should be subclassed. """ def __init__(self, dict_): """ Attach attributes (contained in dict_) to self. @param dict_: Set of attributes. @type dict_: dictionary """ for tpl in dict_.items(): setattr(self, *tpl) def __repr__(self): """ @return: Generic event string representation. @rtype: str """ s = '' for attr, value in sorted(self.__dict__.items(), key=lambda x: x[0]): if attr.startswith('_'): continue if attr == 'mask': value = hex(getattr(self, attr)) elif isinstance(value, str) and not value: value = "''" s += ' %s%s%s' % (output_format.field_name(attr), output_format.punctuation('='), output_format.field_value(value)) s = '%s%s%s %s' % (output_format.punctuation('<'), output_format.class_name(self.__class__.__name__), s, output_format.punctuation('>')) return s def __str__(self): return repr(self) class _RawEvent(_Event): """ Raw event, it contains only the informations provided by the system. It doesn't infer anything. """ def __init__(self, wd, mask, cookie, name): """ @param wd: Watch Descriptor. @type wd: int @param mask: Bitmask of events. @type mask: int @param cookie: Cookie. @type cookie: int @param name: Basename of the file or directory against which the event was raised in case where the watched directory is the parent directory. None if the event was raised on the watched item itself. @type name: string or None """ # Use this variable to cache the result of str(self), this object # is immutable. self._str = None # name: remove trailing '\0' d = {'wd': wd, 'mask': mask, 'cookie': cookie, 'name': name.rstrip('\0')} _Event.__init__(self, d) log.debug(str(self)) def __str__(self): if self._str is None: self._str = _Event.__str__(self) return self._str class Event(_Event): """ This class contains all the useful informations about the observed event. However, the presence of each field is not guaranteed and depends on the type of event. In effect, some fields are irrelevant for some kind of event (for example 'cookie' is meaningless for IN_CREATE whereas it is mandatory for IN_MOVE_TO). The possible fields are: - wd (int): Watch Descriptor. - mask (int): Mask. - maskname (str): Readable event name. - path (str): path of the file or directory being watched. - name (str): Basename of the file or directory against which the event was raised in case where the watched directory is the parent directory. None if the event was raised on the watched item itself. This field is always provided even if the string is ''. - pathname (str): Concatenation of 'path' and 'name'. - src_pathname (str): Only present for IN_MOVED_TO events and only in the case where IN_MOVED_FROM events are watched too. Holds the source pathname from where pathname was moved from. - cookie (int): Cookie. - dir (bool): True if the event was raised against a directory. """ def __init__(self, raw): """ Concretely, this is the raw event plus inferred infos. """ _Event.__init__(self, raw) self.maskname = EventsCodes.maskname(self.mask) if COMPATIBILITY_MODE: self.event_name = self.maskname try: if self.name: self.pathname = os.path.abspath(os.path.join(self.path, self.name)) else: self.pathname = os.path.abspath(self.path) except AttributeError as err: # Usually it is not an error some events are perfectly valids # despite the lack of these attributes. log.debug(err) class ProcessEventError(PyinotifyError): """ ProcessEventError Exception. Raised on ProcessEvent error. """ def __init__(self, err): """ @param err: Exception error description. @type err: string """ PyinotifyError.__init__(self, err) class _ProcessEvent: """ Abstract processing event class. """ def __call__(self, event): """ To behave like a functor the object must be callable. This method is a dispatch method. Its lookup order is: 1. process_MASKNAME method 2. process_FAMILY_NAME method 3. otherwise calls process_default @param event: Event to be processed. @type event: Event object @return: By convention when used from the ProcessEvent class: - Returning False or None (default value) means keep on executing next chained functors (see chain.py example). - Returning True instead means do not execute next processing functions. @rtype: bool @raise ProcessEventError: Event object undispatchable, unknown event. """ stripped_mask = event.mask - (event.mask & IN_ISDIR) maskname = EventsCodes.ALL_VALUES.get(stripped_mask) if maskname is None: raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask) # 1- look for process_MASKNAME meth = getattr(self, 'process_' + maskname, None) if meth is not None: return meth(event) # 2- look for process_FAMILY_NAME meth = getattr(self, 'process_IN_' + maskname.split('_')[1], None) if meth is not None: return meth(event) # 3- default call method process_default return self.process_default(event) def __repr__(self): return '<%s>' % self.__class__.__name__ class _SysProcessEvent(_ProcessEvent): """ There is three kind of processing according to each event: 1. special handling (deletion from internal container, bug, ...). 2. default treatment: which is applied to the majority of events. 3. IN_ISDIR is never sent alone, he is piggybacked with a standard event, he is not processed as the others events, instead, its value is captured and appropriately aggregated to dst event. """ def __init__(self, wm, notifier): """ @param wm: Watch Manager. @type wm: WatchManager instance @param notifier: Notifier. @type notifier: Notifier instance """ self._watch_manager = wm # watch manager self._notifier = notifier # notifier self._mv_cookie = {} # {cookie(int): (src_path(str), date), ...} self._mv = {} # {src_path(str): (dst_path(str), date), ...} def cleanup(self): """ Cleanup (delete) old (>1mn) records contained in self._mv_cookie and self._mv. """ date_cur_ = datetime.now() for seq in (self._mv_cookie, self._mv): for k in list(seq.keys()): if (date_cur_ - seq[k][1]) > timedelta(minutes=1): log.debug('Cleanup: deleting entry %s', seq[k][0]) del seq[k] def process_IN_CREATE(self, raw_event): """ If the event affects a directory and the auto_add flag of the targetted watch is set to True, a new watch is added on this new directory, with the same attribute values than those of this watch. """ if raw_event.mask & IN_ISDIR: watch_ = self._watch_manager.get_watch(raw_event.wd) created_dir = os.path.join(watch_.path, raw_event.name) if watch_.auto_add and not watch_.exclude_filter(created_dir): addw = self._watch_manager.add_watch # The newly monitored directory inherits attributes from its # parent directory. addw_ret = addw(created_dir, watch_.mask, proc_fun=watch_.proc_fun, rec=False, auto_add=watch_.auto_add, exclude_filter=watch_.exclude_filter) # Trick to handle mkdir -p /t1/t2/t3 where t1 is watched and # t2 and t3 are created. # Since the directory is new, then everything inside it # must also be new. created_dir_wd = addw_ret.get(created_dir) if (created_dir_wd is not None) and created_dir_wd > 0: for name in os.listdir(created_dir): inner = os.path.join(created_dir, name) if (os.path.isdir(inner) and self._watch_manager.get_wd(inner) is None): # Generate (simulate) creation event for sub # directories. rawevent = _RawEvent(created_dir_wd, IN_CREATE | IN_ISDIR, 0, name) self._notifier.append_event(rawevent) return self.process_default(raw_event) def process_IN_MOVED_FROM(self, raw_event): """ Map the cookie with the source path (+ date for cleaning). """ watch_ = self._watch_manager.get_watch(raw_event.wd) path_ = watch_.path src_path = os.path.normpath(os.path.join(path_, raw_event.name)) self._mv_cookie[raw_event.cookie] = (src_path, datetime.now()) return self.process_default(raw_event, {'cookie': raw_event.cookie}) def process_IN_MOVED_TO(self, raw_event): """ Map the source path with the destination path (+ date for cleaning). """ watch_ = self._watch_manager.get_watch(raw_event.wd) path_ = watch_.path dst_path = os.path.normpath(os.path.join(path_, raw_event.name)) mv_ = self._mv_cookie.get(raw_event.cookie) to_append = {'cookie': raw_event.cookie} if mv_ is not None: self._mv[mv_[0]] = (dst_path, datetime.now()) # Let's assume that IN_MOVED_FROM event is always queued before # that its associated (they share a common cookie) IN_MOVED_TO # event is queued itself. It is then possible in that scenario # to provide as additional information to the IN_MOVED_TO event # the original pathname of the moved file/directory. to_append['src_pathname'] = mv_[0] elif (raw_event.mask & IN_ISDIR and watch_.auto_add and not watch_.exclude_filter(dst_path)): # We got a diretory that's "moved in" from an unknown source and # auto_add is enabled. Manually add watches to the inner subtrees. # The newly monitored directory inherits attributes from its # parent directory. self._watch_manager.add_watch(dst_path, watch_.mask, proc_fun=watch_.proc_fun, rec=True, auto_add=True, exclude_filter=watch_.exclude_filter) return self.process_default(raw_event, to_append) def process_IN_MOVE_SELF(self, raw_event): """ STATUS: the following bug has been fixed in recent kernels (FIXME: which version ?). Now it raises IN_DELETE_SELF instead. Old kernels were bugged, this event raised when the watched item were moved, so we had to update its path, but under some circumstances it was impossible: if its parent directory and its destination directory wasn't watched. The kernel (see include/linux/fsnotify.h) doesn't bring us enough informations like the destination path of moved items. """ watch_ = self._watch_manager.get_watch(raw_event.wd) src_path = watch_.path mv_ = self._mv.get(src_path) if mv_: dest_path = mv_[0] watch_.path = dest_path # add the separator to the source path to avoid overlapping # path issue when testing with startswith() src_path += os.path.sep src_path_len = len(src_path) # The next loop renames all watches with src_path as base path. # It seems that IN_MOVE_SELF does not provide IN_ISDIR information # therefore the next loop is iterated even if raw_event is a file. for w in self._watch_manager.watches.values(): if w.path.startswith(src_path): # Note that dest_path is a normalized path. w.path = os.path.join(dest_path, w.path[src_path_len:]) else: log.error("The pathname '%s' of this watch %s has probably changed " "and couldn't be updated, so it cannot be trusted " "anymore. To fix this error move directories/files only " "between watched parents directories, in this case e.g. " "put a watch on '%s'.", watch_.path, watch_, os.path.normpath(os.path.join(watch_.path, os.path.pardir))) if not watch_.path.endswith('-unknown-path'): watch_.path += '-unknown-path' return self.process_default(raw_event) def process_IN_Q_OVERFLOW(self, raw_event): """ Only signal an overflow, most of the common flags are irrelevant for this event (path, wd, name). """ return Event({'mask': raw_event.mask}) def process_IN_IGNORED(self, raw_event): """ The watch descriptor raised by this event is now ignored (forever), it can be safely deleted from the watch manager dictionary. After this event we can be sure that neither the event queue nor the system will raise an event associated to this wd again. """ event_ = self.process_default(raw_event) self._watch_manager.del_watch(raw_event.wd) return event_ def process_default(self, raw_event, to_append=None): """ Commons handling for the followings events: IN_ACCESS, IN_MODIFY, IN_ATTRIB, IN_CLOSE_WRITE, IN_CLOSE_NOWRITE, IN_OPEN, IN_DELETE, IN_DELETE_SELF, IN_UNMOUNT. """ watch_ = self._watch_manager.get_watch(raw_event.wd) if raw_event.mask & (IN_DELETE_SELF | IN_MOVE_SELF): # Unfornulately this information is not provided by the kernel dir_ = watch_.dir else: dir_ = bool(raw_event.mask & IN_ISDIR) dict_ = {'wd': raw_event.wd, 'mask': raw_event.mask, 'path': watch_.path, 'name': raw_event.name, 'dir': dir_} if COMPATIBILITY_MODE: dict_['is_dir'] = dir_ if to_append is not None: dict_.update(to_append) return Event(dict_) class ProcessEvent(_ProcessEvent): """ Process events objects, can be specialized via subclassing, thus its behavior can be overriden: Note: you should not override __init__ in your subclass instead define a my_init() method, this method will be called automatically from the constructor of this class with its optionals parameters. 1. Provide specialized individual methods, e.g. process_IN_DELETE for processing a precise type of event (e.g. IN_DELETE in this case). 2. Or/and provide methods for processing events by 'family', e.g. process_IN_CLOSE method will process both IN_CLOSE_WRITE and IN_CLOSE_NOWRITE events (if process_IN_CLOSE_WRITE and process_IN_CLOSE_NOWRITE aren't defined though). 3. Or/and override process_default for catching and processing all the remaining types of events. """ pevent = None def __init__(self, pevent=None, **kargs): """ Enable chaining of ProcessEvent instances. @param pevent: Optional callable object, will be called on event processing (before self). @type pevent: callable @param kargs: This constructor is implemented as a template method delegating its optionals keyworded arguments to the method my_init(). @type kargs: dict """ self.pevent = pevent self.my_init(**kargs) def my_init(self, **kargs): """ This method is called from ProcessEvent.__init__(). This method is empty here and must be redefined to be useful. In effect, if you need to specifically initialize your subclass' instance then you just have to override this method in your subclass. Then all the keyworded arguments passed to ProcessEvent.__init__() will be transmitted as parameters to this method. Beware you MUST pass keyword arguments though. @param kargs: optional delegated arguments from __init__(). @type kargs: dict """ pass def __call__(self, event): stop_chaining = False if self.pevent is not None: # By default methods return None so we set as guideline # that methods asking for stop chaining must explicitely # return non None or non False values, otherwise the default # behavior will be to accept chain call to the corresponding # local method. stop_chaining = self.pevent(event) if not stop_chaining: return _ProcessEvent.__call__(self, event) def nested_pevent(self): return self.pevent def process_IN_Q_OVERFLOW(self, event): """ By default this method only reports warning messages, you can overredide it by subclassing ProcessEvent and implement your own process_IN_Q_OVERFLOW method. The actions you can take on receiving this event is either to update the variable max_queued_events in order to handle more simultaneous events or to modify your code in order to accomplish a better filtering diminishing the number of raised events. Because this method is defined, IN_Q_OVERFLOW will never get transmitted as arguments to process_default calls. @param event: IN_Q_OVERFLOW event. @type event: dict """ log.warning('Event queue overflowed.') def process_default(self, event): """ Default processing event method. By default does nothing. Subclass ProcessEvent and redefine this method in order to modify its behavior. @param event: Event to be processed. Can be of any type of events but IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW). @type event: Event instance """ pass class PrintAllEvents(ProcessEvent): """ Dummy class used to print events strings representations. For instance this class is used from command line to print all received events to stdout. """ def my_init(self, out=None): """ @param out: Where events will be written. @type out: Object providing a valid file object interface. """ if out is None: out = sys.stdout self._out = out def process_default(self, event): """ Writes event string representation to file object provided to my_init(). @param event: Event to be processed. Can be of any type of events but IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW). @type event: Event instance """ self._out.write(str(event)) self._out.write('\n') self._out.flush() class ChainIfTrue(ProcessEvent): """ Makes conditional chaining depending on the result of the nested processing instance. """ def my_init(self, func): """ Method automatically called from base class constructor. """ self._func = func def process_default(self, event): return not self._func(event) class Stats(ProcessEvent): """ Compute and display trivial statistics about processed events. """ def my_init(self): """ Method automatically called from base class constructor. """ self._start_time = time.time() self._stats = {} self._stats_lock = threading.Lock() def process_default(self, event): """ Processes |event|. """ self._stats_lock.acquire() try: events = event.maskname.split('|') for event_name in events: count = self._stats.get(event_name, 0) self._stats[event_name] = count + 1 finally: self._stats_lock.release() def _stats_copy(self): self._stats_lock.acquire() try: return self._stats.copy() finally: self._stats_lock.release() def __repr__(self): stats = self._stats_copy() elapsed = int(time.time() - self._start_time) elapsed_str = '' if elapsed < 60: elapsed_str = str(elapsed) + 'sec' elif 60 <= elapsed < 3600: elapsed_str = '%dmn%dsec' % (elapsed / 60, elapsed % 60) elif 3600 <= elapsed < 86400: elapsed_str = '%dh%dmn' % (elapsed / 3600, (elapsed % 3600) / 60) elif elapsed >= 86400: elapsed_str = '%dd%dh' % (elapsed / 86400, (elapsed % 86400) / 3600) stats['ElapsedTime'] = elapsed_str l = [] for ev, value in sorted(stats.items(), key=lambda x: x[0]): l.append(' %s=%s' % (output_format.field_name(ev), output_format.field_value(value))) s = '<%s%s >' % (output_format.class_name(self.__class__.__name__), ''.join(l)) return s def dump(self, filename): """ Dumps statistics to file |filename|. @param filename: pathname. @type filename: string """ with open(filename, 'w') as file_obj: file_obj.write(str(self)) def __str__(self, scale=45): stats = self._stats_copy() if not stats: return '' m = max(stats.values()) unity = scale / m fmt = '%%-26s%%-%ds%%s' % (len(output_format.field_value('@' * scale)) + 1) def func(x): return fmt % (output_format.field_name(x[0]), output_format.field_value('@' * int(x[1] * unity)), output_format.simple('%d' % x[1], 'yellow')) s = '\n'.join(map(func, sorted(stats.items(), key=lambda x: x[0]))) return s class NotifierError(PyinotifyError): """ Notifier Exception. Raised on Notifier error. """ def __init__(self, err): """ @param err: Exception string's description. @type err: string """ PyinotifyError.__init__(self, err) class Notifier: """ Read notifications, process events. """ def __init__(self, watch_manager, default_proc_fun=None, read_freq=0, threshold=0, timeout=None): """ Initialization. read_freq, threshold and timeout parameters are used when looping. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. If None, a new instance of PrintAllEvents will be assigned. @type default_proc_fun: instance of ProcessEvent @param read_freq: if read_freq == 0, events are read asap, if read_freq is > 0, this thread sleeps max(0, read_freq - timeout) seconds. But if timeout is None it may be different because poll is blocking waiting for something to read. @type read_freq: int @param threshold: File descriptor will be read only if the accumulated size to read becomes >= threshold. If != 0, you likely want to use it in combination with an appropriate value for read_freq because without that you would keep looping without really reading anything and that until the amount of events to read is >= threshold. At least with read_freq set you might sleep. @type threshold: int @param timeout: http://docs.python.org/lib/poll-objects.html#poll-objects @type timeout: int """ # Watch Manager instance self._watch_manager = watch_manager # File descriptor self._fd = self._watch_manager.get_fd() # Poll object and registration self._pollobj = select.poll() self._pollobj.register(self._fd, select.POLLIN) # This pipe is correctely initialized and used by ThreadedNotifier self._pipe = (-1, -1) # Event queue self._eventq = deque() # System processing functor, common to all events self._sys_proc_fun = _SysProcessEvent(self._watch_manager, self) # Default processing method self._default_proc_fun = default_proc_fun if default_proc_fun is None: self._default_proc_fun = PrintAllEvents() # Loop parameters self._read_freq = read_freq self._threshold = threshold self._timeout = timeout # Coalesce events option self._coalesce = False # set of str(raw_event), only used when coalesce option is True self._eventset = set() def append_event(self, event): """ Append a raw event to the event queue. @param event: An event. @type event: _RawEvent instance. """ self._eventq.append(event) def proc_fun(self): return self._default_proc_fun def coalesce_events(self, coalesce=True): """ Coalescing events. Events are usually processed by batchs, their size depend on various factors. Thus, before processing them, events received from inotify are aggregated in a fifo queue. If this coalescing option is enabled events are filtered based on their unicity, only unique events are enqueued, doublons are discarded. An event is unique when the combination of its fields (wd, mask, cookie, name) is unique among events of a same batch. After a batch of events is processed any events is accepted again. By default this option is disabled, you have to explictly call this function to turn it on. @param coalesce: Optional new coalescing value. True by default. @type coalesce: Bool """ self._coalesce = coalesce if not coalesce: self._eventset.clear() def check_events(self, timeout=None): """ Check for new events available to read, blocks up to timeout milliseconds. @param timeout: If specified it overrides the corresponding instance attribute _timeout. @type timeout: int @return: New events to read. @rtype: bool """ while True: try: # blocks up to 'timeout' milliseconds if timeout is None: timeout = self._timeout ret = self._pollobj.poll(timeout) except select.error as err: if err.errno == errno.EINTR: continue # interrupted, retry else: raise else: break if not ret or (self._pipe[0] == ret[0][0]): return False # only one fd is polled return ret[0][1] & select.POLLIN def read_events(self): """ Read events from device, build _RawEvents, and enqueue them. """ buf_ = array.array('i', [0]) # get event queue size if fcntl.ioctl(self._fd, termios.FIONREAD, buf_, 1) == -1: return queue_size = buf_[0] if queue_size < self._threshold: log.debug('(fd: %d) %d bytes available to read but threshold is ' 'fixed to %d bytes', self._fd, queue_size, self._threshold) return try: # Read content from file r = os.read(self._fd, queue_size) except Exception as msg: raise NotifierError(msg) log.debug('Event queue size: %d', queue_size) rsum = 0 # counter while rsum < queue_size: s_size = 16 # Retrieve wd, mask, cookie and fname_len wd, mask, cookie, fname_len = struct.unpack('iIII', r[rsum:rsum+s_size]) # Retrieve name bname, = struct.unpack('%ds' % fname_len, r[rsum + s_size:rsum + s_size + fname_len]) # FIXME: should we explictly call sys.getdefaultencoding() here ?? uname = bname.decode() rawevent = _RawEvent(wd, mask, cookie, uname) if self._coalesce: # Only enqueue new (unique) events. raweventstr = str(rawevent) if raweventstr not in self._eventset: self._eventset.add(raweventstr) self._eventq.append(rawevent) else: self._eventq.append(rawevent) rsum += s_size + fname_len def process_events(self): """ Routine for processing events from queue by calling their associated proccessing method (an instance of ProcessEvent). It also does internal processings, to keep the system updated. """ while self._eventq: raw_event = self._eventq.popleft() # pop next event watch_ = self._watch_manager.get_watch(raw_event.wd) if watch_ is None: # Not really sure how we ended up here, nor how we should # handle these types of events and if it is appropriate to # completly skip them (like we are doing here). log.warning("Unable to retrieve Watch object associated to %s", repr(raw_event)) continue revent = self._sys_proc_fun(raw_event) # system processings if watch_ and watch_.proc_fun: watch_.proc_fun(revent) # user processings else: self._default_proc_fun(revent) self._sys_proc_fun.cleanup() # remove olds MOVED_* events records if self._coalesce: self._eventset.clear() def __daemonize(self, pid_file=None, force_kill=False, stdin=os.devnull, stdout=os.devnull, stderr=os.devnull): """ pid_file: file to which the pid will be written. force_kill: if True kill the process associated to pid_file. stdin, stdout, stderr: files associated to common streams. """ if pid_file is None: dirname = '/var/run/' basename = os.path.basename(sys.argv[0]) or 'pyinotify' pid_file = os.path.join(dirname, basename + '.pid') if os.path.exists(pid_file): with open(pid_file, 'r') as fo: try: pid = int(fo.read()) except ValueError: pid = None if pid is not None: try: os.kill(pid, 0) except OSError as err: if err.errno == errno.ESRCH: log.debug(err) else: log.error(err) else: if not force_kill: s = 'There is already a pid file %s with pid %d' raise NotifierError(s % (pid_file, pid)) else: os.kill(pid, 9) def fork_daemon(): # Adapted from Chad J. Schroeder's recipe # @see http://code.activestate.com/recipes/278731/ pid = os.fork() if (pid == 0): # parent 2 os.setsid() pid = os.fork() if (pid == 0): # child os.chdir('/') os.umask(0) else: # parent 2 os._exit(0) else: # parent 1 os._exit(0) fd_inp = open(stdin, 'r') os.dup2(fd_inp.fileno(), 0) fd_out = open(stdout, 'w') os.dup2(fd_out.fileno(), 1) fd_err = open(stderr, 'w') os.dup2(fd_err.fileno(), 2) # Detach task fork_daemon() # Write pid with open(pid_file, 'w') as file_obj: file_obj.write(str(os.getpid()) + '\n') atexit.register(lambda : os.unlink(pid_file)) def _sleep(self, ref_time): # Only consider sleeping if read_freq is > 0 if self._read_freq > 0: cur_time = time.time() sleep_amount = self._read_freq - (cur_time - ref_time) if sleep_amount > 0: log.debug('Now sleeping %d seconds', sleep_amount) time.sleep(sleep_amount) def loop(self, callback=None, daemonize=False, **args): """ Events are read only one time every min(read_freq, timeout) seconds at best and only if the size to read is >= threshold. After this method returns it must not be called again for the same instance. @param callback: Functor called after each event processing iteration. Expects to receive the notifier object (self) as first parameter. If this function returns True the loop is immediately terminated otherwise the loop method keeps looping. @type callback: callable object or function @param daemonize: This thread is daemonized if set to True. @type daemonize: boolean @param args: Optional and relevant only if daemonize is True. Remaining keyworded arguments are directly passed to daemonize see __daemonize() method. @type args: various """ if daemonize: self.__daemonize(**args) # Read and process events forever while 1: try: self.process_events() if (callback is not None) and (callback(self) is True): break ref_time = time.time() # check_events is blocking if self.check_events(): self._sleep(ref_time) self.read_events() except KeyboardInterrupt: # Stop monitoring if sigint is caught (Control-C). log.debug('Pyinotify stops monitoring.') break # Close internals self.stop() def stop(self): """ Close inotify's instance (close its file descriptor). It destroys all existing watches, pending events,... This method is automatically called at the end of loop(). """ self._pollobj.unregister(self._fd) os.close(self._fd) class ThreadedNotifier(threading.Thread, Notifier): """ This notifier inherits from threading.Thread for instanciating a separate thread, and also inherits from Notifier, because it is a threaded notifier. Note that every functionality provided by this class is also provided through Notifier class. Moreover Notifier should be considered first because it is not threaded and could be easily daemonized. """ def __init__(self, watch_manager, default_proc_fun=None, read_freq=0, threshold=0, timeout=None): """ Initialization, initialize base classes. read_freq, threshold and timeout parameters are used when looping. @param watch_manager: Watch Manager. @type watch_manager: WatchManager instance @param default_proc_fun: Default processing method. See base class. @type default_proc_fun: instance of ProcessEvent @param read_freq: if read_freq == 0, events are read asap, if read_freq is > 0, this thread sleeps max(0, read_freq - timeout) seconds. @type read_freq: int @param threshold: File descriptor will be read only if the accumulated size to read becomes >= threshold. If != 0, you likely want to use it in combination with an appropriate value set for read_freq because without that you would keep looping without really reading anything and that until the amount of events to read is >= threshold. At least with read_freq you might sleep. @type threshold: int @param timeout: see http://docs.python.org/lib/poll-objects.html#poll-objects @type timeout: int """ # Init threading base class threading.Thread.__init__(self) # Stop condition self._stop_event = threading.Event() # Init Notifier base class Notifier.__init__(self, watch_manager, default_proc_fun, read_freq, threshold, timeout) # Create a new pipe used for thread termination self._pipe = os.pipe() self._pollobj.register(self._pipe[0], select.POLLIN) def stop(self): """ Stop notifier's loop. Stop notification. Join the thread. """ self._stop_event.set() os.write(self._pipe[1], b'stop') threading.Thread.join(self) Notifier.stop(self) self._pollobj.unregister(self._pipe[0]) os.close(self._pipe[0]) os.close(self._pipe[1]) def loop(self): """ Thread's main loop. Don't meant to be called by user directly. Call inherited start() method instead. Events are read only once time every min(read_freq, timeout) seconds at best and only if the size of events to read is >= threshold. """ # When the loop must be terminated .stop() is called, 'stop' # is written to pipe fd so poll() returns and .check_events() # returns False which make evaluate the While's stop condition # ._stop_event.isSet() wich put an end to the thread's execution. while not self._stop_event.isSet(): self.process_events() ref_time = time.time() if self.check_events(): self._sleep(ref_time) self.read_events() def run(self): """ Start thread's loop: read and process events until the method stop() is called. Never call this method directly, instead call the start() method inherited from threading.Thread, which then will call run() in its turn. """ self.loop() class AsyncNotifier(asyncore.file_dispatcher, Notifier): """ This notifier inherits from asyncore.file_dispatcher in order to be able to use pyinotify along with the asyncore framework. """ def __init__(self, watch_manager, default_proc_fun=None, read_freq=0, threshold=0, timeout=None, channel_map=None): """ Initializes the async notifier. The only additional parameter is 'channel_map' which is the optional asyncore private map. See Notifier class for the meaning of the others parameters. """ Notifier.__init__(self, watch_manager, default_proc_fun, read_freq, threshold, timeout) asyncore.file_dispatcher.__init__(self, self._fd, channel_map) def handle_read(self): """ When asyncore tells us we can read from the fd, we proceed processing events. This method can be overridden for handling a notification differently. """ self.read_events() self.process_events() class Watch: """ Represent a watch, i.e. a file or directory being watched. """ def __init__(self, wd, path, mask, proc_fun, auto_add, exclude_filter): """ Initializations. @param wd: Watch descriptor. @type wd: int @param path: Path of the file or directory being watched. @type path: str @param mask: Mask. @type mask: int @param proc_fun: Processing callable object. @type proc_fun: @param auto_add: Automatically add watches on new directories. @type auto_add: bool @param exclude_filter: Boolean function, used to exclude new directories from being automatically watched. See WatchManager.__init__ @type exclude_filter: callable object """ self.wd = wd self.path = path self.mask = mask self.proc_fun = proc_fun self.auto_add = auto_add self.exclude_filter = exclude_filter self.dir = os.path.isdir(self.path) def __repr__(self): """ @return: String representation. @rtype: str """ s = ' '.join(['%s%s%s' % (output_format.field_name(attr), output_format.punctuation('='), output_format.field_value(getattr(self, attr))) \ for attr in self.__dict__ if not attr.startswith('_')]) s = '%s%s %s %s' % (output_format.punctuation('<'), output_format.class_name(self.__class__.__name__), s, output_format.punctuation('>')) return s class ExcludeFilter: """ ExcludeFilter is an exclusion filter. """ def __init__(self, arg_lst): """ Examples: ef1 = ExcludeFilter(["^/etc/rc.*", "^/etc/hostname"]) ef2 = ExcludeFilter("/my/path/exclude.lst") Where exclude.lst contains: ^/etc/rc.* ^/etc/hostname @param arg_lst: is either a list of patterns or a filename from which patterns will be loaded. @type arg_lst: list of str or str """ if isinstance(arg_lst, str): lst = self._load_patterns_from_file(arg_lst) elif isinstance(arg_lst, list): lst = arg_lst else: raise TypeError self._lregex = [] for regex in lst: self._lregex.append(re.compile(regex, re.UNICODE)) def _load_patterns_from_file(self, filename): lst = [] with open(filename, 'r') as file_obj: for line in file_obj.readlines(): # Trim leading an trailing whitespaces pattern = line.strip() if not pattern or pattern.startswith('#'): continue lst.append(pattern) return lst def _match(self, regex, path): return regex.match(path) is not None def __call__(self, path): """ @param path: Path to match against provided regexps. @type path: str @return: Return True if path has been matched and should be excluded, False otherwise. @rtype: bool """ for regex in self._lregex: if self._match(regex, path): return True return False class WatchManagerError(Exception): """ WatchManager Exception. Raised on error encountered on watches operations. """ def __init__(self, msg, wmd): """ @param msg: Exception string's description. @type msg: string @param wmd: This dictionary contains the wd assigned to paths of the same call for which watches were successfully added. @type wmd: dict """ self.wmd = wmd Exception.__init__(self, msg) class WatchManager: """ Provide operations for watching files and directories. Its internal dictionary is used to reference watched items. When used inside threaded code, one must instanciate as many WatchManager instances as there are ThreadedNotifier instances. """ def __init__(self, exclude_filter=lambda path: False): """ Initialization: init inotify, init watch manager dictionary. Raise OSError if initialization fails. @param exclude_filter: boolean function, returns True if current path must be excluded from being watched. Convenient for providing a common exclusion filter for every call to add_watch. @type exclude_filter: callable object """ self._exclude_filter = exclude_filter self._wmd = {} # watch dict key: watch descriptor, value: watch self._fd = LIBC.inotify_init() # inotify's init, file descriptor if self._fd < 0: err = 'Cannot initialize new instance of inotify Errno=%s' raise OSError(err % strerrno()) def get_fd(self): """ Return assigned inotify's file descriptor. @return: File descriptor. @rtype: int """ return self._fd def get_watch(self, wd): """ Get watch from provided watch descriptor wd. @param wd: Watch descriptor. @type wd: int """ return self._wmd.get(wd) def del_watch(self, wd): """ Remove watch entry associated to watch descriptor wd. @param wd: Watch descriptor. @type wd: int """ try: del self._wmd[wd] except KeyError as err: log.error(str(err)) @property def watches(self): """ Get a reference on the internal watch manager dictionary. @return: Internal watch manager dictionary. @rtype: dict """ return self._wmd def __format_path(self, path): """ Format path to its internal (stored in watch manager) representation. """ # path must be a unicode string (str) and is just normalized. return os.path.normpath(path) def __add_watch(self, path, mask, proc_fun, auto_add, exclude_filter): """ Add a watch on path, build a Watch object and insert it in the watch manager dictionary. Return the wd value. """ path = self.__format_path(path) # path to a bytes string. This conversion seems to be required because # ctypes.create_string_buffer seems to manipulate bytes # strings representations internally. # Moreover it seems that LIBC.inotify_add_watch does not work very # well when it receives an ctypes.create_unicode_buffer instance as # argument. However wd are _always_ indexed with their original # unicode paths in wmd. byte_path = path.encode(sys.getfilesystemencoding()) wd_ = LIBC.inotify_add_watch(self._fd, ctypes.create_string_buffer(byte_path), mask) if wd_ < 0: return wd_ watch_ = Watch(wd=wd_, path=path, mask=mask, proc_fun=proc_fun, auto_add=auto_add, exclude_filter=exclude_filter) self._wmd[wd_] = watch_ log.debug('New %s', watch_) return wd_ def __glob(self, path, do_glob): if do_glob: return glob.iglob(path) else: return [path] def add_watch(self, path, mask, proc_fun=None, rec=False, auto_add=False, do_glob=False, quiet=True, exclude_filter=None): """ Add watch(s) on the provided |path|(s) with associated |mask| flag value and optionally with a processing |proc_fun| function and recursive flag |rec| set to True. All |path| components _must_ be str (i.e. unicode) objects. If |path| is already watched it is ignored, but if it is called with option rec=True a watch is put on each one of its not-watched subdirectory. @param path: Path to watch, the path can either be a file or a directory. Also accepts a sequence (list) of paths. @type path: string or list of strings @param mask: Bitmask of events. @type mask: int @param proc_fun: Processing object. @type proc_fun: function or ProcessEvent instance or instance of one of its subclasses or callable object. @param rec: Recursively add watches from path on all its subdirectories, set to False by default (doesn't follows symlinks in any case). @type rec: bool @param auto_add: Automatically add watches on newly created directories in watched parent |path| directory. @type auto_add: bool @param do_glob: Do globbing on pathname (see standard globbing module for more informations). @type do_glob: bool @param quiet: if False raises a WatchManagerError exception on error. See example not_quiet.py. @type quiet: bool @param exclude_filter: predicate (boolean function), which returns True if the current path must be excluded from being watched. This argument has precedence over exclude_filter passed to the class' constructor. @type exclude_filter: callable object @return: dict of paths associated to watch descriptors. A wd value is positive if the watch was added sucessfully, otherwise the value is negative. If the path was invalid or was already watched it is not included into this returned dictionary. @rtype: dict of {str: int} """ ret_ = {} # return {path: wd, ...} if exclude_filter is None: exclude_filter = self._exclude_filter # normalize args as list elements for npath in self.__format_param(path): # Require that path be a unicode string if not isinstance(npath, str): ret_[path] = -3 continue # unix pathname pattern expansion for apath in self.__glob(npath, do_glob): # recursively list subdirs according to rec param for rpath in self.__walk_rec(apath, rec): if self.get_wd(rpath) is not None: # We decide to ignore paths already inserted into # the watch manager. Need to be removed with rm_watch() # first. Or simply call update_watch() to update it. continue if not exclude_filter(rpath): wd = ret_[rpath] = self.__add_watch(rpath, mask, proc_fun, auto_add, exclude_filter) if wd < 0: err = 'add_watch: cannot watch %s WD=%d Errno=%s' err = err % (rpath, wd, strerrno()) if quiet: log.error(err) else: raise WatchManagerError(err, ret_) else: # Let's say -2 means 'explicitely excluded # from watching'. ret_[rpath] = -2 return ret_ def __get_sub_rec(self, lpath): """ Get every wd from self._wmd if its path is under the path of one (at least) of those in lpath. Doesn't follow symlinks. @param lpath: list of watch descriptor @type lpath: list of int @return: list of watch descriptor @rtype: list of int """ for d in lpath: root = self.get_path(d) if root is not None: # always keep root yield d else: # if invalid continue # nothing else to expect if not os.path.isdir(root): continue # normalization root = os.path.normpath(root) # recursion lend = len(root) for iwd in self._wmd.items(): cur = iwd[1].path pref = os.path.commonprefix([root, cur]) if root == os.sep or (len(pref) == lend and \ len(cur) > lend and \ cur[lend] == os.sep): yield iwd[1].wd def update_watch(self, wd, mask=None, proc_fun=None, rec=False, auto_add=False, quiet=True): """ Update existing watch descriptors |wd|. The |mask| value, the processing object |proc_fun|, the recursive param |rec| and the |auto_add| and |quiet| flags can all be updated. @param wd: Watch Descriptor to update. Also accepts a list of watch descriptors. @type wd: int or list of int @param mask: Optional new bitmask of events. @type mask: int @param proc_fun: Optional new processing function. @type proc_fun: function or ProcessEvent instance or instance of one of its subclasses or callable object. @param rec: Optionally adds watches recursively on all subdirectories contained into |wd| directory. @type rec: bool @param auto_add: Automatically adds watches on newly created directories in the watch's path corresponding to |wd|. @type auto_add: bool @param quiet: If False raises a WatchManagerError exception on error. See example not_quiet.py @type quiet: bool @return: dict of watch descriptors associated to booleans values. True if the corresponding wd has been successfully updated, False otherwise. @rtype: dict of {int: bool} """ lwd = self.__format_param(wd) if rec: lwd = self.__get_sub_rec(lwd) ret_ = {} # return {wd: bool, ...} for awd in lwd: apath = self.get_path(awd) if not apath or awd < 0: err = 'update_watch: invalid WD=%d' % awd if quiet: log.error(err) continue raise WatchManagerError(err, ret_) if mask: addw = LIBC.inotify_add_watch # apath is always stored as unicode string so encode it to # bytes. byte_path = apath.encode(sys.getfilesystemencoding()) wd_ = addw(self._fd, ctypes.create_string_buffer(byte_path), mask) if wd_ < 0: ret_[awd] = False err = 'update_watch: cannot update %s WD=%d Errno=%s' err = err % (apath, wd_, strerrno()) if quiet: log.error(err) continue raise WatchManagerError(err, ret_) assert(awd == wd_) if proc_fun or auto_add: watch_ = self._wmd[awd] if proc_fun: watch_.proc_fun = proc_fun if auto_add: watch_.auto_add = auto_add ret_[awd] = True log.debug('Updated watch - %s', self._wmd[awd]) return ret_ def __format_param(self, param): """ @param param: Parameter. @type param: string or int @return: wrap param. @rtype: list of type(param) """ if isinstance(param, list): for p_ in param: yield p_ else: yield param def get_wd(self, path): """ Returns the watch descriptor associated to path. This method presents a prohibitive cost, always prefer to keep the WD returned by add_watch(). If the path is unknown it returns None. @param path: Path. @type path: str @return: WD or None. @rtype: int or None """ path = self.__format_path(path) for iwd in self._wmd.items(): if iwd[1].path == path: return iwd[0] def get_path(self, wd): """ Returns the path associated to WD, if WD is unknown it returns None. @param wd: Watch descriptor. @type wd: int @return: Path or None. @rtype: string or None """ watch_ = self._wmd.get(wd) if watch_ is not None: return watch_.path def __walk_rec(self, top, rec): """ Yields each subdirectories of top, doesn't follow symlinks. If rec is false, only yield top. @param top: root directory. @type top: string @param rec: recursive flag. @type rec: bool @return: path of one subdirectory. @rtype: string """ if not rec or os.path.islink(top) or not os.path.isdir(top): yield top else: for root, dirs, files in os.walk(top): yield root def rm_watch(self, wd, rec=False, quiet=True): """ Removes watch(s). @param wd: Watch Descriptor of the file or directory to unwatch. Also accepts a list of WDs. @type wd: int or list of int. @param rec: Recursively removes watches on every already watched subdirectories and subfiles. @type rec: bool @param quiet: If False raises a WatchManagerError exception on error. See example not_quiet.py @type quiet: bool @return: dict of watch descriptors associated to booleans values. True if the corresponding wd has been successfully removed, False otherwise. @rtype: dict of {int: bool} """ lwd = self.__format_param(wd) if rec: lwd = self.__get_sub_rec(lwd) ret_ = {} # return {wd: bool, ...} for awd in lwd: # remove watch wd_ = LIBC.inotify_rm_watch(self._fd, awd) if wd_ < 0: ret_[awd] = False err = 'rm_watch: cannot remove WD=%d Errno=%s' % (awd, strerrno()) if quiet: log.error(err) continue raise WatchManagerError(err, ret_) ret_[awd] = True log.debug('Watch WD=%d (%s) removed', awd, self.get_path(awd)) return ret_ def watch_transient_file(self, filename, mask, proc_class): """ Watch a transient file, which will be created and deleted frequently over time (e.g. pid file). @attention: Currently under the call to this function it is not possible to correctly watch the events triggered into the same base directory than the directory where is located this watched transient file. For instance it would be wrong to make these two successive calls: wm.watch_transient_file('/var/run/foo.pid', ...) and wm.add_watch('/var/run/', ...) @param filename: Filename. @type filename: string @param mask: Bitmask of events, should contain IN_CREATE and IN_DELETE. @type mask: int @param proc_class: ProcessEvent (or of one of its subclass), beware of accepting a ProcessEvent's instance as argument into __init__, see transient_file.py example for more details. @type proc_class: ProcessEvent's instance or of one of its subclasses. @return: Same as add_watch(). @rtype: Same as add_watch(). """ dirname = os.path.dirname(filename) if dirname == '': return {} # Maintains coherence with add_watch() basename = os.path.basename(filename) # Assuming we are watching at least for IN_CREATE and IN_DELETE mask |= IN_CREATE | IN_DELETE def cmp_name(event): if getattr(event, 'name') is None: return False return basename == event.name return self.add_watch(dirname, mask, proc_fun=proc_class(ChainIfTrue(func=cmp_name)), rec=False, auto_add=False, do_glob=False, exclude_filter=lambda path: False) class RawOutputFormat: """ Format string representations. """ def __init__(self, format=None): self.format = format or {} def simple(self, s, attribute): if not isinstance(s, str): s = str(s) return (self.format.get(attribute, '') + s + self.format.get('normal', '')) def punctuation(self, s): """Punctuation color.""" return self.simple(s, 'normal') def field_value(self, s): """Field value color.""" return self.simple(s, 'purple') def field_name(self, s): """Field name color.""" return self.simple(s, 'blue') def class_name(self, s): """Class name color.""" return self.format.get('red', '') + self.simple(s, 'bold') output_format = RawOutputFormat() class ColoredOutputFormat(RawOutputFormat): """ Format colored string representations. """ def __init__(self): f = {'normal': '\033[0m', 'black': '\033[30m', 'red': '\033[31m', 'green': '\033[32m', 'yellow': '\033[33m', 'blue': '\033[34m', 'purple': '\033[35m', 'cyan': '\033[36m', 'bold': '\033[1m', 'uline': '\033[4m', 'blink': '\033[5m', 'invert': '\033[7m'} RawOutputFormat.__init__(self, f) def compatibility_mode(): """ Use this function to turn on the compatibility mode. The compatibility mode is used to improve compatibility with Pyinotify 0.7.1 (or older) programs. The compatibility mode provides additional variables 'is_dir', 'event_name', 'EventsCodes.IN_*' and 'EventsCodes.ALL_EVENTS' as Pyinotify 0.7.1 provided. Do not call this function from new programs!! Especially if there are developped for Pyinotify >= 0.8.x. """ setattr(EventsCodes, 'ALL_EVENTS', ALL_EVENTS) for evname in globals(): if evname.startswith('IN_'): setattr(EventsCodes, evname, globals()[evname]) global COMPATIBILITY_MODE COMPATIBILITY_MODE = True def command_line(): """ By default the watched path is '/tmp' and all types of events are monitored. Events monitoring serves forever, type c^c to stop it. """ from optparse import OptionParser usage = "usage: %prog [options] [path1] [path2] [pathn]" parser = OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="Verbose mode") parser.add_option("-r", "--recursive", action="store_true", dest="recursive", help="Add watches recursively on paths") parser.add_option("-a", "--auto_add", action="store_true", dest="auto_add", help="Automatically add watches on new directories") parser.add_option("-e", "--events-list", metavar="EVENT[,...]", dest="events_list", help=("A comma-separated list of events to watch for - " "see the documentation for valid options (defaults" " to everything)")) parser.add_option("-s", "--stats", action="store_true", dest="stats", help="Display dummy statistics") parser.add_option("-V", "--version", action="store_true", dest="version", help="Pyinotify version") parser.add_option("-f", "--raw-format", action="store_true", dest="raw_format", help="Disable enhanced output format.") (options, args) = parser.parse_args() if options.verbose: log.setLevel(10) if options.version: print(__version__) if not options.raw_format: global output_format output_format = ColoredOutputFormat() if len(args) < 1: path = '/tmp' # default watched path else: path = args # watch manager instance wm = WatchManager() # notifier instance and init if options.stats: notifier = Notifier(wm, default_proc_fun=Stats(), read_freq=5) else: notifier = Notifier(wm, default_proc_fun=PrintAllEvents()) # What mask to apply mask = 0 if options.events_list: events_list = options.events_list.split(',') for ev in events_list: evcode = EventsCodes.ALL_FLAGS.get(ev, 0) if evcode: mask |= evcode else: parser.error("The event '%s' specified with option -e" " is not valid" % ev) else: mask = ALL_EVENTS # stats cb_fun = None if options.stats: def cb(s): sys.stdout.write(repr(s.proc_fun())) sys.stdout.write('\n') sys.stdout.write(str(s.proc_fun())) sys.stdout.write('\n') sys.stdout.flush() cb_fun = cb log.debug('Start monitoring %s, (press c^c to halt pyinotify)' % path) wm.add_watch(path, mask, rec=options.recursive, auto_add=options.auto_add) # Loop forever (until sigint signal get caught) notifier.loop(callback=cb_fun) if __name__ == '__main__': command_line()
{ "content_hash": "7f75902de17ff3886c320b8903e4b434", "timestamp": "", "source": "github", "line_count": 2108, "max_line_length": 80, "avg_line_length": 37.28795066413662, "alnum_prop": 0.5553477602636032, "repo_name": "dunkfordyce/pyinotify", "id": "467a5217664794b280ea8b059c8243fe9295018e", "size": "79785", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python3/pyinotify.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "176314" }, { "name": "Shell", "bytes": "130" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_77) on Mon May 23 19:36:14 EDT 2016 --> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title>Uses of Class org.apache.lucene.util.packed.PackedInts (Lucene 6.0.1 API)</title> <meta name="date" content="2016-05-23"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.lucene.util.packed.PackedInts (Lucene 6.0.1 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/lucene/util/packed/PackedInts.html" title="class in org.apache.lucene.util.packed">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/lucene/util/packed/class-use/PackedInts.html" target="_top">Frames</a></li> <li><a href="PackedInts.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.lucene.util.packed.PackedInts" class="title">Uses of Class<br>org.apache.lucene.util.packed.PackedInts</h2> </div> <div class="classUseContainer">No usage of org.apache.lucene.util.packed.PackedInts</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/lucene/util/packed/PackedInts.html" title="class in org.apache.lucene.util.packed">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/lucene/util/packed/class-use/PackedInts.html" target="_top">Frames</a></li> <li><a href="PackedInts.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small> <i>Copyright &copy; 2000-2016 Apache Software Foundation. All Rights Reserved.</i> <script src='../../../../../../prettify.js' type='text/javascript'></script> <script type='text/javascript'> (function(){ var oldonload = window.onload; if (typeof oldonload != 'function') { window.onload = prettyPrint; } else { window.onload = function() { oldonload(); prettyPrint(); } } })(); </script> </small></p> </body> </html>
{ "content_hash": "d177ff1a0a9f57c7b371f5e0d44bfdb5", "timestamp": "", "source": "github", "line_count": 140, "max_line_length": 143, "avg_line_length": 36.24285714285714, "alnum_prop": 0.5896728419392984, "repo_name": "YorkUIRLab/irlab", "id": "dff41ca7d8c45b3b3d65050ad09d555fe7449af0", "size": "5074", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/lucene-6.0.1/docs/core/org/apache/lucene/util/packed/class-use/PackedInts.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "433499" }, { "name": "Gnuplot", "bytes": "2444" }, { "name": "HTML", "bytes": "95820812" }, { "name": "Java", "bytes": "303195" }, { "name": "JavaScript", "bytes": "33538" } ], "symlink_target": "" }
package com.taskadapter.redmineapi.bean; public class IssueRelation implements Identifiable { public enum TYPE { precedes } /* GET /relations/1819.xml Response: <?xml version="1.0" encoding="UTF-8"?> <relation> <id>1819</id> <issue_id>8470</issue_id> <issue_to_id>8469</issue_to_id> <relation_type>relates</relation_type> <delay/> </relation> */ private final PropertyStorage storage; /** * database numeric Id */ public final static Property<Integer> DATABASE_ID = new Property<>(Integer.class, "id"); public final static Property<Integer> ISSUE_ID = new Property<>(Integer.class, "issueId"); public final static Property<Integer> ISSUE_TO_ID = new Property<>(Integer.class, "issueToId"); public final static Property<String> RELATION_TYPE = new Property<>(String.class, "relationType"); public final static Property<Integer> DELAY = new Property<>(Integer.class, "delay"); IssueRelation(Integer id) { storage = new PropertyStorage(); storage.set(DATABASE_ID, id); } @Override public Integer getId() { return storage.get(DATABASE_ID); } public Integer getIssueId() { return storage.get(ISSUE_ID); } public void setIssueId(Integer issueId) { storage.set(ISSUE_ID, issueId); } public Integer getIssueToId() { return storage.get(ISSUE_TO_ID); } public void setIssueToId(Integer issueToId) { storage.set(ISSUE_TO_ID, issueToId); } public Integer getDelay() { return storage.get(DELAY); } public void setDelay(Integer delay) { storage.set(DELAY, delay); } public String getType() { return storage.get(RELATION_TYPE); } public void setType(String type) { storage.set(RELATION_TYPE, type); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IssueRelation that = (IssueRelation) o; if (getId() != null ? !getId().equals(that.getId()) : that.getId() != null) return false; return true; } @Override public int hashCode() { return getId() != null ? getId().hashCode() : 0; } @Override public String toString() { return "IssueRelation [getId()=" + getId() + ", issueId=" + getIssueId() + ", issueToId=" + getIssueToId() + ", type=" + getType() + ", delay=" + getDelay() + "]"; } public PropertyStorage getStorage() { return storage; } }
{ "content_hash": "a1bb22cdf3c4bef84b0cf96e2f8eb790", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 102, "avg_line_length": 26.127450980392158, "alnum_prop": 0.5958724202626642, "repo_name": "redminenb/redmine-java-api", "id": "cf791d758c84b777244ba6a48c65cb4e6c61726b", "size": "2665", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/taskadapter/redmineapi/bean/IssueRelation.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "780" }, { "name": "Java", "bytes": "490106" } ], "symlink_target": "" }
using namespace BrainCloud; class TestBCVirtualCurrency: public TestFixtureBase { }; #endif
{ "content_hash": "d727412aa522da7baecd8598a2adc5c9", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 51, "avg_line_length": 13.428571428571429, "alnum_prop": 0.8085106382978723, "repo_name": "getbraincloud/braincloud-cpp", "id": "bf15f15eb7d7ca266145549eb743f5895ca7c1fc", "size": "282", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/src/TestBCVirtualCurrency.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "12312" }, { "name": "C", "bytes": "2083120" }, { "name": "C#", "bytes": "6464" }, { "name": "C++", "bytes": "9284112" }, { "name": "CMake", "bytes": "69852" }, { "name": "CSS", "bytes": "1415" }, { "name": "HTML", "bytes": "8157" }, { "name": "Java", "bytes": "804" }, { "name": "JavaScript", "bytes": "6113" }, { "name": "M4", "bytes": "19093" }, { "name": "Makefile", "bytes": "142899" }, { "name": "Objective-C", "bytes": "29435" }, { "name": "Objective-C++", "bytes": "32445" }, { "name": "PowerShell", "bytes": "880" }, { "name": "Python", "bytes": "204738" }, { "name": "Ruby", "bytes": "3207" }, { "name": "Shell", "bytes": "353666" } ], "symlink_target": "" }
import * as types from 'frontend/actions/types' import type { Action } from 'frontend/actions/types' export type CurrentUserState = { id: ?string } const initialState = { id: null } export default function reducer( state: CurrentUserState = initialState, action: Action ) { switch (action.type) { case types.USER_LOGGED_IN: return { ...state, id: action.id } case types.USER_LOGGED_OUT: return { ...initialState } default: return state } }
{ "content_hash": "51b797f31131d7fe03a341907b7d4a09", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 52, "avg_line_length": 20.208333333333332, "alnum_prop": 0.6659793814432989, "repo_name": "jsonnull/aleamancer", "id": "448923dec06f72d9b5c76a2b44d16061bad79a4b", "size": "494", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/frontend/reducers/currentUser.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1623" }, { "name": "JavaScript", "bytes": "122542" } ], "symlink_target": "" }
import "./setup"; import {expect} from "chai"; import {insertHTML} from "./utils"; import BtrzFilter from "../src/btrz-filter"; import BtrzFilterRow from "../src/btrz-filter-row"; import BtrzFilterCol from "../src/btrz-filter-col"; import BtrzField from "../src/btrz-field"; describe("BtrzFilter", () => { it("Should render a given title.", () => { insertHTML(`<div id="app"> <btrz-filter title="testTitle"></btrz-filter> </div>`); const app = new Vue({ el: "#app", components: {BtrzFilter} }), filterTitle = $(".filter-section .filters-header h3"); expect(filterTitle.text()).to.equal("testTitle"); }); it("Should render given fields.", () => { insertHTML(`<div id="app"> <btrz-filter title='testTitle'> <btrz-filter-row> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='1' label='This is a label' name='randomInput' content='BtrzInput' value='1' type='number'> </btrz-field> </btrz-filter-col> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='2' label='This is a label' name='randomInput' content='BtrzInput' value='2' type='number'> </btrz-field> </btrz-filter-col> </btrz-filter-row> <btrz-filter-row> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='3' label='This is a label' name='randomInput' content='BtrzInput' value='3' type='number'> </btrz-field> </btrz-filter-col> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='4' label='This is a label' name='randomInput' content='BtrzInput' value='4' type='number'> </btrz-field> </btrz-filter-col> </btrz-filter-row> </btrz-filter> </div>`); const app = new Vue({ el: "#app", components: {BtrzFilter, BtrzField, BtrzFilterRow, BtrzFilterCol} }), filterFormGroups = $(".filter-section .form-group"); expect(filterFormGroups.length).to.equal(4); }); it("Should render <btr-filter-row> as bootstrap rows.", () => { insertHTML(`<div id="app"> <btrz-filter title='testTitle'> <btrz-filter-row> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='1' label='This is a label' name='randomInput' content='BtrzInput' value='1' type='number'> </btrz-field> </btrz-filter-col> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='2' label='This is a label' name='randomInput' content='BtrzInput' value='2' type='number'> </btrz-field> </btrz-filter-col> </btrz-filter-row> <btrz-filter-row> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='3' label='This is a label' name='randomInput' content='BtrzInput' value='3' type='number'> </btrz-field> </btrz-filter-col> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='4' label='This is a label' name='randomInput' content='BtrzInput' value='4' type='number'> </btrz-field> </btrz-filter-col> </btrz-filter-row> </btrz-filter> </div>`); const app = new Vue({ el: "#app", components: {BtrzFilter, BtrzField, BtrzFilterRow, BtrzFilterCol} }), filterRows = $(".filter-section #filter .row"); expect(filterRows.length).to.equal(2); }); it("Should render <btr-filter-col> as bootstrap cols.", () => { insertHTML(`<div id="app"> <btrz-filter title='testTitle'> <btrz-filter-row> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='1' label='This is a label' name='randomInput' content='BtrzInput' value='1' type='number'> </btrz-field> </btrz-filter-col> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='2' label='This is a label' name='randomInput' content='BtrzInput' value='2' type='number'> </btrz-field> </btrz-filter-col> </btrz-filter-row> <btrz-filter-row> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='3' label='This is a label' name='randomInput' content='BtrzInput' value='3' type='number'> </btrz-field> </btrz-filter-col> <btrz-filter-col class='col-xs-12 col-md-6'> <btrz-field id='4' label='This is a label' name='randomInput' content='BtrzInput' value='4' type='number'> </btrz-field> </btrz-filter-col> </btrz-filter-row> </btrz-filter> </div>`); const app = new Vue({ el: "#app", components: {BtrzFilter, BtrzField, BtrzFilterRow, BtrzFilterCol} }), filterCols = $(".filter-section #filter .col-xs-12"); expect(filterCols.length).to.equal(4); }); });
{ "content_hash": "e6a7a9af9d67e23d0d1c80eef1d90d07", "timestamp": "", "source": "github", "line_count": 126, "max_line_length": 130, "avg_line_length": 45.92063492063492, "alnum_prop": 0.4858278603525752, "repo_name": "Betterez/btrz-vue-components", "id": "0328a108407563db53276c2358851c1f8217bd10", "size": "5786", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/btrz-filter.spec.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "73983" }, { "name": "Vue", "bytes": "49155" } ], "symlink_target": "" }
package org.apereo.cas.ticket.registry; import com.mongodb.BasicDBObject; import com.mongodb.DBCollection; import com.mongodb.WriteResult; import org.apache.commons.lang3.StringUtils; import org.apereo.cas.ticket.BaseTicketSerializers; import org.apereo.cas.ticket.Ticket; import org.apereo.cas.ticket.TicketCatalog; import org.apereo.cas.ticket.TicketDefinition; import org.hjson.JsonValue; import org.hjson.Stringify; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; /** * A Ticket Registry storage backend based on MongoDB. * * @author Misagh Moayyed * @since 5.1.0 */ public class MongoDbTicketRegistry extends AbstractTicketRegistry { private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbTicketRegistry.class); private static final String FIELD_NAME_EXPIRE_AFTER_SECONDS = "expireAfterSeconds"; private final boolean dropCollection; private final TicketCatalog ticketCatalog; private final MongoOperations mongoTemplate; public MongoDbTicketRegistry(final TicketCatalog ticketCatalog, final MongoOperations mongoTemplate) { this(ticketCatalog, false, mongoTemplate); } public MongoDbTicketRegistry(final TicketCatalog ticketCatalog, final boolean dropCollection, final MongoOperations mongoTemplate) { this.ticketCatalog = ticketCatalog; this.dropCollection = dropCollection; this.mongoTemplate = mongoTemplate; createTicketCollections(); LOGGER.info("Configured MongoDb Ticket Registry instance with available collections: [{}]", mongoTemplate.getCollectionNames()); } private DBCollection createTicketCollection(final TicketDefinition ticket) { final String collectionName = ticket.getProperties().getStorageName(); LOGGER.debug("Setting up MongoDb Ticket Registry instance [{}]", collectionName); if (this.dropCollection) { LOGGER.debug("Dropping database collection: [{}]", collectionName); this.mongoTemplate.dropCollection(collectionName); } if (!this.mongoTemplate.collectionExists(collectionName)) { LOGGER.debug("Creating database collection: [{}]", collectionName); this.mongoTemplate.createCollection(collectionName); } LOGGER.debug("Creating indices on collection [{}] to auto-expire documents...", collectionName); final DBCollection collection = mongoTemplate.getCollection(collectionName); collection.createIndex(new BasicDBObject(TicketHolder.FIELD_NAME_EXPIRE_AT, 1), new BasicDBObject(FIELD_NAME_EXPIRE_AFTER_SECONDS, ticket.getProperties().getStorageTimeout())); return collection; } private void createTicketCollections() { final Collection<TicketDefinition> definitions = ticketCatalog.findAll(); definitions.forEach(t -> { final DBCollection c = createTicketCollection(t); LOGGER.debug("Created MongoDb collection configuration for [{}]", c.getFullName()); }); } @Override public Ticket updateTicket(final Ticket ticket) { LOGGER.debug("Updating ticket [{}]", ticket); try { final TicketHolder holder = buildTicketAsDocument(ticket); final TicketDefinition metadata = this.ticketCatalog.find(ticket); if (metadata == null) { LOGGER.error("Could not locate ticket definition in the catalog for ticket [{}]", ticket.getId()); return null; } LOGGER.debug("Located ticket definition [{}] in the ticket catalog", metadata); final String collectionName = getTicketCollectionInstanceByMetadata(metadata); if (StringUtils.isBlank(collectionName)) { LOGGER.error("Could not locate collection linked to ticket definition for ticket [{}]", ticket.getId()); return null; } final Query query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(holder.getTicketId())); final Update update = Update.update(TicketHolder.FIELD_NAME_JSON, holder.getJson()); this.mongoTemplate.updateFirst(query, update, collectionName); LOGGER.debug("Updated ticket [{}]", ticket); } catch (final Exception e) { LOGGER.error("Failed updating [{}]: [{}]", ticket, e); } return ticket; } @Override public void addTicket(final Ticket ticket) { try { LOGGER.debug("Adding ticket [{}]", ticket.getId()); final TicketHolder holder = buildTicketAsDocument(ticket); final TicketDefinition metadata = this.ticketCatalog.find(ticket); if (metadata == null) { LOGGER.error("Could not locate ticket definition in the catalog for ticket [{}]", ticket.getId()); return; } LOGGER.debug("Located ticket definition [{}] in the ticket catalog", metadata); final String collectionName = getTicketCollectionInstanceByMetadata(metadata); if (StringUtils.isBlank(collectionName)) { LOGGER.error("Could not locate collection linked to ticket definition for ticket [{}]", ticket.getId()); return; } LOGGER.debug("Found collection [{}] linked to ticket [{}]", collectionName, metadata); this.mongoTemplate.insert(holder, collectionName); LOGGER.debug("Added ticket [{}]", ticket.getId()); } catch (final Exception e) { LOGGER.error("Failed adding [{}]: [{}]", ticket, e); } } @Override public Ticket getTicket(final String ticketId) { try { LOGGER.debug("Locating ticket ticketId [{}]", ticketId); final String encTicketId = encodeTicketId(ticketId); if (encTicketId == null) { LOGGER.debug("Ticket ticketId [{}] could not be found", ticketId); return null; } final TicketDefinition metadata = this.ticketCatalog.find(ticketId); if (metadata == null) { LOGGER.debug("Ticket definition [{}] could not be found in the ticket catalog", ticketId); return null; } final String collectionName = getTicketCollectionInstanceByMetadata(metadata); final Query query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(encTicketId)); final TicketHolder d = this.mongoTemplate.findOne(query, TicketHolder.class, collectionName); if (d != null) { final Ticket result = deserializeTicketFromMongoDocument(d); return decodeTicket(result); } } catch (final Exception e) { LOGGER.error("Failed fetching [{}]: [{}]", ticketId, e); } return null; } @Override public Collection<Ticket> getTickets() { final Collection<Ticket> tickets = new HashSet<>(); try { final Collection<TicketDefinition> metadata = this.ticketCatalog.findAll(); metadata.forEach(t -> { final String map = getTicketCollectionInstanceByMetadata(t); final Collection<TicketHolder> ticketHolders = this.mongoTemplate.findAll(TicketHolder.class, map); final Collection<Ticket> colTickets = ticketHolders .stream() .map(ticket -> decodeTicket(deserializeTicketFromMongoDocument(ticket))) .collect(Collectors.toList()); tickets.addAll(colTickets); }); } catch (final Exception e) { LOGGER.warn(e.getMessage(), e); } return decodeTickets(tickets); } @Override public boolean deleteSingleTicket(final String ticketIdToDelete) { final String ticketId = encodeTicketId(ticketIdToDelete); LOGGER.debug("Deleting ticket [{}]", ticketId); try { final TicketDefinition metadata = this.ticketCatalog.find(ticketIdToDelete); final String collectionName = getTicketCollectionInstanceByMetadata(metadata); final Query query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(ticketId)); final WriteResult res = this.mongoTemplate.remove(query, collectionName); LOGGER.debug("Deleted ticket [{}] with result [{}]", ticketIdToDelete, res); return true; } catch (final Exception e) { LOGGER.error("Failed deleting [{}]: [{}]", ticketId, e); } return false; } @Override public long deleteAll() { final Collection<TicketDefinition> metadata = this.ticketCatalog.findAll(); final AtomicLong count = new AtomicLong(); metadata.forEach(r -> { final String collectionName = getTicketCollectionInstanceByMetadata(r); if (StringUtils.isNotBlank(collectionName)) { final Query query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).regex(".+")); final long countTickets = this.mongoTemplate.count(query, collectionName); count.addAndGet(countTickets); mongoTemplate.remove(query, collectionName); } }); return count.get(); } private static int getTimeToLive(final Ticket ticket) { return ticket.getExpirationPolicy().getTimeToLive().intValue(); } private static String serializeTicketForMongoDocument(final Ticket ticket) { try { return BaseTicketSerializers.serializeTicket(ticket); } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } return null; } private static Ticket deserializeTicketFromMongoDocument(final TicketHolder holder) { return BaseTicketSerializers.deserializeTicket(holder.getJson(), holder.getType()); } private TicketHolder buildTicketAsDocument(final Ticket ticket) { final Ticket encTicket = encodeTicket(ticket); final String json = serializeTicketForMongoDocument(encTicket); if (StringUtils.isNotBlank(json)) { LOGGER.trace("Serialized ticket into a JSON document as \n [{}]", JsonValue.readJSON(json).toString(Stringify.FORMATTED)); final int timeToLive = getTimeToLive(ticket); return new TicketHolder(json, encTicket.getId(), encTicket.getClass().getName(), timeToLive); } throw new IllegalArgumentException("Ticket " + ticket.getId() + " cannot be serialized to JSON"); } private String getTicketCollectionInstanceByMetadata(final TicketDefinition metadata) { final String mapName = metadata.getProperties().getStorageName(); LOGGER.debug("Locating collection name [{}] for ticket definition [{}]", mapName, metadata); final DBCollection c = getTicketCollectionInstance(mapName); if (c != null) { return c.getName(); } throw new IllegalArgumentException("Could not locate MongoDb collection " + mapName); } private DBCollection getTicketCollectionInstance(final String mapName) { try { final DBCollection inst = this.mongoTemplate.getCollection(mapName); LOGGER.debug("Located MongoDb collection instance [{}]", mapName); return inst; } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } return null; } }
{ "content_hash": "8f788016b8dc2db33906ad2fe5354770", "timestamp": "", "source": "github", "line_count": 259, "max_line_length": 136, "avg_line_length": 46.02316602316602, "alnum_prop": 0.6525167785234899, "repo_name": "pmarasse/cas", "id": "a59dcf86fe6e0e2014de2cc43153bde62594b772", "size": "11920", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "support/cas-server-support-mongo-ticket-registry/src/main/java/org/apereo/cas/ticket/registry/MongoDbTicketRegistry.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "275885" }, { "name": "Groovy", "bytes": "4583" }, { "name": "HTML", "bytes": "250640" }, { "name": "Java", "bytes": "6569782" }, { "name": "JavaScript", "bytes": "204340" }, { "name": "Shell", "bytes": "14980" } ], "symlink_target": "" }
@interface DataSource : NSObject <UITableViewDataSource> - (instancetype)initWithCellIdentifier:(NSString *)cellIdentifier; - (void)requestData:(void(^)(void))complete; @end
{ "content_hash": "93a830b70639edafff7143f74253cdf5", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 66, "avg_line_length": 29.333333333333332, "alnum_prop": 0.7840909090909091, "repo_name": "paulja/PullToRefresh", "id": "9f28e96cbde87446d27aa23b2e8fe7fa41710c2b", "size": "353", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/PullToRefresh/DataSource.h", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "6840" } ], "symlink_target": "" }
package test.web.controller; public class FormBean { private String value = ""; public String getValue() { return value; } public void setValue(String value) { this.value = value; } }
{ "content_hash": "e33a0c3f7991aa5c828b46cdc3ca6358", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 37, "avg_line_length": 13.2, "alnum_prop": 0.6818181818181818, "repo_name": "ufoscout/java-sample-projects", "id": "008b91cb16db564c7956f871eaf01d8601747bd0", "size": "198", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "infinispan-cluster-as-spring3.1-cache-provider/src/main/java/test/web/controller/FormBean.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "63455" }, { "name": "Groovy", "bytes": "2297" }, { "name": "Java", "bytes": "367999" }, { "name": "JavaScript", "bytes": "320925" }, { "name": "Perl", "bytes": "1240" } ], "symlink_target": "" }
package models // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "github.com/go-openapi/errors" "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // IPAMStatus Status of IP address management // // +k8s:deepcopy-gen=true // // swagger:model IPAMStatus type IPAMStatus struct { // allocations Allocations AllocationMap `json:"allocations,omitempty"` // ipv4 IPV4 []string `json:"ipv4"` // ipv6 IPV6 []string `json:"ipv6"` // status Status string `json:"status,omitempty"` } // Validate validates this IP a m status func (m *IPAMStatus) Validate(formats strfmt.Registry) error { var res []error if err := m.validateAllocations(formats); err != nil { res = append(res, err) } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil } func (m *IPAMStatus) validateAllocations(formats strfmt.Registry) error { if swag.IsZero(m.Allocations) { // not required return nil } if err := m.Allocations.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { return ve.ValidateName("allocations") } return err } return nil } // MarshalBinary interface implementation func (m *IPAMStatus) MarshalBinary() ([]byte, error) { if m == nil { return nil, nil } return swag.WriteJSON(m) } // UnmarshalBinary interface implementation func (m *IPAMStatus) UnmarshalBinary(b []byte) error { var res IPAMStatus if err := swag.ReadJSON(b, &res); err != nil { return err } *m = res return nil }
{ "content_hash": "080c9429008f2b612c0a9e5034387a58", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 84, "avg_line_length": 20.243589743589745, "alnum_prop": 0.6985433818872704, "repo_name": "tklauser/cilium", "id": "6473f317dc1da6f51d761916f3258f60e01335a6", "size": "1707", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "api/v1/models/ip_a_m_status.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "939774" }, { "name": "Dockerfile", "bytes": "27555" }, { "name": "Go", "bytes": "9502023" }, { "name": "HCL", "bytes": "1394" }, { "name": "Makefile", "bytes": "76523" }, { "name": "Mustache", "bytes": "1457" }, { "name": "Python", "bytes": "11097" }, { "name": "Ruby", "bytes": "394" }, { "name": "Shell", "bytes": "349188" }, { "name": "SmPL", "bytes": "6540" }, { "name": "Smarty", "bytes": "10430" }, { "name": "TeX", "bytes": "416" }, { "name": "sed", "bytes": "2642" } ], "symlink_target": "" }
package ro.pub.cs.aipi.lab07.entities; import javax.persistence.metamodel.SingularAttribute; import javax.persistence.metamodel.StaticMetamodel; @StaticMetamodel(Writer.class) public class Writer_ { public static volatile SingularAttribute<Writer, Long> id; public static volatile SingularAttribute<Writer, String> firstName; public static volatile SingularAttribute<Writer, String> lastName; public static volatile SingularAttribute<Writer, String> biography; }
{ "content_hash": "4673d80640d3357b62dfeb4e87da18d7", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 68, "avg_line_length": 39.083333333333336, "alnum_prop": 0.8315565031982942, "repo_name": "aipi2015/Laborator07", "id": "09edf4abcd0b3c1c782ce236d8fbbc7017841290", "size": "469", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "solutions/eclipse/07-BookStore-JAXWS-Server/src/ro/pub/cs/aipi/lab07/entities/Writer_.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "282" }, { "name": "HTML", "bytes": "1338" }, { "name": "Java", "bytes": "494209" }, { "name": "Shell", "bytes": "282" } ], "symlink_target": "" }
import BaseStore from "#SRC/js/stores/BaseStore"; import List from "#SRC/js/structs/List"; import { REQUEST_ACL_DIRECTORIES_SUCCESS, REQUEST_ACL_DIRECTORIES_ERROR, REQUEST_ACL_DIRECTORY_ADD_SUCCESS, REQUEST_ACL_DIRECTORY_ADD_ERROR, REQUEST_ACL_DIRECTORY_DELETE_SUCCESS, REQUEST_ACL_DIRECTORY_DELETE_ERROR, REQUEST_ACL_DIRECTORY_TEST_SUCCESS, REQUEST_ACL_DIRECTORY_TEST_ERROR, } from "../constants/ActionTypes"; import { ACL_DIRECTORIES_CHANGED, ACL_DIRECTORIES_ERROR, ACL_DIRECTORY_ADD_SUCCESS, ACL_DIRECTORY_ADD_ERROR, ACL_DIRECTORY_DELETE_SUCCESS, ACL_DIRECTORY_DELETE_ERROR, ACL_DIRECTORY_TEST_SUCCESS, ACL_DIRECTORY_TEST_ERROR, } from "../constants/EventTypes"; import ACLDirectoriesActions from "../actions/ACLDirectoriesActions"; const SDK = require("../../../SDK"); SDK.getSDK().Hooks.addFilter("serverErrorModalListeners", (listeners) => { listeners.push({ name: "aclDirectories", events: ["addError", "testError"] }); return listeners; }); class ACLDirectoriesStore extends BaseStore { constructor(...args) { super(...args); SDK.getSDK().addStoreConfig({ store: this, storeID: "aclDirectories", events: { fetchSuccess: ACL_DIRECTORIES_CHANGED, fetchError: ACL_DIRECTORIES_ERROR, addSuccess: ACL_DIRECTORY_ADD_SUCCESS, addError: ACL_DIRECTORY_ADD_ERROR, deleteSuccess: ACL_DIRECTORY_DELETE_SUCCESS, deleteError: ACL_DIRECTORY_DELETE_ERROR, testSuccess: ACL_DIRECTORY_TEST_SUCCESS, testError: ACL_DIRECTORY_TEST_ERROR, }, unmountWhen: () => false, }); SDK.getSDK().onDispatch((action) => { const { data, type } = action; switch (type) { // Get a list of external directories case REQUEST_ACL_DIRECTORIES_SUCCESS: this.processDirectoriesSuccess(data); break; case REQUEST_ACL_DIRECTORIES_ERROR: this.emit(ACL_DIRECTORIES_ERROR, data); break; case REQUEST_ACL_DIRECTORY_ADD_SUCCESS: this.emit(ACL_DIRECTORY_ADD_SUCCESS); break; case REQUEST_ACL_DIRECTORY_ADD_ERROR: this.emit(ACL_DIRECTORY_ADD_ERROR, data); break; case REQUEST_ACL_DIRECTORY_DELETE_SUCCESS: this.processDirectoriesSuccess([]); this.emit(ACL_DIRECTORY_DELETE_SUCCESS); break; case REQUEST_ACL_DIRECTORY_DELETE_ERROR: this.emit(ACL_DIRECTORY_DELETE_ERROR, data); break; case REQUEST_ACL_DIRECTORY_TEST_SUCCESS: this.emit(ACL_DIRECTORY_TEST_SUCCESS, data); break; case REQUEST_ACL_DIRECTORY_TEST_ERROR: this.emit(ACL_DIRECTORY_TEST_ERROR, data); break; } }); } addDirectory(...args) { return ACLDirectoriesActions.addDirectory(...args); } deleteDirectory(...args) { return ACLDirectoriesActions.deleteDirectory(...args); } testDirectoryConnection(...args) { return ACLDirectoriesActions.testDirectoryConnection(...args); } fetchDirectories(...args) { return ACLDirectoriesActions.fetchDirectories(...args); } processDirectoriesSuccess(directories) { SDK.getSDK().dispatch({ type: ACL_DIRECTORIES_CHANGED, directories, }); this.emit(ACL_DIRECTORIES_CHANGED); } getDirectories() { return new List({ items: SDK.getSDK().Store.getOwnState().directories.list, }); } } export default new ACLDirectoriesStore();
{ "content_hash": "d9de6ca98fe66c7dc8b5ad33cc343ee2", "timestamp": "", "source": "github", "line_count": 121, "max_line_length": 80, "avg_line_length": 28.785123966942148, "alnum_prop": 0.6617858168245765, "repo_name": "dcos/dcos-ui", "id": "586cc8750946e46a730f8de0f93b9842d4ec6eeb", "size": "3483", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "plugins/organization/submodules/directories/stores/ACLDirectoriesStore.ts", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "777" }, { "name": "HCL", "bytes": "1664" }, { "name": "HTML", "bytes": "1188" }, { "name": "JavaScript", "bytes": "575450" }, { "name": "Less", "bytes": "413720" }, { "name": "Python", "bytes": "822" }, { "name": "Shell", "bytes": "29286" }, { "name": "TypeScript", "bytes": "5604221" }, { "name": "Yacc", "bytes": "2575" } ], "symlink_target": "" }
<?php namespace modules\cms\models; use wanhunet\db\ActiveRecord; /** * This is the model class for table "{{%cms_category}}". * * @property integer $id * @property string $title * @property integer $parent * @property integer $status * @property integer $created_at * @property integer $updated_at */ class Category extends ActiveRecord { /** * @inheritdoc */ public static function tableName() { return '{{%cms_category}}'; } /** * @inheritdoc */ public function rules() { return [ [['title', 'parent'], 'required'], [['parent', 'status', 'created_at', 'updated_at'], 'integer'], [['title'], 'string', 'max' => 255] ]; } /** * @return \modules\cms\models\Category * @throws \yii\web\NotFoundHttpException */ public function getParent() { return self::staticRead($this->parent); } /** * @return \yii\db\ActiveQuery */ public function getPost() { return $this->hasMany(Post::className(), ['category_id' => 'id']); } /** * @inheritdoc */ public function attributeLabels() { return [ 'id' => 'ID', 'title' => '标题', 'parent' => '父类ID', 'status' => '状态', 'created_at' => '创建时间', 'updated_at' => '更新时间', ]; } /** * @param int $pid * @param int $level * @param bool $onlyActive * @return array */ public static function getCategoryTree($pid = 0, $level = 0, $onlyActive = true) { $where['parent'] = $pid; if ($onlyActive) { $where["status"] = Category::STATUS_ACTIVE; } $data = Category::find()->where($where)->asArray()->all(); $tree = array(); $level++; if (count($data) > 0) { foreach ($data as $v) { $child = self::getCategoryTree($v['id'], $level, $onlyActive); $tree[] = array('parent' => $v, 'child' => $child, 'level' => $level); } } return $tree; } /** * @param $data * @return array */ public static function eachCategoryTree($data = null) { if ($data === null) $data = self::getCategoryTree(0); //目录树 static $tree = array(); if (!empty($data)) { foreach ($data as $v) { $parent = $v['parent']; $child = $v['child']; $tree[] = array( 'category_id' => $parent['id'], 'parent' => $parent['parent'], 'status' => $parent['status'], 'name' => '├' . str_repeat('─', ($v['level'] - 1)).'┤' . $parent['title'] ); self::eachCategoryTree($child); } } return array('data' => $tree, 'count' => count($tree)); } }
{ "content_hash": "89dbc82466c0cc9b9f3903e3fab41e01", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 93, "avg_line_length": 24.725, "alnum_prop": 0.46006066734074824, "repo_name": "wuwenhan/p2p", "id": "8032faca5d970f08f2768db756d35e14815345d3", "size": "3007", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "modules/cms/models/Category.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "434" }, { "name": "Batchfile", "bytes": "1562" }, { "name": "CSS", "bytes": "221930" }, { "name": "HTML", "bytes": "179244" }, { "name": "JavaScript", "bytes": "329240" }, { "name": "PHP", "bytes": "956861" }, { "name": "Shell", "bytes": "25" } ], "symlink_target": "" }
package com.amazonaws.services.wellarchitected.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.wellarchitected.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * ListWorkloadSharesResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListWorkloadSharesResultJsonUnmarshaller implements Unmarshaller<ListWorkloadSharesResult, JsonUnmarshallerContext> { public ListWorkloadSharesResult unmarshall(JsonUnmarshallerContext context) throws Exception { ListWorkloadSharesResult listWorkloadSharesResult = new ListWorkloadSharesResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return listWorkloadSharesResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("WorkloadId", targetDepth)) { context.nextToken(); listWorkloadSharesResult.setWorkloadId(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("WorkloadShareSummaries", targetDepth)) { context.nextToken(); listWorkloadSharesResult.setWorkloadShareSummaries(new ListUnmarshaller<WorkloadShareSummary>(WorkloadShareSummaryJsonUnmarshaller .getInstance()) .unmarshall(context)); } if (context.testExpression("NextToken", targetDepth)) { context.nextToken(); listWorkloadSharesResult.setNextToken(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return listWorkloadSharesResult; } private static ListWorkloadSharesResultJsonUnmarshaller instance; public static ListWorkloadSharesResultJsonUnmarshaller getInstance() { if (instance == null) instance = new ListWorkloadSharesResultJsonUnmarshaller(); return instance; } }
{ "content_hash": "81ee650d584231c92781c41e6d962620", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 150, "avg_line_length": 39.67567567567568, "alnum_prop": 0.6502043596730245, "repo_name": "aws/aws-sdk-java", "id": "cca92e65efb3f1c53872ea299b44f41d05bf617d", "size": "3516", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-java-sdk-wellarchitected/src/main/java/com/amazonaws/services/wellarchitected/model/transform/ListWorkloadSharesResultJsonUnmarshaller.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Encoding Hint: 雀の往来 --> <project> <property name="jnlp.title" value="JST クリックした位置にジャンプ" /> <property name="jnlp.Name" value="JumpToClickedPositionSlider" /> <property name="jnlp.name" value="jumptoclickedpositionslider" /> <property name="jnlp.codebase" value="http://ateraimemo.com/swing/${jnlp.name}/" /> <property name="jnlp.homepage" value="http://ateraimemo.com/Swing/${jnlp.Name}.html" /> </project>
{ "content_hash": "6da1f35141d9ca60fef716830dcf81f8", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 88, "avg_line_length": 51.77777777777778, "alnum_prop": 0.6909871244635193, "repo_name": "aoguren/java-swing-tips", "id": "867c8f00e9c63f6faae537165789c38d0e4ee1e5", "size": "500", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "JumpToClickedPositionSlider/config/jnlp.xml", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "408221" }, { "name": "HTML", "bytes": "214885" }, { "name": "Java", "bytes": "3885129" }, { "name": "Shell", "bytes": "460428" } ], "symlink_target": "" }
package org.neo4j.kernel.impl.transaction.log.pruning; import org.junit.Test; import org.mockito.Mockito; import org.neo4j.io.fs.FileSystemAbstraction; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertThat; public class LogPruneStrategyFactoryTest { @Test public void testLogPruneThresholdsByType() throws Exception { assertThat( getPruneStrategy( "files", "25", "25 files" ), instanceOf( FileCountThreshold.class ) ); assertThat( getPruneStrategy( "size", "16G", "16G size" ), instanceOf( FileSizeThreshold.class ) ); assertThat( getPruneStrategy( "txs", "4G", "4G txs" ), instanceOf( TransactionCountThreshold.class ) ); assertThat( getPruneStrategy( "hours", "100", "100 hours" ), instanceOf( TransactionTimespanThreshold.class ) ); assertThat( getPruneStrategy( "days", "100k", "100k days" ), instanceOf( TransactionTimespanThreshold.class) ); } private Threshold getPruneStrategy(String type, String value, String configValue) { FileSystemAbstraction fileSystem = Mockito.mock( FileSystemAbstraction.class ); return LogPruneStrategyFactory.getThresholdByType( fileSystem, type, value, configValue ); } }
{ "content_hash": "196d4b14e07eb37f8608ec0d550b23d7", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 120, "avg_line_length": 40.516129032258064, "alnum_prop": 0.7197452229299363, "repo_name": "HuangLS/neo4j", "id": "77b05d9865153471317fb907fa00852dd331209a", "size": "2051", "binary": false, "copies": "1", "ref": "refs/heads/2.3", "path": "community/kernel/src/test/java/org/neo4j/kernel/impl/transaction/log/pruning/LogPruneStrategyFactoryTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "23732" }, { "name": "CSS", "bytes": "857344" }, { "name": "CoffeeScript", "bytes": "753075" }, { "name": "Cucumber", "bytes": "19027" }, { "name": "Elixir", "bytes": "2696" }, { "name": "Groff", "bytes": "74996" }, { "name": "HTML", "bytes": "486412" }, { "name": "Java", "bytes": "23514509" }, { "name": "JavaScript", "bytes": "1641699" }, { "name": "Makefile", "bytes": "8288" }, { "name": "PowerShell", "bytes": "160123" }, { "name": "Ruby", "bytes": "706" }, { "name": "Scala", "bytes": "5935401" }, { "name": "Shell", "bytes": "99251" } ], "symlink_target": "" }
/* This is the main program for the shared library test. */ #include <stdio.h> int mainvar = 1; int overriddenvar = 2; extern int shlibvar1; extern int shlib_mainvar (); extern int shlib_overriddenvar (); extern int shlib_shlibvar1 (); extern int shlib_shlibvar2 (); extern int shlib_shlibcall (); extern int shlib_maincall (); extern int shlib_checkfunptr1 (); extern int shlib_checkfunptr2 (); extern int (*shlib_getfunptr1 ()) (); extern int (*shlib_getfunptr2 ()) (); extern int shlib_check (); extern int shlib_shlibcall2 (); /* This function is called by the shared library. */ int main_called () { return 6; } /* This function overrides a function in the shared library. */ int shlib_overriddencall2 () { return 8; } int main () { printf ("mainvar == %d\n", mainvar); printf ("overriddenvar == %d\n", overriddenvar); printf ("shlibvar1 == %d\n", shlibvar1); #ifndef XCOFF_TEST printf ("shlib_mainvar () == %d\n", shlib_mainvar ()); printf ("shlib_overriddenvar () == %d\n", shlib_overriddenvar ()); #endif printf ("shlib_shlibvar1 () == %d\n", shlib_shlibvar1 ()); printf ("shlib_shlibvar2 () == %d\n", shlib_shlibvar2 ()); printf ("shlib_shlibcall () == %d\n", shlib_shlibcall ()); #ifndef XCOFF_TEST printf ("shlib_shlibcall2 () == %d\n", shlib_shlibcall2 ()); printf ("shlib_maincall () == %d\n", shlib_maincall ()); #endif printf ("main_called () == %d\n", main_called ()); #ifndef SYMBOLIC_TEST printf ("shlib_checkfunptr1 (shlib_shlibvar1) == %d\n", shlib_checkfunptr1 (shlib_shlibvar1)); #ifndef XCOFF_TEST printf ("shlib_checkfunptr2 (main_called) == %d\n", shlib_checkfunptr2 (main_called)); #endif { int (*p) (); p = shlib_getfunptr1 (); printf ("shlib_getfunptr1 () "); if (p == shlib_shlibvar1) printf ("=="); else printf ("!="); printf (" shlib_shlibvar1\n"); } #ifndef XCOFF_TEST { int (*p) (); p = shlib_getfunptr2 (); printf ("shlib_getfunptr2 () "); if (p == main_called) printf ("=="); else printf ("!="); printf (" main_called\n"); } #endif #endif printf ("shlib_check () == %d\n", shlib_check ()); return 0; }
{ "content_hash": "b3eebcd693dc11f3c180628d1ac95de6", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 68, "avg_line_length": 24.11111111111111, "alnum_prop": 0.619815668202765, "repo_name": "hoangt/goblin-core", "id": "0817d2730a80a29a3ee29d1d32284fed6c837abf", "size": "2170", "binary": false, "copies": "231", "ref": "refs/heads/master", "path": "binutils/binutils-2.23.2/ld/testsuite/ld-shared/main.c", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "AppleScript", "bytes": "1429" }, { "name": "Assembly", "bytes": "37219664" }, { "name": "Awk", "bytes": "1296" }, { "name": "Bison", "bytes": "769886" }, { "name": "C", "bytes": "121618095" }, { "name": "C#", "bytes": "12418" }, { "name": "C++", "bytes": "125510142" }, { "name": "CMake", "bytes": "708668" }, { "name": "CSS", "bytes": "43924" }, { "name": "Cuda", "bytes": "12393" }, { "name": "D", "bytes": "23091496" }, { "name": "DTrace", "bytes": "8533449" }, { "name": "E", "bytes": "3290" }, { "name": "Eiffel", "bytes": "2314" }, { "name": "Elixir", "bytes": "314" }, { "name": "Emacs Lisp", "bytes": "41146" }, { "name": "FORTRAN", "bytes": "377751" }, { "name": "Forth", "bytes": "4188" }, { "name": "GAP", "bytes": "21991" }, { "name": "GDScript", "bytes": "54941" }, { "name": "Gnuplot", "bytes": "446" }, { "name": "Groff", "bytes": "940592" }, { "name": "HTML", "bytes": "1118040" }, { "name": "JavaScript", "bytes": "24233" }, { "name": "LLVM", "bytes": "48362057" }, { "name": "M", "bytes": "2548" }, { "name": "Makefile", "bytes": "5469249" }, { "name": "Mathematica", "bytes": "5497" }, { "name": "Matlab", "bytes": "54444" }, { "name": "Mercury", "bytes": "1222" }, { "name": "Nemerle", "bytes": "141" }, { "name": "OCaml", "bytes": "748821" }, { "name": "Objective-C", "bytes": "4996482" }, { "name": "Objective-C++", "bytes": "1419213" }, { "name": "Perl", "bytes": "974117" }, { "name": "Perl6", "bytes": "80156" }, { "name": "Pure Data", "bytes": "22171" }, { "name": "Python", "bytes": "1375992" }, { "name": "R", "bytes": "627855" }, { "name": "Rebol", "bytes": "51929" }, { "name": "Scheme", "bytes": "4296232" }, { "name": "Shell", "bytes": "2237613" }, { "name": "Standard ML", "bytes": "5682" }, { "name": "SuperCollider", "bytes": "734239" }, { "name": "Tcl", "bytes": "2234" }, { "name": "TeX", "bytes": "601780" }, { "name": "VimL", "bytes": "26411" } ], "symlink_target": "" }
package org.zstack.core.db; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Configurable; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import org.zstack.utils.logging.CLogger; import org.zstack.utils.logging.CLoggerImpl; import javax.persistence.NoResultException; import javax.persistence.Query; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import javax.persistence.criteria.*; import javax.persistence.metamodel.SingularAttribute; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.List; @Configurable(preConstruction=true,autowire=Autowire.BY_TYPE,dependencyCheck=true) public class SimpleQueryImpl<T> implements SimpleQuery<T> { private static final CLogger _logger = CLoggerImpl.getLogger(SimpleQueryImpl.class); private final Class<T> _entityClass; private Root<T> _root; private List<AttrInfo> _selects = new ArrayList<AttrInfo>(); private List<Condition> _conditions = new ArrayList<Condition>(); private List<OrderInfo> orderInfos = new ArrayList<OrderInfo>(); private SingularAttribute groupByInfo = null; private List<Path> _paths = new ArrayList<Path>(); private CriteriaQuery _query; private final CriteriaBuilder _builder; private Integer limit; private Integer start; @Autowired private DatabaseFacade _dbf; class Condition { private final SingularAttribute _attr; private final Op _op; private final Object[] _val; Condition (SingularAttribute attr, Op op, Object... val) { this._attr = attr; this._op = op; this._val = val; } } class OrderInfo { private final SingularAttribute attr; private final Od od; OrderInfo(SingularAttribute attr, Od od) { this.attr = attr; this.od = od; } } class AttrInfo { private final SingularAttribute _attr; private final Class<?> _javaType; AttrInfo(SingularAttribute attr, Class<?> type) { this._attr = attr; this._javaType = type; } } SimpleQueryImpl(Class<T> vo) { _entityClass = vo; _builder = _dbf.getCriteriaBuilder(); } @Override public SimpleQuery<T> select(SingularAttribute... attrs) { for (int i=0; i<attrs.length; i++) { _selects.add(new AttrInfo(attrs[i], attrs[i].getJavaType())); } return this; } @Override public SimpleQuery<T> add(SingularAttribute attr, Op op, Object... val) { _conditions.add(new Condition(attr, op, val)); return this; } private Order[] orderClause() { ArrayList<Order> orders = new ArrayList<Order>(orderInfos.size()); Order[] orderArr = (Order[]) Array.newInstance(Order.class, orderInfos.size()); for (OrderInfo info : orderInfos) { if (info.od == Od.ASC) { orders.add(_builder.asc(_root.get(info.attr))); } else if (info.od == Od.DESC) { orders.add(_builder.desc(_root.get(info.attr))); } } return orders.toArray(orderArr); } private CriteriaQuery groupByClause(CriteriaQuery q) { assert _root != null : "You just set root before call groupby clause"; if (groupByInfo != null) { q.groupBy(_root.get(groupByInfo)); } return q; } private Predicate[] whereClause() { List<Predicate> preds = new ArrayList<Predicate>(_conditions.size()); for (Condition con : _conditions) { Op op = con._op; Path p = _root.get(con._attr); Object[] vals = con._val; if (op == Op.EQ) { assert vals.length == 1 : String.format("Op.EQ needs one value, but %s given", vals.length); preds.add(_builder.equal(p, vals[0])); } else if (op == Op.NOT_EQ) { assert vals.length == 1 : String.format("Op.NOT_EQ needs one value, but %s given", vals.length); preds.add(_builder.notEqual(p, vals[0])); } else if (op == Op.NOT_NULL) { preds.add(_builder.isNotNull(p)); } else if (op == Op.IN) { //preds.add(_builder.in(p.in(vals))); assert vals.length !=0 : String.format("Op.IN needs more than on value, but %s given", vals.length); preds.add(p.in(vals)); } else if (op == Op.NOT_IN) { assert vals.length !=0 : String.format("Op.NOT_IN needs more than on value, but %s given", vals.length); preds.add(_builder.not(p.in(vals))); } else if (op == Op.NULL) { preds.add(_builder.isNull(p)); } else if (op == Op.LIKE) { assert vals.length == 1 : String.format("Op.LIKE needs one value, but %s given", vals.length); preds.add(_builder.like(p, (String)vals[0])); } else if (op == Op.NOT_LIKE) { assert vals.length == 1 : String.format("Op.NOTLIKE needs one value, but %s given", vals.length); preds.add(_builder.notLike(p, (String)vals[0])); } else if (op == Op.GT) { assert vals.length == 1 : String.format("Op.GT needs one value, but %s given", vals.length); preds.add(_builder.greaterThan(p, (Comparable)vals[0])); } else if (op == Op.LT) { assert vals.length == 1 : String.format("Op.LT needs one value, but %s given", vals.length); preds.add(_builder.lessThan(p, (Comparable)vals[0])); } else if (op == Op.GTE) { assert vals.length == 1 : String.format("Op.GT_EQ needs one value, but %s given", vals.length); preds.add(_builder.greaterThanOrEqualTo(p, (Comparable)vals[0])); } else if (op == Op.LTE) { assert vals.length == 1 : String.format("Op.LT_EQ needs one value, but %s given", vals.length); preds.add(_builder.lessThanOrEqualTo(p, (Comparable)vals[0])); } else { assert(false) : op.toString() + " has not been supported"; } } Predicate[] predArray = (Predicate[]) Array.newInstance(Predicate.class, preds.size()); return preds.toArray(predArray); } private void done() { if (_selects.size() == 0) { _query = _builder.createQuery(_entityClass); } else if (_selects.size() == 1) { Class<?> selectType = _selects.get(0)._javaType; _query = _builder.createQuery(selectType); } else { _query = _builder.createTupleQuery(); } _root = _query.from(_entityClass); if (_selects.size() == 0) { } else if (_selects.size() == 1) { Path p = _root.get(_selects.get(0)._attr); _query.select(p); } else { for (AttrInfo info : _selects) { _paths.add(_root.get(info._attr)); } _query.multiselect(_paths); } _query.where(whereClause()); _query.orderBy(orderClause()); groupByClause(_query); } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public T find() { assert _selects.size() == 0 : "find() for entity doesn't need any parameter in Query.Select(), you have put some parameter in Query.select(..), either removing these parameters or using findValue() or findTuple()"; done(); T vo = null; try { Query q = _dbf.getEntityManager().createQuery(_query); if (limit != null) { q.setMaxResults(limit); } vo = (T)q.getSingleResult(); } catch (NoResultException e) { } catch (EmptyResultDataAccessException e) { } if (vo != null) { return vo; } else { return null; } } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public <K> List<K> list() { assert _selects.size() == 0 : "list() for entities doesn't need any parameter in Query.Select(), you have put some parameter in Query.select(..), either removing these parameters or using listValue() or listTuple()"; done(); Query q = _dbf.getEntityManager().createQuery(_query); if (limit != null) { q.setMaxResults(limit); } if (start != null) { q.setFirstResult(start); } List<T> vos = q.getResultList(); List<K> ros = new ArrayList<K>(vos.size()); for (T vo : vos) { ros.add((K) vo); } return ros; } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public <K> K findValue() { assert _selects.size() == 1 : String.format("findValue() only need one parameter in Query.Select(), you have put %s parameter in Query.select(..), either correcting the parameter or using find() or findTuple()", _selects.size()); done(); K value = null; try { Query q = _dbf.getEntityManager().createQuery(_query); if (limit != null) { q.setMaxResults(limit); } value = (K)q.getSingleResult(); } catch (NoResultException e) { } catch (EmptyResultDataAccessException e) { } return value; } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public <K> List<K> listValue() { assert _selects.size() == 1 : String.format("listValue() only need one parameter in Query.Select(), you have put %s parameter in Query.select(..), either correcting the parameter or using list() or listTuple()", _selects.size()); done(); Query q = _dbf.getEntityManager().createQuery(_query); if (limit != null) { q.setMaxResults(limit); } if (start != null) { q.setFirstResult(start); } List<K> vals = q.getResultList(); return vals; } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public Tuple findTuple() { assert _selects.size() > 1 : String.format("findTuple() needs more than one parameter in Query.Select(), you have put %s parameter in Query.select(..), either correcting the parameter or using find() or findValue()", _selects.size()); done(); Tuple ret = null; try { Query q = _dbf.getEntityManager().createQuery(_query); if (limit != null) { q.setMaxResults(limit); } ret = (Tuple)q.getSingleResult(); } catch (NoResultException e) { } catch (EmptyResultDataAccessException e) { } return ret; } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public List<Tuple> listTuple() { assert _selects.size() > 1 : String.format("listTuple() needs more than one parameter in Query.Select(), you have put %s parameter in Query.select(..), either correcting the parameter or using list() or listValue()", _selects.size()); done(); Query q = _dbf.getEntityManager().createQuery(_query); if (limit != null) { q.setMaxResults(limit); } if (start != null) { q.setFirstResult(start); } List<Tuple> rets = q.getResultList(); return rets; } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public Long count() { assert _selects.size() == 0 : "count() for entity doesn't need any parameter in Query.Select(), you have put some parameter in Query.select(..), either removing these parameters or using findValue() or findTuple()"; _query = _builder.createQuery(Long.class); _root = _query.from(_entityClass); _query.select(_builder.count(_root)); _query.where(whereClause()); return (Long) _dbf.getEntityManager().createQuery(_query).getSingleResult(); } @Override public SimpleQuery<T> orderBy(SingularAttribute attr, org.zstack.core.db.SimpleQuery.Od order) { orderInfos.add(new OrderInfo(attr, order)); return this; } @Override public SimpleQuery<T> groupBy(SingularAttribute attr) { this.groupByInfo = attr; return this; } @Override public SimpleQuery<T> isSoftDeleted(SingularAttribute attr) { return add(attr, Op.NULL); } @Override @Transactional(readOnly=true, propagation=Propagation.REQUIRES_NEW) public boolean isExists() { assert _selects.size() == 0 : "isExists() for entity doesn't need any parameter in Query.Select(), you have put some parameter in Query.select(..), either removing these parameters or using findValue() or findTuple()"; _query = _builder.createQuery(Long.class); _root = _query.from(_entityClass); _query.select(_builder.count(_root)); _query.where(whereClause()); TypedQuery<Long> tq = _dbf.getEntityManager().createQuery(_query); tq.setMaxResults(1); long count = tq.getSingleResult(); return count >= 1; } @Override public SimpleQuery<T> setLimit(int limit) { this.limit = limit; return this; } @Override public SimpleQuery<T> setStart(int start) { this.start = start; return this; } }
{ "content_hash": "b1829df6bb6edecdc600ed2527bd136a", "timestamp": "", "source": "github", "line_count": 358, "max_line_length": 242, "avg_line_length": 39.97206703910614, "alnum_prop": 0.5721174004192872, "repo_name": "newbiet/zstack", "id": "376df4d0e7af659d6e2469d49d06fc69826a863c", "size": "14310", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "core/src/main/java/org/zstack/core/db/SimpleQueryImpl.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "AspectJ", "bytes": "50928" }, { "name": "Batchfile", "bytes": "1132" }, { "name": "Groovy", "bytes": "16816" }, { "name": "Java", "bytes": "11930671" }, { "name": "Shell", "bytes": "150455" } ], "symlink_target": "" }
/** * @fileoverview Provides the UI elements to generate a new PGP key. */ goog.provide('e2e.ext.ui.panels.GenerateKey'); goog.require('e2e.ext.constants.CssClass'); goog.require('e2e.ext.constants.ElementId'); goog.require('e2e.ext.constants.Keyserver'); //@yahoo added 2 requires goog.require('e2e.ext.keyserver.Client'); goog.require('e2e.ext.ui.templates.panels.generatekey'); //@yahoo added 3 requires goog.require('e2e.ext.utils'); goog.require('e2e.ext.utils.action'); goog.require('e2e.ext.utils.text'); goog.require('goog.array'); goog.require('goog.dom'); goog.require('goog.dom.classlist'); goog.require('goog.events.EventType'); goog.require('goog.events.KeyCodes'); goog.require('goog.ui.Component'); goog.require('goog.ui.KeyboardShortcutHandler'); goog.require('soy'); goog.scope(function() { var constants = e2e.ext.constants; var panels = e2e.ext.ui.panels; var templates = e2e.ext.ui.templates.panels.generatekey; /** * Constructor for the UI component that provides the form to generate new PGP * keys. * @param {!function(...)} callback The callback to invoke when a new PGP key is * to be generated. * @param {boolean=} opt_hideTitle Optional. A flag to control the display of * the section title. If true, the section title will not be displayed. * Defaults to false. * @param {string=} opt_actionBtnTitle Optional. The title for the action * button. Uses extension defaults if not specified. * @constructor * @extends {goog.ui.Component} */ panels.GenerateKey = function(callback, opt_hideTitle, opt_actionBtnTitle) { goog.base(this); /** * The callback to invoke when a new PGP key is to be generated. * @type {!function(...)} * @private */ this.callback_ = callback; /** * //@yahoo * The keyserver client component associated with this panel. * @type {!e2e.ext.keyserver.Client} * @private */ this.keyserverClient_ = new e2e.ext.keyserver.Client( e2e.ext.constants.Keyserver.DEFAULT_LOCATION); /** * The title for the generate key section. If empty, it will not be displayed. * @type {string} * @private */ this.sectionTitle_ = Boolean(opt_hideTitle) ? '' : chrome.i18n.getMessage('genKeyTitle'); /** * The title for the action button. * @type {string} * @private */ this.actionButtonTitle_ = opt_actionBtnTitle || chrome.i18n.getMessage('genKeyGenerateButtonLabel'); }; goog.inherits(panels.GenerateKey, goog.ui.Component); /** @override */ panels.GenerateKey.prototype.createDom = function() { goog.base(this, 'createDom'); this.decorateInternal(this.getElement()); }; /** @override */ panels.GenerateKey.prototype.decorateInternal = function(elem) { goog.base(this, 'decorateInternal', elem); elem.id = constants.ElementId.GENERATE_KEY_FORM; soy.renderElement(elem, templates.generateKeyForm, { sectionTitle: this.sectionTitle_, emailLabel: chrome.i18n.getMessage('genKeyEmailLabel'), commentsLabel: chrome.i18n.getMessage('genKeyCommentsLabel'), actionButtonTitle: this.actionButtonTitle_, signupCancelButtonTitle: chrome.i18n.getMessage('actionCancelPgpAction') }); //@yahoo Prefill the input with the user's email if possible e2e.ext.utils.action.getUserYmailAddress(goog.bind(function(email) { var input = this.getElementByClass(constants.CssClass.EMAIL); if (input) { input.value = email || ''; } }, this)); }; /** @override */ panels.GenerateKey.prototype.enterDocument = function() { goog.base(this, 'enterDocument'); var keyboardHandler = new goog.ui.KeyboardShortcutHandler( this.getElementByClass(constants.CssClass.EMAIL)); keyboardHandler.registerShortcut('enter', goog.events.KeyCodes.ENTER); this.getHandler(). listen( this.getElementByClass(constants.CssClass.ACTION), goog.events.EventType.CLICK, this.generate_). listen( this.getElementByClass(constants.CssClass.CANCEL), goog.events.EventType.CLICK, this.hideSignupForm_). listen( keyboardHandler, goog.ui.KeyboardShortcutHandler.EventType.SHORTCUT_TRIGGERED, this.generate_); }; /** * Generates a new PGP key using the information provided by the user. * @private */ panels.GenerateKey.prototype.generate_ = function() { //@yahoo this.clearFailure_(); var name = ''; var email = this.getElementByClass(constants.CssClass.EMAIL).value; var comments = ''; // TODO(radi): Add a mechanism to allow the user to adjust this. var expDate = Math.floor(new Date('9999/12/31').getTime() / 1e3); this.callback_(this, name, email, comments, expDate); }; /** * Resets the key generation form. */ panels.GenerateKey.prototype.reset = function() { var inputs = this.getElement().querySelectorAll('input'); goog.array.forEach(inputs, function(input) { input.value = ''; }); }; /** * //@yahoo * Sends an OpenPGP public key(s) to the keyserver. * @param {!e2e.openpgp.Keys} keys * @param {function(string)} callback * @param {e2e.openpgp.ContextImpl} ctx */ panels.GenerateKey.prototype.sendKeys = function(keys, callback, ctx) { goog.array.forEach(keys, goog.bind(function(key) { if (!key.key.secret) { var email = e2e.ext.utils.text.extractValidYahooEmail(key.uids[0]); if (email) { try { this.keyserverClient_.sendKey(email, key.serialized, goog.bind( function(response) { // Key was successfully registered, and response is valid e2e.ext.utils.action.refreshYmail(); callback(response); window.alert(chrome.i18n.getMessage('sendKeySuccess')); }, this), goog.bind(function(err) { // The key wasn't sent to the server or the server signature // was invalid, so delete it for now. // TODO: Separate key generation and import to keyring. if (ctx !== null) { ctx.deleteKey(key.uids[0]); } this.displayFailure_(err); }, this)); } catch (e) { console.error('got key send failure in generate key', email); this.displayFailure_(e); } } } }, this)); }; /** * Hides the signup form. * @private */ panels.GenerateKey.prototype.hideSignupForm_ = function() { var signupForm = goog.dom.getElement( e2e.ext.constants.ElementId.GENERATE_KEY_FORM); var cancelButton = goog.dom.getElementByClass( e2e.ext.constants.CssClass.CANCEL, signupForm); var signupPrompt = goog.dom.getElement( e2e.ext.constants.ElementId.SIGNUP_PROMPT); var keyringOptions = goog.dom.getElement( e2e.ext.constants.ElementId.KEYRING_OPTIONS_DIV); goog.dom.classlist.add(signupForm, e2e.ext.constants.CssClass.HIDDEN); goog.dom.classlist.add(cancelButton, e2e.ext.constants.CssClass.HIDDEN); goog.dom.classlist.remove(signupPrompt, e2e.ext.constants.CssClass.HIDDEN); goog.dom.classlist.remove(keyringOptions, e2e.ext.constants.CssClass.HIDDEN); }; /** * //@yahoo * Displays error message. * @param {Error} error The error to display. * @private */ panels.GenerateKey.prototype.displayFailure_ = function(error) { var errorDiv = goog.dom.getElementByClass(constants.CssClass.ERROR); if (error) { var errorMsg = goog.isDef(error.messageId) ? chrome.i18n.getMessage(error.messageId) : error.message; e2e.ext.utils.errorHandler(error); if (errorDiv) { errorDiv.textContent = errorMsg; } else { // The errorDiv might be destroyed by the time displayFailure_ fires window.alert('Error: ' + errorMsg); } } else if (errorDiv) { errorDiv.textContent = ''; } }; /** * //@yahoo * Clears error messages. * @private */ panels.GenerateKey.prototype.clearFailure_ = function() { this.displayFailure_(null); }; }); // goog.scope
{ "content_hash": "82f16ea009e3bd19fcc84366c4c38810", "timestamp": "", "source": "github", "line_count": 262, "max_line_length": 80, "avg_line_length": 30.35496183206107, "alnum_prop": 0.6727021249842826, "repo_name": "yahoo/end-to-end", "id": "5d5cd07d58265bca439933252b5c5864ec3576e3", "size": "8576", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/javascript/crypto/e2e/extension/ui/panels/generatekey/generatekey.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "25395" }, { "name": "HTML", "bytes": "916773" }, { "name": "JavaScript", "bytes": "4333731" }, { "name": "Python", "bytes": "3136" }, { "name": "Shell", "bytes": "15808" } ], "symlink_target": "" }
namespace llvm { class MachineModuleInfo; class Mangler; class MCAsmInfo; class MCExpr; class MCSection; class MCSectionMachO; class MCSymbol; class MCContext; class GlobalValue; class TargetMachine; class TargetLoweringObjectFileELF : public TargetLoweringObjectFile { bool UseInitArray; mutable unsigned NextUniqueID = 0; public: TargetLoweringObjectFileELF() : UseInitArray(false) {} ~TargetLoweringObjectFileELF() override {} void emitPersonalityValue(MCStreamer &Streamer, const DataLayout &TM, const MCSymbol *Sym) const override; /// Given a constant with the SectionKind, return a section that it should be /// placed in. MCSection *getSectionForConstant(const DataLayout &DL, SectionKind Kind, const Constant *C) const override; MCSection *getExplicitSectionGlobal(const GlobalValue *GV, SectionKind Kind, Mangler &Mang, const TargetMachine &TM) const override; MCSection *SelectSectionForGlobal(const GlobalValue *GV, SectionKind Kind, Mangler &Mang, const TargetMachine &TM) const override; MCSection *getSectionForJumpTable(const Function &F, Mangler &Mang, const TargetMachine &TM) const override; bool shouldPutJumpTableInFunctionSection(bool UsesLabelDifference, const Function &F) const override; /// Return an MCExpr to use for a reference to the specified type info global /// variable from exception handling information. const MCExpr * getTTypeGlobalReference(const GlobalValue *GV, unsigned Encoding, Mangler &Mang, const TargetMachine &TM, MachineModuleInfo *MMI, MCStreamer &Streamer) const override; // The symbol that gets passed to .cfi_personality. MCSymbol *getCFIPersonalitySymbol(const GlobalValue *GV, Mangler &Mang, const TargetMachine &TM, MachineModuleInfo *MMI) const override; void InitializeELF(bool UseInitArray_); MCSection *getStaticCtorSection(unsigned Priority, const MCSymbol *KeySym) const override; MCSection *getStaticDtorSection(unsigned Priority, const MCSymbol *KeySym) const override; }; class TargetLoweringObjectFileMachO : public TargetLoweringObjectFile { public: ~TargetLoweringObjectFileMachO() override {} TargetLoweringObjectFileMachO(); /// Emit the module flags that specify the garbage collection information. void emitModuleFlags(MCStreamer &Streamer, ArrayRef<Module::ModuleFlagEntry> ModuleFlags, Mangler &Mang, const TargetMachine &TM) const override; MCSection *SelectSectionForGlobal(const GlobalValue *GV, SectionKind Kind, Mangler &Mang, const TargetMachine &TM) const override; MCSection *getExplicitSectionGlobal(const GlobalValue *GV, SectionKind Kind, Mangler &Mang, const TargetMachine &TM) const override; MCSection *getSectionForConstant(const DataLayout &DL, SectionKind Kind, const Constant *C) const override; /// The mach-o version of this method defaults to returning a stub reference. const MCExpr * getTTypeGlobalReference(const GlobalValue *GV, unsigned Encoding, Mangler &Mang, const TargetMachine &TM, MachineModuleInfo *MMI, MCStreamer &Streamer) const override; // The symbol that gets passed to .cfi_personality. MCSymbol *getCFIPersonalitySymbol(const GlobalValue *GV, Mangler &Mang, const TargetMachine &TM, MachineModuleInfo *MMI) const override; /// Get MachO PC relative GOT entry relocation const MCExpr *getIndirectSymViaGOTPCRel(const MCSymbol *Sym, const MCValue &MV, int64_t Offset, MachineModuleInfo *MMI, MCStreamer &Streamer) const override; void getNameWithPrefix(SmallVectorImpl<char> &OutName, const GlobalValue *GV, Mangler &Mang, const TargetMachine &TM) const override; }; class TargetLoweringObjectFileCOFF : public TargetLoweringObjectFile { public: ~TargetLoweringObjectFileCOFF() override {} MCSection *getExplicitSectionGlobal(const GlobalValue *GV, SectionKind Kind, Mangler &Mang, const TargetMachine &TM) const override; MCSection *SelectSectionForGlobal(const GlobalValue *GV, SectionKind Kind, Mangler &Mang, const TargetMachine &TM) const override; void getNameWithPrefix(SmallVectorImpl<char> &OutName, const GlobalValue *GV, Mangler &Mang, const TargetMachine &TM) const override; MCSection *getSectionForJumpTable(const Function &F, Mangler &Mang, const TargetMachine &TM) const override; /// Emit Obj-C garbage collection and linker options. Only linker option /// emission is implemented for COFF. void emitModuleFlags(MCStreamer &Streamer, ArrayRef<Module::ModuleFlagEntry> ModuleFlags, Mangler &Mang, const TargetMachine &TM) const override; MCSection *getStaticCtorSection(unsigned Priority, const MCSymbol *KeySym) const override; MCSection *getStaticDtorSection(unsigned Priority, const MCSymbol *KeySym) const override; void emitLinkerFlagsForGlobal(raw_ostream &OS, const GlobalValue *GV, const Mangler &Mang) const override; }; } // end namespace llvm #endif
{ "content_hash": "88ee2ddf50d33e6db772e5129602d48a", "timestamp": "", "source": "github", "line_count": 147, "max_line_length": 80, "avg_line_length": 42.40816326530612, "alnum_prop": 0.6153352582611485, "repo_name": "mirams/opencor", "id": "2f1379131cbdc51440f32f4cb9bd45509aab7f39", "size": "6984", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/plugins/thirdParty/LLVM/include/llvm/CodeGen/TargetLoweringObjectFileImpl.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2211" }, { "name": "C", "bytes": "7562304" }, { "name": "C++", "bytes": "114016189" }, { "name": "CMake", "bytes": "216057" }, { "name": "HTML", "bytes": "9433" }, { "name": "NSIS", "bytes": "4599" }, { "name": "Objective-C", "bytes": "282234" }, { "name": "PAWN", "bytes": "136" }, { "name": "PHP", "bytes": "154028" }, { "name": "POV-Ray SDL", "bytes": "32617292" }, { "name": "Shell", "bytes": "2656" }, { "name": "SourcePawn", "bytes": "1544" }, { "name": "Visual Basic", "bytes": "332" }, { "name": "XSLT", "bytes": "59631" } ], "symlink_target": "" }
from __future__ import division # Local RGZ modules import rgz import consensus from load_contours import get_contours,make_pathdict # Default packages import json import cStringIO import urllib import time import random import os from ast import literal_eval from collections import Counter # Other packages import pandas as pd import numpy as np from astropy.io import ascii,fits from astropy import wcs from PIL import Image from matplotlib import pyplot as plt from matplotlib.pyplot import cm from matplotlib.path import Path import matplotlib.patches as patches from scipy.interpolate import griddata from scipy.ndimage.filters import maximum_filter from scipy.ndimage.morphology import generate_binary_structure, binary_erosion from astroML.plotting import hist as histML # Local paths and files rgz_dir = '/Users/willettk/Astronomy/Research/GalaxyZoo/rgz-analysis' rgz_consensus_file = '%s/csv/consensus_rgz_first.csv' % rgz_dir # Various image parameters IMG_HEIGHT_OLD = 424.0 # number of pixels in the original JPG image along the y axis IMG_WIDTH_OLD = 424.0 # number of pixels in the original JPG image along the x axis IMG_HEIGHT_NEW = 500.0 # number of pixels in the downloaded JPG image along the y axis IMG_WIDTH_NEW = 500.0 # number of pixels in the downloaded JPG image along the x axis FITS_HEIGHT = 301.0 # number of pixels in the FITS image (?) along the y axis FITS_WIDTH = 301.0 # number of pixels in the FITS image (?) along the x axis FIRST_FITS_HEIGHT = 132.0 # number of pixels in the FITS image along the y axis FIRST_FITS_WIDTH = 132.0 # number of pixels in the FITS image along the y axis first_ir_scale_x = FIRST_FITS_WIDTH / IMG_WIDTH_NEW first_ir_scale_y = FIRST_FITS_HEIGHT / IMG_HEIGHT_NEW PIXEL_SIZE = 0.00016667#/3600.0 # the number of arcseconds per pixel in the FITS image xmin = 1. xmax = IMG_HEIGHT_NEW ymin = 1. ymax = IMG_WIDTH_NEW subjects,classifications = rgz.load_rgz_data() def get_doubles(consensus_level=0.50): # Find examples of RGZ subjects with exactly two radio components rgzconsensus = ascii.read(rgz_consensus_file,format='csv') dblidx = (rgzconsensus['n_radio'] == 2) & (rgzconsensus['consensus_level'] >= consensus_level) doubles = rgzconsensus[dblidx] return doubles def get_triples(consensus_level=0.50): # Find examples of RGZ subjects with exactly three radio components rgzconsensus = ascii.read(rgz_consensus_file,format='csv') trpidx = (rgzconsensus['n_radio'] == 3) & (rgzconsensus['consensus_level'] >= consensus_level) triples = rgzconsensus[trpidx] return triples def all_doubles_pixradio(doubles,pathdict): # Compute the coordinates of the optical ID and radio component centroids, bending angle, and position angle # for all consensus RGZ subjects with exactly two radio components with open('%s/bending_angles/angles_double_pixradio.csv' % rgz_dir,'w') as f: print >> f,'zooniverse_id,bending_angle,position_angle' for double in doubles: #irx,iry,radio_components = pix_convert(double,pathdict) xc,yc = literal_eval(double['ir_peak']) if xc is not None: subject = subjects.find_one({'zooniverse_id':double['zooniverse_id']}) contours = get_contours(subject,pathdict) radio_components = contours['contours'] radio_centroids = pix_radio(radio_components) alpha = bending_angle(xc,yc,radio_centroids[0][0],radio_centroids[0][1],radio_centroids[1][0],radio_centroids[1][1]) alpha_deg = alpha * 180./np.pi phi = position_angle(xc,yc,radio_centroids[0][0],radio_centroids[0][1],radio_centroids[1][0],radio_centroids[1][1]) phi_deg = phi * 180./np.pi if alpha is not None: print >> f,'%s,%.3f,%.3f' % (double['zooniverse_id'],alpha_deg,phi_deg) return None def dblid(doubles,zooniverse_id): # Retrieve subject for a single two-component radio source in the doubles list dbl = doubles[doubles['zooniverse_id'] == zooniverse_id][0] return dbl def pix_convert(galaxy,pathdict,local=False): # Convert IR coordinates from RA/dec into pixel subject = subjects.find_one({'zooniverse_id':galaxy['zooniverse_id']}) contours = get_contours(subject,pathdict) radio_components = contours['contours'] try: assert len(radio_components) > 1, \ 'Radio data only has %i component for %s' % (len(radio_components),galaxy['zooniverse_id']) except AssertionError: return None,None,None # Keep everything in pixel coordinates. Reverse what's done in consensus.py; # transform an RA/dec pair into the pixel x/y pair. # Convert the pixel coordinates into RA,dec using the WCS object from the header hdulist = fits.open(pathdict[galaxy['first_id']]) w = wcs.WCS(hdulist[0].header) worldcrd = np.array([[galaxy['ra'],galaxy['dec']]],np.float_) pix = w.wcs_world2pix(worldcrd,0) irx,iry = pix[0] irx_first,iry_first = np.round(pix[0][0] / first_ir_scale_x), np.round(IMG_HEIGHT_NEW - pix[0][1] / first_ir_scale_y) return irx,iry,radio_components def bending_angle(xc,yc,x1,y1,x2,y2): # Compute the bending angle (in radians) between three points in pixel space ''' Points are: - xc,yc: x,y center of IR counterpart - x1,y1: x,y center of 1st radio lobe - x2,y2: x,y center of 2nd radio lobe ''' r1 = np.array([x1,y1]) r2 = np.array([x2,y2]) center = np.array([xc,yc]) r1diff = r1-center r2diff = r2-center r1len = np.hypot(r1diff[0],r1diff[1]) r2len = np.hypot(r2diff[0],r2diff[1]) alpha = np.arccos(np.dot(r1diff,r2diff) / (r1len*r2len)) return alpha def bending_angle_sdss(zid,x1,y1,x2,y2): # Compute the bending angle (in radians) between three points in pixel space ''' - zid - x1,y1: x,y center of 1st radio lobe - x2,y2: x,y center of 2nd radio lobe ''' # I'd love to do this purely in RA/dec, converting all positions in Astropy, but functionality doesn't seem to be there. # Convert SDSS optical position into radio-frame pixel coordinates hdulist = fits.open(pathdict[galaxy['first_id']]) w = wcs.WCS(hdulist[0].header) worldcrd = np.array([[galaxy['ra'],galaxy['dec']]],np.float_) pix = w.wcs_world2pix(worldcrd,0) xc,yc = np.round(pix[0][0] / first_ir_scale_x), np.round(IMG_HEIGHT_NEW - pix[0][1] / first_ir_scale_y) r1 = np.array([x1,y1]) r2 = np.array([x2,y2]) center = np.array([xc,yc]) r1diff = r1-center r2diff = r2-center r1len = np.hypot(r1diff[0],r1diff[1]) r2len = np.hypot(r2diff[0],r2diff[1]) alpha = np.arccos(np.dot(r1diff,r2diff) / (r1len*r2len)) return alpha def position_angle(xc,yc,x1,y1,x2,y2): # Compute the position angle (in radians, with respect to north) between three points in pixel space ''' Points are: - xc,yc: x,y center of bending angle - x1,y1: x,y center of 1st component - x2,y2: x,y center of 2nd component ''' r1 = np.array([x1,y1]) r2 = np.array([x2,y2]) center = np.array([xc,yc]) r12sum = (r1-center) + (r2-center) r12len = np.hypot(r12sum[0],r12sum[1]) north = np.array([0,1]) northlen = np.hypot(north[0],north[1]) alpha = np.arccos(np.dot(r12sum,north) / (r12len*northlen)) # Measure CCW from north if r12sum[0] > 0.: alpha = 2*np.pi - alpha return alpha def pix_radio(radio_components): # From list of bounding boxes in radio pixel coordinates, # return the centroids of the boxes in IR pixel coordinates radio_centroids = [] for comp in radio_components: bbox = comp[0]['bbox'] cxu = np.median((bbox[0],bbox[2])) cyu = np.median((bbox[1],bbox[3])) cx,cy = cxu/first_ir_scale_x,cyu/first_ir_scale_y radio_centroids.append((cx,cy)) return radio_centroids def bbox_radio_to_ir(bbox): # Convert the bbox in RGZ subject from radio to infrared pixel scale bbox_ir = [bbox[0]/first_ir_scale_x,bbox[1]/first_ir_scale_x,bbox[2]/first_ir_scale_x,bbox[3]/first_ir_scale_x] return bbox_ir def load_angles(filename): # Load the CSV file of the computed bending angles for multi-peaked or multi-lobed sources with open('%s/bending_angles/%s.csv' % (rgz_dir,filename),'r') as f: angstr = f.readlines() ba = [float(x.split(',')[1]) for x in angstr[1:]] #pa = [float(x.split(',')[2]) for x in angstr[1:]] return ba#,pa def plothist(savefig=False): # Plot distribution of the bending angles for RGZ sources ''' angles_double_pixradio = load_angles('angles_double_pixradio') angles_triple_pixradio = load_angles('angles_triple_pixradio') angles_double_mps = load_angles('angles_multipeaked_singles') angles_triple_mps = load_angles('angles_multipeaked_singles_no_optical') ''' data = ascii.read('{:}/csv/static_catalog3.csv'.format(rgz_dir),delimiter=' ') angles_radiodouble = data[data['angle_type'] == 'double_pixradio']['bending_angle'] angles_mps = data[data['angle_type'] == 'multipeaked_singles']['bending_angle'] # Set up figure fig = plt.figure(2,(15,8)) c1 = '#377eb8' c2 = '#e41a1c' c3 = '#4daf4a' c4 = '#984ea3' # Panel 1 - histogram ax1 = fig.add_subplot(121) histML(angles_radiodouble, bins=15, ax=ax1, histtype='step', lw=3, alpha=1.0, color=c1, range=(0,90),label='double lobed, multi-contour') histML(angles_mps, bins=15, ax=ax1, histtype='step', lw=3, alpha=1.0, color=c2, range=(0,90),label='double-peaked, single-contour') ax1.set_xlim(0,90) ax1.vlines(x=np.median(angles_radiodouble),ymin=ax1.get_ylim()[0],ymax = ax1.get_ylim()[1],color=c1,linestyle='--') ax1.vlines(x=np.median(angles_mps),ymin=ax1.get_ylim()[0],ymax = ax1.get_ylim()[1],color=c2,linestyle='--') ax1.set_xlabel(r'bending angle [deg]',fontsize=24) ax1.set_ylabel('count',fontsize=20) plt.tick_params(axis='both', which='major', labelsize=20) # Panel 2 - cumulative ax2 = fig.add_subplot(122) histML(angles_radiodouble, bins=15, ax=ax2, histtype='step', lw=3, alpha=1.0, color=c1, range=(0,90),label='double lobed, multi-contour',cumulative=True) histML(angles_mps, bins=15, ax=ax2, histtype='step', lw=3, alpha=1.0, color=c2, range=(0,90),label='double-peaked, single-contour',cumulative=True) ax2.set_xlim(0,90) ax2.vlines(x=np.median(angles_radiodouble),ymin=ax2.get_ylim()[0],ymax = ax2.get_ylim()[1],color=c1,linestyle='--') ax2.vlines(x=np.median(angles_mps),ymin=ax2.get_ylim()[0],ymax = ax2.get_ylim()[1],color=c2,linestyle='--') ax2.set_xlabel(r'bending angle [deg]',fontsize=24) ax2.set_ylabel('count',fontsize=20) ax2.legend(loc='upper left') plt.tick_params(axis='both', which='major', labelsize=20) # Finish adjusting plot parameters fig.tight_layout() if savefig: fig.savefig('%s/bending_angles/plots/bending_angles_hist.pdf' % rgz_dir) else: plt.show() return None def plot_one_double(zooniverse_id,pathdict,figno=1,savefig=False,anglepath='',dbltype='radio'): # Make a four-panel plot of the consensus identification with marked bending angle and position angle for a double source cons = consensus.checksum(zooniverse_id) subject = subjects.find_one({'zooniverse_id':zooniverse_id}) contours = get_contours(subject,pathdict) radio_components = contours['contours'] # Plot image answer = cons['answer'] # Download contour data sf_x = 500./contours['width'] sf_y = 500./contours['height'] verts_all = [] codes_all = [] components = contours['contours'] for comp in components: # Order of bounding box components is (xmax,ymax,xmin,ymin) comp_xmax,comp_ymax,comp_xmin,comp_ymin = comp[0]['bbox'] # Only plot radio components identified by the users as the consensus; # check on the xmax value to make sure for v in answer.itervalues(): if comp_xmax in v['xmax']: for idx,level in enumerate(comp): verts = [((p['x'])*sf_x,(p['y']-1)*sf_y) for p in level['arr']] codes = np.ones(len(verts),int) * Path.LINETO codes[0] = Path.MOVETO verts_all.extend(verts) codes_all.extend(codes) try: path = Path(verts_all, codes_all) patch_black = patches.PathPatch(path, facecolor = 'none', edgecolor='black', lw=1) except AssertionError: print 'Users found no components for consensus match of %s' % zooniverse_id # Plot the infrared results fig = plt.figure(figno,(15,4)) fig.clf() ax3 = fig.add_subplot(143) ax4 = fig.add_subplot(144) colormaparr = [cm.hot_r,cm.Blues,cm.RdPu,cm.Greens,cm.PuBu,cm.YlGn,cm.Greys][::-1] colorarr = ['r','b','m','g','c','y','k'][::-1] if len(answer) > 0: # At least one galaxy was identified for idx,ans in enumerate(answer.itervalues()): if ans.has_key('peak_data'): # Plot the KDE map colormap = colormaparr.pop() ax3.imshow(np.rot90(ans['peak_data']['Z']), cmap=colormap,extent=[xmin, xmax, ymin, ymax]) # Plot individual sources color = colorarr.pop() ''' x_plot = [xt * 500./424 for xt in ans['ir_x'] if xt != -99.0] y_plot = [yt * 500./424 for yt in ans['ir_y'] if yt != -99.0] ''' x_plot,y_plot = ans['ir_x'],ans['ir_y'] ax3.scatter(x_plot, y_plot, c=color, marker='o', s=10, alpha=1./len(x_plot)) ax4.plot([ans['ir_peak'][0]],[ans['ir_peak'][1]],color=color,marker='*',markersize=12) elif ans.has_key('ir'): color = colorarr.pop() x_plot,y_plot = ans['ir'] ax3.plot([x_plot],[y_plot],color=color,marker='o',markersize=2) ax4.plot([x_plot],[y_plot],color=color,marker='*',markersize=12) else: ax4.text(550,idx*25,'#%i - no IR host' % idx,fontsize=11) ax3.set_xlim([0, 500]) ax3.set_ylim([500, 0]) ax3.set_title(zooniverse_id) ax3.set_aspect('equal') ax4.set_xlim([0, 500]) ax4.set_ylim([500, 0]) ax4.set_title('Consensus (%i/%i users)' % (cons['n_users'],cons['n_total'])) ax4.set_aspect('equal') # Display IR and radio images url_standard = subject['location']['standard'] im_standard = Image.open(cStringIO.StringIO(urllib.urlopen(url_standard).read())) ax1 = fig.add_subplot(141) ax1.imshow(im_standard,origin='upper') ax1.set_title('WISE') url_radio = subject['location']['radio'] im_radio = Image.open(cStringIO.StringIO(urllib.urlopen(url_radio).read())) ax2 = fig.add_subplot(142) ax2.imshow(im_radio,origin='upper') ax2.set_title(subject['metadata']['source']) ax2.get_yaxis().set_ticklabels([]) ax3.get_yaxis().set_ticklabels([]) # Plot contours identified as the consensus if len(answer) > 0: ax4.add_patch(patch_black) radio_centers = [] for component in components: bbox = component[0]['bbox'] # Draw centers of bounding boxes xradiocen = np.median((bbox[0],bbox[2])) / first_ir_scale_x yradiocen = np.median((bbox[1],bbox[3])) / first_ir_scale_y radio_centers.append((xradiocen,yradiocen)) ax4.scatter(xradiocen,yradiocen, c='g', marker='s', s=15, alpha=1) # Draw edge of bounding box xradiomin = bbox[2] / first_ir_scale_x xradiomax = bbox[0] / first_ir_scale_x yradiomin = bbox[3] / first_ir_scale_y yradiomax = bbox[1] / first_ir_scale_y ax4.plot([xradiomin,xradiomin,xradiomax,xradiomax,xradiomin],[yradiomin,yradiomax,yradiomax,yradiomin,yradiomin],color='g') for ans in answer: if answer[ans].has_key('ir_peak'): # Optical counterpart position xc,yc = answer[ans]['ir_peak'] # Position of radio sources for multi-peaked, single-component subjects if dbltype == "mps": local_maxima = mps_cc(subject,pathdict,plot=False,verbose=False) suffix = '' if len(local_maxima) == 1 else 's' assert len(local_maxima) >= 2, \ "%i peak%s in first radio component of %s; must have exactly 2 peaks to plot bending angle using mps method." % (len(local_maxima),suffix,zooniverse_id) x1 = local_maxima[0][1][0] / first_ir_scale_x y1 = local_maxima[0][1][1] / first_ir_scale_y x2 = local_maxima[1][1][0] / first_ir_scale_x y2 = local_maxima[1][1][1] / first_ir_scale_y ax4.scatter(x1,y1, color='darkorange', marker='s', s=15, alpha=1) ax4.scatter(x2,y2, color='darkorange', marker='s', s=15, alpha=1) # Position of radio sources for double-lobed, two-component subjects elif len(radio_centers) == 2: x1,y1 = radio_centers[0] x2,y2 = radio_centers[1] else: raise ValueError("Centers of radio boxes not defined.") m1 = (y1 - yc) / (x1 - xc) b1 = yc - m1*xc m2 = (y2 - yc) / (x2 - xc) b2 = yc - m2*xc xedge1 = 0 if x1 < xc else 500 yedge1 = y1 - (x1-xedge1)*(yc-y1)/(xc-x1) xedge2 = 0 if x2 < xc else 500 yedge2 = y2 - (x2-xedge2)*(yc-y2)/(xc-x2) # Draw and annotate the the bending angle ax4.plot([xedge1,xc],[yedge1,yc],color='orange',linestyle='--') ax4.plot([xedge2,xc],[yedge2,yc],color='orange',linestyle='--') alpha_deg = bending_angle(xc,yc,x1,y1,x2,y2) * 180/np.pi ax4.text(550,0,r'$\alpha$ = %.1f deg' % alpha_deg,fontsize=11) # Draw vector pointing north # Draw the bisector vector ''' yd = y_bisect(xc,yc,xedge1,yedge1,xedge2,yedge2) ax4.arrow(xc,yc,-xc,yd-yc,head_width=20, head_length=40, fc='blue', ec='blue') ''' # Compute the position angle with respect to north phi_deg = position_angle(xc,500-yc,x1,500-y1,x2,500-y2) * 180/np.pi ax4.text(550,50,r'$\phi$ = %.1f deg' % phi_deg,fontsize=11) ax4.arrow(xc,yc,0,-yc,head_width=20, head_length=40, fc='grey', ec='grey',ls='dotted') else: print "No peak for %s" % zooniverse_id ax4.yaxis.tick_right() ax1.get_xaxis().set_ticks([0,100,200,300,400]) ax2.get_xaxis().set_ticks([0,100,200,300,400]) ax3.get_xaxis().set_ticks([0,100,200,300,400]) ax4.get_xaxis().set_ticks([0,100,200,300,400,500]) plt.subplots_adjust(wspace=0.02) # Save hard copy of the figure if savefig: fig.savefig('%s/bending_angles/plots/individual/%sba_%s.pdf' % (rgz_dir,anglepath,zooniverse_id)) plt.close() else: plt.show() # Close figure after it's done; otherwise mpl complains about having thousands of stuff open return None def plot_one_triple(zooniverse_id,pathdict,figno=1,savefig=False,anglepath=''): # Make a four-panel plot of the consensus identification with marked bending angle and position angle for a triple source cons = consensus.checksum(zooniverse_id) subject = subjects.find_one({'zooniverse_id':zooniverse_id}) contours = get_contours(subject,pathdict) radio_components = contours['contours'] # Plot image answer = cons['answer'] # Download contour data sf_x = 500./contours['width'] sf_y = 500./contours['height'] verts_all = [] codes_all = [] components = contours['contours'] for comp in components: # Order of bounding box components is (xmax,ymax,xmin,ymin) comp_xmax,comp_ymax,comp_xmin,comp_ymin = comp[0]['bbox'] # Only plot radio components identified by the users as the consensus; # check on the xmax value to make sure for v in answer.itervalues(): if comp_xmax in v['xmax']: for idx,level in enumerate(comp): verts = [((p['x'])*sf_x,(p['y']-1)*sf_y) for p in level['arr']] codes = np.ones(len(verts),int) * Path.LINETO codes[0] = Path.MOVETO verts_all.extend(verts) codes_all.extend(codes) try: path = Path(verts_all, codes_all) patch_black = patches.PathPatch(path, facecolor = 'none', edgecolor='black', lw=1) except AssertionError: print 'Users found no components for consensus match of %s' % zooniverse_id # Plot the infrared results fig = plt.figure(figno,(15,4)) fig.clf() ax3 = fig.add_subplot(143) ax4 = fig.add_subplot(144) colormaparr = [cm.hot_r,cm.Blues,cm.RdPu,cm.Greens,cm.PuBu,cm.YlGn,cm.Greys][::-1] colorarr = ['r','b','m','g','c','y','k'][::-1] if len(answer) > 0: # At least one galaxy was identified for idx,ans in enumerate(answer.itervalues()): if ans.has_key('peak_data'): # Plot the KDE map colormap = colormaparr.pop() ax3.imshow(np.rot90(ans['peak_data']['Z']), cmap=colormap,extent=[xmin, xmax, ymin, ymax]) # Plot individual sources color = colorarr.pop() x_plot,y_plot = ans['ir_x'],ans['ir_y'] ax3.scatter(x_plot, y_plot, c=color, marker='o', s=10, alpha=1./len(x_plot)) ax4.plot([ans['ir_peak'][0]],[ans['ir_peak'][1]],color=color,marker='*',markersize=12) elif ans.has_key('ir'): color = colorarr.pop() x_plot,y_plot = ans['ir'] ax3.plot([x_plot],[y_plot],color=color,marker='o',markersize=2) ax4.plot([x_plot],[y_plot],color=color,marker='*',markersize=12) else: ax4.text(550,idx*25,'#%i - no IR host' % idx,fontsize=11) ax3.set_xlim([0, 500]) ax3.set_ylim([500, 0]) ax3.set_title(zooniverse_id) ax3.set_aspect('equal') ax4.set_xlim([0, 500]) ax4.set_ylim([500, 0]) ax4.set_title('Consensus (%i/%i users)' % (cons['n_users'],cons['n_total'])) ax4.set_aspect('equal') # Display IR and radio images url_standard = subject['location']['standard'] im_standard = Image.open(cStringIO.StringIO(urllib.urlopen(url_standard).read())) ax1 = fig.add_subplot(141) ax1.imshow(im_standard,origin='upper') ax1.set_title('WISE') url_radio = subject['location']['radio'] im_radio = Image.open(cStringIO.StringIO(urllib.urlopen(url_radio).read())) ax2 = fig.add_subplot(142) ax2.imshow(im_radio,origin='upper') ax2.set_title(subject['metadata']['source']) ax2.get_yaxis().set_ticklabels([]) ax3.get_yaxis().set_ticklabels([]) # Plot contours identified as the consensus if len(answer) > 0: ax4.add_patch(patch_black) # Add centers of bounding boxes for comp in components: bbox_radio = comp[0]['bbox'] bbox_ir = bbox_radio_to_ir(bbox_radio) xrad = np.median((bbox_ir[0],bbox_ir[2])) yrad = np.median((bbox_ir[1],bbox_ir[3])) ax4.scatter(xrad,yrad, c='g', marker='s', s=15, alpha=1) dbx = [bbox_ir[i] for i in (2,2,0,0,2)] dby = [bbox_ir[i] for i in (3,1,1,3,3)] ax4.plot(dbx,dby,color='g') radiobeamsize = 5. # arcsec imagesize = 3. # arcmin imagescale = IMG_HEIGHT_NEW/imagesize / 60. # pixel / arcsec radio_tol = radiobeamsize * imagescale for ans in answer: if answer[ans].has_key('ir_peak'): # Optical counterpart position xc,yc = answer[ans]['ir_peak'] # Measure all positions in radio pixel coordinates radio_centroids = pix_radio(components) maxdist = 0 for centroid in radio_centroids: d = pix_dist(xc,yc,centroid[0],centroid[1]) maxdist = d if d > maxdist else maxdist if d <= radio_tol: middle_radio = centroid radio_centroids.remove(middle_radio) x1 = radio_centroids[0][0] y1 = radio_centroids[0][1] x2 = radio_centroids[1][0] y2 = radio_centroids[1][1] if len(radio_centroids) == 2: m1 = (y1 - yc) / (x1 - xc) b1 = yc - m1*xc m2 = (y2 - yc) / (x2 - xc) b2 = yc - m2*xc xedge1 = 0 if x1 < xc else 500 yedge1 = y1 - (x1-xedge1)*(yc-y1)/(xc-x1) xedge2 = 0 if x2 < xc else 500 yedge2 = y2 - (x2-xedge2)*(yc-y2)/(xc-x2) # Draw and annotate the the bending angle ax4.plot([xedge1,xc],[yedge1,yc],color='orange',linestyle='--') ax4.plot([xedge2,xc],[yedge2,yc],color='orange',linestyle='--') alpha_deg = bending_angle(xc,yc,x1,y1,x2,y2) * 180/np.pi ax4.text(550,0,r'$\alpha$ = %.1f deg' % alpha_deg,fontsize=11) else: print "\tDidn't find match to optical ID for triple radio source %s" % zooniverse_id else: print "\tNo IR peak for %s" % zooniverse_id ax4.yaxis.tick_right() ax1.get_xaxis().set_ticks([0,100,200,300,400]) ax2.get_xaxis().set_ticks([0,100,200,300,400]) ax3.get_xaxis().set_ticks([0,100,200,300,400]) ax4.get_xaxis().set_ticks([0,100,200,300,400,500]) plt.subplots_adjust(wspace=0.02) # Save hard copy of the figure if savefig: fig.savefig('{0:}/bending_angles/plots/individual/triples/{1:}ba_{2:}.pdf'.format(rgz_dir,anglepath,zooniverse_id)) plt.close() else: plt.show() # Close figure after it's done; otherwise mpl complains about having thousands of stuff open return None def y_bisect(xc,yc,xt,yt,xb,yb): # Finds the point yd such that the vector (xc,yc) -> (0,yd) # bisects the angle formed by the vectors (xb,yb) -> (xc,yc) # and (xt,yt) -> (xc,yc) bc_length = np.hypot(xb - xc,yb - yc) tc_length = np.hypot(xt - xc,yt - yc) numerator = ((xb - xc)*xc + (yb - yc)*yc)/bc_length - (xc*(xt - xc) + yc*(yt - yc))/tc_length denominator = (yb - yc)/bc_length - (yt - yc)/tc_length return numerator/denominator def plot_some(n,random_selection=False): # Plot a random selection of double and triple sources pathdict = make_pathdict() # Doubles doubles = get_doubles() somedoubles = random.sample(doubles,n) if random_selection else doubles[:n] for dbl in somedoubles: plot_one_double(dbl['zooniverse_id'],pathdict,savefig=True) # Triples sometriples = get_triples() for triple in sometriples: plot_one_triple(triple['zooniverse_id'],pathdict,savefig=True) return None def pix_dist(x1,y1,x2,y2): # Find the distance between two sets of Cartesian points via the Pythagorean theorem dist = np.sqrt((x2-x1)**2 + (y2-y1)**2) return dist def all_triples_pixradio(triples,pathdict): # Compute the bending angle for RGZ subjects with three radio components and an optical ID within 1 beam size of the center component radiobeamsize = 5. # arcsec imagesize = 3. # arcmin imagescale = IMG_HEIGHT_NEW/imagesize / 60. # pixel / arcsec radio_tol = radiobeamsize * imagescale with open('%s/bending_angles/angles_triple_pixradio.csv' % rgz_dir,'w') as f: print >> f,'zooniverse_id,bending_angle,position_angle' for triple in triples: irx,iry = literal_eval(triple['ir_peak']) subject = subjects.find_one({'zooniverse_id':triple['zooniverse_id']}) contours = get_contours(subject,pathdict) radio_components = contours['contours'] # Measure all positions in radio pixel coordinates radio_centroids = pix_radio(radio_components) maxdist = 0 for centroid in radio_centroids: d = pix_dist(irx,iry,centroid[0],centroid[1]) maxdist = d if d > maxdist else maxdist if d <= radio_tol: middle_radio = centroid radio_centroids.remove(middle_radio) alpha = bending_angle(middle_radio[0],middle_radio[1],radio_centroids[0][0],radio_centroids[0][1],radio_centroids[1][0],radio_centroids[1][1]) alpha_deg = alpha * 180./np.pi phi = position_angle(middle_radio[0],middle_radio[1],radio_centroids[0][0],radio_centroids[0][1],radio_centroids[1][0],radio_centroids[1][1]) phi_deg = phi * 180./np.pi print >> f,'%s,%.3f,%.3f' % (triple['zooniverse_id'],alpha_deg,phi_deg) break else: "Couldn't match the optical ID within 1 beam size of center for %s" % triple['zooniverse_id'] return None def find_multipeaked_singles(subject,plot=False,verbose=True): # Deprecated in favor of mps_cc # Find multi-peaked single component sources via binary kernels. # Download contour data contours = get_contours(subject,pathdict) lobe = contours['contours'][0] # Order of bounding box components is (xmax,ymax,xmin,ymin) xmax,ymax,xmin,ymin = lobe[0]['bbox'] xsize,ysize = 0.1,0.1 X,Y = np.mgrid[xmin:xmax:xsize,ymin:ymax:ysize] parr = [] valarr = [] for cl in lobe: parr.extend([(p['x'],p['y']) for p in cl['arr']]) valarr.extend(np.ones(len(cl['arr']))+cl['level']) points = np.array([(px,py) for px,py in parr]) values = np.array(valarr) grid_z0 = griddata(points,values,(X,Y),method='nearest') grid_z1 = griddata(points,values,(X,Y),method='linear') grid_z2 = griddata(points,values,(X,Y),method='cubic') # Find the number of peaks # http://stackoverflow.com/questions/3684484/peak-detection-in-a-2d-array #neighborhood = generate_binary_structure(2,2) kernelsize = 50 neighborhood = np.ones((kernelsize,kernelsize)) ''' Z = np.copy(grid_z2) Z[np.isnan(grid_z2)] = 1. ''' Z = grid_z2 local_max = maximum_filter(Z, footprint=neighborhood)==Z background = np.isnan(Z) eroded_background = binary_erosion(background, structure=neighborhood, border_value=1) all_peaks = local_max - eroded_background # Check if peak is in the background detected_peaks = np.isfinite(Z) & all_peaks npeaks = detected_peaks.sum() xdp = X[detected_peaks] ydp = Y[detected_peaks] if verbose: print '%i peaks detected' % npeaks print xdp,ydp if plot: plt.subplot(231,aspect='equal') plt.plot(points[:,0], points[:,1], 'k.', ms=1) plt.title(subject['zooniverse_id']) plt.subplot(232) plt.imshow(grid_z0.T, extent=(xmin,xmax,ymin,ymax), cmap = cm.cubehelix, origin='lower',interpolation='none') plt.title('Nearest') plt.subplot(233) plt.imshow(grid_z1.T, extent=(xmin,xmax,ymin,ymax), cmap = cm.cubehelix, origin='lower',interpolation='none') plt.title('Linear') plt.subplot(234) plt.imshow(grid_z2.T, extent=(xmin,xmax,ymin,ymax), cmap = cm.cubehelix, origin='lower',interpolation='none') plt.title('Cubic') plt.subplot(235) plt.imshow(Z.T, extent=(xmin,xmax,ymin,ymax), cmap = cm.cubehelix, origin='lower')#,vmin=0.999,vmax=1.012) plt.title('Z') ''' plt.subplot(235) plt.imshow(background.T, extent=(xmin,xmax,ymin,ymax), cmap = cm.cubehelix, origin='lower',interpolation='none') plt.title('Background') plt.subplot(235) plt.imshow(eroded_background.T, extent=(xmin,xmax,ymin,ymax), cmap = cm.cubehelix, origin='lower',interpolation='none') plt.title('Eroded background') ''' plt.subplot(236,aspect='equal') plt.plot(points[:,0], points[:,1], 'k.', ms=1) plt.plot(xdp,ydp,'ro') plt.title('Detected peaks') plt.gcf().set_size_inches(18, 12) plt.show() return xdp,ydp def centroid(arr): # Find the centroid of a polygon defined by a list of (x,y) points x = [l['x'] for l in arr] y = [l['y'] for l in arr] xmean = np.mean(x) ymean = np.mean(y) return xmean,ymean def point_in_poly(x,y,poly): # Determine whether a given point (x,y) is within a convex polygon defined by an array of points n = len(poly) inside = False p1x,p1y = poly[0] for i in range(n+1): p2x,p2y = poly[i % n] if y > min(p1y,p2y): if y <= max(p1y,p2y): if x <= max(p1x,p2x): if p1y != p2y: xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x if p1x == p2x or x <= xints: inside = not inside p1x,p1y = p2x,p2y return inside def make_polygon(arr): # Create a list of x,y pairs out of an array to draw a polygon x = [l['x'] for l in arr] y = [l['y'] for l in arr] polygon = [(xx,yy) for xx,yy in zip(x,y)] return polygon def mps_cc(subject,pathdict,plot=True,verbose=True): # Find location of peaks within a single-component radio source via contour counting contours = get_contours(subject,pathdict) lobe = contours['contours'][0] xmax,ymax,xmin,ymin = lobe[0]['bbox'] parr = [] valarr = [] for cl in lobe: parr.extend([(p['x'],p['y']) for p in cl['arr']]) valarr.extend(np.ones(len(cl['arr']))+cl['level']) points = np.array([(px,py) for px,py in parr]) values = np.array(valarr) # Find levels with multiple contours # For each of those levels, check if next level up has geometric center within that contour # If no, then that level's geometric center is a local maximum # If yes, then move up one level and repeat k = [l['k'] for l in lobe] ck = Counter(k) mlarr = [] for x,y in ck.iteritems(): if y > 1: mlarr.append(x) if max(k) not in mlarr: mlarr.append(max(k)) mlarr.sort() local_maxima = [] for m in mlarr: levels = [l for l in lobe if l['k'] == m] # Is there a higher level? if m < max(k): upper_levels = [l for l in lobe if l['k'] == m+1] for level in levels: within = False for ul in upper_levels: gc = centroid(ul['arr']) polygon = make_polygon(level['arr']) result = point_in_poly(gc[0],gc[1],polygon) within += result if not within: gc = centroid(level['arr']) local_maxima.append((m,gc)) if verbose: print 'Point in poly, m=%i, center=(%.1f,%.1f)' % (m,gc[0],gc[1]) # If no higher level, centroids = local max else: for level in levels: gc = centroid(level['arr']) local_maxima.append((m,gc)) if verbose: print 'No higher levels, m=%i, center=(%.1f,%.1f)' % (m,gc[0],gc[1]) # Plot locations of peaks npeaks = len(local_maxima) if plot: xc = [x[1][0] for x in local_maxima] yc = [x[1][1] for x in local_maxima] fig = plt.figure() ax = fig.add_subplot(111) verts_all = [] codes_all = [] components = contours['contours'] for comp in components: # Order of bounding box components is (xmax,ymax,xmin,ymin) comp_xmax,comp_ymax,comp_xmin,comp_ymin = comp[0]['bbox'] # Only plot radio components identified by the users as the consensus; # check on the xmax value to make sure for idx,level in enumerate(comp): verts = [(p['x'],p['y']) for p in level['arr']] codes = np.ones(len(verts),int) * Path.LINETO codes[0] = Path.MOVETO verts_all.extend(verts) codes_all.extend(codes) try: path = Path(verts_all, codes_all) patch_black = patches.PathPatch(path, facecolor = 'none', edgecolor='black', lw=1) except AssertionError: print 'Users found no components for consensus match of %s' % zooniverse_id # Plot contours identified as the consensus ax.add_patch(patch_black) ax.plot(xc,yc,'r*',ms=10) ax.set_xlim(0,FIRST_FITS_WIDTH) ax.set_ylim(FIRST_FITS_HEIGHT,0) ax.set_aspect('equal') #ax.title(subject['zooniverse_id']) plt.show() return local_maxima def batch_mps_cc(): # Find location of peaks within all single-component radio sources via contour counting ''' Time estimate: Contour data retrieved over network: 5013.01 seconds (~83 minutes) for 38,750 images 7.73 images per second Contour data stored locally: 1559.67 seconds (~26 minutes) for 46,068 images 29.54 images per second ''' # Note - only enabled for FIRST now. tstart = time.time() mps = subjects.find({'state':'complete','metadata.contour_count':1,'metadata.survey':'first'},timeout=False) n = mps.count() with open('%s/bending_angles/multipeaked_singles_cc.csv' % rgz_dir,'w') as f: print >> f,"zooniverse_id,nlobe,ntotal,xc,yc" idx_s = 0 for subject in mps: try: local_maxima = mps_cc(subject,pathdict,plot=False,verbose=False) if len(local_maxima) > 1: for idx,lm in enumerate(local_maxima): print >> f,"{0:},{1:d},{2:d},{3:.4f},{4:.4f}".format(subject['zooniverse_id'],idx+1,len(local_maxima),lm[1][0],lm[1][1]) except ValueError: print "Error retrieving JSON object for {0:}".format(subject['zooniverse_id']) idx_s += 1 if ~idx_s % 100: print "%i completed" % idx_s mps.close() tend = time.time() print '%.2f minutes for %i images' % ((tend - tstart)/60.,n) print '%.2f images per second' % (n/(tend - tstart)) return None def hist_mps_cc(): # Plot the distribution of the number of peaks in single-component radio subjects data = ascii.read('%s/bending_angles/multipeaked_singles_cc.csv' % rgz_dir,delimiter=' ',data_start=1,header_start=0) c = Counter(data['zooniverse_id']) # Doesn't include npeaks = 1, so calculate that separately ntotal = subjects.find({'state':'complete','metadata.contour_count':1,'metadata.survey':'first'}).count() runningsum = 0 for v in c.itervalues(): runningsum += v c[1] = ntotal - runningsum fig = plt.figure(1,(12,6)) ax1 = fig.add_subplot(121) histML(c.values(), bins=range(10), ax=ax1, histtype='step', lw=2, alpha=1.0, color='#377eb8',log=True) ax1.set_xlim(1,10) ax1.set_xlabel(r'$N_{peaks}$',fontsize=18) ax1.set_ylabel('Count') ax1.set_title('RGZ 1-contour sources') ax2 = fig.add_subplot(122) histML(c.values(), bins=range(10), ax=ax2, histtype='step', lw=2, alpha=1.0, color='#e41a1c',cumulative=True,normed=True) ax2.set_xlabel(r'$N_{peaks}$',fontsize=18) ax1.set_title('RGZ 1-contour sources') ax2.set_ylabel('Cumulative fraction') fig.savefig('%s/bending_angles/plots/mps_cc.pdf' % rgz_dir) plt.show() return None def batch_mps_kernel(): # Deprecated in favor of batch_mps_cc # Find location of peaks within all single-component radio sources via binary kernels tstart = time.time() mps = subjects.find({'state':'complete','metadata.contour_count':1}) n = mps.count() with open('%s/bending_angles/multipeaked_singles.csv' % rgz_dir,'w') as f: for subject in mps: xdp,ydp = find_multipeaked_singles(subject,plot=False,verbose=False) if len(xdp) > 0 and len(ydp) > 0: for idx,(xsubpeak,ysubpeak) in enumerate(zip(xdp,ydp)): print >> f,subject['zooniverse_id'],idx+1,len(xdp),xsubpeak,ysubpeak tend = time.time() print '%.2f seconds for %i images' % (tend - tstart,n) print '%.2f images per second' % (n/(tend - tstart)) return None def mps_bending_angle(consensus_level = 0.50): # Compute the bending and position angles for double-peaked, single-contour sources with optical counterparts tstart = time.time() # Load data df = pd.read_csv('%s/bending_angles/multipeaked_singles_cc.csv' % rgz_dir,delimiter=',') df2 = df[(df['ntotal'] == 2)] with open('%s/bending_angles/angles_multipeaked_singles.csv' % rgz_dir,'w') as f: print >> f,'zooniverse_id,bending_angle,position_angle' # get optical counterpart for zooniverse_id (loop over eventually) df2_pair1 = df2[::2] df2_pair2 = df2[1::2] _zid = np.array(df2_pair1['zooniverse_id']) _x1 = np.array(df2_pair1['xc']) _y1 = np.array(df2_pair1['yc']) _x2 = np.array(df2_pair2['xc']) _y2 = np.array(df2_pair2['yc']) for zooniverse_id,x1,y1,x2,y2 in zip(_zid,_x1,_y1,_x2,_y2): c = consensus.checksum(zooniverse_id) try: if (len(c['answer']) == 1) and (c['n_users']/float(c['n_total']) >= consensus_level): if c['answer'][c['answer'].keys()[0]].has_key('ir_peak'): peak_x,peak_y = c['answer'][c['answer'].keys()[0]]['ir_peak'] ir_x = peak_x * first_ir_scale_x ir_y = peak_y * first_ir_scale_y alpha = bending_angle(ir_x,ir_y,x1,y1,x2,y2) alpha_deg = alpha * 180./np.pi phi = position_angle(ir_x,ir_y,x1,y1,x2,y2) phi_deg = phi * 180./np.pi print >> f,'{0:s},{1:.4f},{2:.4f}'.format(zooniverse_id,alpha_deg,phi_deg) else: print "Had more than 1 IR sources and/or less than {0:.2f} percent consensus for {1:s}".format(consensus_level,zooniverse_id) except TypeError: print "No 'answer' key for %s" % zooniverse_id # Timing the process tend = time.time() n = len(df2)/2 print '%.2f minutes for %i subjects' % ((tend - tstart)/60.,n) print '%.2f subjects per second' % (n/(tend - tstart)) return None if __name__ == '__main__': # If run from command line, computes bending angles for all double and multi-peaked single component RGZ sources pathdict = make_pathdict() doubles = get_doubles() all_doubles_pixradio(doubles,pathdict) batch_mps_cc() mps_bending_angle() ''' triples = get_triples() all_triples_pixradio(triples,pathdict) '''
{ "content_hash": "09fca722b8fc4017b1d631bbef9edc06", "timestamp": "", "source": "github", "line_count": 1288, "max_line_length": 176, "avg_line_length": 34.34549689440994, "alnum_prop": 0.5834030336596062, "repo_name": "willettk/rgz-analysis", "id": "3ea925c2f64b73425edc6066e63356d8e728dd80", "size": "44326", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "python/bending_angles.py", "mode": "33188", "license": "mit", "language": [ { "name": "Jupyter Notebook", "bytes": "147317" }, { "name": "Python", "bytes": "691021" }, { "name": "Ruby", "bytes": "3598" }, { "name": "Shell", "bytes": "6723" }, { "name": "TeX", "bytes": "40897" } ], "symlink_target": "" }
package test.service.impl.segmenteffortservice; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; import org.junit.Test; import javastrava.auth.model.Token; import javastrava.service.SegmentEffortService; import javastrava.service.exception.InvalidTokenException; import javastrava.service.impl.SegmentEffortServiceImpl; import test.service.standardtests.data.SegmentEffortDataUtils; import test.service.standardtests.spec.ServiceInstanceTests; import test.utils.RateLimitedTestRunner; import test.utils.TestUtils; /** * Implementation tests for SegmentEffortService * * @author Dan Shannon * */ public class ImplementationTest implements ServiceInstanceTests { private static Token getRevokedToken() { return TestUtils.getRevokedToken(); } private static SegmentEffortService getService() { return SegmentEffortServiceImpl.instance(TestUtils.getValidToken()); } private static SegmentEffortService getServiceWithoutWriteAccess() { return SegmentEffortServiceImpl.instance(TestUtils.getValidTokenWithWriteAccess()); } /** * <p> * Test that when we ask for a {@link SegmentEffortServiceImpl service implementation} for a second, valid, different token, we get a DIFFERENT implementation * </p> * * @throws Exception * if the test fails in an unexpected way */ @Override @Test public void testImplementation_differentImplementationIsNotCached() throws Exception { RateLimitedTestRunner.run(() -> { final SegmentEffortService service = getService(); final SegmentEffortService service2 = getServiceWithoutWriteAccess(); assertFalse(service == service2); }); } /** * <p> * Test that when we ask for a {@link SegmentEffortServiceImpl service implementation} for a second time, we get the SAME ONE as the first time (i.e. the caching strategy is working) * </p> * * @throws Exception * if the test fails in an unexpected way */ @Override @Test public void testImplementation_implementationIsCached() throws Exception { RateLimitedTestRunner.run(() -> { final SegmentEffortService service = SegmentEffortServiceImpl.instance(TestUtils.getValidToken()); final SegmentEffortService service2 = SegmentEffortServiceImpl.instance(TestUtils.getValidToken()); assertEquals("Retrieved multiple service instances for the same token - should only be one", service, service2); //$NON-NLS-1$ }); } /** * <p> * Test that we don't get a {@link SegmentEffortServiceImpl service implementation} if the token isn't valid * </p> * * @throws Exception * if the test fails in an unexpected way */ @Override @Test public void testImplementation_invalidToken() throws Exception { RateLimitedTestRunner.run(() -> { SegmentEffortService service = null; service = SegmentEffortServiceImpl.instance(TestUtils.INVALID_TOKEN); try { service.getSegmentEffort(SegmentEffortDataUtils.SEGMENT_EFFORT_VALID_ID); } catch (final InvalidTokenException e) { // expected return; } fail("Used an invalid token, but still got access!"); //$NON-NLS-1$ }); } /** * <p> * Test that we don't get a {@link SegmentEffortServiceImpl service implementation} if the token has been revoked by the user * </p> * * @throws Exception * if the test fails in an unexpected way */ @Override @Test public void testImplementation_revokedToken() throws Exception { RateLimitedTestRunner.run(() -> { final SegmentEffortService service = SegmentEffortServiceImpl.instance(getRevokedToken()); try { service.getSegmentEffort(SegmentEffortDataUtils.SEGMENT_EFFORT_VALID_ID); } catch (final InvalidTokenException e) { // Expected return; } fail("Used an invalid token, still got access to Strava data!"); //$NON-NLS-1$ }); } /** * <p> * Test we get a {@link SegmentEffortServiceImpl service implementation} successfully with a valid token * </p> * * @throws Exception * if the test fails in an unexpected way */ @Override @Test public void testImplementation_validToken() throws Exception { RateLimitedTestRunner.run(() -> { final SegmentEffortService service = SegmentEffortServiceImpl.instance(TestUtils.getValidToken()); assertNotNull("Got a NULL service for a valid token", service); //$NON-NLS-1$ }); } }
{ "content_hash": "8487fbb95756e31ff0ca5da9aad4aa3c", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 183, "avg_line_length": 32.210144927536234, "alnum_prop": 0.7372328458942632, "repo_name": "danshannon/javastrava-test", "id": "1501fb4cb8462f60f7977f11bc59db7de552fe2c", "size": "4445", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/test/service/impl/segmenteffortservice/ImplementationTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1092887" } ], "symlink_target": "" }
/** * Created by gturnquist on 11/11/14. */ console.log('If you can see this, then the app was properly loaded by Rave');
{ "content_hash": "68611afce79208de46e3f1712856caee", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 77, "avg_line_length": 24.8, "alnum_prop": 0.6854838709677419, "repo_name": "gregturn/rave-relative-url-bug", "id": "923c41dce3cd9f8c665dafc3e2dcaccb0b9f2d4f", "size": "124", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/resources/static/app/main.js", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "1287" }, { "name": "JavaScript", "bytes": "331" } ], "symlink_target": "" }
package org.motechproject.scheduletracking.domain.search; import org.junit.Test; import org.motechproject.scheduletracking.domain.Enrollment; import org.motechproject.scheduletracking.domain.EnrollmentBuilder; import org.motechproject.scheduletracking.domain.Schedule; import org.motechproject.scheduletracking.repository.AllEnrollments; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import static ch.lambdaj.Lambda.extract; import static ch.lambdaj.Lambda.on; import static java.util.Arrays.asList; import static junit.framework.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class MetadataCriterionTest { @Test public void shouldFetchFromDbUsingCriteria() { AllEnrollments allEnrollments = mock(AllEnrollments.class); List<Enrollment> result = mock(List.class); when(allEnrollments.findByMetadataProperty("foo", "bar")).thenReturn(result); assertEquals(result, new MetadataCriterion("foo", "bar").fetch(allEnrollments)); } @Test public void shouldFilterByMetadata() { Schedule schedule = new Schedule("my_schedule"); List<Enrollment> enrollments = new ArrayList<Enrollment>(); HashMap<String,String> metadata1 = new HashMap<String, String>(),metadata2 = new HashMap<String, String>(), metadata3 = new HashMap<String, String>(), metadata4 = new HashMap<String, String>(); metadata1.put("foo","bar"); metadata1.put("fuu", "bar"); enrollments.add(new EnrollmentBuilder().withExternalId("entity1").withSchedule(schedule).withCurrentMilestoneName(null).withStartOfSchedule(null).withEnrolledOn(null).withPreferredAlertTime(null).withStatus(null).withMetadata(metadata1).toEnrollment()); metadata2.put("foo", "baz"); metadata2.put("fuu", "biz"); enrollments.add(new EnrollmentBuilder().withExternalId("entity2").withSchedule(schedule).withCurrentMilestoneName(null).withStartOfSchedule(null).withEnrolledOn(null).withPreferredAlertTime(null).withStatus(null).withMetadata(metadata2).toEnrollment()); metadata3.put("foo","bar"); enrollments.add(new EnrollmentBuilder().withExternalId("entity3").withSchedule(schedule).withCurrentMilestoneName(null).withStartOfSchedule(null).withEnrolledOn(null).withPreferredAlertTime(null).withStatus(null).withMetadata(metadata3).toEnrollment()); metadata4.put("foo", "boz"); metadata4.put("fuu", "ber"); enrollments.add(new EnrollmentBuilder().withExternalId("entity4").withSchedule(schedule).withCurrentMilestoneName(null).withStartOfSchedule(null).withEnrolledOn(null).withPreferredAlertTime(null).withStatus(null).withMetadata(metadata4).toEnrollment()); enrollments.add(new EnrollmentBuilder().withExternalId("entity5").withSchedule(schedule).withCurrentMilestoneName(null).withStartOfSchedule(null).withEnrolledOn(null).withPreferredAlertTime(null).withStatus(null).withMetadata(null).toEnrollment()); List<Enrollment> filtered = new MetadataCriterion("foo", "bar").filter(enrollments); assertEquals(asList(new String[]{ "entity1", "entity3" }), extract(filtered, on(Enrollment.class).getExternalId())); } }
{ "content_hash": "53adb8b66fc1e68d2c795eda19c391ac", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 261, "avg_line_length": 55.91379310344828, "alnum_prop": 0.761640456367561, "repo_name": "justin-hayes/modules", "id": "e48670b1ea1a9a8e519dec10e9fbd148757a7850", "size": "3243", "binary": false, "copies": "19", "ref": "refs/heads/master", "path": "schedule-tracking/src/test/java/org/motechproject/scheduletracking/domain/search/MetadataCriterionTest.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "8999" }, { "name": "Groovy", "bytes": "564" }, { "name": "HTML", "bytes": "145919" }, { "name": "Java", "bytes": "4000720" }, { "name": "JavaScript", "bytes": "163695" }, { "name": "Shell", "bytes": "1401" } ], "symlink_target": "" }
using NUnit.Framework; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Moq; namespace Cassandra.Tests { [TestFixture] public class RowSetUnitTests { [Test] public void RowIteratesThroughValues() { var rs = CreateStringsRowset(4, 1); var row = rs.First(); //Use Linq's IEnumerable ToList: it iterates and maps to a list var cellValues = row.ToList(); Assert.AreEqual("row_0_col_0", cellValues[0]); Assert.AreEqual("row_0_col_1", cellValues[1]); Assert.AreEqual("row_0_col_2", cellValues[2]); Assert.AreEqual("row_0_col_3", cellValues[3]); } /// <summary> /// Test that all possible ways to get the value from the row gets the same value /// </summary> [Test] public void RowGetTheSameValues() { var row = CreateStringsRowset(3, 1).First(); var value00 = row[0]; var value01 = row.GetValue<object>(0); var value02 = row.GetValue(typeof(object), 0); Assert.True(value00.Equals(value01) && value01.Equals(value02), "Row values do not match"); var value10 = (string)row[1]; var value11 = row.GetValue<string>(1); var value12 = (string)row.GetValue(typeof(string), 1); Assert.True(value10.Equals(value11) && value11.Equals(value12), "Row values do not match"); var value20 = (string)row["col_2"]; var value21 = row.GetValue<string>("col_2"); var value22 = (string)row.GetValue(typeof(string), "col_2"); Assert.True(value20.Equals(value21) && value21.Equals(value22), "Row values do not match"); } [Test] public void RowSetIteratesTest() { var rs = CreateStringsRowset(2, 3); //Use Linq's IEnumerable ToList to iterate and map it to a list var rowList = rs.ToList(); Assert.AreEqual(3, rowList.Count); Assert.AreEqual("row_0_col_0", rowList[0].GetValue<string>("col_0")); Assert.AreEqual("row_1_col_1", rowList[1].GetValue<string>("col_1")); Assert.AreEqual("row_2_col_0", rowList[2].GetValue<string>("col_0")); } [Test] public void RowSetCallsFetchNextTest() { //Create a rowset with 1 row var rs = CreateStringsRowset(1, 1, "a_"); Assert.True(rs.AutoPage); //It has paging state, stating that there are more pages rs.PagingState = new byte[] { 0 }; //Add a handler to fetch next rs.FetchNextPage = (pagingState) => { return CreateStringsRowset(1, 1, "b_"); }; //use linq to iterate and map it to a list var rowList = rs.ToList(); Assert.AreEqual(2, rowList.Count); Assert.AreEqual("a_row_0_col_0", rowList[0].GetValue<string>("col_0")); Assert.AreEqual("b_row_0_col_0", rowList[1].GetValue<string>("col_0")); } [Test] public void RowSetDoesNotCallFetchNextWhenAutoPageFalseTest() { //Create a rowset with 1 row var rs = CreateStringsRowset(1, 1, "a_"); //Set to not to automatically page rs.AutoPage = false; //It has paging state, stating that there are more pages rs.PagingState = new byte[] { 0 }; //Add a handler to fetch next var called = false; rs.FetchNextPage = (pagingState) => { called = true; return CreateStringsRowset(1, 1, "b_"); }; //use linq to iterate and map it to a list var rowList = rs.ToList(); Assert.False(called); Assert.AreEqual(1, rowList.Count); } /// <summary> /// Ensures that in case there is an exception while retrieving the next page, it propagates. /// </summary> [Test] public void RowSetFetchNextPropagatesExceptionTest() { var rs = CreateStringsRowset(1, 1); //It has paging state, stating that there are more pages. rs.PagingState = new byte[] { 0 }; //Throw a test exception when fetching the next page. rs.FetchNextPage = (pagingState) => { throw new TestException(); }; //use linq to iterate and map it to a list //The row set should throw an exception when getting the next page. Assert.Throws<TestException>(() => { rs.ToList(); }); } /// <summary> /// Tests that once iterated, it can not be iterated any more. /// </summary> [Test] public void RowSetMustDequeue() { var rowLength = 10; var rs = CreateStringsRowset(2, rowLength); rs.FetchNextPage = (pagingState) => { Assert.Fail("Event to get next page must not be called as there is no paging state."); return null; }; //Use Linq to iterate var rowsFirstIteration = rs.ToList(); Assert.AreEqual(rowLength, rowsFirstIteration.Count); //Following iterations must yield 0 rows var rowsSecondIteration = rs.ToList(); var rowsThridIteration = rs.ToList(); Assert.AreEqual(0, rowsSecondIteration.Count); Assert.AreEqual(0, rowsThridIteration.Count); Assert.IsTrue(rs.IsExhausted()); Assert.IsTrue(rs.IsFullyFetched); } /// <summary> /// Tests that when multi threading, all enumerators of the same rowset wait for the fetching. /// </summary> [Test] public void RowSetFetchNextAllEnumeratorsWait() { var pageSize = 10; var rs = CreateStringsRowset(10, pageSize); rs.PagingState = new byte[0]; var fetchCounter = 0; rs.FetchNextPage = (pagingState) => { fetchCounter++; //fake a fetch Thread.Sleep(1000); return CreateStringsRowset(10, pageSize); }; var counterList = new ConcurrentBag<int>(); Action iteration = () => { var counter = 0; foreach (var row in rs) { counter++; //Try to synchronize, all the threads will try to fetch at the almost same time. Thread.Sleep(300); } counterList.Add(counter); }; //Invoke it in parallel more than 10 times Parallel.Invoke(iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration, iteration); //Assert that the fetch was called just 1 time Assert.AreEqual(1, fetchCounter); //Sum all rows dequeued from the different threads var totalRows = counterList.Sum(); //Check that the total amount of rows dequeued are the same as pageSize * number of pages. Assert.AreEqual(pageSize * 2, totalRows); } [Test] public void RowSetFetchNext3Pages() { var rowLength = 10; var rs = CreateStringsRowset(10, rowLength, "page_0_"); rs.PagingState = new byte[0]; var fetchCounter = 0; rs.FetchNextPage = (pagingState) => { fetchCounter++; var pageRowSet = CreateStringsRowset(10, rowLength, "page_" + fetchCounter + "_"); if (fetchCounter < 3) { //when retrieving the pages, state that there are more results pageRowSet.PagingState = new byte[0]; } else { //On the 3rd page, state that there aren't any more pages. pageRowSet.PagingState = null; } return pageRowSet; }; //Use Linq to iterate var rows = rs.ToList(); Assert.AreEqual(3, fetchCounter, "Fetch must have been called 3 times"); Assert.AreEqual(rows.Count, rowLength * 4, "RowSet must contain 4 pages in total"); //Check the values are in the correct order Assert.AreEqual(rows[0].GetValue<string>(0), "page_0_row_0_col_0"); Assert.AreEqual(rows[rowLength].GetValue<string>(0), "page_1_row_0_col_0"); Assert.AreEqual(rows[rowLength * 2].GetValue<string>(0), "page_2_row_0_col_0"); Assert.AreEqual(rows[rowLength * 3].GetValue<string>(0), "page_3_row_0_col_0"); } [Test] public void RowSetFetchNext3PagesExplicitFetch() { var rowLength = 10; var rs = CreateStringsRowset(10, rowLength, "page_0_"); rs.PagingState = new byte[0]; var fetchCounter = 0; rs.FetchNextPage = (pagingState) => { fetchCounter++; var pageRowSet = CreateStringsRowset(10, rowLength, "page_" + fetchCounter + "_"); if (fetchCounter < 3) { //when retrieving the pages, state that there are more results pageRowSet.PagingState = new byte[0]; } else if (fetchCounter == 3) { //On the 3rd page, state that there aren't any more pages. pageRowSet.PagingState = null; } else { throw new Exception("It should not be called more than 3 times."); } return pageRowSet; }; Assert.AreEqual(rowLength * 1, rs.InnerQueueCount); rs.FetchMoreResults(); Assert.AreEqual(rowLength * 2, rs.InnerQueueCount); rs.FetchMoreResults(); Assert.AreEqual(rowLength * 3, rs.InnerQueueCount); rs.FetchMoreResults(); Assert.AreEqual(rowLength * 4, rs.InnerQueueCount); //Use Linq to iterate: var rows = rs.ToList(); Assert.AreEqual(rows.Count, rowLength * 4, "RowSet must contain 4 pages in total"); //Check the values are in the correct order Assert.AreEqual(rows[0].GetValue<string>(0), "page_0_row_0_col_0"); Assert.AreEqual(rows[rowLength].GetValue<string>(0), "page_1_row_0_col_0"); Assert.AreEqual(rows[rowLength * 2].GetValue<string>(0), "page_2_row_0_col_0"); Assert.AreEqual(rows[rowLength * 3].GetValue<string>(0), "page_3_row_0_col_0"); } [Test] public void NotExistentColumnThrows() { var row = CreateSampleRowSet().First(); var ex = Assert.Throws<ArgumentException>(() => row.GetValue<string>("not_existent_col")); StringAssert.Contains("Column", ex.Message); StringAssert.Contains("not found", ex.Message); } [Test] public void NullValuesWithStructTypeColumnThrows() { //Row with all null values var row = CreateSampleRowSet().Last(); Assert.IsNull(row.GetValue<string>("text_sample")); Assert.Throws<NullReferenceException>(() => row.GetValue<int>("int_sample")); Assert.DoesNotThrow(() => row.GetValue<int?>("int_sample")); } [Test] public void RowsetIsMockable() { var rowMock = new Mock<Row>(); rowMock.Setup(r => r.GetValue<int>(It.Is<string>(n => n == "int_value"))).Returns(100); var rows = new Row[] { rowMock.Object }; var mock = new Mock<RowSet>(); mock .Setup(r => r.GetEnumerator()).Returns(() => ((IEnumerable<Row>)rows).GetEnumerator()); var rs = mock.Object; var rowArray = rs.ToArray(); Assert.AreEqual(rowArray.Length, 1); Assert.AreEqual(rowArray[0].GetValue<int>("int_value"), 100); } /// <summary> /// Creates a rowset. /// The columns are named: col_0, ..., col_n /// The rows values are: row_0_col_0, ..., row_m_col_n /// </summary> private static RowSet CreateStringsRowset(int columnLength, int rowLength, string valueModifier = null) { var columns = new List<CqlColumn>(); var columnIndexes = new Dictionary<string, int>(); for (var i = 0; i < columnLength; i++) { var c = new CqlColumn() { Index = i, Name = "col_" + i, TypeCode = ColumnTypeCode.Text, Type = typeof(string) }; columns.Add(c); columnIndexes.Add(c.Name, c.Index); } var rs = new RowSet(); for (var j = 0; j < rowLength; j++) { var rowValues = new List<byte[]>(); foreach (var c in columns) { var value = valueModifier + "row_" + j + "_col_" + c.Index; rowValues.Add(Encoding.UTF8.GetBytes(value)); } rs.AddRow(new Row(1, rowValues.ToArray(), columns.ToArray(), columnIndexes)); } return rs; } /// <summary> /// Creates a RowSet with few rows with int, text columns (null values in the last row) /// </summary> private static RowSet CreateSampleRowSet() { var columns = new List<CqlColumn> { new CqlColumn() { Index = 0, Name = "text_sample", TypeCode = ColumnTypeCode.Text, Type = typeof (string) }, new CqlColumn() { Index = 1, Name = "int_sample", TypeCode = ColumnTypeCode.Int, Type = typeof(int) } }; var columnIndexes = columns.ToDictionary(c => c.Name, c => c.Index); var rs = new RowSet(); var rowValues = new[] { Encoding.UTF8.GetBytes("text value"), TypeCodec.EncodeInt(2, null, 100) }; rs.AddRow(new Row(2, rowValues, columns.ToArray(), columnIndexes)); rowValues = new byte[][] { null, null }; rs.AddRow(new Row(2, rowValues, columns.ToArray(), columnIndexes)); return rs; } private class TestException : Exception { } } }
{ "content_hash": "c2fe464e45bb779d71d368c3bdddf691", "timestamp": "", "source": "github", "line_count": 398, "max_line_length": 193, "avg_line_length": 38.47487437185929, "alnum_prop": 0.5176647293149611, "repo_name": "oguimbal/csharp-driver", "id": "19b591cbc4d6f8c7339cd3b2527986c3be32f438", "size": "15941", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Cassandra.Tests/RowSetUnitTests.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "299" }, { "name": "C#", "bytes": "2951381" } ], "symlink_target": "" }
XlsxWriter ========== **XlsxWriter** is a Python module for writing files in the Excel 2007+ XLSX file format. XlsxWriter can be used to write text, numbers, formulas and hyperlinks to multiple worksheets and it supports features such as formatting and many more, including: * 100% compatible Excel XLSX files. * Full formatting. * Merged cells. * Defined names. * Charts. * Autofilters. * Data validation and drop down lists. * Conditional formatting. * Worksheet PNG/JPEG images. * Rich multi-format strings. * Cell comments. * Integration with Pandas. * Textboxes. * Memory optimisation mode for writing large files. It supports Python 2.5, 2.6, 2.7, 3.1, 3.2, 3.3, 3.4, Jython and PyPy and uses standard libraries only. Here is a simple example: .. code-block:: python import xlsxwriter # Create an new Excel file and add a worksheet. workbook = xlsxwriter.Workbook('demo.xlsx') worksheet = workbook.add_worksheet() # Widen the first column to make the text clearer. worksheet.set_column('A:A', 20) # Add a bold format to use to highlight cells. bold = workbook.add_format({'bold': True}) # Write some simple text. worksheet.write('A1', 'Hello') # Text with formatting. worksheet.write('A2', 'World', bold) # Write some numbers, with row/column notation. worksheet.write(2, 0, 123) worksheet.write(3, 0, 123.456) # Insert an image. worksheet.insert_image('B5', 'logo.png') workbook.close() .. image:: https://raw.github.com/jmcnamara/XlsxWriter/master/dev/docs/source/_images/demo.png See the full documentation at: http://xlsxwriter.readthedocs.org Release notes: http://xlsxwriter.readthedocs.org/changes.html
{ "content_hash": "0d154043bb1095a21085e125dcaf8ab1", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 94, "avg_line_length": 25.606060606060606, "alnum_prop": 0.7171597633136094, "repo_name": "jvrsantacruz/XlsxWriter", "id": "bd8d8ea28942c2d7dd0063ea95fdddbe431fef5d", "size": "1690", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "README.rst", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "5113" }, { "name": "CSS", "bytes": "16544" }, { "name": "HTML", "bytes": "13100" }, { "name": "Makefile", "bytes": "7453" }, { "name": "Perl", "bytes": "3504" }, { "name": "Python", "bytes": "2343848" }, { "name": "Shell", "bytes": "6064" } ], "symlink_target": "" }
TEST_DIR = File.expand_path(File.dirname(__FILE__)) TOP_SRC_DIR = File.join(TEST_DIR, '..') require File.join(TOP_SRC_DIR, 'lib', 'tracelines19.rb') def dump_file(file, opts) puts file begin fp = File.open(file, 'r') rescue Errno::ENOENT puts "File #{file} is not readable." return end lines = fp.read if opts[:print_source] puts '=' * 80 puts lines end if opts[:print_parse] puts '=' * 80 cmd = "#{File.join(TEST_DIR, 'parse-show.rb')} #{file}" system(cmd) end if opts[:print_trace] require 'tracer' puts '=' * 80 tracer = Tracer.new tracer.add_filter lambda {|event, f, line, id, binding, klass| __FILE__ != f && event == 'line' } tracer.on{load(file)} end expected_lnums = nil if opts[:expect_line] fp.rewind first_line = fp.readline.chomp expected_str = first_line[1..-1] begin expected_lnums = eval(expected_str, binding, __FILE__, __LINE__) rescue SyntaxError puts '=' * 80 puts "Failed reading expected values from #{file}" end end fp.close() got_lnums = TraceLineNumbers.lnums_for_str(lines) if expected_lnums puts "expecting: #{expected_lnums.inspect}" puts '-' * 80 if expected_lnums if got_lnums != expected_lnums puts "mismatch: #{got_lnums.inspect}" else puts 'Got what was expected.' end else puts got_lnums.inspect end else puts got_lnums.inspect end end require 'getoptlong' program = File.basename($0) opts = { :print_source => true, # Print source file? :print_trace => true, # Run Tracer over file? :expect_line => true, # Source file has expected (correct) list of lines? :print_parse => true, # Show ParseTree output? } getopts = GetoptLong.new( [ '--expect', '-e', GetoptLong::NO_ARGUMENT ], [ '--no-expect', '-E', GetoptLong::NO_ARGUMENT ], [ '--help', '-h', GetoptLong::NO_ARGUMENT ], [ '--parse', '-p', GetoptLong::NO_ARGUMENT ], [ '--no-parse', '-P', GetoptLong::NO_ARGUMENT ], [ '--source', '-s', GetoptLong::NO_ARGUMENT ], [ '--no-source', '-S', GetoptLong::NO_ARGUMENT ], [ '--trace', '-t', GetoptLong::NO_ARGUMENT ], [ '--no-trace', '-T', GetoptLong::NO_ARGUMENT ]) getopts.each do |opt, arg| case opt when '--help' puts "usage Usage: #{$program} [options] file1 file2 ... Diagnostic program to make see what TraceLineNumbers does and compare against other output. options: -e --expect Read source file expected comment (default) -E --no-expect Don't look for source file expected comment -p --parse Show ParseTree Output (default) -P --no-parse Don't show ParseTree output -s --source Show source file (default) -S --no-source Don't print source -t --trace Show Tracer output (default) -T --no-trace Don't show Tracer output " when '--expect' opts[:expect_line] = true when '--no-expect' opts[:expect_line] = false when '--parse' opts[:print_parse] = true when '--no-parse' opts[:print_parse] = false when '--source' opts[:print_source] = true when '--no-source' opts[:print_source] = false when '--trace' opts[:print_trace] = true when '--no-trace' opts[:print_trace] = false else puts "Unknown and ignored option #{opt}" end end ARGV.each do |file| dump_file(file, opts) end
{ "content_hash": "6dd6263b6b1bddc6adb695e50754b2a0", "timestamp": "", "source": "github", "line_count": 125, "max_line_length": 77, "avg_line_length": 29.056, "alnum_prop": 0.5696585903083701, "repo_name": "khrtz/rails_vote_ranking", "id": "2c63d2a273c94f2b7a9089d141a29297d201992e", "size": "3653", "binary": false, "copies": "27", "ref": "refs/heads/master", "path": "vendor/bundle/ruby/2.1.0/gems/debugger-linecache-1.2.0/test/lnum-diag.rb", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "9402" }, { "name": "HTML", "bytes": "31081" }, { "name": "JavaScript", "bytes": "2362" }, { "name": "Ruby", "bytes": "72380" } ], "symlink_target": "" }
TODO: - add input for organization - add input for date - add autosort
{ "content_hash": "34686b01ca48f200dc9d9a2df84f7d7d", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 28, "avg_line_length": 17.5, "alnum_prop": 0.7571428571428571, "repo_name": "bricejlin/deleterboard", "id": "c1a4a01fd33c2adb9c3005e27c2f4d7a56c9a047", "size": "70", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1573" } ], "symlink_target": "" }
"""Tests for classes and methods relating to user rights.""" from core.domain import collection_services from core.domain import exp_domain from core.domain import exp_services from core.domain import rights_manager from core.domain import user_services from core.tests import test_utils class ExplorationRightsTests(test_utils.GenericTestBase): """Test that rights for actions on explorations work as expected.""" EXP_ID = 'exp_id' def setUp(self): super(ExplorationRightsTests, self).setUp() self.signup('a@example.com', 'A') self.signup('b@example.com', 'B') self.signup('c@example.com', 'C') self.signup('d@example.com', 'D') self.signup('e@example.com', 'E') self.signup('f@example.com', 'F') self.signup(self.ADMIN_EMAIL, username=self.ADMIN_USERNAME) self.signup(self.MODERATOR_EMAIL, username=self.MODERATOR_USERNAME) self.user_id_a = self.get_user_id_from_email('a@example.com') self.user_id_b = self.get_user_id_from_email('b@example.com') self.user_id_c = self.get_user_id_from_email('c@example.com') self.user_id_d = self.get_user_id_from_email('d@example.com') self.user_id_e = self.get_user_id_from_email('e@example.com') self.user_id_f = self.get_user_id_from_email('f@example.com') self.user_id_admin = self.get_user_id_from_email(self.ADMIN_EMAIL) self.user_id_moderator = self.get_user_id_from_email( self.MODERATOR_EMAIL) self.set_admins([self.ADMIN_USERNAME]) self.set_moderators([self.MODERATOR_USERNAME]) self.user_a = user_services.UserActionsInfo(self.user_id_a) self.user_b = user_services.UserActionsInfo(self.user_id_b) self.user_c = user_services.UserActionsInfo(self.user_id_c) self.user_d = user_services.UserActionsInfo(self.user_id_d) self.user_e = user_services.UserActionsInfo(self.user_id_e) self.user_f = user_services.UserActionsInfo(self.user_id_f) self.user_admin = user_services.UserActionsInfo(self.user_id_admin) self.user_moderator = user_services.UserActionsInfo( self.user_id_moderator) self.system_user = user_services.get_system_user() def test_get_exploration_rights_for_nonexistent_exploration(self): non_exp_id = 'this_exp_does_not_exist_id' with self.assertRaisesRegexp( Exception, 'Entity for class ExplorationRightsModel with id ' 'this_exp_does_not_exist_id not found' ): rights_manager.get_exploration_rights(non_exp_id) self.assertIsNone( rights_manager.get_exploration_rights(non_exp_id, strict=False)) def test_demo_exploration(self): exp_services.load_demo('1') rights_manager.release_ownership_of_exploration( self.system_user, '1') exp_rights = rights_manager.get_exploration_rights('1') self.assertTrue(rights_manager.check_can_access_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_a, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_moderator, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_moderator, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_moderator, exp_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_moderator, exp_rights)) def test_non_splash_page_demo_exploration(self): # Note: there is no difference between permissions for demo # explorations, whether or not they are on the splash page. exp_services.load_demo('3') rights_manager.release_ownership_of_exploration( self.system_user, '3') exp_rights = rights_manager.get_exploration_rights('3') self.assertTrue(rights_manager.check_can_access_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_a, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_moderator, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_moderator, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_moderator, exp_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_moderator, exp_rights)) def test_ownership_of_exploration(self): exp = exp_domain.Exploration.create_default_exploration(self.EXP_ID) exp_services.save_new_exploration(self.user_id_a, exp) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_EDITOR) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(exp_rights.is_owner(self.user_id_a)) self.assertFalse(exp_rights.is_owner(self.user_id_b)) self.assertFalse(exp_rights.is_owner(self.user_id_admin)) def test_newly_created_exploration(self): exp = exp_domain.Exploration.create_default_exploration(self.EXP_ID) exp_services.save_new_exploration(self.user_id_a, exp) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_admin, exp_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_moderator, exp_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_moderator, exp_rights)) self.assertFalse(rights_manager.check_can_translate_activity( self.user_moderator, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_moderator, exp_rights)) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_translate_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, exp_rights)) def test_inviting_collaborator_to_exploration(self): exp = exp_domain.Exploration.create_default_exploration(self.EXP_ID) exp_services.save_new_exploration(self.user_id_a, exp) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_translate_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, exp_rights)) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_EDITOR) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_b, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, exp_rights)) def test_inviting_translator_to_exploration(self): exp = exp_domain.Exploration.create_default_exploration(self.EXP_ID) exp_services.save_new_exploration(self.user_id_a, exp) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_translate_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, exp_rights)) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_TRANSLATOR) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_rights)) self.assertTrue(rights_manager.check_can_translate_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, exp_rights)) def test_inviting_playtester_to_exploration(self): exp = exp_domain.Exploration.create_default_exploration(self.EXP_ID) exp_services.save_new_exploration(self.user_id_a, exp) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_translate_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, exp_rights)) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_VIEWER) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_translate_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, exp_rights)) def test_setting_rights_of_exploration(self): exp = exp_domain.Exploration.create_default_exploration(self.EXP_ID) exp_services.save_new_exploration(self.user_id_a, exp) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_VIEWER) with self.assertRaisesRegexp(Exception, 'Could not assign new role.'): rights_manager.assign_role_for_exploration( self.user_b, self.EXP_ID, self.user_id_c, rights_manager.ROLE_VIEWER) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_TRANSLATOR) with self.assertRaisesRegexp(Exception, 'Could not assign new role.'): rights_manager.assign_role_for_exploration( self.user_b, self.EXP_ID, self.user_id_c, rights_manager.ROLE_VIEWER) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_EDITOR) with self.assertRaisesRegexp(Exception, 'Could not assign new role.'): rights_manager.assign_role_for_exploration( self.user_b, self.EXP_ID, self.user_id_c, rights_manager.ROLE_VIEWER) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_OWNER) rights_manager.assign_role_for_exploration( self.user_b, self.EXP_ID, self.user_id_c, rights_manager.ROLE_OWNER) rights_manager.assign_role_for_exploration( self.user_b, self.EXP_ID, self.user_id_d, rights_manager.ROLE_EDITOR) rights_manager.assign_role_for_exploration( self.user_b, self.EXP_ID, self.user_id_e, rights_manager.ROLE_TRANSLATOR) rights_manager.assign_role_for_exploration( self.user_b, self.EXP_ID, self.user_id_f, rights_manager.ROLE_VIEWER) def test_publishing_and_unpublishing_exploration(self): exp = exp_domain.Exploration.create_default_exploration( self.EXP_ID, title='A title', category='A category') exp_services.save_new_exploration(self.user_id_a, exp) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) rights_manager.publish_exploration(self.user_a, self.EXP_ID) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_b, exp_rights)) self.assertFalse(rights_manager.check_can_unpublish_activity( self.user_a, exp_rights)) rights_manager.unpublish_exploration(self.user_admin, self.EXP_ID) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_a, exp_rights)) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) def test_can_only_delete_unpublished_explorations(self): exp = exp_domain.Exploration.create_default_exploration( self.EXP_ID, title='A title', category='A category') exp_services.save_new_exploration(self.user_id_a, exp) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_delete_activity( self.user_a, exp_rights)) rights_manager.publish_exploration(self.user_a, self.EXP_ID) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertFalse(rights_manager.check_can_delete_activity( self.user_a, exp_rights)) rights_manager.unpublish_exploration(self.user_admin, self.EXP_ID) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_delete_activity( self.user_a, exp_rights)) def test_changing_viewability_of_exploration(self): exp = exp_domain.Exploration.create_default_exploration( self.EXP_ID, title='A title', category='A category') exp_services.save_new_exploration(self.user_id_a, exp) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) with self.assertRaisesRegexp(Exception, 'already the current value'): rights_manager.set_private_viewability_of_exploration( self.user_a, self.EXP_ID, False) with self.assertRaisesRegexp(Exception, 'cannot be changed'): rights_manager.set_private_viewability_of_exploration( self.user_b, self.EXP_ID, True) rights_manager.set_private_viewability_of_exploration( self.user_a, self.EXP_ID, True) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_a, exp_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_b, exp_rights)) rights_manager.set_private_viewability_of_exploration( self.user_a, self.EXP_ID, False) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_a, exp_rights)) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_rights)) def test_check_exploration_rights(self): exp = exp_domain.Exploration.create_default_exploration(self.EXP_ID) exp_services.save_new_exploration(self.user_id_a, exp) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_b, rights_manager.ROLE_VIEWER) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_c, rights_manager.ROLE_EDITOR) rights_manager.assign_role_for_exploration( self.user_a, self.EXP_ID, self.user_id_d, rights_manager.ROLE_TRANSLATOR) exp_rights = rights_manager.get_exploration_rights(self.EXP_ID) self.assertTrue(exp_rights.is_owner(self.user_id_a)) self.assertTrue(exp_rights.is_editor(self.user_id_c)) self.assertTrue(exp_rights.is_viewer(self.user_id_b)) self.assertFalse(exp_rights.is_viewer(self.user_id_a)) self.assertFalse(exp_rights.is_owner(self.user_id_b)) self.assertFalse(exp_rights.is_editor(self.user_id_b)) self.assertTrue(exp_rights.is_translator(self.user_id_d)) self.assertFalse(exp_rights.is_translator(self.user_id_b)) def test_get_multiple_exploration_rights(self): exp_ids = ['exp1', 'exp2', 'exp3', 'exp4'] # saving only first 3 explorations to check that None is returned for # non-existing exploration. for exp_id in exp_ids[:3]: self.save_new_valid_exploration(exp_id, self.user_id_admin) exp_rights = rights_manager.get_multiple_exploration_rights_by_ids( exp_ids) self.assertEqual(len(exp_rights), 4) for rights_object in exp_rights[:3]: self.assertIsNotNone(rights_object) self.assertIsNone(exp_rights[3]) class CollectionRightsTests(test_utils.GenericTestBase): """Test that rights for actions on collections work as expected.""" COLLECTION_ID = 'collection_id' EXP_ID_FOR_COLLECTION = 'exp_id_for_collection' def setUp(self): super(CollectionRightsTests, self).setUp() self.signup('a@example.com', 'A') self.signup('b@example.com', 'B') self.signup('c@example.com', 'C') self.signup('d@example.com', 'D') self.signup('e@example.com', 'E') self.signup(self.ADMIN_EMAIL, username=self.ADMIN_USERNAME) self.signup(self.MODERATOR_EMAIL, username=self.MODERATOR_USERNAME) self.user_id_a = self.get_user_id_from_email('a@example.com') self.user_id_b = self.get_user_id_from_email('b@example.com') self.user_id_c = self.get_user_id_from_email('c@example.com') self.user_id_d = self.get_user_id_from_email('d@example.com') self.user_id_e = self.get_user_id_from_email('e@example.com') self.user_id_admin = self.get_user_id_from_email(self.ADMIN_EMAIL) self.user_id_moderator = self.get_user_id_from_email( self.MODERATOR_EMAIL) self.set_admins([self.ADMIN_USERNAME]) self.set_moderators([self.MODERATOR_USERNAME]) self.user_a = user_services.UserActionsInfo(self.user_id_a) self.user_b = user_services.UserActionsInfo(self.user_id_b) self.user_c = user_services.UserActionsInfo(self.user_id_c) self.user_d = user_services.UserActionsInfo(self.user_id_d) self.user_e = user_services.UserActionsInfo(self.user_id_e) self.user_admin = user_services.UserActionsInfo(self.user_id_admin) self.user_moderator = user_services.UserActionsInfo( self.user_id_moderator) self.system_user = user_services.get_system_user() def test_get_collection_rights_for_nonexistent_collection(self): non_col_id = 'this_collection_does_not_exist_id' with self.assertRaisesRegexp( Exception, 'Entity for class CollectionRightsModel with id ' 'this_collection_does_not_exist_id not found' ): rights_manager.get_collection_rights(non_col_id) self.assertIsNone( rights_manager.get_collection_rights(non_col_id, strict=False)) def test_demo_collection(self): collection_services.load_demo('0') rights_manager.release_ownership_of_collection( self.system_user, '0') collection_rights = rights_manager.get_collection_rights('0') self.assertTrue(rights_manager.check_can_access_activity( self.user_a, collection_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_a, collection_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_a, collection_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_admin, collection_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_admin, collection_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_admin, collection_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_moderator, collection_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_moderator, collection_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_moderator, collection_rights)) def test_ownership_of_collection(self): self.save_new_default_collection(self.COLLECTION_ID, self.user_id_a) rights_manager.assign_role_for_collection( self.user_a, self.COLLECTION_ID, self.user_id_b, rights_manager.ROLE_EDITOR) self.assertListEqual( ['A'], rights_manager.get_collection_owner_names( self.COLLECTION_ID)) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertTrue(collection_rights.is_owner(self.user_id_a)) self.assertFalse(collection_rights.is_owner(self.user_id_b)) self.assertFalse(collection_rights.is_owner(self.user_id_admin)) def test_newly_created_collection(self): self.save_new_default_collection(self.COLLECTION_ID, self.user_id_a) self.assertListEqual( ['A'], rights_manager.get_collection_owner_names( self.COLLECTION_ID)) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_a, collection_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_a, collection_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_a, collection_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_admin, collection_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_admin, collection_rights)) self.assertTrue(rights_manager.check_can_delete_activity( self.user_admin, collection_rights)) self.assertTrue(rights_manager.check_can_access_activity( self.user_moderator, collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_moderator, collection_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_moderator, collection_rights)) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, collection_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, collection_rights)) def test_inviting_collaborator_to_collection(self): self.save_new_valid_collection( self.COLLECTION_ID, self.user_id_a, exploration_id=self.EXP_ID_FOR_COLLECTION) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) # Verify initial editor permissions for the collection. self.assertTrue(rights_manager.check_can_access_activity( self.user_a, collection_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_a, collection_rights)) # Verify initial editor permissions for the exploration within the # collection. self.assertFalse(rights_manager.check_can_access_activity( self.user_b, collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, collection_rights)) # User A adds user B to the collection as an editor. rights_manager.assign_role_for_collection( self.user_a, self.COLLECTION_ID, self.user_id_b, rights_manager.ROLE_EDITOR) # Ensure User A is the only user in the owner names list. self.assertListEqual( ['A'], rights_manager.get_collection_owner_names( self.COLLECTION_ID)) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) # Ensure User B is now an editor of the collection. self.assertTrue(rights_manager.check_can_access_activity( self.user_b, collection_rights)) self.assertTrue(rights_manager.check_can_edit_activity( self.user_b, collection_rights)) self.assertFalse(rights_manager.check_can_delete_activity( self.user_b, collection_rights)) exp_for_collection_rights = rights_manager.get_exploration_rights( self.EXP_ID_FOR_COLLECTION) # Ensure User B is not an editor of the exploration within the # collection. self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_for_collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_for_collection_rights)) def test_inviting_playtester_to_collection(self): self.save_new_valid_collection( self.COLLECTION_ID, self.user_id_a, exploration_id=self.EXP_ID_FOR_COLLECTION) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) exp_for_collection_rights = rights_manager.get_exploration_rights( self.EXP_ID_FOR_COLLECTION) # Verify initial viewer permissions for the collection. self.assertFalse(rights_manager.check_can_access_activity( self.user_b, collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, collection_rights)) # Verify initial viewer permissions for the exploration within the # collection. self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_for_collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_for_collection_rights)) # User A adds user B to the collection as a viewer. rights_manager.assign_role_for_collection( self.user_a, self.COLLECTION_ID, self.user_id_b, rights_manager.ROLE_VIEWER) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) exp_for_collection_rights = rights_manager.get_exploration_rights( self.EXP_ID_FOR_COLLECTION) # Ensure User B is now a viewer of the collection. self.assertTrue(rights_manager.check_can_access_activity( self.user_b, collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, collection_rights)) # Ensure User B cannot view the exploration just because he/she has # access to the collection containing it. self.assertFalse(rights_manager.check_can_access_activity( self.user_b, exp_for_collection_rights)) self.assertFalse(rights_manager.check_can_edit_activity( self.user_b, exp_for_collection_rights)) def test_setting_rights_of_collection(self): self.save_new_default_collection(self.COLLECTION_ID, self.user_id_a) rights_manager.assign_role_for_collection( self.user_a, self.COLLECTION_ID, self.user_id_b, rights_manager.ROLE_VIEWER) with self.assertRaisesRegexp(Exception, 'Could not assign new role.'): rights_manager.assign_role_for_collection( self.user_b, self.COLLECTION_ID, self.user_id_c, rights_manager.ROLE_VIEWER) rights_manager.assign_role_for_collection( self.user_a, self.COLLECTION_ID, self.user_id_b, rights_manager.ROLE_EDITOR) with self.assertRaisesRegexp(Exception, 'Could not assign new role.'): rights_manager.assign_role_for_collection( self.user_b, self.COLLECTION_ID, self.user_id_c, rights_manager.ROLE_VIEWER) rights_manager.assign_role_for_collection( self.user_a, self.COLLECTION_ID, self.user_id_b, rights_manager.ROLE_OWNER) rights_manager.assign_role_for_collection( self.user_b, self.COLLECTION_ID, self.user_id_c, rights_manager.ROLE_OWNER) rights_manager.assign_role_for_collection( self.user_b, self.COLLECTION_ID, self.user_id_d, rights_manager.ROLE_EDITOR) rights_manager.assign_role_for_collection( self.user_b, self.COLLECTION_ID, self.user_id_e, rights_manager.ROLE_VIEWER) def test_publishing_and_unpublishing_collection(self): self.save_new_default_collection(self.COLLECTION_ID, self.user_id_a) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, collection_rights)) rights_manager.publish_collection(self.user_a, self.COLLECTION_ID) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_b, collection_rights)) self.assertFalse(rights_manager.check_can_unpublish_activity( self.user_a, collection_rights)) rights_manager.unpublish_collection( self.user_admin, self.COLLECTION_ID) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertTrue(rights_manager.check_can_access_activity( self.user_a, collection_rights)) self.assertFalse(rights_manager.check_can_access_activity( self.user_b, collection_rights)) def test_can_only_delete_unpublished_collections(self): self.save_new_default_collection(self.COLLECTION_ID, self.user_id_a) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertTrue(rights_manager.check_can_delete_activity( self.user_a, collection_rights)) rights_manager.publish_collection(self.user_a, self.COLLECTION_ID) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertFalse(rights_manager.check_can_delete_activity( self.user_a, collection_rights)) rights_manager.unpublish_collection( self.user_admin, self.COLLECTION_ID) collection_rights = rights_manager.get_collection_rights( self.COLLECTION_ID) self.assertTrue(rights_manager.check_can_delete_activity( self.user_a, collection_rights)) class CheckCanReleaseOwnershipTest(test_utils.GenericTestBase): """Tests for check_can_release_ownership function.""" published_exp_id = 'exp_id_1' private_exp_id = 'exp_id_2' def setUp(self): super(CheckCanReleaseOwnershipTest, self).setUp() self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME) self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME) self.set_admins([self.ADMIN_USERNAME]) self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL) self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL) self.admin = user_services.UserActionsInfo(self.admin_id) self.owner = user_services.UserActionsInfo(self.owner_id) self.save_new_valid_exploration( self.published_exp_id, self.owner_id) self.save_new_valid_exploration( self.private_exp_id, self.owner_id) rights_manager.publish_exploration(self.owner, self.published_exp_id) def test_admin_can_release_ownership_of_published_exploration(self): self.assertTrue(rights_manager.check_can_release_ownership( self.admin, rights_manager.get_exploration_rights(self.published_exp_id))) def test_owner_can_release_ownership_of_published_exploration(self): self.assertTrue(rights_manager.check_can_release_ownership( self.owner, rights_manager.get_exploration_rights(self.published_exp_id))) def test_admin_cannot_release_ownership_of_private_exploration(self): self.assertFalse(rights_manager.check_can_release_ownership( self.admin, rights_manager.get_exploration_rights(self.private_exp_id))) def test_owner_cannot_release_ownership_of_private_exploration(self): self.assertFalse(rights_manager.check_can_release_ownership( self.owner, rights_manager.get_exploration_rights(self.private_exp_id))) class CheckCanUnpublishActivityTest(test_utils.GenericTestBase): """Tests for check_can_unpublish_activity function.""" published_exp_id = 'exp_id_1' private_exp_id = 'exp_id_2' private_col_id = 'col_id_1' published_col_id = 'col_id_2' def setUp(self): super(CheckCanUnpublishActivityTest, self).setUp() self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME) self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME) self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME) self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL) self.moderator_id = self.get_user_id_from_email(self.MODERATOR_EMAIL) self.set_admins([self.ADMIN_USERNAME]) self.set_moderators([self.MODERATOR_USERNAME]) self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL) self.admin = user_services.UserActionsInfo(self.admin_id) self.owner = user_services.UserActionsInfo(self.owner_id) self.moderator = user_services.UserActionsInfo(self.moderator_id) self.save_new_valid_exploration( self.published_exp_id, self.owner_id) self.save_new_valid_exploration( self.private_exp_id, self.owner_id) self.save_new_valid_collection( self.published_col_id, self.owner_id, exploration_id=self.published_col_id) self.save_new_valid_collection( self.private_col_id, self.owner_id, exploration_id=self.private_col_id) rights_manager.publish_exploration(self.owner, self.published_exp_id) rights_manager.publish_collection(self.owner, self.published_col_id) def test_admin_can_unpublish_published_collection(self): self.assertTrue(rights_manager.check_can_unpublish_activity( self.admin, rights_manager.get_collection_rights(self.published_col_id))) def test_owner_cannot_unpublish_published_collection(self): self.assertFalse(rights_manager.check_can_unpublish_activity( self.owner, rights_manager.get_collection_rights(self.published_col_id))) def test_admin_cannot_unpublish_private_collection(self): self.assertFalse(rights_manager.check_can_unpublish_activity( self.admin, rights_manager.get_collection_rights(self.private_col_id))) def test_admin_can_unpublish_published_exploration(self): self.assertTrue(rights_manager.check_can_unpublish_activity( self.admin, rights_manager.get_exploration_rights(self.published_exp_id))) def test_owner_cannot_unpublish_published_exploration(self): self.assertFalse(rights_manager.check_can_unpublish_activity( self.owner, rights_manager.get_exploration_rights(self.published_exp_id))) def test_admin_cannot_unpublish_private_exploration(self): self.assertFalse(rights_manager.check_can_unpublish_activity( self.admin, rights_manager.get_exploration_rights(self.private_exp_id))) def test_moderator_can_unpublish_published_exploration(self): self.assertTrue(rights_manager.check_can_unpublish_activity( self.moderator, rights_manager.get_exploration_rights(self.published_exp_id)))
{ "content_hash": "30b2cbc4319a99a3e82049d4822e3b63", "timestamp": "", "source": "github", "line_count": 860, "max_line_length": 78, "avg_line_length": 45.78139534883721, "alnum_prop": 0.6604947678553287, "repo_name": "AllanYangZhou/oppia", "id": "6983dcd2540be7c55340e31dd64c73ddf32ef6a8", "size": "39977", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "core/domain/rights_manager_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "82690" }, { "name": "HTML", "bytes": "1128088" }, { "name": "JavaScript", "bytes": "3945933" }, { "name": "Python", "bytes": "4888439" }, { "name": "Shell", "bytes": "50051" } ], "symlink_target": "" }
Ein einfacher Einstieg in das JavaScript Web-Framework sammy.js *Author:* Julian Metzger *Version:* 1.0 *Last update:* 09/2012 ##Libraries jQuery Sammy.js mustache.js Twitter Bootstrap
{ "content_hash": "8f26cd72ce0bd8c264a2db121472a845", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 66, "avg_line_length": 17.25, "alnum_prop": 0.7053140096618358, "repo_name": "mrjm/sammy-js-tutorial", "id": "9633714fce796257aa169194b96081c998b84cad", "size": "228", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "694" }, { "name": "JavaScript", "bytes": "20189" }, { "name": "MAXScript", "bytes": "97" } ], "symlink_target": "" }
__author__ = "Fabio Tea <ft2011@gmail.com>" import feedparser from urllib.request import urlopen from sopel.tools import SopelMemory, SopelMemoryWithDefault from sopel.module import commands, example, NOLIMIT, require_privilege, OP, require_admin from sopel.config.types import (StaticSection, ValidatedAttribute, ListAttribute) class RSSSection(StaticSection): feeds = ListAttribute("feeds", default=[]) update_interval = ValidatedAttribute("update_interval", int, default=10) def setup(bot): bot.config.define_section("rss", RSSSection) bot.memory["rss"] = SopelMemory() bot.memory["rss"]["feeds"] = [] bot.memory["rss"]["update_interval"] = 10 if bot.config.rss.feeds: bot.memory["rss"]["feeds"] = bot.config.rss.feeds if bot.config.rss.update_interval: bot.memory["rss"]["update_interval"] = bot.config.rss.update_interval def configure(config): config.define_section("rss", RSSSection) config.rss.configure_setting("feeds", "Feed URLs") config.rss.configure_setting("update_interval", "How often to check? (secounds)") def shutdown(bot): print("shutting down...") bot.debug("RSS", "shutting down...", "always") bot.config.rss.feeds = bot.memory["rss"]["feeds"] bot.config.rss.update_interval = bot.memory["rss"]["update_interval"] bot.config.save() print(bot.config.rss.feeds) bot.debug("RSS", bot.config.rss.feeds, "always") bot.debug("RSS", bot.config.rss.update_interval, "always") @require_admin @commands("rssget") def rssget(bot, trigger): if not trigger.group(2) is None: bot.say("coming soon") return NOLIMIT # rss = "http://lorem-rss.herokuapp.com/feed" # feed = feedparser.parse(rss) # for key in feed["entries"]: # bot.say(unidecode.unidecode(key["title"])) @require_admin @commands("rsslist") @example(".rsslist") def rsslist(bot, trigger): if not trigger.group(2) is None: bot.say("expecting no parameter for this command...") return NOLIMIT feeds = bot.memory["rss"]["feeds"] bot.say("RSS Feed URLs (#{}): ".format(len(feeds))) for feed in feeds: bot.say("{}: {}".format(feeds.index(feed) + 1, feed)) return NOLIMIT @require_admin @commands("rssadd") @example(".rssadd http://google.com") def rssadd(bot, trigger): url = trigger.group(2) if trigger.group(2) is None: bot.say("expecting one parameter for this command...") return NOLIMIT try: with urlopen(url) as f: if f.status == 200: bot.memory["rss"]["feeds"].append(url) # bot.config.save() bot.say("RSS feed '{}' added successfully".format(url)) except: bot.say("Unable to add feed '{}' - Invalid URL!".format(url)) return NOLIMIT @require_admin @commands("rssdel") @example(".rssdel 2") def rssdel(bot, trigger): idx = trigger.group(2) if idx is None: bot.say("expecting one parameter for this command...") return NOLIMIT try: if bot.memory["rss"]["feeds"][idx]: bot.memory["rss"]["feeds"].remove(idx) # bot.config.save() bot.say("RSS feed '{}' deleted successfully".format(idx)) except: bot.say("Unable to delete feed '{}' - No such index!".format(idx)) return NOLIMIT @require_admin @commands("rssclear") @example(".rssclear") def rssclear(bot, trigger): if not trigger.group(2) is None: bot.say("expecting no parameter for this command...") return NOLIMIT bot.memory["rss"]["feeds"].clear() # bot.config.save() bot.say("All RSS feeds deleted successfully") return NOLIMIT
{ "content_hash": "549494610b8cd6496aed2fac4c4d895f", "timestamp": "", "source": "github", "line_count": 126, "max_line_length": 89, "avg_line_length": 30.42063492063492, "alnum_prop": 0.6146621445343073, "repo_name": "f4bio/sopel-rss", "id": "5623a88f64b146f7f85e15a1de23ef1c4490ffdc", "size": "3833", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rss.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "3833" } ], "symlink_target": "" }
package com.palantir.atlasdb.spi; import com.palantir.atlasdb.keyvalue.api.KeyValueService; import com.palantir.timestamp.TimestampService; public interface AtlasDbFactory { String getType(); KeyValueService createRawKeyValueService(KeyValueServiceConfig config); TimestampService createTimestampService(KeyValueService rawKvs); }
{ "content_hash": "2d3dde0c903c632339b029160d30fc72", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 75, "avg_line_length": 24.928571428571427, "alnum_prop": 0.8223495702005731, "repo_name": "andy2palantir/atlasdb", "id": "06748663765bb314ae4da2d9ea8928472bd03bc7", "size": "937", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "atlasdb-spi/src/main/java/com/palantir/atlasdb/spi/AtlasDbFactory.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "4196898" }, { "name": "Protocol Buffer", "bytes": "6159" }, { "name": "Shell", "bytes": "2130" } ], "symlink_target": "" }
> **NOTE:** > For any released features, please see notes in [release notes](RELEASENOTE.md). You may have noticed that the team is working on DocFX v3 right now. Yes, v3 is the long-term plan for DocFX to be the end-to-end "docs" tool to support > **NOTE:** > Roadmap for v3 is tracked [here](https://github.com/dotnet/docfx/tree/v3/docs/roadmap.md). - build of markdown and structured conceptuals (such as landing page, tutorial, etc.) - end-to-end generation of reference language documentation from either source codes or released packages (such as .NET, REST, Java, etc.) - able to run everywhere (i.e. cross-platform) and publish to anywhere possible (static site, or a built-in rendering stack with functionalities similar to <https://docs.microsoft.com>) - look and feel defaulted to <https://docs.microsoft.com>, but still customizable The reason why we choose to have a redesign and reimplementation of v3 instead of continuous improvement in v2 is mainly due to following considerations: - The v2 architecture, especially the plugin framework, is too flexible, making it difficult to do changes in DocFX without impacting the plugins. The flexibility also hinders community users to dig deep into the code and contribute. - It is not easy to locally debug and test due to the existence of the plugin framework. - The performance is also not ideal: two major issues are with AppDomain and Git related operations. - Technical stacks are not consistent throughout the pipeline, causing additional overhead in development and troubleshooting. - Community users are expressing their desire to have a documentation experience similar to <https://docs.microsoft.com> (i.e. [feature requests](README.md#collecting-feedbacks-and-proposals-for-docfx) on versioning, PDF link, REST definition pages, etc.), but there is no easy way to approach these requirements in v2. ## v2 strategy Though we're focusing our resource on DocFX v3, please be assured that v2 is and will still be actively **supported and maintained** for a long time. We don't expect sudden deprecation of v2 before the full-fledge of v3 plus a reasonable time for migration. However, due to limited resource on v2, we are going to: - address large-impact requests only, e.g. features or bugs blocking adoption of DocFX v2, `metadata` gap to support any new version of .NET and .NET core - postpone other features to be reconsidered and planned in v3 - continuously improve usability, e.g. documentation, error messages - still provide active support to all channels (GitHub issue, etc.), but may have some latency in response - still provide full support to your contribution by PRs ## features in backlog (most likely to address in v3) Below are the features we put in backlog to be reconsidered and planned in v3 in future. ### Schema-driven document processor **Status** In progress. As [spec](Documentation/spec/docfx_document_schema.md) indicates, schema-driven processor is to handle the multi-language support issues. With SDP, it is much easier than today to onboard new languages such as TypeScript, SQL, GO, etc. A new language on-boarding will include the following steps: 1. Generate the YAML file from the language 2. Create the schema for the language YAML 3. Create the template for the language based on the schema ### Docker investigation to setup environment to generate YAML file from multiple languages Take TypeScript as a start point. ### Razor page support **Status** In design phase. [Razor page](https://docs.microsoft.com/en-us/aspnet/core/mvc/razor-pages/) is a new feature of ASP.NET Core. A Razor page contains a template file `A.cshtml` and a 'code-behind' file `A.cshtml.cs`. The design is pretty similar to DocFX's templating system which is a template file `A.tmpl` or `A.liquid` and a 'preprocessor' file `A.tmpl.js` or `A.liquid.js`. Razor page is quite familiar to ASP.NET developers. Supporting it in DocFX sounds friendly to new comers. ### Single file build and DocFX watch According to [Feature Proposals](http://feathub.com/docascode/docfx-feature-proposals), `docfx watch` wins far ahead. Watch => Changed file list => Build => File Accessor Layer File changes include: 1. Source Code file change => Out of scope. (Hard to implement) 2. `.md` and `.yml` file change => In scope. 3. Template file change 1. Dependent style files change => In scope. 2. Template file change => In scope. (Could be slow) ### Authoring experience * VSCode extension * Preview * TOC * Schema based YAML files * Intellisense and validation * Markdig syntax: uid autocomplete, syntax detect * docfx.json * toc.yml * schema based YAML documents ### Online API service for resolving cross reference With this API service, there is no need to download `msdn.zip` package or `xrefmap.yml` file anymore. ### Engineering work 1. Integrate DocFX with CI, e.g. Travis, Appveyor 2. Easier installation, e.g. one script for copy ### Cross platform support * Dotnet-core migration * Docker ### Other features * Highlighted clickable method declaration, e.g. *[String]() ToString([int]() a)* * Localization and versioning support * More attractive themes * Sandcastle advanced features * Support more programming languages, e.g. Python, JavaScript, Golang, etc.
{ "content_hash": "a75458df1d216617bc302bcd709ebb91", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 365, "avg_line_length": 53.53, "alnum_prop": 0.7573323370072856, "repo_name": "dotnet/docfx", "id": "8feb94a867c87d65413096489e4855df6e425556", "size": "5365", "binary": false, "copies": "2", "ref": "refs/heads/dev", "path": "Roadmap.md", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "236" }, { "name": "C#", "bytes": "5468456" }, { "name": "CSS", "bytes": "49441" }, { "name": "F#", "bytes": "133425" }, { "name": "JavaScript", "bytes": "97975" }, { "name": "Liquid", "bytes": "5617" }, { "name": "PowerShell", "bytes": "41840" }, { "name": "Roff", "bytes": "980" }, { "name": "Shell", "bytes": "297" }, { "name": "XSLT", "bytes": "6912" } ], "symlink_target": "" }
.. _2016_02_29_fdioandhoneycomb: .. toctree: =================== FD.io and Honeycomb =================== Event ----- This presentation was held during the OpenDaylight Design Forum on March 1st, 2016. Speakers -------- Ed Warnicke Slideshow --------- `Presentation Powerpoint <https://wiki.fd.io/images/8/84/Honeycomb_and_Fdio.pptx>`_ Video ----- `Ed Warnicke's Video Presentation <https://www.youtube.com/watch?v=ZqH9nwh83DI>`_
{ "content_hash": "6d97591ffecb4dc6598460f801a68ba0", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 83, "avg_line_length": 15.714285714285714, "alnum_prop": 0.6363636363636364, "repo_name": "vpp-dev/vpp", "id": "f589b7a4349fd24971de0aeb7c63f3f0cd092f24", "size": "440", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/events/Summits/OpenDaylight/2016_02_29_fdioandhoneycomb.rst", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "16871" }, { "name": "C", "bytes": "21411560" }, { "name": "C++", "bytes": "2210928" }, { "name": "CMake", "bytes": "179409" }, { "name": "CSS", "bytes": "847" }, { "name": "Emacs Lisp", "bytes": "111146" }, { "name": "Go", "bytes": "13884" }, { "name": "HTML", "bytes": "612" }, { "name": "Lua", "bytes": "79974" }, { "name": "M4", "bytes": "257" }, { "name": "Makefile", "bytes": "120923" }, { "name": "Objective-C", "bytes": "50546" }, { "name": "Python", "bytes": "3767934" }, { "name": "Ruby", "bytes": "8015" }, { "name": "Shell", "bytes": "106805" } ], "symlink_target": "" }
.class final Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState; .super Landroid/graphics/drawable/DrawableWrapper$DrawableWrapperState; .source "AnimatedRotateDrawable.java" # annotations .annotation system Ldalvik/annotation/EnclosingClass; value = Landroid/graphics/drawable/AnimatedRotateDrawable; .end annotation .annotation system Ldalvik/annotation/InnerClass; accessFlags = 0x18 name = "AnimatedRotateState" .end annotation # instance fields .field mFrameDuration:I .field mFramesCount:I .field mPivotX:F .field mPivotXRel:Z .field mPivotY:F .field mPivotYRel:Z # direct methods .method public constructor <init>(Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;)V .registers 4 .param p1, "orig" # Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState; .prologue const/4 v1, 0x0 const/4 v0, 0x0 .line 223 invoke-direct {p0, p1}, Landroid/graphics/drawable/DrawableWrapper$DrawableWrapperState;-><init>(Landroid/graphics/drawable/DrawableWrapper$DrawableWrapperState;)V .line 215 iput-boolean v1, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotXRel:Z .line 216 iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotX:F .line 217 iput-boolean v1, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotYRel:Z .line 218 iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotY:F .line 219 const/16 v0, 0x96 iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mFrameDuration:I .line 220 const/16 v0, 0xc iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mFramesCount:I .line 225 if-eqz p1, :cond_2f .line 226 iget-boolean v0, p1, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotXRel:Z iput-boolean v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotXRel:Z .line 227 iget v0, p1, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotX:F iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotX:F .line 228 iget-boolean v0, p1, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotYRel:Z iput-boolean v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotYRel:Z .line 229 iget v0, p1, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotY:F iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mPivotY:F .line 230 iget v0, p1, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mFramesCount:I iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mFramesCount:I .line 231 iget v0, p1, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mFrameDuration:I iput v0, p0, Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;->mFrameDuration:I .line 222 :cond_2f return-void .end method # virtual methods .method public newDrawable(Landroid/content/res/Resources;)Landroid/graphics/drawable/Drawable; .registers 4 .param p1, "res" # Landroid/content/res/Resources; .prologue .line 237 new-instance v0, Landroid/graphics/drawable/AnimatedRotateDrawable; const/4 v1, 0x0 invoke-direct {v0, p0, p1, v1}, Landroid/graphics/drawable/AnimatedRotateDrawable;-><init>(Landroid/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState;Landroid/content/res/Resources;Landroid/graphics/drawable/AnimatedRotateDrawable;)V return-object v0 .end method
{ "content_hash": "68ab675227b66e13efb35a29c2acf505", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 248, "avg_line_length": 32.7563025210084, "alnum_prop": 0.779887121600821, "repo_name": "ddki/my_study_project", "id": "04587dd0e6d264fc98d06a2953ca62e2386a9d6e", "size": "3898", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "client/android/demos/ODEX/test/framework/android/graphics/drawable/AnimatedRotateDrawable$AnimatedRotateState.smali", "mode": "33188", "license": "mit", "language": [ { "name": "AspectJ", "bytes": "3520" }, { "name": "Batchfile", "bytes": "10160" }, { "name": "C", "bytes": "43156" }, { "name": "C++", "bytes": "16295" }, { "name": "CSS", "bytes": "54735" }, { "name": "Dockerfile", "bytes": "538" }, { "name": "Elixir", "bytes": "7538" }, { "name": "GAP", "bytes": "133251" }, { "name": "Go", "bytes": "23393" }, { "name": "HTML", "bytes": "183017" }, { "name": "Haskell", "bytes": "18024" }, { "name": "Java", "bytes": "3890824" }, { "name": "JavaScript", "bytes": "1150724" }, { "name": "Lex", "bytes": "22606" }, { "name": "Makefile", "bytes": "1772" }, { "name": "Objective-C", "bytes": "9224" }, { "name": "PHP", "bytes": "31" }, { "name": "Python", "bytes": "2968992" }, { "name": "Ragel", "bytes": "16770" }, { "name": "Ruby", "bytes": "95237" }, { "name": "Scala", "bytes": "23930" }, { "name": "Shell", "bytes": "47348" }, { "name": "Smali", "bytes": "71959175" }, { "name": "Swift", "bytes": "211292" }, { "name": "XSLT", "bytes": "104965" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" /> <meta name="viewport" content="width=device-width,initial-scale=1"> <meta http-equiv="x-ua-compatible" content="ie=edge"> <meta name="lang:clipboard.copy" content="Copy to clipboard"> <meta name="lang:clipboard.copied" content="Copied to clipboard"> <meta name="lang:search.language" content="en"> <meta name="lang:search.pipeline.stopwords" content="True"> <meta name="lang:search.pipeline.trimmer" content="True"> <meta name="lang:search.result.none" content="No matching documents"> <meta name="lang:search.result.one" content="1 matching document"> <meta name="lang:search.result.other" content="# matching documents"> <meta name="lang:search.tokenizer" content="[\s\-]+"> <link href="https://fonts.gstatic.com/" rel="preconnect" crossorigin> <link href="https://fonts.googleapis.com/css?family=Roboto+Mono:400,500,700|Roboto:300,400,400i,700&display=fallback" rel="stylesheet"> <style> body, input { font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif } code, kbd, pre { font-family: "Roboto Mono", "Courier New", Courier, monospace } </style> <link rel="stylesheet" href="../_static/stylesheets/application.css"/> <link rel="stylesheet" href="../_static/stylesheets/application-palette.css"/> <link rel="stylesheet" href="../_static/stylesheets/application-fixes.css"/> <link rel="stylesheet" href="../_static/fonts/material-icons.css"/> <meta name="theme-color" content="#3f51b5"> <script src="../_static/javascripts/modernizr.js"></script> <title>statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma &#8212; statsmodels</title> <link rel="icon" type="image/png" sizes="32x32" href="../_static/icons/favicon-32x32.png"> <link rel="icon" type="image/png" sizes="16x16" href="../_static/icons/favicon-16x16.png"> <link rel="manifest" href="../_static/icons/site.webmanifest"> <link rel="mask-icon" href="../_static/icons/safari-pinned-tab.svg" color="#919191"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-config" content="../_static/icons/browserconfig.xml"> <link rel="stylesheet" href="../_static/stylesheets/examples.css"> <link rel="stylesheet" href="../_static/stylesheets/deprecation.css"> <link rel="stylesheet" type="text/css" href="../_static/pygments.css" /> <link rel="stylesheet" type="text/css" href="../_static/material.css" /> <link rel="stylesheet" type="text/css" href="../_static/graphviz.css" /> <link rel="stylesheet" type="text/css" href="../_static/plot_directive.css" /> <script data-url_root="../" id="documentation_options" src="../_static/documentation_options.js"></script> <script src="../_static/jquery.js"></script> <script src="../_static/underscore.js"></script> <script src="../_static/doctools.js"></script> <script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script> <link rel="shortcut icon" href="../_static/favicon.ico"/> <link rel="author" title="About these documents" href="../about.html" /> <link rel="index" title="Index" href="../genindex.html" /> <link rel="search" title="Search" href="../search.html" /> <link rel="next" title="statsmodels.tsa.vector_ar.vecm.VECMResults.resid" href="statsmodels.tsa.vector_ar.vecm.VECMResults.resid.html" /> <link rel="prev" title="statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_det_coef_coint" href="statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_det_coef_coint.html" /> </head> <body dir=ltr data-md-color-primary=indigo data-md-color-accent=blue> <svg class="md-svg"> <defs data-children-count="0"> <svg xmlns="http://www.w3.org/2000/svg" width="416" height="448" viewBox="0 0 416 448" id="__github"><path fill="currentColor" d="M160 304q0 10-3.125 20.5t-10.75 19T128 352t-18.125-8.5-10.75-19T96 304t3.125-20.5 10.75-19T128 256t18.125 8.5 10.75 19T160 304zm160 0q0 10-3.125 20.5t-10.75 19T288 352t-18.125-8.5-10.75-19T256 304t3.125-20.5 10.75-19T288 256t18.125 8.5 10.75 19T320 304zm40 0q0-30-17.25-51T296 232q-10.25 0-48.75 5.25Q229.5 240 208 240t-39.25-2.75Q130.75 232 120 232q-29.5 0-46.75 21T56 304q0 22 8 38.375t20.25 25.75 30.5 15 35 7.375 37.25 1.75h42q20.5 0 37.25-1.75t35-7.375 30.5-15 20.25-25.75T360 304zm56-44q0 51.75-15.25 82.75-9.5 19.25-26.375 33.25t-35.25 21.5-42.5 11.875-42.875 5.5T212 416q-19.5 0-35.5-.75t-36.875-3.125-38.125-7.5-34.25-12.875T37 371.5t-21.5-28.75Q0 312 0 260q0-59.25 34-99-6.75-20.5-6.75-42.5 0-29 12.75-54.5 27 0 47.5 9.875t47.25 30.875Q171.5 96 212 96q37 0 70 8 26.25-20.5 46.75-30.25T376 64q12.75 25.5 12.75 54.5 0 21.75-6.75 42 34 40 34 99.5z"/></svg> </defs> </svg> <input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="__drawer"> <input class="md-toggle" data-md-toggle="search" type="checkbox" id="__search"> <label class="md-overlay" data-md-component="overlay" for="__drawer"></label> <a href="#generated/statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma" tabindex="1" class="md-skip"> Skip to content </a> <header class="md-header" data-md-component="header"> <nav class="md-header-nav md-grid"> <div class="md-flex navheader"> <div class="md-flex__cell md-flex__cell--shrink"> <a href="../index.html" title="statsmodels" class="md-header-nav__button md-logo"> <img src="../_static/statsmodels-logo-v2-bw.svg" height="26" alt="statsmodels logo"> </a> </div> <div class="md-flex__cell md-flex__cell--shrink"> <label class="md-icon md-icon--menu md-header-nav__button" for="__drawer"></label> </div> <div class="md-flex__cell md-flex__cell--stretch"> <div class="md-flex__ellipsis md-header-nav__title" data-md-component="title"> <span class="md-header-nav__topic">statsmodels v0.13.2</span> <span class="md-header-nav__topic"> statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma </span> </div> </div> <div class="md-flex__cell md-flex__cell--shrink"> <label class="md-icon md-icon--search md-header-nav__button" for="__search"></label> <div class="md-search" data-md-component="search" role="dialog"> <label class="md-search__overlay" for="__search"></label> <div class="md-search__inner" role="search"> <form class="md-search__form" action="../search.html" method="get" name="search"> <input type="text" class="md-search__input" name="q" placeholder="Search" autocapitalize="off" autocomplete="off" spellcheck="false" data-md-component="query" data-md-state="active"> <label class="md-icon md-search__icon" for="__search"></label> <button type="reset" class="md-icon md-search__icon" data-md-component="reset" tabindex="-1"> &#xE5CD; </button> </form> <div class="md-search__output"> <div class="md-search__scrollwrap" data-md-scrollfix> <div class="md-search-result" data-md-component="result"> <div class="md-search-result__meta"> Type to start searching </div> <ol class="md-search-result__list"></ol> </div> </div> </div> </div> </div> </div> <div class="md-flex__cell md-flex__cell--shrink"> <div class="md-header-nav__source"> <a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github"> <div class="md-source__icon"> <svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28"> <use xlink:href="#__github" width="24" height="24"></use> </svg> </div> <div class="md-source__repository"> statsmodels </div> </a> </div> </div> <script src="../_static/javascripts/version_dropdown.js"></script> <script> var json_loc = "../../versions-v2.json", target_loc = "../../", text = "Versions"; $( document ).ready( add_version_dropdown(json_loc, target_loc, text)); </script> </div> </nav> </header> <div class="md-container"> <nav class="md-tabs" data-md-component="tabs"> <div class="md-tabs__inner md-grid"> <ul class="md-tabs__list"> <li class="md-tabs__item"><a href="../user-guide.html" class="md-tabs__link">User Guide</a></li> <li class="md-tabs__item"><a href="../vector_ar.html" class="md-tabs__link">Vector Autoregressions <code class="xref py py-mod docutils literal notranslate"><span class="pre">tsa.vector_ar</span></code></a></li> <li class="md-tabs__item"><a href="statsmodels.tsa.vector_ar.vecm.VECMResults.html" class="md-tabs__link">statsmodels.tsa.vector_ar.vecm.VECMResults</a></li> </ul> </div> </nav> <main class="md-main"> <div class="md-main__inner md-grid" data-md-component="container"> <div class="md-sidebar md-sidebar--primary" data-md-component="navigation"> <div class="md-sidebar__scrollwrap"> <div class="md-sidebar__inner"> <nav class="md-nav md-nav--primary" data-md-level="0"> <label class="md-nav__title md-nav__title--site" for="__drawer"> <a href="../index.html" title="statsmodels" class="md-nav__button md-logo"> <img src="../_static/statsmodels-logo-v2-bw.svg" alt=" logo" width="48" height="48"> </a> <a href="../index.html" title="statsmodels">statsmodels v0.13.2</a> </label> <div class="md-nav__source"> <a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github"> <div class="md-source__icon"> <svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28"> <use xlink:href="#__github" width="24" height="24"></use> </svg> </div> <div class="md-source__repository"> statsmodels </div> </a> </div> <ul class="md-nav__list"> <li class="md-nav__item"> <a href="../install.html" class="md-nav__link">Installing statsmodels</a> </li> <li class="md-nav__item"> <a href="../gettingstarted.html" class="md-nav__link">Getting started</a> </li> <li class="md-nav__item"> <a href="../user-guide.html" class="md-nav__link">User Guide</a> <ul class="md-nav__list"> <li class="md-nav__item"> <a href="../user-guide.html#background" class="md-nav__link">Background</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#regression-and-linear-models" class="md-nav__link">Regression and Linear Models</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#time-series-analysis" class="md-nav__link">Time Series Analysis</a> <ul class="md-nav__list"> <li class="md-nav__item"> <a href="../tsa.html" class="md-nav__link">Time Series analysis <code class="xref py py-mod docutils literal notranslate"><span class="pre">tsa</span></code></a> </li> <li class="md-nav__item"> <a href="../statespace.html" class="md-nav__link">Time Series Analysis by State Space Methods <code class="xref py py-mod docutils literal notranslate"><span class="pre">statespace</span></code></a> </li> <li class="md-nav__item"> <a href="../vector_ar.html" class="md-nav__link">Vector Autoregressions <code class="xref py py-mod docutils literal notranslate"><span class="pre">tsa.vector_ar</span></code></a> </li></ul> </li> <li class="md-nav__item"> <a href="../user-guide.html#other-models" class="md-nav__link">Other Models</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#statistics-and-tools" class="md-nav__link">Statistics and Tools</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#data-sets" class="md-nav__link">Data Sets</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#sandbox" class="md-nav__link">Sandbox</a> </li></ul> </li> <li class="md-nav__item"> <a href="../examples/index.html" class="md-nav__link">Examples</a> </li> <li class="md-nav__item"> <a href="../api.html" class="md-nav__link">API Reference</a> </li> <li class="md-nav__item"> <a href="../about.html" class="md-nav__link">About statsmodels</a> </li> <li class="md-nav__item"> <a href="../dev/index.html" class="md-nav__link">Developer Page</a> </li> <li class="md-nav__item"> <a href="../release/index.html" class="md-nav__link">Release Notes</a> </li> </ul> </nav> </div> </div> </div> <div class="md-sidebar md-sidebar--secondary" data-md-component="toc"> <div class="md-sidebar__scrollwrap"> <div class="md-sidebar__inner"> <nav class="md-nav md-nav--secondary"> <ul class="md-nav__list" data-md-scrollfix=""> <li class="md-nav__item"><a class="md-nav__extra_link" href="../_sources/generated/statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma.rst.txt">Show Source</a> </li> <li id="searchbox" class="md-nav__item"></li> </ul> </nav> </div> </div> </div> <div class="md-content"> <article class="md-content__inner md-typeset" role="main"> <section id="statsmodels-tsa-vector-ar-vecm-vecmresults-pvalues-gamma"> <h1 id="generated-statsmodels-tsa-vector-ar-vecm-vecmresults-pvalues-gamma--page-root">statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma<a class="headerlink" href="#generated-statsmodels-tsa-vector-ar-vecm-vecmresults-pvalues-gamma--page-root" title="Permalink to this headline">¶</a></h1> <dl class="py attribute"> <dt class="sig sig-object py" id="statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma"> <span class="sig-prename descclassname"><span class="pre">VECMResults.</span></span><span class="sig-name descname"><span class="pre">pvalues_gamma</span></span><a class="headerlink" href="#statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma" title="Permalink to this definition">¶</a></dt> <dd></dd></dl> </section> </article> </div> </div> </main> </div> <footer class="md-footer"> <div class="md-footer-nav"> <nav class="md-footer-nav__inner md-grid"> <a href="statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_det_coef_coint.html" title="statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_det_coef_coint" class="md-flex md-footer-nav__link md-footer-nav__link--prev" rel="prev"> <div class="md-flex__cell md-flex__cell--shrink"> <i class="md-icon md-icon--arrow-back md-footer-nav__button"></i> </div> <div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title"> <span class="md-flex__ellipsis"> <span class="md-footer-nav__direction"> Previous </span> statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_det_coef_coint </span> </div> </a> <a href="statsmodels.tsa.vector_ar.vecm.VECMResults.resid.html" title="statsmodels.tsa.vector_ar.vecm.VECMResults.resid" class="md-flex md-footer-nav__link md-footer-nav__link--next" rel="next"> <div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title"><span class="md-flex__ellipsis"> <span class="md-footer-nav__direction"> Next </span> statsmodels.tsa.vector_ar.vecm.VECMResults.resid </span> </div> <div class="md-flex__cell md-flex__cell--shrink"><i class="md-icon md-icon--arrow-forward md-footer-nav__button"></i> </div> </a> </nav> </div> <div class="md-footer-meta md-typeset"> <div class="md-footer-meta__inner md-grid"> <div class="md-footer-copyright"> <div class="md-footer-copyright__highlight"> &#169; Copyright 2009-2019, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers. </div> Last updated on Feb 08, 2022. <br/> Created using <a href="http://www.sphinx-doc.org/">Sphinx</a> 4.4.0. and <a href="https://github.com/bashtage/sphinx-material/">Material for Sphinx</a> </div> </div> </div> </footer> <script src="../_static/javascripts/application.js"></script> <script>app.initialize({version: "1.0.4", url: {base: ".."}})</script> </body> </html>
{ "content_hash": "658f6b06c623208c7aa59f70c97f212a", "timestamp": "", "source": "github", "line_count": 447, "max_line_length": 999, "avg_line_length": 39.55257270693512, "alnum_prop": 0.6015271493212669, "repo_name": "statsmodels/statsmodels.github.io", "id": "d34c20c7aa1e2ad44574d6d03852fbaf26674b64", "size": "17684", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "v0.13.2/generated/statsmodels.tsa.vector_ar.vecm.VECMResults.pvalues_gamma.html", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
import errno import logging import os from ceph_deploy import hosts, exc from ceph_deploy.lib import remoto LOG = logging.getLogger(__name__) def distro_is_supported(distro_name): """ An enforcer of supported distros that can differ from what ceph-deploy supports. """ supported = ['centos', 'redhat', 'ubuntu', 'debian'] if distro_name in supported: return True return False def connect(args): for hostname in args.hosts: distro = hosts.get(hostname, username=args.username) if not distro_is_supported(distro.normalized_name): raise exc.UnsupportedPlatform( distro.distro_name, distro.codename, distro.release ) LOG.info( 'Distro info: %s %s %s', distro.name, distro.release, distro.codename ) LOG.info('assuming that a repository with Calamari packages is already configured.') LOG.info('Refer to the docs for examples (http://ceph.com/ceph-deploy/docs/conf.html)') rlogger = logging.getLogger(hostname) # Emplace minion config prior to installation so that it is present # when the minion first starts. minion_config_dir = os.path.join('/etc/salt/', 'minion.d') minion_config_file = os.path.join(minion_config_dir, 'calamari.conf') rlogger.debug('creating config dir: %s' % minion_config_dir) distro.conn.remote_module.makedir(minion_config_dir, [errno.EEXIST]) rlogger.debug( 'creating the calamari salt config: %s' % minion_config_file ) distro.conn.remote_module.write_file( minion_config_file, ('master: %s\n' % args.master).encode('utf-8') ) distro.packager.install('salt-minion') distro.packager.install('diamond') # redhat/centos need to get the service started if distro.normalized_name in ['redhat', 'centos']: remoto.process.run( distro.conn, ['chkconfig', 'salt-minion', 'on'] ) remoto.process.run( distro.conn, ['service', 'salt-minion', 'start'] ) distro.conn.exit() def calamari(args): if args.subcommand == 'connect': connect(args) def make(parser): """ Install and configure Calamari nodes. Assumes that a repository with Calamari packages is already configured. Refer to the docs for examples (http://ceph.com/ceph-deploy/docs/conf.html) """ calamari_parser = parser.add_subparsers(dest='subcommand') calamari_parser.required = True calamari_connect = calamari_parser.add_parser( 'connect', help='Configure host(s) to connect to Calamari master' ) calamari_connect.add_argument( '--master', nargs='?', metavar='MASTER SERVER', help="The domain for the Calamari master server" ) calamari_connect.add_argument( 'hosts', nargs='+', ) parser.set_defaults( func=calamari, )
{ "content_hash": "8228d6ec6d5dfc6301c4b636cf4a267f", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 95, "avg_line_length": 29.055555555555557, "alnum_prop": 0.5984703632887189, "repo_name": "codenrhoden/ceph-deploy", "id": "9bbea65ce29d1a6edec9c41d5141d7a83b58cdea", "size": "3138", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "ceph_deploy/calamari.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "392696" }, { "name": "Shell", "bytes": "8614" } ], "symlink_target": "" }
'use strict'; var data = require('sdk/self').data; var disable = { firefox: {{json disable.firefox}} }; {{#if extend_ff_index}} {{> extend_ff_index }} {{/if}} {{#unless disable.firefox.page_action}} {{#if page_action}} require('sdk/ui/button/action').ActionButton({ {{#if page_action.id}}id: "{{page_action.id}}",{{/if}} label: "{{page_action.default_title}}", icon: "./{{page_action.default_icon}}", onClick: {{#if page_action.callback}}{{page_action.callback}}{{else}}function(state) { tabs.open(data.url("{{page_action.default_popup}}")); }{{/if}} }); {{/if}} {{/unless}} {{#unless disable.firefox.content_scripts}} {{#if content_scripts}} require("sdk/page-mod").PageMod({ {{#if content_scripts.js}} contentScriptFile: [ {{#each content_scripts.js}} data.url("{{this}}"), {{/each}} ], {{/if}} {{#if content_scripts.css}} contentStyleFile: [ {{#each content_scripts.css}} data.url("{{this}}"), {{/each}} ], {{/if}} include: '{{host}}', contentScriptWhen: 'ready' }); {{/if}} {{/unless}}
{ "content_hash": "9d1739c0448eb4456ea06dda5269a5f4", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 88, "avg_line_length": 23.340425531914892, "alnum_prop": 0.5724703737465816, "repo_name": "Tuguusl/grunt-browser-extension", "id": "f65203cfe19265f39e0ed90dc8b40071d5722c34", "size": "1097", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/firefox/lib/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "690" }, { "name": "JavaScript", "bytes": "24836" }, { "name": "NSIS", "bytes": "10142" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <site> <feature url="features/io.hummer.eclipse.jarSearch.feature_1.0.1.jar" id="io.hummer.eclipse.jarSearch.feature" version="1.0.1"> <category name="jarSearch"/> </feature> <category-def name="jarSearch" label="Eclipse JAR Search"/> </site>
{ "content_hash": "c12002f15e3d42e4c1cc2c386f74dfdd", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 130, "avg_line_length": 42.42857142857143, "alnum_prop": 0.6835016835016835, "repo_name": "whummer/eclipseJarSearch", "id": "ea7fd8875b624806a2b4630defefcc4e38e17fdd", "size": "297", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "build/site.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "15073" } ], "symlink_target": "" }
package org.jivesoftware.smackx.jingleold.listeners; import org.jivesoftware.smack.SmackException.NotConnectedException; import org.jivesoftware.smackx.jingleold.media.PayloadType; /** * Interface for listening to jmf events. * @author Thiago Camargo */ public interface JingleMediaListener extends JingleListener { /** * Notification that the jmf has been negotiated and established. * * @param pt The payload type agreed. * @throws NotConnectedException */ public void mediaEstablished(PayloadType pt) throws NotConnectedException; /** * Notification that a payload type must be cancelled * * @param cand The payload type that must be closed */ public void mediaClosed(PayloadType cand); }
{ "content_hash": "b599f5f7925716a22a6d82891a278cbe", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 78, "avg_line_length": 28.22222222222222, "alnum_prop": 0.7296587926509186, "repo_name": "TTalkIM/Smack", "id": "49ec6d64ec67cff5bac892c0ddbc9ce3b2f0abcd", "size": "1368", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "smack-jingle-old/src/main/java/org/jivesoftware/smackx/jingleold/listeners/JingleMediaListener.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "602" }, { "name": "HTML", "bytes": "72604" }, { "name": "Java", "bytes": "4821414" }, { "name": "Shell", "bytes": "739" } ], "symlink_target": "" }
import { useNetInfo } from '@react-native-community/netinfo'; import { cacheDirectory, deleteAsync, documentDirectory, getInfoAsync, makeDirectoryAsync, moveAsync, readAsStringAsync, readDirectoryAsync, writeAsStringAsync, } from 'expo-file-system'; import * as Notifications from 'expo-notifications'; import lodash from 'lodash'; import prettyBytes from 'pretty-bytes'; import propTypes from 'prop-types'; import React from 'react'; import { Alert, Platform } from 'react-native'; import { useMutation, useQueryClient } from 'react-query'; import * as Sentry from 'sentry-expo'; import { useAPI } from './api'; import config from './config'; import { isPickupReport } from './utilities'; // tiles are put into the cache directory to allow the OS to clean them up if the device gets low on space export const tileCacheDirectory = cacheDirectory + 'tiles'; const offlineDataStorageDirectory = documentDirectory + 'offlineData'; const offlineMessage = 'No connection to the internet was detected.'; const errorMessage = 'An error occurred while trying to upload your report:'; const commonMessage = 'You report has been saved to the your device for later submission.'; const dataFileName = 'data.json'; function ensureDirectory(path) { return getInfoAsync(path, { size: true }).then((info) => { if (!info.exists) { makeDirectoryAsync(path); } }); } ensureDirectory(tileCacheDirectory); ensureDirectory(offlineDataStorageDirectory); export async function getBaseMapCacheSize() { const { size } = await getInfoAsync(tileCacheDirectory); return prettyBytes(size || 0); } export async function clearBaseMapCache() { await deleteAsync(tileCacheDirectory); await ensureDirectory(tileCacheDirectory); } const OfflineCacheContext = React.createContext(); export async function getOfflineSubmission(id, pickupIndex) { try { const json = await readAsStringAsync(`${offlineDataStorageDirectory}/${id}/${dataFileName}`); if (pickupIndex) { return JSON.parse(json).pickups[pickupIndex]; } return JSON.parse(json); } catch (error) { console.error( `Error attempting to read offline submission with id: ${id} (pickupIndex: ${pickupIndex}): \n\n ${error}` ); Sentry.Native.captureException(error); return null; } } async function deleteOfflineSubmission(id) { try { await deleteAsync(`${offlineDataStorageDirectory}/${id}`); } catch (error) { console.error(`Error attempting to delete offline submission with id: ${id}: \n\n ${error}`); Sentry.Native.captureException(error); } } export function OfflineCacheContextProvider({ children }) { const { isInternetReachable } = useNetInfo(); const [cachedSubmissionIds, setCachedSubmissionIds] = React.useState([]); const { postReport, postRoute } = useAPI(); React.useEffect(() => { const giddyUp = async () => { const folderNames = await readDirectoryAsync(offlineDataStorageDirectory); // filter out any weird stuff like .DS_Store setCachedSubmissionIds(folderNames.filter((folderName) => folderName.match(/^\d+$/))); }; giddyUp(); }, []); const submit = async function () { if (mutation.isLoading) { console.warn('already submitting, skipping'); return; } console.log('submitting offline submissions...'); const failedSubmissionIds = []; let lastError; for (let i = 0; i < cachedSubmissionIds.length; i++) { try { console.log(`submitting: ${cachedSubmissionIds[i]}`); const submission = await getOfflineSubmission(cachedSubmissionIds[i]); delete submission.offlineStorageId; if (submission.animal_location) { // report if (isPickupReport(submission)) { await postReport(submission, config.REPORT_TYPES.pickup); } else { await postReport(submission, config.REPORT_TYPES.report); } } else { // route const routeResponse = await postRoute(lodash.omit(submission, ['pickups'])); for (let j = 0; j < submission.pickups.length; j++) { const pickup = submission.pickups[j]; delete pickup.offlineStorageId; pickup.route_id = routeResponse.route_id; await postReport(pickup, config.REPORT_TYPES.pickup); } } await deleteOfflineSubmission(cachedSubmissionIds[i]); } catch (error) { failedSubmissionIds.push(cachedSubmissionIds[i]); Sentry.Native.captureException(error); console.error(error); lastError = error.message; } } if (failedSubmissionIds.length === 0) { Alert.alert('Offline Submission', 'Your offline data was successfully submitted.'); } else { Alert.alert('Offline Submission Error', lastError); } setCachedSubmissionIds(failedSubmissionIds); return lastError; }; const queryClient = useQueryClient(); const mutation = useMutation(submit, { onSuccess: () => { queryClient.invalidateQueries(config.QUERY_KEYS.submissions); queryClient.invalidateQueries(config.QUERY_KEYS.profile); }, }); React.useEffect(() => { if (isInternetReachable && cachedSubmissionIds.length > 0 && !mutation.isLoading) { mutation.mutate(); } }, [isInternetReachable]); React.useEffect(() => { const updateBadgeCount = async () => { let badgeNumber = 0; if (cachedSubmissionIds && cachedSubmissionIds.length > 0) { badgeNumber = cachedSubmissionIds.length; } let allowed = true; if (Platform.OS === 'ios') { let status = await Notifications.getPermissionsAsync(); if (status.ios.status === Notifications.IosAuthorizationStatus.NOT_DETERMINED) { status = await Notifications.requestPermissionsAsync({ ios: { allowBadge: true } }); } allowed = status.ios.status !== Notifications.IosAuthorizationStatus.DENIED; } if (allowed) { await Notifications.setBadgeCountAsync(badgeNumber); } }; updateBadgeCount(); }, [cachedSubmissionIds]); const movePhoto = async function (photo, directory) { // copy photo to documentDirectory so that it doesn't get auto-deleted by the OS // ImagePicker puts it in the cacheDirectory if (photo) { const fileName = photo.uri.split('/').pop(); const newUri = `${directory}/${fileName}`; await moveAsync({ from: photo.uri, to: newUri, }); // being very careful to create a new object // built apps were having troubles with the photo uri update const newPhoto = { ...photo, uri: newUri }; return newPhoto; } return photo; }; const showAlert = async function (error) { await new Promise((resolve) => { Alert.alert( 'Offline Report', (error ? `${errorMessage} \n\n ${error.message}` : offlineMessage) + '\n\n' + commonMessage, [{ text: 'OK', onPress: resolve }] ); }); }; const cacheReport = async function (submitValues, error) { const id = new Date().getTime(); const reportDirectory = `${offlineDataStorageDirectory}/${id}`; await makeDirectoryAsync(reportDirectory); submitValues.offlineStorageId = id; submitValues.photo = await movePhoto(submitValues.photo, reportDirectory); await writeAsStringAsync(`${reportDirectory}/${dataFileName}`, JSON.stringify(submitValues)); setCachedSubmissionIds((existing) => [...existing, id]); await showAlert(error); }; const cacheRoute = async function (submitValues, pickups, error) { const id = new Date().getTime(); const routeDirectory = `${offlineDataStorageDirectory}/${id}`; await makeDirectoryAsync(routeDirectory); submitValues.offlineStorageId = id; // being very careful to create new objects and array // built apps were having troubles with the photo uri update const newPickups = []; for (let i = 0; i < pickups.length; i++) { const photo = await movePhoto(pickups[i].photo, routeDirectory); newPickups.push({ ...pickups[i], photo }); } await writeAsStringAsync( `${routeDirectory}/${dataFileName}`, JSON.stringify({ ...submitValues, pickups: newPickups }) ); setCachedSubmissionIds((existing) => [...existing, id]); await showAlert(error); }; return ( <OfflineCacheContext.Provider value={{ isConnected: isInternetReachable, cacheReport, cacheRoute, cachedSubmissionIds, submitOfflineSubmissions: mutation.mutate.bind(mutation), isSubmitting: mutation.isLoading, }} > {children} </OfflineCacheContext.Provider> ); } OfflineCacheContextProvider.propTypes = { children: propTypes.object, }; export function useOfflineCache() { const context = React.useContext(OfflineCacheContext); if (!context) { throw new Error('useOfflineCache must be used within a OfflineCacheProvider'); } return context; }
{ "content_hash": "0e66a488423acc119387f323544f4992", "timestamp": "", "source": "github", "line_count": 291, "max_line_length": 111, "avg_line_length": 31.030927835051546, "alnum_prop": 0.6681063122923588, "repo_name": "agrc/roadkill-mobile", "id": "30f0edb9f7e3204709d494c4d253266b8b375d6a", "size": "9030", "binary": false, "copies": "1", "ref": "refs/heads/production", "path": "src/front/services/offline.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "73" }, { "name": "Dockerfile", "bytes": "733" }, { "name": "JavaScript", "bytes": "280840" }, { "name": "Shell", "bytes": "6957" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <title>TDI.Tools - QUnit Test suite</title> <link rel="stylesheet" href="http://code.jquery.com/qunit/qunit-1.19.0.css"> </head> <body> <div id="qunit"></div> <div id="qunit-fixture"></div> <script src="http://code.jquery.com/qunit/qunit-1.19.0.js"></script> <script src="http://code.jquery.com/jquery-1.10.2.min.js"></script> <script src="../build/tdi-bundle.min.js"></script> <script src="tdi-tools-tests.js"></script> </body> </html>
{ "content_hash": "40223f6cc1ae1021903648841601137d", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 77, "avg_line_length": 28.058823529411764, "alnum_prop": 0.6582809224318659, "repo_name": "twinstone/tdi", "id": "00938289d64fa4cd88770e01ef0a0375e31147ad", "size": "477", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/tdi-tools.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "40" }, { "name": "HTML", "bytes": "21733" }, { "name": "JavaScript", "bytes": "140024" } ], "symlink_target": "" }
#include <boost/filesystem.hpp> #include <application/Logger.h> #include "ArchiveFileSystem.h" bool ros::ArchiveFileSystem::addArchive(ArchiveFilePtr archive) { const fs::path& path = archive->getPath(); for (ArchiveFilesList::const_iterator iter = archives.begin(); iter != archives.end(); ++iter) { if ((*iter)->getPath() == path) { ROS_WARNING(boost::format("Archive %s has already been added into file system - ignoring it") % path); return false; } } ROS_TRACE(boost::format("Archive %s added into file system") % path); archives.push_back(archive); return true; } bool ros::ArchiveFileSystem::openArchive(const fs::path& path) { ArchiveFilePtr archive = ArchiveFile::create(path.string()); if (!archive) { ROS_ERROR(boost::format("Failed to create archive %s for file system") % path); return false; } if (!archive->open(path)) { ROS_ERROR(boost::format("Failed to open archive %s for file system") % path); return false; } return addArchive(archive); } bool ros::ArchiveFileSystem::setRoot(const fs::path& root) { this->root.clear(); archives.clear(); sys::error_code error; const bool isDirectory = fs::is_directory(root, error); if (error) { ROS_ERROR(boost::format("Failed to check type for root %s - system error occured %s") % root % error.message()); return false; } if (!isDirectory) { ROS_ERROR(boost::format("Provided root %s does not point to a directory") % root); return false; } fs::directory_iterator iter(root, error); if (error) { ROS_ERROR(boost::format("Failed to open root %s - system error occured %s") % root % error.message()); return false; } while (iter != fs::directory_iterator()) { const fs::directory_entry entry = *iter; openArchive(entry.path()); ++iter; } ROS_DEBUG(boost::format("Root %s set with %d opened archives") % root % archives.size()); return archives.size() > 0; } ros::RawBufferPtr ros::ArchiveFileSystem::readFile(const std::string& name) const { for (ArchiveFilesList::const_iterator iter = archives.begin(); iter != archives.end(); ++iter) { ArchiveFilePtr archive = *iter; ArchiveEntryPtr entry = archive->findEntry(name); if (entry) { ROS_TRACE(boost::format("Found entry for file %s in file system") % name); return entry->decompress(); } } ROS_WARNING(boost::format("Failed to find entry for file %s in file system") % name); return RawBufferPtr(); } bool ros::ArchiveFileSystem::hasFile(const std::string& name) const { for (ArchiveFilesList::const_iterator iter = archives.begin(); iter != archives.end(); ++iter) { ArchiveFilePtr archive = *iter; if (archive->hasEntry(name)) { return true; } } return false; }
{ "content_hash": "44ee20d5b8175c642d2249419626b320", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 120, "avg_line_length": 35.566265060240966, "alnum_prop": 0.6239837398373984, "repo_name": "lechkulina/RealmsOfSteel", "id": "f2b60f22ec230359dc58788566182271e3473a74", "size": "3126", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "engine/source/resources/ArchiveFileSystem.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2471" }, { "name": "C++", "bytes": "268874" }, { "name": "CMake", "bytes": "7171" } ], "symlink_target": "" }
package com.netflix.kayenta.config; import static com.netflix.kayenta.graphite.E2EIntegrationTest.CANARY_WINDOW_IN_MINUTES; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.net.Socket; import java.net.UnknownHostException; import java.time.Instant; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import lombok.extern.slf4j.Slf4j; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; @TestConfiguration @Slf4j public class GraphiteIntegrationTestConfig { public static final String CONTROL_SCOPE_NAME = "control"; public static final String EXPERIMENT_SCOPE_HEALTHY = "test-healthy"; public static final String EXPERIMENT_SCOPE_UNHEALTHY = "test-unhealthy"; private static final String LOCAL_GRAPHITE_HOST = "localhost"; private static final String TEST_METRIC = "test.server.request.400"; private static final int MOCK_SERVICE_REPORTING_INTERVAL_IN_MILLISECONDS = 1000; public static final int[] HEALTHY_SERVER_METRICS = {0, 10}; public static final int[] UNHEALTHY_SERVER_METRICS = {10, 20}; private final ExecutorService executorService; private Instant metricsReportingStartTime; public GraphiteIntegrationTestConfig() { this.executorService = Executors.newFixedThreadPool(2); } @Bean public Instant metricsReportingStartTime() { return metricsReportingStartTime; } @PostConstruct public void start() { metricsReportingStartTime = Instant.now(); executorService.submit( createMetricReportingMockService( getGraphiteMetricProvider( CONTROL_SCOPE_NAME, HEALTHY_SERVER_METRICS[0], HEALTHY_SERVER_METRICS[1]))); executorService.submit( createMetricReportingMockService( getGraphiteMetricProvider( EXPERIMENT_SCOPE_HEALTHY, HEALTHY_SERVER_METRICS[0], HEALTHY_SERVER_METRICS[1]))); executorService.submit( createMetricReportingMockService( getGraphiteMetricProvider( EXPERIMENT_SCOPE_UNHEALTHY, UNHEALTHY_SERVER_METRICS[0], UNHEALTHY_SERVER_METRICS[1]))); metricsReportingStartTime = Instant.now(); try { long pause = TimeUnit.MINUTES.toMillis(CANARY_WINDOW_IN_MINUTES) + TimeUnit.SECONDS.toMillis(10); log.info( "Waiting for {} milliseconds for mock data to flow through graphite, before letting the " + "integration" + " tests run", pause); Thread.sleep(pause); } catch (InterruptedException e) { log.error("Failed to wait to send metrics", e); throw new RuntimeException(e); } } @PreDestroy public void stop() { executorService.shutdownNow(); } private Runnable createMetricReportingMockService(GraphiteMetricProvider graphiteMetricProvider) { int graphiteFeedPort = Integer.parseInt(System.getProperty("graphite.feedPort")); return () -> { while (!Thread.currentThread().isInterrupted()) { try (Socket socket = new Socket(LOCAL_GRAPHITE_HOST, graphiteFeedPort)) { OutputStream outputStream = socket.getOutputStream(); PrintWriter out = new PrintWriter(outputStream); out.println(graphiteMetricProvider.getRandomMetricWithinRange()); out.flush(); out.close(); Thread.sleep(MOCK_SERVICE_REPORTING_INTERVAL_IN_MILLISECONDS); } catch (UnknownHostException e) { log.error("UNABLE TO FIND HOST", e); } catch (IOException e) { log.error("CONNECTION ERROR", e); } catch (InterruptedException e) { log.debug("Thread interrupted", e); } } }; } private GraphiteMetricProvider getGraphiteMetricProvider(String scope, int min, int max) { String metricName = TEST_METRIC + "." + scope; return new GraphiteMetricProvider(min, max, metricName); } }
{ "content_hash": "6c9e598c6c3e8aee999600717a6a6ee4", "timestamp": "", "source": "github", "line_count": 112, "max_line_length": 100, "avg_line_length": 36.544642857142854, "alnum_prop": 0.7068165160029318, "repo_name": "spinnaker/kayenta", "id": "a43b7cc01930a1cb03b709d64bd067f79da12fc4", "size": "4682", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "kayenta-graphite/src/integration-test/java/com/netflix/kayenta/config/GraphiteIntegrationTestConfig.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "874" }, { "name": "Groovy", "bytes": "103973" }, { "name": "Java", "bytes": "1206273" }, { "name": "Scala", "bytes": "243375" }, { "name": "Shell", "bytes": "396" }, { "name": "Slim", "bytes": "380" } ], "symlink_target": "" }
export namespace crypto { class BN { } namespace ECDSA { function sign(message: Buffer, key: PrivateKey): Signature; function verify(hashbuf: Buffer, sig: Signature, pubkey: PublicKey, endian?: 'little'): boolean; } namespace Hash { function sha1(buffer: Buffer): Buffer; function sha256(buffer: Buffer): Buffer; function sha256sha256(buffer: Buffer): Buffer; function sha256ripemd160(buffer: Buffer): Buffer; function sha512(buffer: Buffer): Buffer; function ripemd160(buffer: Buffer): Buffer; function sha256hmac(data: Buffer, key: Buffer): Buffer; function sha512hmac(data: Buffer, key: Buffer): Buffer; } namespace Random { function getRandomBuffer(size: number): Buffer; } namespace Point {} class Signature { static fromDER(sig: Buffer): Signature; static fromString(data: string): Signature; SIGHASH_ALL: number; toString(): string; } } export namespace Transaction { class UnspentOutput { static fromObject(o: object): UnspentOutput; readonly address: Address; readonly txId: string; readonly outputIndex: number; readonly script: Script; readonly satoshis: number; constructor(data: object); inspect(): string; toObject(): this; toString(): string; } class Output { readonly script: Script; readonly satoshis: number; constructor(data: object); setScript(script: Script | string | Buffer): this; inspect(): string; toObject(): object; } class Input { readonly prevTxId: Buffer; readonly outputIndex: number; readonly sequenceNumber: number; readonly script: Script; readonly output?: Output | undefined; } } export class Transaction { inputs: Transaction.Input[]; outputs: Transaction.Output[]; readonly id: string; readonly hash: string; nid: string; constructor(serialized?: any); from(utxos: Transaction.UnspentOutput[]): this; to(address: Address[] | Address | string, amount: number): this; change(address: Address | string): this; fee(amount: number): this; feePerKb(amount: number): this; sign(privateKey: PrivateKey | string): this; applySignature(sig: crypto.Signature): this; addInput(input: Transaction.Input): this; addOutput(output: Transaction.Output): this; addData(value: Buffer): this; lockUntilDate(time: Date | number): this; lockUntilBlockHeight(height: number): this; hasWitnesses(): boolean; getFee(): number; getChangeOutput(): Transaction.Output | null; getLockTime(): Date | number; verify(): string | boolean; isCoinbase(): boolean; enableRBF(): this; isRBF(): boolean; inspect(): string; serialize(): string; } export class Block { hash: string; height: number; transactions: Transaction[]; header: { time: number; prevHash: string; }; constructor(data: Buffer | object); } export class PrivateKey { readonly publicKey: PublicKey; readonly network: Networks.Network; toAddress(): Address; toPublicKey(): PublicKey; toString(): string; toObject(): object; toJSON(): object; toWIF(): string; constructor(key?: string, network?: Networks.Network); } export class PublicKey { constructor(source: string); static fromPrivateKey(privateKey: PrivateKey): PublicKey; toBuffer(): Buffer; toDER(): Buffer; } export class HDPrivateKey { readonly hdPublicKey: HDPublicKey; constructor(data?: string | Buffer | object); derive(arg: string | number, hardened?: boolean): HDPrivateKey; deriveChild(arg: string | number, hardened?: boolean): HDPrivateKey; deriveNonCompliantChild(arg: string | number, hardened?: boolean): HDPrivateKey; toString(): string; toObject(): object; toJSON(): object; } export class HDPublicKey { readonly xpubkey: Buffer; readonly network: Networks.Network; readonly depth: number; readonly publicKey: PublicKey; readonly fingerPrint: Buffer; constructor(arg: string | Buffer | object); derive(arg: string | number, hardened?: boolean): HDPublicKey; deriveChild(arg: string | number, hardened?: boolean): HDPublicKey; toString(): string; } export namespace Script { const types: { DATA_OUT: string; }; function buildMultisigOut(publicKeys: PublicKey[], threshold: number, opts: object): Script; function buildWitnessMultisigOutFromScript(script: Script): Script; function buildMultisigIn(pubkeys: PublicKey[], threshold: number, signatures: Buffer[], opts: object): Script; function buildP2SHMultisigIn(pubkeys: PublicKey[], threshold: number, signatures: Buffer[], opts: object): Script; function buildPublicKeyHashOut(address: Address): Script; function buildPublicKeyOut(pubkey: PublicKey): Script; function buildDataOut(data: string | Buffer, encoding?: string): Script; function buildScriptHashOut(script: Script): Script; function buildPublicKeyIn(signature: crypto.Signature | Buffer, sigtype: number): Script; function buildPublicKeyHashIn(publicKey: PublicKey, signature: crypto.Signature | Buffer, sigtype: number): Script; function fromAddress(address: string | Address): Script; function empty(): Script; } export class Script { constructor(data: string | object); set(obj: object): this; toBuffer(): Buffer; toASM(): string; toString(): string; toHex(): string; isPublicKeyHashOut(): boolean; isPublicKeyHashIn(): boolean; getPublicKey(): Buffer; getPublicKeyHash(): Buffer; isPublicKeyOut(): boolean; isPublicKeyIn(): boolean; isScriptHashOut(): boolean; isWitnessScriptHashOut(): boolean; isWitnessPublicKeyHashOut(): boolean; isWitnessProgram(): boolean; isScriptHashIn(): boolean; isMultisigOut(): boolean; isMultisigIn(): boolean; isDataOut(): boolean; getData(): Buffer; isPushOnly(): boolean; classify(): string; classifyInput(): string; classifyOutput(): string; isStandard(): boolean; prepend(obj: any): this; add(obj: any): this; hasCodeseparators(): boolean; removeCodeseparators(): this; equals(script: Script): boolean; getAddressInfo(): Address | boolean; findAndDelete(script: Script): this; checkMinimalPush(i: number): boolean; getSignatureOperationsCount(accurate: boolean): number; toAddress(): Address; } export class Message { constructor(message: string); magicHash(): Buffer; sign(privateKey: PrivateKey): string; verify(bitcoinAddress: Address | string, signatureString: string): boolean; fromString(str: string): Message; fromJSON(json: string): Message; toObject(): { message: string }; toJSON(): string; toString(): string; inspect(): string; } export interface Util { readonly buffer: { reverse(a: any): any; }; } export namespace Networks { interface Network { readonly name: string; readonly alias: string; } const livenet: Network; const mainnet: Network; const testnet: Network; function add(data: any): Network; function remove(network: Network): void; function get(args: string | number | Network, keys: string | string[]): Network; } export class Address { readonly hashBuffer: Buffer; readonly network: Networks.Network; readonly type: string; constructor(data: Buffer | Uint8Array | string | object, network?: Networks.Network, type?: string); } export class Unit { static fromBTC(amount: number): Unit; static fromMilis(amount: number): Unit; static fromBits(amount: number): Unit; static fromSatoshis(amount: number): Unit; constructor(amount: number, unitPreference: string); toBTC(): number; toMilis(): number; toBits(): number; toSatoshis(): number; }
{ "content_hash": "0ac0e7ec9504713994447f45c722cc1c", "timestamp": "", "source": "github", "line_count": 301, "max_line_length": 119, "avg_line_length": 26.93687707641196, "alnum_prop": 0.6647755303404045, "repo_name": "markogresak/DefinitelyTyped", "id": "ec49f3043d32596e68e713e10b10687717f72828", "size": "8396", "binary": false, "copies": "24", "ref": "refs/heads/master", "path": "types/bitcore-lib/index.d.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "15" }, { "name": "Protocol Buffer", "bytes": "678" }, { "name": "TypeScript", "bytes": "17426898" } ], "symlink_target": "" }
class Story; class Character; namespace Ui { class characterDialog; } class characterDialog : public QDialog { Q_OBJECT public: explicit characterDialog(QWidget *parent = 0); characterDialog(Character* new_char, QWidget* parent = 0); ~characterDialog(); signals: void newCharacter(); private slots: void on_buttonBox_accepted(); private: Ui::characterDialog *ui; Story* story; Character* character; void populate_views(); }; #endif // CHARACTERDIALOG_H
{ "content_hash": "385a077dc52dd3e162ce91438d662a87", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 60, "avg_line_length": 15.516129032258064, "alnum_prop": 0.7234927234927235, "repo_name": "ROLL-E/roll-e", "id": "e22c3aaca0aa4586051f641775b7a0f7a6453fcd", "size": "555", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Server/characterdialog.h", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "136697" }, { "name": "IDL", "bytes": "4852" }, { "name": "JavaScript", "bytes": "1831" } ], "symlink_target": "" }
'use strict'; var ono = require('ono'); var assign = require('../../util/assign'); var __internal = require('../../util/internal'); var validatePlugins = require('./validatePlugins'); var callSyncPlugin = require('./callSyncPlugin'); var callAsyncPlugin = require('./callAsyncPlugin'); module.exports = PluginHelper; /** * Helper methods for working with plugins. * * @param {object[]|null} plugins - The plugins to use * @param {Schema} schema - The {@link Schema} to apply the plugins to * * @class * @extends Array */ function PluginHelper (plugins, schema) { validatePlugins(plugins); plugins = plugins || []; // Clone the array of plugins, and sort by priority var pluginHelper = plugins.slice().sort(sortByPriority); /** * Internal stuff. Use at your own risk! * * @private */ pluginHelper[__internal] = { /** * A reference to the {@link Schema} object */ schema: schema, }; // Return an array that "inherits" from PluginHelper return assign(pluginHelper, PluginHelper.prototype); } /** * Resolves a URL, relative to a base URL. * * @param {?string} args.from - The base URL to resolve against, if any * @param {string} args.to - The URL to resolve. This may be absolute or relative. * @returns {string} - Returns an absolute URL */ PluginHelper.prototype.resolveURL = function resolveURL (args) { try { var handled = callSyncPlugin(this, 'resolveURL', args); var url = handled.result; var plugin = handled.plugin || { name: '' }; if (url === undefined || url === null) { throw ono('Error in %s.resolveURL: No value was returned', plugin.name); } else if (typeof url !== 'string') { throw ono('Error in %s.resolveURL: The return value was not a string (%s)', plugin.name, typeof url); } else { return url; } } catch (err) { throw ono(err, 'Unable to resolve %s', args.to); } }; /** * Synchronously reads the given file from its source (e.g. web server, filesystem, etc.) * * @param {File} args.file - The {@link File} to read */ PluginHelper.prototype.readFileSync = function readFileSync (args) { try { var handled = callSyncPlugin(this, 'readFileSync', args); if (!handled.plugin) { throw ono('Error in readFileSync: No plugin was able to read the file'); } else { // The file was read successfully, so set the file's data args.file.data = handled.result; } } catch (err) { throw ono(err, 'Unable to read %s', args.file.url); } }; /** * Asynchronously reads the given file from its source (e.g. web server, filesystem, etc.) * * @param {File} args.file * The {@link File} to read. Its {@link File#data} property will be set to the file's contents. * In addition, {@link File#mimeType} and {@link File#encoding} may be set, if determinable. * * @param {function} callback * The callback function to call after the file has been read */ PluginHelper.prototype.readFileAsync = function readFileAsync (args, callback) { callAsyncPlugin(this, 'readFileAsync', args, function (err, handled) { if (!err && !handled.plugin) { err = ono('Error in readFileAsync: No plugin was able to read the file'); } if (err) { err = ono(err, 'Unable to read %s', args.file.url); callback(err); } else { if (handled.plugin) { // The file was read successfully, so set the file's data args.file.data = handled.result; } callback(null); } }); }; /** * Decodes the given file's data, in place. * * @param {File} args.file - The {@link File} to decode. */ PluginHelper.prototype.decodeFile = function decodeFile (args) { try { var handled = callSyncPlugin(this, 'decodeFile', args); // NOTE: It's ok if no plugin handles this method. // The file data will just remain in its "raw" format. if (handled.plugin) { // The file was decoded successfully, so update the file's data args.file.data = handled.result; } } catch (err) { throw ono(err, 'Unable to parse %s', args.file.url); } }; /** * Parses the given file's data, in place. * * @param {File} args.file - The {@link File} to parse. */ PluginHelper.prototype.parseFile = function parseFile (args) { try { var handled = callSyncPlugin(this, 'parseFile', args); // NOTE: It's ok if no plugin handles this method. // The file data will just remain in its "raw" format. if (handled.plugin) { // The file was parsed successfully, so update the file's data args.file.data = handled.result; } } catch (err) { throw ono(err, 'Unable to parse %s', args.file.url); } }; /** * Performs final cleanup steps on the schema after all files have been read successfully. */ PluginHelper.prototype.finished = function finished () { try { // NOTE: It's ok if no plugin handles this method. // It's just an opportunity for plugins to perform cleanup tasks if necessary. callSyncPlugin(this, 'finished', {}); } catch (err) { throw ono(err, 'Error finalizing schema'); } }; /** * Used to sort plugins by priority, so that plugins with higher piority come first * in the __plugins array. * * @param {object} pluginA * @param {object} pluginB * @returns {number} */ function sortByPriority (pluginA, pluginB) { return (pluginB.priority || 0) - (pluginA.priority || 0); }
{ "content_hash": "072378d689a10201f576502a4e9e257d", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 107, "avg_line_length": 28.287958115183248, "alnum_prop": 0.6453821950768092, "repo_name": "BigstickCarpet/json-schema-lib", "id": "e74f81593901841d44f66f1902832f51d7c44965", "size": "5403", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/api/PluginHelper/PluginHelper.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1944" }, { "name": "JavaScript", "bytes": "107768" } ], "symlink_target": "" }
import { NgModule } from '@angular/core'; import { BrowserModule } from '@angular/platform-browser'; import { FormsModule } from '@angular/forms'; import { AppComponent } from './app.component'; import { AngularMultiSelectModule } from 'angular2-multiselect-dropdown/angular2-multiselect-dropdown'; @NgModule({ imports: [ BrowserModule, FormsModule, AngularMultiSelectModule ], declarations: [ AppComponent ], bootstrap: [ AppComponent ] }) export class AppModule { }
{ "content_hash": "fef66b11fe5e1c1f6f7e87002d602eca", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 103, "avg_line_length": 25.7, "alnum_prop": 0.6945525291828794, "repo_name": "CuppaLabs/cuppalabs.github.io", "id": "fe6ede416ac4276fcf6dc61c6c0e49f7e0ca1bec", "size": "514", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/app/app.module.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "160174" }, { "name": "HTML", "bytes": "1699503" }, { "name": "JavaScript", "bytes": "25183" } ], "symlink_target": "" }
module Cognizant module Util module DSLProxyMethodsHandler attr_accessor :attributes def initialize(entity, &dsl_block) @attributes = Hash.new end def method_missing(name, *args, &block) if args.size == 1 and name.to_s =~ /^(.*)=$/ @attributes[$1.to_sym] = args.first elsif args.size == 1 @attributes[name.to_sym] = args.first elsif args.size == 0 and name.to_s =~ /^(.*)!$/ @attributes[$1.to_sym] = true elsif args.empty? and @attributes.key?(name.to_sym) @attributes[name.to_sym] else super end end end end end
{ "content_hash": "b997f8ac567405b477363b7d9738bd42", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 59, "avg_line_length": 26.52, "alnum_prop": 0.55052790346908, "repo_name": "Gurpartap/cognizant", "id": "4a385cf3bf885e8733e68568ce3289d842a5d3a2", "size": "663", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/cognizant/util/dsl_proxy_methods_handler.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "95694" } ], "symlink_target": "" }
package eu.henkelmann.actuarius import org.scalatest.FlatSpec import org.scalatest.Matchers import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner /** * Tests Inline Parsing, i.e. emphasis , strong text, links, escapes etc. */ @RunWith(classOf[JUnitRunner]) class InlineParsersTest extends FlatSpec with Matchers with InlineParsers{ /////////////////////////////////////////////////////////////// // Inline parsing Tests // /////////////////////////////////////////////////////////////// def runSucceedingParsingTests(p:Parser[String], l:List[(String, String)]) { for ((a, b) <- l) { try { apply(p, a) should equal (b) } catch { case e:Throwable => println("Input causing the failure was: '" + a + "'."); throw e; } } } def runExceptionParsingTests(p:Parser[String], l:List[String]) { for (s <- l) an[IllegalArgumentException] should be thrownBy {apply(p, s)} } val italicTests:List[(String, String)] = List( ("*italic*", "<em>italic</em>"), ("*italic * italic*", "<em>italic * italic</em>"), ("_italic_", "<em>italic</em>")) val boldTests = List( ("**bold**", "<strong>bold</strong>"), ("**bold * bold**", "<strong>bold * bold</strong>"), ("__bold__", "<strong>bold</strong>")) val codeTests = List( ("`code`", "<code>code</code>"), ("``code``", "<code>code</code>"), ("` *italic* `", "<code> *italic* </code>"), ("`code\ncode`", "<code>code\ncode</code>"), ("``code ` code``", "<code>code ` code</code>") ) val linkTests = List( ("""[link text](http://example.com "link title")""", """<a href="http://example.com" title="link title">link text</a>"""), ("""[link text](http://example.com )""", """<a href="http://example.com">link text</a>"""), ("""[link text]( http://example.com "link title" )""", """<a href="http://example.com" title="link title">link text</a>"""), ("""[link text]( http://example.com "li)nk" title" )""", """<a href="http://example.com" title="li)nk&quot; title">link text</a>""") ) val fastLinkTests = List( ("""<http://www.example.com?foo=a&bar=b\*>""", """<a href="http://www.example.com?foo=a&amp;bar=b*">http://www.example.com?foo=a&amp;bar=b*</a>""") ) val imageTests = List( ("""![alt text](/src/img.png "img title")""", """<img src="/src/img.png" alt="alt text" title="img title" />"""), ("""![alt text](/src/img.png )""", """<img src="/src/img.png" alt="alt text" />"""), ("""![alt text]( /src/img.png "img title" )""", """<img src="/src/img.png" alt="alt text" title="img title" />"""), ("""![alt text]( /src/img.png "i)mg" title" )""", """<img src="/src/img.png" alt="alt text" title="i)mg&quot; title" />""") ) val brTests = List( (" \n", "<br />\n") ) val xmlNameTests = List( ("foo", "foo"), ("foo_bar", "foo_bar"), ("a", "a") ) val xmlNameExTests = List( "", "foo/bar", "foo<bar", "foo>bar", "foo\"bar", "foo\\bar", "foo bar" ) val xmlStartTagTests = List( ("<foo>", "<foo>"), ("""<foo attr="bar">""", """<foo attr="bar">"""), ("""<foo attr="bar" attr2="baz">""", """<foo attr="bar" attr2="baz">"""), ("""<a href="http://www.example.com?p1=a&p2=b">""", """<a href="http://www.example.com?p1=a&amp;p2=b">"""), ("""<foo attr='bar'>""", """<foo attr='bar'>"""), ("""<foo attr="bar" attr2='baz'>""", """<foo attr="bar" attr2='baz'>"""), ("""<a href='http://www.example.com?p1=a&p2=b'>""", """<a href='http://www.example.com?p1=a&amp;p2=b'>""") ) val xmlEndTagTests = List( ("</foo>", "</foo>"), ("</a>", "</a>") ) val xmlInlineTests = List( ("""hallo <foo attr="&'<>">*italic*</foo> ballo""", """hallo <foo attr="&amp;&apos;&lt;&gt;"><em>italic</em></foo> ballo"""), ("""hallo <foo attr="&'<>"/>*italic*<foo/> ballo""", """hallo <foo attr="&amp;&apos;&lt;&gt;"/><em>italic</em><foo/> ballo"""), ("""hallo <foo attr='&"<>'>*italic*</foo> ballo""", """hallo <foo attr='&amp;&quot;&lt;&gt;'><em>italic</em></foo> ballo"""), ("""hallo <foo attr='&"<>'/>*italic*<foo/> ballo""", """hallo <foo attr='&amp;&quot;&lt;&gt;'/><em>italic</em><foo/> ballo""") ) val mixedTests = List( ("*italic* **bold** *italic*", "<em>italic</em> <strong>bold</strong> <em>italic</em>"), ("*italic***bold***italic*", "<em>italic<strong>*bold</strong></em>italic*"), ("***foo***", "<strong><em>foo</em></strong>") ) /** * These should pass the inline replacement unchanged and can be used to be put between "real tests" to simualate * intermediate text. */ val dummyTests = List( ("lorem ipsum ", "lorem ipsum "), (" lorem ipsum", " lorem ipsum"), (" lorem \n ipsum ", " lorem \n ipsum ") ) val allInlineTests = italicTests ++ boldTests ++ codeTests ++ linkTests ++ fastLinkTests ++ imageTests ++ brTests ++ xmlStartTagTests ++ xmlEndTagTests ++ xmlInlineTests ++ dummyTests it should "create italic text" in { runSucceedingParsingTests(emAsterisk(new InlineContext())|emUnderscore(new InlineContext()) , italicTests) } it should "create bold text" in { runSucceedingParsingTests(strongAsterisk(new InlineContext())|strongUnderscore(new InlineContext()), boldTests) } it should "create inline code" in { runSucceedingParsingTests(code, codeTests) } it should "create links" in { runSucceedingParsingTests(link(new InlineContext()), linkTests) } it should "create fast links" in { runSucceedingParsingTests(fastLink(new InlineContext()), fastLinkTests) val p = fastLink(new InlineContext()) an[IllegalArgumentException] should be thrownBy (apply(p, "<this is not a fast link<span>")) } it should "create images" in { runSucceedingParsingTests((elem('!')~>directImg), imageTests) } it should "create line breaks" in { runSucceedingParsingTests(br, brTests) } it should "parse simplified xml identifiers" in { runSucceedingParsingTests(xmlName, xmlNameTests) runExceptionParsingTests(xmlName, xmlNameExTests) } it should "parse opening xml tags and escape their attribute vals" in { runSucceedingParsingTests(xmlStartOrEmptyTag, xmlStartTagTests) } it should "parse closing xml tags" in { runSucceedingParsingTests(xmlEndTag, xmlEndTagTests) } it should "allow inline xml and escape its parameters" in { runSucceedingParsingTests(inline(Map()), xmlInlineTests) } it should "parse mixed inline cases" in { runSucceedingParsingTests(inline(Map()), mixedTests) } val ld1 = new LinkDefinition("id", "http://www.example.com", Some("Title")) val ld2 = new LinkDefinition("id 2", "http://other.example.com", Some("Title 2")) val ld3 = new LinkDefinition("id 3", "http://none.example.com", None) val map = Map(ld1.id -> ld1, ld2.id -> ld2, ld3.id -> ld3) val ctx = new InlineContext(map) it should "resolve references" in { val p = ref(ctx) apply(p, "[text][id]") should equal ((ld1, "text")) apply(p, "[text] [id]") should equal ((ld1, "text")) apply(p, "[id][]") should equal ((ld1, "id")) apply(p, "[id] []") should equal ((ld1, "id")) apply(p, "[id]") should equal ((ld1, "id")) apply(p, "[Id]") should equal ((ld1, "Id")) } it should "resolve reference links" in { val p = inline(map) apply(p, "[text][id]") should equal ("""<a href="http://www.example.com" title="Title">text</a>""") apply(p, "[text] [id]") should equal ("""<a href="http://www.example.com" title="Title">text</a>""") apply(p, "[id][]") should equal ("""<a href="http://www.example.com" title="Title">id</a>""") apply(p, "[id] []") should equal ("""<a href="http://www.example.com" title="Title">id</a>""") apply(p, "[id]") should equal ("""<a href="http://www.example.com" title="Title">id</a>""") apply(p, "[Id]") should equal ("""<a href="http://www.example.com" title="Title">Id</a>""") apply(p, "[id] [Id 2]") should equal ("""<a href="http://other.example.com" title="Title 2">id</a>""") apply(p, "[id 3]") should equal ("""<a href="http://none.example.com">id 3</a>""") apply(p, "[foo \"bar\"][id 3]") should equal ("""<a href="http://none.example.com">foo &quot;bar&quot;</a>""") } it should "resolve reference images" in { val p = inline(map) apply(p, "![text][id]") should equal ("""<img src="http://www.example.com" alt="text" title="Title" />""") apply(p, "![text] [id]") should equal ("""<img src="http://www.example.com" alt="text" title="Title" />""") apply(p, "![id][]") should equal ("""<img src="http://www.example.com" alt="id" title="Title" />""") apply(p, "![id] []") should equal ("""<img src="http://www.example.com" alt="id" title="Title" />""") apply(p, "![id]") should equal ("""<img src="http://www.example.com" alt="id" title="Title" />""") apply(p, "![Id]") should equal ("""<img src="http://www.example.com" alt="Id" title="Title" />""") apply(p, "![id] [Id 2]") should equal ("""<img src="http://other.example.com" alt="id" title="Title 2" />""") apply(p, "![id 3]") should equal ("""<img src="http://none.example.com" alt="id 3" />""") apply(p, "![foo \"bar\"][id 3]") should equal ("""<img src="http://none.example.com" alt="foo &quot;bar&quot;" />""") } it should "handle all inline cases with the inline replacer" in { runSucceedingParsingTests(inline(Map()), allInlineTests) val concatTests = for ( (a1, a2) <- allInlineTests; (b1, b2) <- allInlineTests; (c1, c2) <- allInlineTests) yield (a1+ " " + b1 + " " + c1, a2 + " " + b2 + " " +c2); runSucceedingParsingTests(inline(Map()), concatTests) } }
{ "content_hash": "d69a0b7e282e1b3d8f5b89d54076bae6", "timestamp": "", "source": "github", "line_count": 249, "max_line_length": 130, "avg_line_length": 43.092369477911646, "alnum_prop": 0.5211556383970177, "repo_name": "KimStebel/actuarius", "id": "4e2fc4cdc26c4cc9050bc37e4148c418ba6dcf62", "size": "10730", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/scala/eu/henkelmann/actuarius/InlineParsersTest.scala", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Scala", "bytes": "105942" } ], "symlink_target": "" }
<?php namespace Quartet\BaseApi\Api; class Users extends AbstractApi { /** * @return \Quartet\BaseApi\Entity\User */ public function me() { $data = $this->client->request('get', '/1/users/me'); return $this->entityManager->getEntity('User', $data['user']); } }
{ "content_hash": "b4da156254f03484007ae331293cd91d", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 70, "avg_line_length": 20.333333333333332, "alnum_prop": 0.5868852459016394, "repo_name": "quartetcom/base-api-php-client", "id": "6295297dd2efec3d7535d724c7bcba42ca2a9028", "size": "305", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Api/Users.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "74761" } ], "symlink_target": "" }
<?php namespace SimpleMvc; /** * View Class */ class View { protected $settings; public function __construct($settings) { $this->settings = $settings; } public function rendering($file, $viewData, $header = null, $footer = null) { $eval_header = null; $eval_footer = null; //defaul settings to show html $this->setDefault_eval($viewData); //set data. $eval_forms = $this->file_get_contents_with_eval($file, $viewData); //set header and footer if it is needed if(!empty($header) && !empty($footer)) { //get header and footer $eval_header = $this->file_get_contents_with_eval($header, $viewData); $eval_footer = $this->file_get_contents_with_eval($footer, $viewData); //get whole page view $eval_forms = $eval_header . $eval_forms . $eval_footer; } //flush forms echo $eval_forms; //show debug info if($this->settings->debug_mode) { echo '<pre>' . var_export($viewData, true) . '</pre>'; } } private function file_get_contents_with_eval($file, $viewData) { $search = array(); $replace = array(); foreach ($viewData as $key => $value) { array_push($search, '{' . $key . '}'); array_push($replace, $value); } $forms = file_get_contents($file); $eval_contents = str_replace($search, $replace, $forms); return $eval_contents; } private function setDefault_eval(&$viewData) { $defaultData = [ //base url $this->settings->base_url_view_name => rtrim($this->settings->base_url,"/"), //app title $this->settings->app_title_view_name => rtrim($this->settings->app_title), //view url $this->settings->style_url_view_name => $this->getUrlPath(VIEWS) ]; //combine it to view data $viewData = array_merge($defaultData, $viewData); } private function getUrlPath($path) { $docRoot = dirname($_SERVER['SCRIPT_FILENAME']); $relPath = str_replace($docRoot, '', $path); $urlPath = rtrim($this->settings->base_url, "/") . $relPath . DS . $this->settings->theme; return $urlPath; } }
{ "content_hash": "3740a8591cfec2f98daab4dffdd41671", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 98, "avg_line_length": 27.919540229885058, "alnum_prop": 0.522848909016056, "repo_name": "Jamie-Kim/SimpleMvc", "id": "a3a55450691739357116bd2a57f8769c2d7d53c3", "size": "2648", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "simpleMvc/core/libs/View.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "125" }, { "name": "CSS", "bytes": "1547" }, { "name": "HTML", "bytes": "5760" }, { "name": "JavaScript", "bytes": "2771" }, { "name": "PHP", "bytes": "54115" } ], "symlink_target": "" }
// // SoundEngine.h // SoA // // Created by John Doe on 2/5/16. // Copyright © 2016 John Doe. All rights reserved. // #ifndef SoundEngine_h #define SoundEngine_h #import <AVFoundation/AVFoundation.h> @interface SoundEngine : NSObject @property (strong, nonatomic) AVAudioPlayer* player; - (id) init; - (void) play: (NSString*) fileName type: (NSString*) type; - (void) toggle; @end #endif /* SoundEngine_h */
{ "content_hash": "426c924491e0edfe18d81674c7d16cd4", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 59, "avg_line_length": 16.23076923076923, "alnum_prop": 0.6800947867298578, "repo_name": "nelfurion/IOS-Project", "id": "d9e16a6ee3c251607d1a4e820360c4f2a310a673", "size": "423", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "soa/SoA/SoA/Apis/Media/SoundEngine.h", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "57969" }, { "name": "Swift", "bytes": "1390" } ], "symlink_target": "" }
id: test-runners title: "Test Runners" --- With a few lines of code, you can hook up Playwright to your favorite Java test runner. Playwright and Browser instances can be reused between tests for better performance. We recommend running each test case in a new BrowserContext, this way browser state will be isolated between the tests. <!-- TOC --> ## JUnit In [JUnit](https://junit.org/junit5/) you can initialize [Playwright] and [Browser] in [@BeforeAll](https://junit.org/junit5/docs/current/api/org.junit.jupiter.api/org/junit/jupiter/api/BeforeAll.html) method and destroy them in [@AfterAll](https://junit.org/junit5/docs/current/api/org.junit.jupiter.api/org/junit/jupiter/api/AfterAll.html). In the example below all three test methods use the same [Browser]. Each test uses its own [BrowserContext] and [Page]. ```java package org.example; import com.microsoft.playwright.Browser; import com.microsoft.playwright.BrowserContext; import com.microsoft.playwright.Page; import com.microsoft.playwright.Playwright; import org.junit.jupiter.api.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; public class TestExample { // Shared between all tests in this class. static Playwright playwright; static Browser browser; // New instance for each test method. BrowserContext context; Page page; @BeforeAll static void launchBrowser() { playwright = Playwright.create(); browser = playwright.chromium().launch(); } @AfterAll static void closeBrowser() { playwright.close(); } @BeforeEach void createContextAndPage() { context = browser.newContext(); page = context.newPage(); } @AfterEach void closeContext() { context.close(); } @Test void shouldClickButton() { page.navigate("data:text/html,<script>var result;</script><button onclick='result=\"Clicked\"'>Go</button>"); page.locator("button").click(); assertEquals("Clicked", page.evaluate("result")); } @Test void shouldCheckTheBox() { page.setContent("<input id='checkbox' type='checkbox'></input>"); page.locator("input").check(); assertTrue((Boolean) page.evaluate("() => window['checkbox'].checked")); } @Test void shouldSearchWiki() { page.navigate("https://www.wikipedia.org/"); page.locator("input[name=\"search\"]").click(); page.locator("input[name=\"search\"]").fill("playwright"); page.locator("input[name=\"search\"]").press("Enter"); assertEquals("https://en.wikipedia.org/wiki/Playwright", page.url()); } } ``` ### Running Tests in Parallel By default JUnit will run all tests sequentially on a single thread. Since JUnit 5.3 you can change this behavior to run tests in parallel to speed up execution (see [this page](https://junit.org/junit5/docs/snapshot/user-guide/index.html#writing-tests-parallel-execution)). Since it is not safe to use same Playwright objects from multiple threads without extra synchronization we recommend you create Playwright instance per thread and use it on that thread exclusively. Here is an example how to run multiple test classes in parallel. Use [`@TestInstance(TestInstance.Lifecycle.PER_CLASS)`](https://junit.org/junit5/docs/current/api/org.junit.jupiter.api/org/junit/jupiter/api/TestInstance.html) annotation to make JUnit create one instance of a class for all test methods within that class (by default each JUnit will create a new instance of the class for each test method). Store [Playwright] and [Browser] objects in instance fields. They will be shared between tests. Each instance of the class will use its own copy of Playwright. ```java // Subclasses will inherit PER_CLASS behavior. @TestInstance(TestInstance.Lifecycle.PER_CLASS) class TestFixtures { // Shared between all tests in the class. Playwright playwright; Browser browser; @BeforeAll void launchBrowser() { playwright = Playwright.create(); browser = playwright.chromium().launch(); } @AfterAll void closeBrowser() { playwright.close(); } // New instance for each test method. BrowserContext context; Page page; @BeforeEach void createContextAndPage() { context = browser.newContext(); page = context.newPage(); } @AfterEach void closeContext() { context.close(); } } class Test1 extends TestFixtures { @Test void shouldClickButton() { page.navigate("data:text/html,<script>var result;</script><button onclick='result=\"Clicked\"'>Go</button>"); page.locator("button").click(); assertEquals("Clicked", page.evaluate("result")); } @Test void shouldCheckTheBox() { page.setContent("<input id='checkbox' type='checkbox'></input>"); page.locator("input").check(); assertTrue((Boolean) page.evaluate("() => window['checkbox'].checked")); } @Test void shouldSearchWiki() { page.navigate("https://www.wikipedia.org/"); page.locator("input[name=\"search\"]").click(); page.locator("input[name=\"search\"]").fill("playwright"); page.locator("input[name=\"search\"]").press("Enter"); assertEquals("https://en.wikipedia.org/wiki/Playwright", page.url()); } } class Test2 extends TestFixtures { @Test void shouldReturnInnerHTML() { page.setContent("<div>hello</div>"); assertEquals("hello", page.innerHTML("css=div")); } @Test void shouldClickButton() { Page popup = page.waitForPopup(() -> { page.evaluate("window.open('about:blank');"); }); assertEquals("about:blank", popup.url()); } } ``` Configure JUnit to run tests in each class sequentially and run multiple classes on parallel threads (with max number of thread equal to 1/2 of the number of CPU cores): ```bash junit.jupiter.execution.parallel.enabled = true junit.jupiter.execution.parallel.mode.default = same_thread junit.jupiter.execution.parallel.mode.classes.default = concurrent junit.jupiter.execution.parallel.config.strategy=dynamic junit.jupiter.execution.parallel.config.dynamic.factor=0.5 ```
{ "content_hash": "98b10bef7284ad12591109a4301dd228", "timestamp": "", "source": "github", "line_count": 187, "max_line_length": 212, "avg_line_length": 32.22994652406417, "alnum_prop": 0.7192633150821304, "repo_name": "microsoft/playwright", "id": "9daf09cf0fd1e70fd3d0aeda2c3afe66e91487c1", "size": "6031", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "docs/src/test-runners-java.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "291" }, { "name": "C", "bytes": "7890" }, { "name": "C#", "bytes": "2296" }, { "name": "C++", "bytes": "116810" }, { "name": "CMake", "bytes": "2704" }, { "name": "CSS", "bytes": "103622" }, { "name": "HTML", "bytes": "339177" }, { "name": "Java", "bytes": "16271" }, { "name": "JavaScript", "bytes": "75700" }, { "name": "Makefile", "bytes": "377" }, { "name": "Objective-C", "bytes": "63418" }, { "name": "PowerShell", "bytes": "4150" }, { "name": "Python", "bytes": "2965" }, { "name": "Shell", "bytes": "43661" }, { "name": "Svelte", "bytes": "3358" }, { "name": "TypeScript", "bytes": "5956124" }, { "name": "Vue", "bytes": "6225" } ], "symlink_target": "" }
""" Common functions Marco Lui, January 2013 """ from itertools import islice import marshal class Enumerator(object): """ Enumerator object. Returns a larger number each call. Can be used with defaultdict to enumerate a sequence of items. """ def __init__(self, start=0): self.n = start def __call__(self): retval = self.n self.n += 1 return retval def chunk(seq, chunksize): """ Break a sequence into chunks not exceeeding a predetermined size """ seq_iter = iter(seq) while True: chunk = tuple(islice(seq_iter, chunksize)) if not chunk: break yield chunk def unmarshal_iter(path): """ Open a given path and yield an iterator over items unmarshalled from it. """ with open(path, 'rb') as f: while True: try: yield marshal.load(f) except EOFError: break import os, errno def makedir(path): try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise import csv def write_weights(weights, path): w = dict(weights) with open(path, 'w') as f: writer = csv.writer(f) try: key_order = sorted(w, key=w.get, reverse=True) except ValueError: # Could not order keys by value, value is probably a vector. # Order keys alphabetically in this case. key_order = sorted(w) for k in key_order: row = [repr(k)] try: row.extend(w[k]) except TypeError: row.append(w[k]) writer.writerow(row) import numpy def read_weights(path): with open(path) as f: reader = csv.reader(f) retval = dict() for row in reader: key = eval(row[0]) #val = numpy.array( map(float,row[1:]) ) val = numpy.array( [float(v) if v != 'nan' else 0. for v in row[1:]] ) retval[key] = val return retval def read_features(path): """ Read a list of features in feature-per-line format, where each feature is a repr and needs to be evaled. @param path path to read from """ with open(path) as f: return map(eval, f) def write_features(features, path): """ Write a list of features to a file at `path`. The repr of each feature is written on a new line. @param features list of features to write @param path path to write to """ with open(path,'w') as f: for feat in features: print >>f, repr(feat) def index(seq): """ Build an index for a sequence of items. Assumes that the items in the sequence are unique. @param seq the sequence to index @returns a dictionary from item to position in the sequence """ return dict((k,v) for (v,k) in enumerate(seq)) from itertools import imap from contextlib import contextmanager, closing import multiprocessing as mp @contextmanager def MapPool(processes=None, initializer=None, initargs=None, maxtasksperchild=None, chunksize=1): """ Contextmanager to express the common pattern of not using multiprocessing if only 1 job is allocated (for example for debugging reasons) """ if processes is None: processes = mp.cpu_count() + 4 if processes > 1: with closing( mp.Pool(processes, initializer, initargs, maxtasksperchild)) as pool: f = lambda fn, chunks: pool.imap_unordered(fn, chunks, chunksize=chunksize) yield f else: if initializer is not None: initializer(*initargs) f = imap yield f if processes > 1: pool.join()
{ "content_hash": "cfca8098af0a19ebce260945e4d1f69b", "timestamp": "", "source": "github", "line_count": 140, "max_line_length": 97, "avg_line_length": 24.25, "alnum_prop": 0.655081001472754, "repo_name": "plamenbbn/XDATA", "id": "e7b2d18fb5b70865966d23fe7e62eba8fd88b6ce", "size": "3395", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "textstructure/utilities/langid/langid/train/common.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2911" }, { "name": "C", "bytes": "299272" }, { "name": "C++", "bytes": "40035" }, { "name": "HTML", "bytes": "13611" }, { "name": "Java", "bytes": "1096166" }, { "name": "Makefile", "bytes": "831" }, { "name": "Perl", "bytes": "59273" }, { "name": "Python", "bytes": "9060352" }, { "name": "Shell", "bytes": "3032" } ], "symlink_target": "" }
package TabServer2 // // R E S T s e r v e r - Server Component (TabServer2) // // Copyright (C) Philip Schlump, 2012-2017 -- All rights reserved. // // Do not remove the following lines - used in auto-update. // Version: 1.1.0 // BuildNo: 0391 // FileId: 0005 // File: TabServer2/crud.go // import ( "fmt" "net/http" "time" "github.com/pschlump/Go-FTL/server/goftlmux" "github.com/pschlump/Go-FTL/server/tr" "github.com/pschlump/MiscLib" "github.com/pschlump/godebug" "github.com/pschlump/json" // "encoding/json" ) func Sleep(res http.ResponseWriter, req *http.Request, cfgTag string, rv string, isError bool, cookieList map[string]string, ps *goftlmux.Params, trx *tr.Trx, hdlr *TabServer2Type) (rvOut string, pptFlag PrePostFlagType, exit bool, a_status int) { fmt.Printf("%sAT:%s at top rv = -->>%s<<-- %s\n", MiscLib.ColorBlue, MiscLib.ColorReset, rv, godebug.LF()) type RedirectToData struct { Status string `json:"status"` SleepN int `json:"$sleep$"` } var ed RedirectToData err := json.Unmarshal([]byte(rv), &ed) if err != nil { fmt.Printf("%sAT:%s *** Sleep Ignored - Failed to Parse s %s *** rv = -->>%s<<-- %s\n", MiscLib.ColorBlue, MiscLib.ColorReset, err, rv, godebug.LF()) return rv, PrePostContinue, false, 200 } if ed.SleepN > 0 { slowDown := time.Duration(int64(ed.SleepN)) * time.Second time.Sleep(slowDown) } return rv, PrePostContinue, false, 200 } /* vim: set noai ts=4 sw=4: */
{ "content_hash": "58709a373a96ca8ba6d077e547cddbb3", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 247, "avg_line_length": 28.313725490196077, "alnum_prop": 0.6772853185595568, "repo_name": "pschlump/Go-FTL", "id": "d4129d204f7e1d87619043b4f5bdf10cc3cd6cb3", "size": "1444", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/midlib/TabServer2/Sleep.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "2937165" }, { "name": "HTML", "bytes": "723339" }, { "name": "JavaScript", "bytes": "378751" }, { "name": "M4", "bytes": "719" }, { "name": "Makefile", "bytes": "45369" }, { "name": "PLpgSQL", "bytes": "442199" }, { "name": "Perl", "bytes": "5339" }, { "name": "Roff", "bytes": "128747" }, { "name": "Shell", "bytes": "13191" }, { "name": "TSQL", "bytes": "11034" } ], "symlink_target": "" }
package io.opensec.oval.model.unix; import io.opensec.oval.model.ComponentType; import io.opensec.oval.model.Family; import io.opensec.oval.model.common.CheckEnumeration; import io.opensec.oval.model.definitions.StateRefType; import io.opensec.oval.model.definitions.SystemObjectRefType; import io.opensec.oval.model.definitions.TestType; /** * The shadow test is used to check information from the /etc/shadow file for a specific user. * * @author Akihito Nakamura, AIST * @see <a href="http://oval.mitre.org/language/">OVAL Language</a> */ public class ShadowTest extends TestType { /** * Constructor. */ public ShadowTest() { this( null, 0 ); } public ShadowTest( final String id, final int version ) { this( id, version, null, null ); } public ShadowTest( final String id, final int version, final String comment, final CheckEnumeration check ) { this( id, version, comment, check, null, null ); } public ShadowTest( final String id, final int version, final String comment, final CheckEnumeration check, final SystemObjectRefType object, final StateRefType[] stateList ) { super( id, version, comment, check, object, stateList ); // _oval_platform_type = OvalPlatformType.unix; // _oval_component_type = OvalComponentType.shadow; _oval_family = Family.UNIX; _oval_component = ComponentType.SHADOW; } //************************************************************** // java.lang.Object //************************************************************** @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals( final Object obj ) { if (!(obj instanceof ShadowTest)) { return false; } return super.equals( obj ); } @Override public String toString() { return "shadow_test[" + super.toString() + "]"; } } //ShadowTest
{ "content_hash": "8b90f0cc13a3634d63bf8187fb8c1280", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 94, "avg_line_length": 22.75, "alnum_prop": 0.5164835164835165, "repo_name": "nakamura5akihito/opensec-oval", "id": "0df88504cdb57d17d94b01da8f52218e8d9a57dc", "size": "3025", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/io/opensec/oval/model/unix/ShadowTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "159336" }, { "name": "Java", "bytes": "4301821" }, { "name": "XSLT", "bytes": "66206" } ], "symlink_target": "" }
""" Django settings for rbe project. Generated by 'django-admin startproject' using Django 1.8. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'o+)#@ur4@hx78g&4(f=qw&o3l!l)l++yo+qfb+&97-hff!-nqy' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'query', 'upload', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) ROOT_URLCONF = 'rbe.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'rbe.wsgi.application' # Database # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'America/Chicago' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ STATIC_URL = '/static/'
{ "content_hash": "899b7d40d5d651c7d8a7455433117b72", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 71, "avg_line_length": 25.634615384615383, "alnum_prop": 0.6864216054013503, "repo_name": "install-logos/RiceBE", "id": "2284cc3f337cba60db061c0f05b751838237b684", "size": "2666", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rbe/rbe/settings.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "12692" } ], "symlink_target": "" }
Experimenting with building and pushing to docker scratch golang containers.
{ "content_hash": "ea0686ea2901830685c40da03bad1bd0", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 76, "avg_line_length": 77, "alnum_prop": 0.8571428571428571, "repo_name": "zet4/go-travis-docker-test", "id": "5565240e65651da4ea150326409ed5f71cf9d0bf", "size": "101", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "1960" }, { "name": "Shell", "bytes": "330" } ], "symlink_target": "" }
package com.ssp.web; import org.avaje.agentloader.AgentLoader; import org.junit.BeforeClass; /** * @author shailesh.patel */ public abstract class AppTestSupport { /** * This is needed to be done once to enhance all the entity objects for Ebean ORM. */ @BeforeClass public static void runOnce() { AgentLoader.loadAgentFromClasspath("avaje-ebeanorm-agent", "debug=1"); } }
{ "content_hash": "4a05ffba324c6de7f286e75c279f09bb", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 86, "avg_line_length": 22.833333333333332, "alnum_prop": 0.6885644768856448, "repo_name": "shailesh17/spring-boot-ebean-starter", "id": "55f8d7a02121ea8085b7c1cb5890162b1b314d29", "size": "411", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/java/com/ssp/web/AppTestSupport.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "28878" } ], "symlink_target": "" }
'use strict'; (function () { var engine = require('./dom-wrapper.js'); require('./plugins/add.js')(engine); require('./plugins/attr.js')(engine); require('./plugins/class.js')(engine); require('./plugins/remove.js')(engine); require('./plugins/events.js')(engine); require('./plugins/style.js')(engine); require('./plugins/text.js')(engine); require('./plugins/utils.js')(engine); require('./tags/text.js')(engine); require('./tags/fragment.js')(engine); if(typeof module !== 'undefined' && module.exports){ module.exports = exports = engine; }else if(typeof window !== 'undefined'){ window.DomWrapper = engine; } })();
{ "content_hash": "5b23e0cd6f2b4da496d45b27f745ebb3", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 53, "avg_line_length": 27.91304347826087, "alnum_prop": 0.6542056074766355, "repo_name": "DavidBM/dom-wrapper", "id": "cfc24822d1928b5c8271e29e0ff3fbcffdbfb6f4", "size": "642", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "30887" } ], "symlink_target": "" }
function clear(n) % CLEAR Delete the C++ ReactorNet object. % clear(n) % :param n: % Instance of class :mat:func:`ReactorNet` % reactornetmethods(1, n.index);
{ "content_hash": "c0f6d3763d540eebbfcb7c8592669ece", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 46, "avg_line_length": 20.625, "alnum_prop": 0.6848484848484848, "repo_name": "Heathckliff/cantera", "id": "a043a977316fd269f2110c1826c28c7d17eb6e6a", "size": "165", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "interfaces/matlab/toolbox/@ReactorNet/clear.m", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "1935303" }, { "name": "C++", "bytes": "6751664" }, { "name": "CSS", "bytes": "2167" }, { "name": "FORTRAN", "bytes": "1175454" }, { "name": "Groff", "bytes": "2843" }, { "name": "HTML", "bytes": "17002" }, { "name": "M", "bytes": "980" }, { "name": "Matlab", "bytes": "284988" }, { "name": "Python", "bytes": "1055361" }, { "name": "Shell", "bytes": "2662" } ], "symlink_target": "" }
<?php namespace Blogger\ShopBundle\Tests\Controller; use Symfony\Bundle\FrameworkBundle\Test\WebTestCase; class DefaultControllerTest extends WebTestCase { public function testIndex() { $client = static::createClient(); $crawler = $client->request('GET', '/hello/Fabien'); $this->assertTrue($crawler->filter('html:contains("Hello Fabien")')->count() > 0); } }
{ "content_hash": "b484da0144cf5ff04a606f17fafc1fb2", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 90, "avg_line_length": 23.58823529411765, "alnum_prop": 0.6758104738154613, "repo_name": "Pappa/symblog", "id": "669913e604592be61296545f75739967e0d443c0", "size": "401", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Blogger/ShopBundle/Tests/Controller/DefaultControllerTest.php", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "82219" }, { "name": "PHP", "bytes": "142987" } ], "symlink_target": "" }
package org.n52.sir.IT; public class ServiceReferenceIT { // TODO add integration tests for adding and removing service references }
{ "content_hash": "01a092c9eee121691e8153e98f5fb1dd", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 76, "avg_line_length": 15.777777777777779, "alnum_prop": 0.7535211267605634, "repo_name": "52North/OpenSensorSearch", "id": "b7a1cef6e6289ab94c4f6d763594b17fd1398eca", "size": "794", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "sir-it/src/test/java/org/n52/sir/IT/ServiceReferenceIT.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "74492" }, { "name": "Eagle", "bytes": "32684" }, { "name": "HTML", "bytes": "41150" }, { "name": "Java", "bytes": "2154313" }, { "name": "JavaScript", "bytes": "2202088" }, { "name": "PLpgSQL", "bytes": "1664" }, { "name": "SQLPL", "bytes": "2543" }, { "name": "XSLT", "bytes": "177185" } ], "symlink_target": "" }
package com.haniokasai.mc.TinyMistress.playground; import com.haniokasai.mc.TinyMistress.srv.OutputLogger; import com.haniokasai.mc.TinyMistress.tools.TinyLogger; import com.haniokasai.mc.TinyMistress.tools.config; import java.io.*; import java.lang.reflect.Field; /** * Created by hani on 2017/03/16. */ class main { }
{ "content_hash": "bf10211976df50cbc44eabb0e88cf6c3", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 55, "avg_line_length": 19.294117647058822, "alnum_prop": 0.774390243902439, "repo_name": "haniokasai/TinyMistress", "id": "9b806eedcd0d81ae6fc6ce2868d85ac900799b48", "size": "328", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/haniokasai/mc/TinyMistress/playground/main.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "46" }, { "name": "CSS", "bytes": "127272" }, { "name": "HTML", "bytes": "32311" }, { "name": "Java", "bytes": "36292" }, { "name": "JavaScript", "bytes": "26662" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.5.0) on Thu Apr 28 12:34:54 CEST 2005 --> <TITLE> Dbase </TITLE> <META NAME="keywords" CONTENT="Dbase class"> <LINK REL ="stylesheet" TYPE="text/css" HREF="stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { parent.document.title="Dbase"; } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <body onload="windowTitle();" vlink="#551a8b" alink="#ff0000" link="#0000ee" text="#000000" bgcolor="#ffffff"> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <td bgcolor="#ffffff" class="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV CLASS&nbsp; &nbsp;<A HREF="DbaseAgent.html" title="class in &lt;Unnamed&gt;"><B>NEXT CLASS</B></A></FONT></TD> <td bgcolor="#ffffff" class="NavBarCell2"><FONT SIZE="-2"> <A HREF="index.html?Dbase.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="Dbase.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_summary">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_detail">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <!-- ======== START OF CLASS DATA ======== --> <H2> Class Dbase</H2> <PRE> java.lang.Object <IMG SRC="./resources/inherit.gif" ALT="extended by ">com.sri.oaa2.icl.IclDb <IMG SRC="./resources/inherit.gif" ALT="extended by "><B>Dbase</B> </PRE> <HR> <DL> <DT><PRE>public class <B>Dbase</B><DT>extends com.sri.oaa2.icl.IclDb</DL> </PRE> <P> Class that makes an IclDb from a text file. The entries should be one per line. <P> <P> <HR> <P> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <A NAME="constructor_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Constructor Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><B><A HREF="Dbase.html#Dbase(java.lang.String)">Dbase</A></B>(java.lang.String&nbsp;dbaseName)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> </TABLE> &nbsp; <!-- ========== METHOD SUMMARY =========== --> <A NAME="method_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Method Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;com.sri.oaa2.icl.IclList</CODE></FONT></TD> <TD><CODE><B><A HREF="Dbase.html#findAnswers(com.sri.oaa2.icl.IclStruct)">findAnswers</A></B>(com.sri.oaa2.icl.IclStruct&nbsp;query)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Searches the database for answers to a query.</TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_com.sri.oaa2.icl.IclDb"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class com.sri.oaa2.icl.IclDb</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>asserta, assertz, db_Assert, db_Contains, db_getRef, db_PrintDB, db_Replace, db_Retract, db_Retract, db_Solve, db_Solve, db_SolveDebug, erase, replace_all, retract_all, retract, retract, toString</CODE></TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait</CODE></TD> </TR> </TABLE> &nbsp; <P> <!-- ========= CONSTRUCTOR DETAIL ======== --> <A NAME="constructor_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Constructor Detail</B></FONT></TH> </TR> </TABLE> <A NAME="Dbase(java.lang.String)"><!-- --></A><H3> Dbase</H3> <PRE> public <B>Dbase</B>(java.lang.String&nbsp;dbaseName)</PRE> <DL> </DL> <!-- ============ METHOD DETAIL ========== --> <A NAME="method_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Method Detail</B></FONT></TH> </TR> </TABLE> <A NAME="findAnswers(com.sri.oaa2.icl.IclStruct)"><!-- --></A><H3> findAnswers</H3> <PRE> public com.sri.oaa2.icl.IclList <B>findAnswers</B>(com.sri.oaa2.icl.IclStruct&nbsp;query)</PRE> <DL> <DD>Searches the database for answers to a query. <P> <DD><DL> <DT><B>Parameters:</B><DD><CODE>query</CODE> - the query to search the database with <DT><B>Returns:</B><DD>an IclList with the unified answer/-s to the query</DL> </DD> </DL> <!-- ========= END OF CLASS DATA ========= --> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <td bgcolor="#ffffff" class="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV CLASS&nbsp; &nbsp;<A HREF="DbaseAgent.html" title="class in &lt;Unnamed&gt;"><B>NEXT CLASS</B></A></FONT></TD> <td bgcolor="#ffffff" class="NavBarCell2"><FONT SIZE="-2"> <A HREF="index.html?Dbase.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="Dbase.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_summary">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_detail">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
{ "content_hash": "6554ac225bb0599ac597d8bf3cb02b90", "timestamp": "", "source": "github", "line_count": 260, "max_line_length": 217, "avg_line_length": 36.82692307692308, "alnum_prop": 0.6383289817232376, "repo_name": "TeamSPoon/logicmoo_workspace", "id": "85a6b7568b9c344473de56b8cabe74a4a60bf491", "size": "9575", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packs_sys/logicmoo_nlu/ext/SIRIDUS/dbaseAgent/docs/Dbase.html", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "342" }, { "name": "C", "bytes": "1" }, { "name": "C++", "bytes": "1" }, { "name": "CSS", "bytes": "126627" }, { "name": "HTML", "bytes": "839172" }, { "name": "Java", "bytes": "11116" }, { "name": "JavaScript", "bytes": "238700" }, { "name": "PHP", "bytes": "42253" }, { "name": "Perl 6", "bytes": "23" }, { "name": "Prolog", "bytes": "440882" }, { "name": "PureBasic", "bytes": "1334" }, { "name": "Rich Text Format", "bytes": "3436542" }, { "name": "Roff", "bytes": "42" }, { "name": "Shell", "bytes": "61603" }, { "name": "TeX", "bytes": "99504" } ], "symlink_target": "" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <meta name="GENERATOR" content="VSdocman - documentation generator; https://www.helixoft.com" /> <link rel="icon" href="favicon.ico"> <title>tlece_QuestionVideo.FileAMSUrl Property</title> <link rel="stylesheet" type="text/css" href="msdn2019/toc.css" /> <script src="msdn2019/toc.js"></script> <link rel="stylesheet" type="text/css" href="msdn2019/msdn2019.css"></link> <script src="msdn2019/msdn2019.js" type="text/javascript"></script> <script src="SyntaxHighlighter/scripts/shCore_helixoft.js" type="text/javascript"></script> <script src="SyntaxHighlighter/scripts/shBrushVb.js" type="text/javascript"></script> <script src="SyntaxHighlighter/scripts/shBrushCSharp.js" type="text/javascript"></script> <script src="SyntaxHighlighter/scripts/shBrushFSharp.js" type="text/javascript"></script> <script src="SyntaxHighlighter/scripts/shBrushCpp.js" type="text/javascript"></script> <script src="SyntaxHighlighter/scripts/shBrushJScript.js" type="text/javascript"></script> <link href="SyntaxHighlighter/styles/shCore.css" rel="stylesheet" type="text/css" /> <link href="SyntaxHighlighter/styles/shThemeMsdnLW.css" rel="stylesheet" type="text/css" /> <script type="text/javascript"> SyntaxHighlighter.all(); </script> <link rel="stylesheet" type="text/css" href="vsdocman_overrides.css"></link> </head> <body style="direction: ltr;"> <div id="topic"> <!--HEADER START--> <div id="header"> <div id="header-top-container"> <div id="header-top-parent-container1"> <div id="header-top-container1"> <div id="runningHeaderText1"><a id="headerLogo" href="#" onclick="window.location.href = getCssCustomProperty('--headerLogoLink'); return false;">logo</a></div> <div id="runningHeaderText1b"><script> document.write(getCssCustomProperty('--headerTopCustomLineHtml')); </script></div> </div> </div> <div id="header-top-container2"> <div id="runningHeaderText">SOLUTION-WIDE PROPERTIES Reference</div> <div id="search-bar-container"> <form id="search-bar" action="search--.html"> <input id="HeaderSearchInput" type="search" name="search" placeholder="Search" > <button id="btn-search" class="c-glyph" title="Search"> <span>Search</span> </button> </form> <button id="cancel-search" class="cancel-search" title="Cancel"> <span>Cancel</span> </button> </div> </div> </div> <hr /> <div id="header-breadcrumbs"></div> <div id="headerLinks"> </div> <hr /> </div> <!--HEADER END--> <div id="mainSection"> <div id="toc-area"> <div id="toc-container" class="stickthis full-height"> <div id="-1"></div> <div id="c-1"> <div id="ci-1" class="inner-for-height"></div> </div> </div> </div> <div id="mainBody"> <h1 class="title">tlece_QuestionVideo.FileAMSUrl Property</h1> <div class="metadata"> Namespace: <a href="topic_0000000000000265.html">Tlece.Recruitment.Entities</a> <br />Assembly: Tlece.Recruitment (in Tlece.Recruitment.dll) </div> <div class="section_container"> <div id="syntaxSection" class="section"> <div id="syntaxCodeBlocks"> <div class="codeSnippetContainer"> <div class="codeSnippetTabs"> <div class="codeSnippetTabLeftCornerActive"> </div> <div class="codeSnippetTab csFirstTab csActiveTab codeVB"> <a>VB</a> </div> <div class="codeSnippetTab csNaTab codeCsharp"> <a href="javascript: CodeSnippet_SetLanguage('Csharp');">C#</a> </div> <div class="codeSnippetTab csNaTab codeFsharp"> <a href="javascript: CodeSnippet_SetLanguage('Fsharp');">F#</a> </div> <div class="codeSnippetTab csNaTab codeCpp"> <a href="javascript: CodeSnippet_SetLanguage('Cpp');">C++</a> </div> <div class="codeSnippetTab csLastTab csNaTab codeJScript"> <a href="javascript: CodeSnippet_SetLanguage('JScript');">JScript</a> </div> <div class="codeSnippetTabRightCorner"> </div> <div style="clear:both;"> </div> </div> <div class="codeSnippetCodeCollection"> <div class="codeSnippetToolbar"> <a title="Copy to clipboard." href="javascript:void(0)" onclick="CopyCode(this);">Copy</a> </div> <div class="codeSnippetCode codeVB"> <pre xml:space="preserve" class="brush: vb">Public Property FileAMSUrl() As <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a></pre> </div> <div class="codeSnippetCode codeNA"> <pre xml:space="preserve">This language is not supported or no code example is available.</pre> </div> </div> </div> <div class="codeSnippetContainer"> <div class="codeSnippetTabs"> <div class="codeSnippetTabLeftCorner"> </div> <div class="codeSnippetTab csFirstTab csNaTab codeVB"> <a>VB</a> </div> <div class="codeSnippetTab csActiveTab codeCsharp"> <a href="javascript: CodeSnippet_SetLanguage('Csharp');">C#</a> </div> <div class="codeSnippetTab csNaTab codeFsharp"> <a href="javascript: CodeSnippet_SetLanguage('Fsharp');">F#</a> </div> <div class="codeSnippetTab csNaTab codeCpp"> <a href="javascript: CodeSnippet_SetLanguage('Cpp');">C++</a> </div> <div class="codeSnippetTab csLastTab csNaTab codeJScript"> <a href="javascript: CodeSnippet_SetLanguage('JScript');">JScript</a> </div> <div class="codeSnippetTabRightCorner"> </div> <div style="clear:both;"> </div> </div> <div class="codeSnippetCodeCollection"> <div class="codeSnippetToolbar"> <a title="Copy to clipboard." href="javascript:void(0)" onclick="CopyCode(this);">Copy</a> </div> <div class="codeSnippetCode codeCsharp"> <pre xml:space="preserve" class="brush: csharp">public <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">string</a> FileAMSUrl {get; set;}</pre> </div> <div class="codeSnippetCode codeNA"> <pre xml:space="preserve">This language is not supported or no code example is available.</pre> </div> </div> </div> <div class="codeSnippetContainer"> <div class="codeSnippetTabs"> <div class="codeSnippetTabLeftCorner"> </div> <div class="codeSnippetTab csFirstTab csNaTab codeVB"> <a>VB</a> </div> <div class="codeSnippetTab csNaTab codeCsharp"> <a href="javascript: CodeSnippet_SetLanguage('Csharp');">C#</a> </div> <div class="codeSnippetTab csNaTab codeFsharp"> <a href="javascript: CodeSnippet_SetLanguage('Fsharp');">F#</a> </div> <div class="codeSnippetTab csActiveTab codeCpp"> <a href="javascript: CodeSnippet_SetLanguage('Cpp');">C++</a> </div> <div class="codeSnippetTab csLastTab csNaTab codeJScript"> <a href="javascript: CodeSnippet_SetLanguage('JScript');">JScript</a> </div> <div class="codeSnippetTabRightCorner"> </div> <div style="clear:both;"> </div> </div> <div class="codeSnippetCodeCollection"> <div class="codeSnippetToolbar"> <a title="Copy to clipboard." href="javascript:void(0)" onclick="CopyCode(this);">Copy</a> </div> <div class="codeSnippetCode codeCpp"> <pre xml:space="preserve" class="brush: cpp">public: &nbsp;<br />property <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a>^ FileAMSUrl {&nbsp;<br /> <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a>^ get( );&nbsp;<br /> void set(&nbsp;<br /> <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a>^ value&nbsp;<br /> );&nbsp;<br />}</pre> </div> <div class="codeSnippetCode codeNA"> <pre xml:space="preserve">This language is not supported or no code example is available.</pre> </div> </div> </div> <div class="codeSnippetContainer"> <div class="codeSnippetTabs"> <div class="codeSnippetTabLeftCorner"> </div> <div class="codeSnippetTab csFirstTab csNaTab codeVB"> <a>VB</a> </div> <div class="codeSnippetTab csNaTab codeCsharp"> <a href="javascript: CodeSnippet_SetLanguage('Csharp');">C#</a> </div> <div class="codeSnippetTab csNaTab codeFsharp"> <a href="javascript: CodeSnippet_SetLanguage('Fsharp');">F#</a> </div> <div class="codeSnippetTab csNaTab codeCpp"> <a href="javascript: CodeSnippet_SetLanguage('Cpp');">C++</a> </div> <div class="codeSnippetTab csActiveTab csLastTab codeJScript"> <a href="javascript: CodeSnippet_SetLanguage('JScript');">JScript</a> </div> <div class="codeSnippetTabRightCornerActive"> </div> <div style="clear:both;"> </div> </div> <div class="codeSnippetCodeCollection"> <div class="codeSnippetToolbar"> <a title="Copy to clipboard." href="javascript:void(0)" onclick="CopyCode(this);">Copy</a> </div> <div class="codeSnippetCode codeJScript"> <pre xml:space="preserve" class="brush: js">public function get FileAMSUrl() : <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a>;&nbsp;<br />public function set FileAMSUrl(value : <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a>);</pre> </div> <div class="codeSnippetCode codeNA"> <pre xml:space="preserve">This language is not supported or no code example is available.</pre> </div> </div> </div> </div> <h4 class="subHeading"> Property Value</h4> <a target="_top" href="https://docs.microsoft.com/en-us/dotnet/api/system.string">string</a> </div> </div> <div class="section_container"> <div class="section_heading"> <span><a href="javascript:void(0)" title="Collapse" onclick="toggleSection(this);">Applies to</a></span> <div>&nbsp;</div> </div> <div id="frameworksSection" class="section"> <h4 class="subHeading">.NET Framework</h4>Supported in: 4.5, 4.5.1, 4.5.2, 4.6, 4.6.1, 4.6.2, 4.7, 4.7.1<br /> </div> </div> <div class="section_container"> <div class="section_heading"> <span><a href="javascript:void(0)" title="Collapse" onclick="toggleSection(this);">See Also</a></span> <div>&nbsp;</div> </div> <div id="seeAlsoSection" class="section"> <div> <a href="topic_0000000000000436.html">tlece_QuestionVideo Class</a><br /> <a href="topic_0000000000000265.html">Tlece.Recruitment.Entities Namespace</a><br /> </div> </div> </div> </div> <div id="internal-toc-area"> <div id="internal-toc-container" class="stickthis"> <h3 id="internal-toc-heading">In this article</h3> <span id="internal-toc-definition-localized-text">Definition</span> </div> </div> </div> <div id="footer"> <div id="footer-container"> <p><span style="color: #FF0000;">Generated with unregistered version of <a target="_top" href="http://www.helixoft.com/vsdocman/overview.html">VSdocman</a></span>&nbsp;<br />Your own footer text will only be shown in registered version.</p> </div> </div> </div> </body> </html>
{ "content_hash": "263bfb2aed349f0e4dd51ddbdcb81679", "timestamp": "", "source": "github", "line_count": 391, "max_line_length": 472, "avg_line_length": 30.191815856777495, "alnum_prop": 0.6332909783989835, "repo_name": "asiboro/asiboro.github.io", "id": "7809be72728237322a9cc968d95b184496ec75b3", "size": "11805", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vsdoc/topic_000000000000043C.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "310000" }, { "name": "HTML", "bytes": "135195" }, { "name": "JavaScript", "bytes": "621923" } ], "symlink_target": "" }
/* -------------------------------------------------------------------------- */ /* Copyright 2002-2015, OpenNebula Project (OpenNebula.org), C12G Labs */ /* */ /* Licensed under the Apache License, Version 2.0 (the "License"); you may */ /* not use this file except in compliance with the License. You may obtain */ /* a copy of the License at */ /* */ /* http://www.apache.org/licenses/LICENSE-2.0 */ /* */ /* Unless required by applicable law or agreed to in writing, software */ /* distributed under the License is distributed on an "AS IS" BASIS, */ /* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ /* See the License for the specific language governing permissions and */ /* limitations under the License. */ /* -------------------------------------------------------------------------- */ #ifndef REQUEST_MANAGER_H_ #define REQUEST_MANAGER_H_ #include "ActionManager.h" #include "VirtualMachinePool.h" #include "HostPool.h" #include "UserPool.h" #include "VirtualNetworkPool.h" #include "ImagePool.h" #include "VMTemplatePool.h" #include "GroupPool.h" #include "AuthManager.h" #include <xmlrpc-c/base.hpp> #include <xmlrpc-c/registry.hpp> #include <xmlrpc-c/server_abyss.hpp> using namespace std; extern "C" void * rm_action_loop(void *arg); extern "C" void * rm_xml_server_loop(void *arg); class RequestManager : public ActionListener { public: RequestManager( int _port, int _max_conn, int _max_conn_backlog, int _keepalive_timeout, int _keepalive_max_conn, int _timeout, const string _xml_log_file, const string call_log_format); ~RequestManager(){}; /** * This functions starts the associated listener thread (XML server), and * creates a new thread for the Request Manager. This thread will wait in * an action loop till it receives ACTION_FINALIZE. * @return 0 on success. */ int start(); /** * Gets the thread identification. * @return pthread_t for the manager thread (that in the action loop). */ pthread_t get_thread_id() const { return rm_thread; }; /** * */ void finalize() { am.trigger(ACTION_FINALIZE,0); }; private: //-------------------------------------------------------------------------- // Friends, thread functions require C-linkage //-------------------------------------------------------------------------- friend void * rm_xml_server_loop(void *arg); friend void * rm_action_loop(void *arg); /** * Thread id for the RequestManager */ pthread_t rm_thread; /** * Thread id for the XML Server */ pthread_t rm_xml_server_thread; /** * Port number where the connection will be open */ int port; /* * FD for the XML server socket */ int socket_fd; /** * Max connections */ int max_conn; /* * Max backlog connections */ int max_conn_backlog; /* * Keepalive timeout */ int keepalive_timeout; /* * Keepalive max conn */ int keepalive_max_conn; /* * Timeout */ int timeout; /** * Filename for the log of the xmlrpc server that listens */ string xml_log_file; /** * Action engine for the Manager */ ActionManager am; /** * To register XML-RPC methods */ xmlrpc_c::registry RequestManagerRegistry; /** * The XML-RPC server */ xmlrpc_c::serverAbyss * AbyssServer; /** * The action function executed when an action is triggered. * @param action the name of the action * @param arg arguments for the action function */ void do_action(const string & action, void * arg); /** * Register the XML-RPC API Calls */ void register_xml_methods(); int setup_socket(); }; /* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */ #endif
{ "content_hash": "450e1c7dc06647983cbba8efe3a3af50", "timestamp": "", "source": "github", "line_count": 179, "max_line_length": 80, "avg_line_length": 26.050279329608937, "alnum_prop": 0.47716062620630495, "repo_name": "Terradue/one", "id": "68c4f863e60f1ff1df0a5f61a4d2fcfdd54b8385", "size": "4663", "binary": false, "copies": "1", "ref": "refs/heads/one-4.10", "path": "include/RequestManager.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "177645" }, { "name": "C++", "bytes": "2722794" }, { "name": "CSS", "bytes": "103072" }, { "name": "HTML", "bytes": "35316" }, { "name": "Java", "bytes": "359372" }, { "name": "JavaScript", "bytes": "1512980" }, { "name": "Lex", "bytes": "10520" }, { "name": "Python", "bytes": "123470" }, { "name": "Roff", "bytes": "101651" }, { "name": "Ruby", "bytes": "2022766" }, { "name": "Shell", "bytes": "625942" }, { "name": "TSQL", "bytes": "24079" }, { "name": "Yacc", "bytes": "34829" } ], "symlink_target": "" }
To use this extension, install it aside of Webmake: $ npm install webmake-sass If you use global installation of Webmake, then extension also needs to be installed globally: $ npm install -g webmake-sass When running Webmake, ask webmake to use it: $ webmake --ext=sass program.js bundle.js Same way when Webmake is used programmatically: webmake(inputPath, { ext: 'sass' }, cb); webmake-sass can be used with any other Webmake extension, e.g.: $ webmake --ext=sass --ext=otherext program.js bundle.js Programmatically: require('./path/to/sassfile'); // injects the resulting CSS automatically into the current page
{ "content_hash": "0483aba886cdab3606515c1a70b970b4", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 99, "avg_line_length": 28.304347826086957, "alnum_prop": 0.7296466973886329, "repo_name": "acdaniel/webmake-sass", "id": "53ce1795a8200fffc0c7be2f296099dea7a8320e", "size": "749", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33261", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "550" } ], "symlink_target": "" }
![XCGLogger][xcglogger-logo] [![badge-language]][swift.org] [![badge-platforms]][swift.org] [![badge-license]][license] [![badge-travis]][travis] [![badge-swiftpm]][swiftpm] [![badge-cocoapods]][cocoapods-xcglogger] [![badge-carthage]][carthage] [![badge-mastodon]][mastodon-davewoodx] [![badge-twitter]][twitter-davewoodx] [![badge-sponsors]][cerebral-gardens] [![badge-patreon]][patreon-davewoodx] ## tl;dr XCGLogger is the original debug log module for use in Swift projects. Swift does not include a C preprocessor so developers are unable to use the debug log `#define` macros they would use in Objective-C. This means our traditional way of generating nice debug logs no longer works. Resorting to just plain old `print` calls means you lose a lot of helpful information, or requires you to type a lot more code. XCGLogger allows you to log details to the console (and optionally a file, or other custom destinations), just like you would have with `NSLog()` or `print()`, but with additional information, such as the date, function name, filename and line number. Go from this: ```Simple message``` to this: ```2014-06-09 06:44:43.600 [Debug] [AppDelegate.swift:40] application(_:didFinishLaunchingWithOptions:): Simple message``` #### Example <img src="https://raw.githubusercontent.com/DaveWoodCom/XCGLogger/master/ReadMeImages/SampleLog.png" alt="Example" style="width: 690px;" /> ### Communication _(Hat Tip AlamoFire)_ * If you need help, use [Stack Overflow][stackoverflow] (Tag '[xcglogger][stackoverflow]'). * If you'd like to ask a general question, use [Stack Overflow][stackoverflow]. * If you've found a bug, open an issue. * If you have a feature request, open an issue. * If you want to contribute, submit a pull request. * If you use XCGLogger, please Star the project on [GitHub][github-xcglogger] ## Installation ### Git Submodule Execute: ```git submodule add https://github.com/DaveWoodCom/XCGLogger.git``` in your repository folder. ### [Carthage][carthage] Add the following line to your `Cartfile`. ```github "DaveWoodCom/XCGLogger" ~> 7.0.0``` Then run `carthage update --no-use-binaries` or just `carthage update`. For details of the installation and usage of Carthage, visit [its project page][carthage]. Developers running 5.0 and above in Swift will need to add `$(SRCROOT)/Carthage/Build/iOS/ObjcExceptionBridging.framework` to their Input Files in the Copy Carthage Frameworks Build Phase. ### [CocoaPods][cocoapods] Add something similar to the following lines to your `Podfile`. You may need to adjust based on your platform, version/branch etc. ``` source 'https://github.com/CocoaPods/Specs.git' platform :ios, '8.0' use_frameworks! pod 'XCGLogger', '~> 7.0.0' ``` Specifying the pod `XCGLogger` on its own will include the core framework. We're starting to add subspecs to allow you to include optional components as well: `pod 'XCGLogger/UserInfoHelpers', '~> 7.0.0'`: Include some experimental code to help deal with using UserInfo dictionaries to tag log messages. Then run `pod install`. For details of the installation and usage of CocoaPods, visit [its official web site][cocoapods]. Note: Before CocoaPods 1.4.0 it was not possible to use multiple pods with a mixture of Swift versions. You may need to ensure each pod is configured for the correct Swift version (check the targets in the pod project of your workspace). If you manually adjust the Swift version for a project, it'll reset the next time you run `pod install`. You can add a `post_install` hook into your podfile to automate setting the correct Swift versions. This is largely untested, and I'm not sure it's a good solution, but it seems to work: ``` post_install do |installer| installer.pods_project.targets.each do |target| if ['SomeTarget-iOS', 'SomeTarget-watchOS'].include? "#{target}" print "Setting #{target}'s SWIFT_VERSION to 4.2\n" target.build_configurations.each do |config| config.build_settings['SWIFT_VERSION'] = '4.2' end else print "Setting #{target}'s SWIFT_VERSION to Undefined (Xcode will automatically resolve)\n" target.build_configurations.each do |config| config.build_settings.delete('SWIFT_VERSION') end end end print "Setting the default SWIFT_VERSION to 3.2\n" installer.pods_project.build_configurations.each do |config| config.build_settings['SWIFT_VERSION'] = '3.2' end end ``` You can adjust that to suit your needs of course. ### [Swift Package Manager][swiftpm] Add the following entry to your package's dependencies: ``` .Package(url: "https://github.com/DaveWoodCom/XCGLogger.git", majorVersion: 7) ``` ### Backwards Compatibility Use: * XCGLogger version [7.0.0][xcglogger-7.0.0] for Swift 5.0 * XCGLogger version [6.1.0][xcglogger-6.1.0] for Swift 4.2 * XCGLogger version [6.0.4][xcglogger-6.0.4] for Swift 4.1 * XCGLogger version [6.0.2][xcglogger-6.0.2] for Swift 4.0 * XCGLogger version [5.0.5][xcglogger-5.0.5] for Swift 3.0-3.2 * XCGLogger version [3.6.0][xcglogger-3.6.0] for Swift 2.3 * XCGLogger version [3.5.3][xcglogger-3.5.3] for Swift 2.2 * XCGLogger version [3.2][xcglogger-3.2] for Swift 2.0-2.1 * XCGLogger version [2.x][xcglogger-2.x] for Swift 1.2 * XCGLogger version [1.x][xcglogger-1.x] for Swift 1.1 and below. ## Basic Usage (Quick Start) _This quick start method is intended just to get you up and running with the logger. You should however use the [advanced usage below](#advanced-usage-recommended) to get the most out of this library._ Add the XCGLogger project as a subproject to your project, and add the appropriate library as a dependency of your target(s). Under the `General` tab of your target, add `XCGLogger.framework` and `ObjcExceptionBridging.framework` to the `Embedded Binaries` section. Then, in each source file: ```Swift import XCGLogger ``` In your AppDelegate (or other global file), declare a global constant to the default XCGLogger instance. ```Swift let log = XCGLogger.default ``` In the ```Swift application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]? = nil) // iOS, tvOS ``` or ```Swift applicationDidFinishLaunching(_ notification: Notification) // macOS ``` function, configure the options you need: ```Swift log.setup(level: .debug, showThreadName: true, showLevel: true, showFileNames: true, showLineNumbers: true, writeToFile: "path/to/file", fileLevel: .debug) ``` The value for `writeToFile:` can be a `String` or `URL`. If the file already exists, it will be cleared before we use it. Omit the parameter or set it to `nil` to log to the console only. You can optionally set a different log level for the file output using the `fileLevel:` parameter. Set it to `nil` or omit it to use the same log level as the console. Then, whenever you'd like to log something, use one of the convenience methods: ```Swift log.verbose("A verbose message, usually useful when working on a specific problem") log.debug("A debug message") log.info("An info message, probably useful to power users looking in console.app") log.notice("A notice message") log.warning("A warning message, may indicate a possible error") log.error("An error occurred, but it's recoverable, just info about what happened") log.severe("A severe error occurred, we are likely about to crash now") log.alert("An alert error occurred, a log destination could be made to email someone") log.emergency("An emergency error occurred, a log destination could be made to text someone") ``` The different methods set the log level of the message. XCGLogger will only print messages with a log level that is greater to or equal to its current log level setting. So a logger with a level of `.error` will only output log messages with a level of `.error`, `.severe`, `.alert`, or `.emergency`. ## Advanced Usage (Recommended) XCGLogger aims to be simple to use and get you up and running quickly with as few as 2 lines of code above. But it allows for much greater control and flexibility. A logger can be configured to deliver log messages to a variety of destinations. Using the basic setup above, the logger will output log messages to the standard Xcode debug console, and optionally a file if a path is provided. It's quite likely you'll want to send logs to more interesting places, such as the Apple System Console, a database, third party server, or another application such as [NSLogger][NSLogger]. This is accomplished by adding the destination to the logger. Here's an example of configuring the logger to output to the Apple System Log as well as a file. ```Swift // Create a logger object with no destinations let log = XCGLogger(identifier: "advancedLogger", includeDefaultDestinations: false) // Create a destination for the system console log (via NSLog) let systemDestination = AppleSystemLogDestination(identifier: "advancedLogger.systemDestination") // Optionally set some configuration options systemDestination.outputLevel = .debug systemDestination.showLogIdentifier = false systemDestination.showFunctionName = true systemDestination.showThreadName = true systemDestination.showLevel = true systemDestination.showFileName = true systemDestination.showLineNumber = true systemDestination.showDate = true // Add the destination to the logger log.add(destination: systemDestination) // Create a file log destination let fileDestination = FileDestination(writeToFile: "/path/to/file", identifier: "advancedLogger.fileDestination") // Optionally set some configuration options fileDestination.outputLevel = .debug fileDestination.showLogIdentifier = false fileDestination.showFunctionName = true fileDestination.showThreadName = true fileDestination.showLevel = true fileDestination.showFileName = true fileDestination.showLineNumber = true fileDestination.showDate = true // Process this destination in the background fileDestination.logQueue = XCGLogger.logQueue // Add the destination to the logger log.add(destination: fileDestination) // Add basic app info, version info etc, to the start of the logs log.logAppDetails() ``` You can configure each log destination with different options depending on your needs. Another common usage pattern is to have multiple loggers, perhaps one for UI issues, one for networking, and another for data issues. Each log destination can have its own log level. As a convenience, you can set the log level on the log object itself and it will pass that level to each destination. Then set the destinations that need to be different. **Note**: A destination object can only be added to one logger object, adding it to a second will remove it from the first. ### Initialization Using A Closure Alternatively you can use a closure to initialize your global variable, so that all initialization is done in one place ```Swift let log: XCGLogger = { let log = XCGLogger(identifier: "advancedLogger", includeDefaultDestinations: false) // Customize as needed return log }() ``` **Note**: This creates the log object lazily, which means it's not created until it's actually needed. This delays the initial output of the app information details. Because of this, I recommend forcing the log object to be created at app launch by adding the line `let _ = log` at the top of your `didFinishLaunching` method if you don't already log something on app launch. ### Log Anything You can log strings: ```Swift log.debug("Hi there!") ``` or pretty much anything you want: ```Swift log.debug(true) log.debug(CGPoint(x: 1.1, y: 2.2)) log.debug(MyEnum.Option) log.debug((4, 2)) log.debug(["Device": "iPhone", "Version": 7]) ``` ### Filtering Log Messages New to XCGLogger 4, you can now create filters to apply to your logger (or to specific destinations). Create and configure your filters (examples below), and then add them to the logger or destination objects by setting the optional `filters` property to an array containing the filters. Filters are applied in the order they exist in the array. During processing, each filter is asked if the log message should be excluded from the log. If any filter excludes the log message, it's excluded. Filters have no way to reverse the exclusion of another filter. If a destination's `filters` property is `nil`, the log's `filters` property is used instead. To have one destination log everything, while having all other destinations filter something, add the filters to the log object and set the one destination's `filters` property to an empty array `[]`. **Note**: Unlike destinations, you can add the same filter object to multiple loggers and/or multiple destinations. #### Filter by Filename To exclude all log messages from a specific file, create an exclusion filter like so: ```Swift log.filters = [FileNameFilter(excludeFrom: ["AppDelegate.swift"], excludePathWhenMatching: true)] ``` `excludeFrom:` takes an `Array<String>` or `Set<String>` so you can specify multiple files at the same time. `excludePathWhenMatching:` defaults to `true` so you can omit it unless you want to match path's as well. To include log messages only for a specific set to files, create the filter using the `includeFrom:` initializer. It's also possible to just toggle the `inverse` property to flip the exclusion filter to an inclusion filter. #### Filter by Tag In order to filter log messages by tag, you must of course be able to set a tag on the log messages. Each log message can now have additional, user defined data attached to them, to be used by filters (and/or formatters etc). This is handled with a `userInfo: Dictionary<String, Any>` object. The dictionary key should be a namespaced string to avoid collisions with future additions. Official keys will begin with `com.cerebralgardens.xcglogger`. The tag key can be accessed by `XCGLogger.Constants.userInfoKeyTags`. You definitely don't want to be typing that, so feel free to create a global shortcut: `let tags = XCGLogger.Constants.userInfoKeyTags`. Now you can easily tag your logs: ```Swift let sensitiveTag = "Sensitive" log.debug("A tagged log message", userInfo: [tags: sensitiveTag]) ``` The value for tags can be an `Array<String>`, `Set<String>`, or just a `String`, depending on your needs. They'll all work the same way when filtered. Depending on your workflow and usage, you'll probably create faster methods to set up the `userInfo` dictionary. See [below](#mixing-and-matching) for other possible shortcuts. Now that you have your logs tagged, you can filter easily: ```Swift log.filters = [TagFilter(excludeFrom: [sensitiveTag])] ``` Just like the `FileNameFilter`, you can use `includeFrom:` or toggle `inverse` to include only log messages that have the specified tags. #### Filter by Developer Filtering by developer is exactly like filtering by tag, only using the `userInfo` key of `XCGLogger.Constants.userInfoKeyDevs`. In fact, both filters are subclasses of the `UserInfoFilter` class that you can use to create additional filters. See [Extending XCGLogger](#extending-xcglogger) below. #### Mixing and Matching In large projects with multiple developers, you'll probably want to start tagging log messages, as well as indicate the developer that added the message. While extremely flexible, the `userInfo` dictionary can be a little cumbersome to use. There are a few possible methods you can use to simply things. I'm still testing these out myself so they're not officially part of the library yet (I'd love feedback or other suggestions). I have created some experimental code to help create the UserInfo dictionaries. (Include the optional `UserInfoHelpers` subspec if using CocoaPods). Check the iOS Demo app to see it in use. There are two structs that conform to the `UserInfoTaggingProtocol` protocol. `Tag` and `Dev`. You can create an extension on each of these that suit your project. For example: ```Swift extension Tag { static let sensitive = Tag("sensitive") static let ui = Tag("ui") static let data = Tag("data") } extension Dev { static let dave = Dev("dave") static let sabby = Dev("sabby") } ``` Along with these types, there's an overloaded operator `|` that can be used to merge them together into a dictionary compatible with the `UserInfo:` parameter of the logging calls. Then you can log messages like this: ```Swift log.debug("A tagged log message", userInfo: Dev.dave | Tag.sensitive) ``` There are some current issues I see with these `UserInfoHelpers`, which is why I've made it optional/experimental for now. I'd love to hear comments/suggestions for improvements. 1. The overloaded operator `|` merges dictionaries so long as there are no `Set`s. If one of the dictionaries contains a `Set`, it'll use one of them, without merging them. Preferring the left hand side if both sides have a set for the same key. 2. Since the `userInfo:` parameter needs a dictionary, you can't pass in a single Dev or Tag object. You need to use at least two with the `|` operator to have it automatically convert to a compatible dictionary. If you only want one Tag for example, you must access the `.dictionary` parameter manually: `userInfo: Tag("Blah").dictionary`. ### Selectively Executing Code All log methods operate on closures. Using the same syntactic sugar as Swift's `assert()` function, this approach ensures we don't waste resources building log messages that won't be output anyway, while at the same time preserving a clean call site. For example, the following log statement won't waste resources if the debug log level is suppressed: ```Swift log.debug("The description of \(thisObject) is really expensive to create") ``` Similarly, let's say you have to iterate through a loop in order to do some calculation before logging the result. In Objective-C, you could put that code block between `#if` `#endif`, and prevent the code from running. But in Swift, previously you would need to still process that loop, wasting resources. With `XCGLogger` it's as simple as: ```Swift log.debug { var total = 0.0 for receipt in receipts { total += receipt.total } return "Total of all receipts: \(total)" } ``` In cases where you wish to selectively execute code without generating a log line, return `nil`, or use one of the methods: `verboseExec`, `debugExec`, `infoExec`, `warningExec`, `errorExec`, and `severeExec`. ### Custom Date Formats You can create your own `DateFormatter` object and assign it to the logger. ```Swift let dateFormatter = DateFormatter() dateFormatter.dateFormat = "MM/dd/yyyy hh:mma" dateFormatter.locale = Locale.current log.dateFormatter = dateFormatter ``` ### Enhancing Log Messages With Colour XCGLogger supports adding formatting codes to your log messages to enable colour in various places. The original option was to use the [XcodeColors plug-in][XcodeColors]. However, Xcode (as of version 8) no longer officially supports plug-ins. You can still view your logs in colour, just not in Xcode at the moment. You can use the ANSI colour support to add colour to your fileDestination objects and view your logs via a terminal window. This gives you some extra options such as adding Bold, Italics, or (please don't) Blinking! Once enabled, each log level can have its own colour. These colours can be customized as desired. If using multiple loggers, you could alternatively set each logger to its own colour. An example of setting up the ANSI formatter: ```Swift if let fileDestination: FileDestination = log.destination(withIdentifier: XCGLogger.Constants.fileDestinationIdentifier) as? FileDestination { let ansiColorLogFormatter: ANSIColorLogFormatter = ANSIColorLogFormatter() ansiColorLogFormatter.colorize(level: .verbose, with: .colorIndex(number: 244), options: [.faint]) ansiColorLogFormatter.colorize(level: .debug, with: .black) ansiColorLogFormatter.colorize(level: .info, with: .blue, options: [.underline]) ansiColorLogFormatter.colorize(level: .notice, with: .green, options: [.italic]) ansiColorLogFormatter.colorize(level: .warning, with: .red, options: [.faint]) ansiColorLogFormatter.colorize(level: .error, with: .red, options: [.bold]) ansiColorLogFormatter.colorize(level: .severe, with: .white, on: .red) ansiColorLogFormatter.colorize(level: .alert, with: .white, on: .red, options: [.bold]) ansiColorLogFormatter.colorize(level: .emergency, with: .white, on: .red, options: [.bold, .blink]) fileDestination.formatters = [ansiColorLogFormatter] } ``` As with filters, you can use the same formatter objects for multiple loggers and/or multiple destinations. If a destination's `formatters` property is `nil`, the logger's `formatters` property will be used instead. See [Extending XCGLogger](#extending-xcglogger) below for info on creating your own custom formatters. ### Alternate Configurations By using Swift build flags, different log levels can be used in debugging versus staging/production. Go to Build Settings -> Swift Compiler - Custom Flags -> Other Swift Flags and add `-DDEBUG` to the Debug entry. ```Swift #if DEBUG log.setup(level: .debug, showThreadName: true, showLevel: true, showFileNames: true, showLineNumbers: true) #else log.setup(level: .severe, showThreadName: true, showLevel: true, showFileNames: true, showLineNumbers: true) #endif ``` You can set any number of options up in a similar fashion. See the updated iOSDemo app for an example of using different log destinations based on options, search for `USE_NSLOG`. ### Background Log Processing By default, the supplied log destinations will process the logs on the thread they're called on. This is to ensure the log message is displayed immediately when debugging an application. You can add a breakpoint immediately after a log call and see the results when the breakpoint hits. However, if you're not actively debugging the application, processing the logs on the current thread can introduce a performance hit. You can now specify a destination process its logs on a dispatch queue of your choice (or even use a default supplied one). ```Swift fileDestination.logQueue = XCGLogger.logQueue ``` or even ```Swift fileDestination.logQueue = DispatchQueue.global(qos: .background) ``` This works extremely well when combined with the [Alternate Configurations](#alternate-configurations) method above. ```Swift #if DEBUG log.setup(level: .debug, showThreadName: true, showLevel: true, showFileNames: true, showLineNumbers: true) #else log.setup(level: .severe, showThreadName: true, showLevel: true, showFileNames: true, showLineNumbers: true) if let consoleLog = log.logDestination(XCGLogger.Constants.baseConsoleDestinationIdentifier) as? ConsoleDestination { consoleLog.logQueue = XCGLogger.logQueue } #endif ``` ### Append To Existing Log File When using the advanced configuration of the logger (see [Advanced Usage above](#advanced-usage-recommended)), you can now specify that the logger append to an existing log file, instead of automatically overwriting it. Add the optional `shouldAppend:` parameter when initializing the `FileDestination` object. You can also add the `appendMarker:` parameter to add a marker to the log file indicating where a new instance of your app started appending. By default we'll add `-- ** ** ** --` if the parameter is omitted. Set it to `nil` to skip appending the marker. ```let fileDestination = FileDestination(writeToFile: "/path/to/file", identifier: "advancedLogger.fileDestination", shouldAppend: true, appendMarker: "-- Relauched App --")``` ### Automatic Log File Rotation When logging to a file, you have the option to automatically rotate the log file to an archived destination, and have the logger automatically create a new log file in place of the old one. Create a destination using the `AutoRotatingFileDestination` class and set the following properties: `targetMaxFileSize`: Auto rotate once the file is larger than this `targetMaxTimeInterval`: Auto rotate after this many seconds `targetMaxLogFiles`: Number of archived log files to keep, older ones are automatically deleted Those are all guidelines for the logger, not hard limits. ### Extending XCGLogger You can create alternate log destinations (besides the built in ones). Your custom log destination must implement the `DestinationProtocol` protocol. Instantiate your object, configure it, and then add it to the `XCGLogger` object with `add(destination:)`. There are two base destination classes (`BaseDestination` and `BaseQueuedDestination`) you can inherit from to handle most of the process for you, requiring you to only implement one additional method in your custom class. Take a look at `ConsoleDestination` and `FileDestination` for examples. You can also create custom filters or formatters. Take a look at the provided versions as a starting point. Note that filters and formatters have the ability to alter the log messages as they're processed. This means you can create a filter that strips passwords, highlights specific words, encrypts messages, etc. ## Contributing XCGLogger is the best logger available for Swift because of the contributions from the community like you. There are many ways you can help continue to make it great. 1. Star the project on [GitHub][github-xcglogger]. 2. Report issues/bugs you find. 3. Suggest features. 4. Submit pull requests. 5. Download and install one of my apps: [https://www.cerebralgardens.com/apps/][cerebral-gardens-apps] Try my newest app: [All the Rings][all-the-rings]. 6. You can visit my [Patreon][patreon-davewoodx] and contribute financially. **Note**: when submitting a pull request, please use lots of small commits verses one huge commit. It makes it much easier to merge in when there are several pull requests that need to be combined for a new version. <!-- Removed these since plug-ins seem to be gone for good now ## Third Party Tools That Work With XCGLogger **Note**: These plug-ins no longer 'officially' work in Xcode. File a [bug report](https://openradar.appspot.com/27447585) if you'd like to see plug-ins return to Xcode. [**XcodeColors:**][XcodeColors] Enable colour in the Xcode console <br /> [**KZLinkedConsole:**][KZLinkedConsole] Link from a log line directly to the code that produced it **Note**: These may not yet work with the Swift 4 version of XCGLogger. [**XCGLoggerNSLoggerConnector:**][XCGLoggerNSLoggerConnector] Send your logs to [NSLogger][NSLogger] --> ## To Do - Add more examples of some advanced use cases - Add additional log destination types - Add Objective-C support - Add Linux support ## More If you find this library helpful, you'll definitely find this other tool helpful: Watchdog: https://watchdogforxcode.com/ Also, please check out some of my other projects: - All the Rings: [App Store](https://itunes.apple.com/app/all-the-rings/id1186956966?pt=17255&ct=github&mt=8&at=11lMGu) - Rudoku: [App Store](https://itunes.apple.com/app/rudoku/id965105321?pt=17255&ct=github&mt=8&at=11lMGu) - TV Tune Up: https://www.cerebralgardens.com/tvtuneup ### Change Log The change log is now in its own file: [CHANGELOG.md](CHANGELOG.md) [xcglogger-logo]: https://github.com/DaveWoodCom/XCGLogger/raw/master/ReadMeImages/XCGLoggerLogo_326x150.png [swift.org]: https://swift.org/ [license]: https://github.com/DaveWoodCom/XCGLogger/blob/master/LICENSE.txt [travis]: https://travis-ci.org/DaveWoodCom/XCGLogger [swiftpm]: https://swift.org/package-manager/ [cocoapods]: https://cocoapods.org/ [cocoapods-xcglogger]: https://cocoapods.org/pods/XCGLogger [carthage]: https://github.com/Carthage/Carthage [cerebral-gardens]: https://www.cerebralgardens.com/ [cerebral-gardens-apps]: https://www.cerebralgardens.com/apps/ [all-the-rings]: https://alltherings.fit/?s=GH3 [mastodon-davewoodx]: https://mastodon.social/@davewoodx [twitter-davewoodx]: https://twitter.com/davewoodx [github-xcglogger]: https://github.com/DaveWoodCom/XCGLogger [stackoverflow]: https://stackoverflow.com/questions/tagged/xcglogger [patreon-davewoodx]: https://www.patreon.com/DaveWoodX [badge-language]: https://img.shields.io/badge/Swift-1.x%20%7C%202.x%20%7C%203.x%20%7C%204.x%20%7C%205.x-orange.svg?style=flat [badge-platforms]: https://img.shields.io/badge/Platforms-macOS%20%7C%20iOS%20%7C%20tvOS%20%7C%20watchOS-lightgray.svg?style=flat [badge-license]: https://img.shields.io/badge/License-MIT-lightgrey.svg?style=flat [badge-travis]: https://img.shields.io/travis/DaveWoodCom/XCGLogger/master.svg?style=flat [badge-swiftpm]: https://img.shields.io/badge/Swift_Package_Manager-v7.0.0-64a6dd.svg?style=flat [badge-cocoapods]: https://img.shields.io/cocoapods/v/XCGLogger.svg?style=flat [badge-carthage]: https://img.shields.io/badge/Carthage-v7.0.0-64a6dd.svg?style=flat [badge-sponsors]: https://img.shields.io/badge/Sponsors-Cerebral%20Gardens-orange.svg?style=flat [badge-mastodon]: https://img.shields.io/badge/Mastodon-DaveWoodX-606A84.svg?style=flat [badge-twitter]: https://img.shields.io/twitter/follow/DaveWoodX.svg?style=social [badge-patreon]: https://img.shields.io/badge/Patreon-DaveWoodX-F96854.svg?style=flat [XcodeColors]: https://github.com/robbiehanson/XcodeColors [KZLinkedConsole]: https://github.com/krzysztofzablocki/KZLinkedConsole [NSLogger]: https://github.com/fpillet/NSLogger [XCGLoggerNSLoggerConnector]: https://github.com/markuswinkler/XCGLoggerNSLoggerConnector [Firelog]: http://jogabo.github.io/firelog/ [Firebase]: https://www.firebase.com/ [xcglogger-7.0.0]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/7.0.0 [xcglogger-6.1.0]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/6.1.0 [xcglogger-6.0.4]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/6.0.4 [xcglogger-6.0.2]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/6.0.2 [xcglogger-5.0.5]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/5.0.5 [xcglogger-3.6.0]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/3.6.0 [xcglogger-3.5.3]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/3.5.3 [xcglogger-3.2]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/3.2.0 [xcglogger-2.x]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/2.4.0 [xcglogger-1.x]: https://github.com/DaveWoodCom/XCGLogger/releases/tag/1.8.1
{ "content_hash": "ebf8a0fd67f4679743f1ffea24a65cbd", "timestamp": "", "source": "github", "line_count": 596, "max_line_length": 688, "avg_line_length": 51.00335570469799, "alnum_prop": 0.7634712810053293, "repo_name": "adonoho/XCGLogger", "id": "ef38e96ad5afb0d5ec4d0149a2560110e760cba5", "size": "30398", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "1956" }, { "name": "Ruby", "bytes": "3040" }, { "name": "Swift", "bytes": "249699" } ], "symlink_target": "" }
<html lang="en"> <head> <title>Fast enumeration protocol - Using the GNU Compiler Collection (GCC)</title> <meta http-equiv="Content-Type" content="text/html"> <meta name="description" content="Using the GNU Compiler Collection (GCC)"> <meta name="generator" content="makeinfo 4.13"> <link title="Top" rel="start" href="index.html#Top"> <link rel="up" href="Fast-enumeration.html#Fast-enumeration" title="Fast enumeration"> <link rel="prev" href="Fast-enumeration-details.html#Fast-enumeration-details" title="Fast enumeration details"> <link href="http://www.gnu.org/software/texinfo/" rel="generator-home" title="Texinfo Homepage"> <!-- Copyright (C) 1988-2013 Free Software Foundation, Inc. Permission is granted to copy, distribute and/or modify this document under the terms of the GNU Free Documentation License, Version 1.3 or any later version published by the Free Software Foundation; with the Invariant Sections being ``Funding Free Software'', the Front-Cover Texts being (a) (see below), and with the Back-Cover Texts being (b) (see below). A copy of the license is included in the section entitled ``GNU Free Documentation License''. (a) The FSF's Front-Cover Text is: A GNU Manual (b) The FSF's Back-Cover Text is: You have freedom to copy and modify this GNU Manual, like GNU software. Copies published by the Free Software Foundation raise funds for GNU development.--> <meta http-equiv="Content-Style-Type" content="text/css"> <style type="text/css"><!-- pre.display { font-family:inherit } pre.format { font-family:inherit } pre.smalldisplay { font-family:inherit; font-size:smaller } pre.smallformat { font-family:inherit; font-size:smaller } pre.smallexample { font-size:smaller } pre.smalllisp { font-size:smaller } span.sc { font-variant:small-caps } span.roman { font-family:serif; font-weight:normal; } span.sansserif { font-family:sans-serif; font-weight:normal; } --></style> <link rel="stylesheet" type="text/css" href="../cs.css"> </head> <body> <div class="node"> <a name="Fast-enumeration-protocol"></a> <p> Previous:&nbsp;<a rel="previous" accesskey="p" href="Fast-enumeration-details.html#Fast-enumeration-details">Fast enumeration details</a>, Up:&nbsp;<a rel="up" accesskey="u" href="Fast-enumeration.html#Fast-enumeration">Fast enumeration</a> <hr> </div> <h4 class="subsection">8.9.4 Fast enumeration protocol</h4> <p>If you want your own collection object to be usable with fast enumeration, you need to have it implement the method <pre class="smallexample"> - (unsigned long) countByEnumeratingWithState: (NSFastEnumerationState *)state objects: (id *)objects count: (unsigned long)len; </pre> <p>where <code>NSFastEnumerationState</code> must be defined in your code as follows: <pre class="smallexample"> typedef struct { unsigned long state; id *itemsPtr; unsigned long *mutationsPtr; unsigned long extra[5]; } NSFastEnumerationState; </pre> <p>If no <code>NSFastEnumerationState</code> is defined in your code, the compiler will automatically replace <code>NSFastEnumerationState *</code> with <code>struct __objcFastEnumerationState *</code>, where that type is silently defined by the compiler in an identical way. This can be confusing and we recommend that you define <code>NSFastEnumerationState</code> (as shown above) instead. <p>The method is called repeatedly during a fast enumeration to retrieve batches of objects. Each invocation of the method should retrieve the next batch of objects. <p>The return value of the method is the number of objects in the current batch; this should not exceed <code>len</code>, which is the maximum size of a batch as requested by the caller. The batch itself is returned in the <code>itemsPtr</code> field of the <code>NSFastEnumerationState</code> struct. <p>To help with returning the objects, the <code>objects</code> array is a C array preallocated by the caller (on the stack) of size <code>len</code>. In many cases you can put the objects you want to return in that <code>objects</code> array, then do <code>itemsPtr = objects</code>. But you don't have to; if your collection already has the objects to return in some form of C array, it could return them from there instead. <p>The <code>state</code> and <code>extra</code> fields of the <code>NSFastEnumerationState</code> structure allows your collection object to keep track of the state of the enumeration. In a simple array implementation, <code>state</code> may keep track of the index of the last object that was returned, and <code>extra</code> may be unused. <p>The <code>mutationsPtr</code> field of the <code>NSFastEnumerationState</code> is used to keep track of mutations. It should point to a number; before working on each object, the fast enumeration loop will check that this number has not changed. If it has, a mutation has happened and the fast enumeration will abort. So, <code>mutationsPtr</code> could be set to point to some sort of version number of your collection, which is increased by one every time there is a change (for example when an object is added or removed). Or, if you are content with less strict mutation checks, it could point to the number of objects in your collection or some other value that can be checked to perform an approximate check that the collection has not been mutated. <p>Finally, note how we declared the <code>len</code> argument and the return value to be of type <code>unsigned long</code>. They could also be declared to be of type <code>unsigned int</code> and everything would still work. <!-- ========================================================================= --> </body></html>
{ "content_hash": "ef22d45c1f506ca3855a3fc3ce8c692f", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 138, "avg_line_length": 48.6, "alnum_prop": 0.72599451303155, "repo_name": "teaguejt/xinu-rwb", "id": "a5c12151428b4ad93d7ac35bf0ac43a3663d244e", "size": "5832", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "cross_compiler/share/doc/arm-arm-none-eabi/html/gcc/Fast-enumeration-protocol.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "6173" }, { "name": "C", "bytes": "4149847" }, { "name": "C++", "bytes": "8386130" }, { "name": "CSS", "bytes": "4376" }, { "name": "Groff", "bytes": "2490635" }, { "name": "HTML", "bytes": "14436568" }, { "name": "Lex", "bytes": "1439" }, { "name": "Logos", "bytes": "9040" }, { "name": "Makefile", "bytes": "7220" }, { "name": "Objective-C", "bytes": "248283" }, { "name": "PHP", "bytes": "8650" }, { "name": "Perl", "bytes": "1492" }, { "name": "Python", "bytes": "152128" }, { "name": "Shell", "bytes": "37861" }, { "name": "XC", "bytes": "8725" }, { "name": "XS", "bytes": "8497" }, { "name": "Yacc", "bytes": "14595" } ], "symlink_target": "" }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.Linq; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.PooledObjects; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp { /// <summary> /// This portion of the binder converts an <see cref="ExpressionSyntax"/> into a <see cref="BoundExpression"/>. /// </summary> internal partial class Binder { private BoundExpression BindMethodGroup(ExpressionSyntax node, bool invoked, bool indexed, DiagnosticBag diagnostics) { switch (node.Kind()) { case SyntaxKind.IdentifierName: case SyntaxKind.GenericName: return BindIdentifier((SimpleNameSyntax)node, invoked, indexed, diagnostics); case SyntaxKind.SimpleMemberAccessExpression: case SyntaxKind.PointerMemberAccessExpression: return BindMemberAccess((MemberAccessExpressionSyntax)node, invoked, indexed, diagnostics); case SyntaxKind.ParenthesizedExpression: return BindMethodGroup(((ParenthesizedExpressionSyntax)node).Expression, invoked: false, indexed: false, diagnostics: diagnostics); default: return BindExpression(node, diagnostics, invoked, indexed); } } private static ImmutableArray<MethodSymbol> GetOriginalMethods(OverloadResolutionResult<MethodSymbol> overloadResolutionResult) { // If overload resolution has failed then we want to stash away the original methods that we // considered so that the IDE can display tooltips or other information about them. // However, if a method group contained a generic method that was type inferred then // the IDE wants information about the *inferred* method, not the original unconstructed // generic method. if (overloadResolutionResult == null) { return ImmutableArray<MethodSymbol>.Empty; } var builder = ArrayBuilder<MethodSymbol>.GetInstance(); foreach (var result in overloadResolutionResult.Results) { builder.Add(result.Member); } return builder.ToImmutableAndFree(); } /// <summary> /// Helper method to create a synthesized method invocation expression. /// </summary> /// <param name="node">Syntax Node.</param> /// <param name="receiver">Receiver for the method call.</param> /// <param name="methodName">Method to be invoked on the receiver.</param> /// <param name="args">Arguments to the method call.</param> /// <param name="diagnostics">Diagnostics.</param> /// <param name="typeArgsSyntax">Optional type arguments syntax.</param> /// <param name="typeArgs">Optional type arguments.</param> /// <param name="queryClause">The syntax for the query clause generating this invocation expression, if any.</param> /// <param name="allowFieldsAndProperties">True to allow invocation of fields and properties of delegate type. Only methods are allowed otherwise.</param> /// <param name="allowUnexpandedForm">False to prevent selecting a params method in unexpanded form.</param> /// <returns>Synthesized method invocation expression.</returns> internal BoundExpression MakeInvocationExpression( SyntaxNode node, BoundExpression receiver, string methodName, ImmutableArray<BoundExpression> args, DiagnosticBag diagnostics, SeparatedSyntaxList<TypeSyntax> typeArgsSyntax = default(SeparatedSyntaxList<TypeSyntax>), ImmutableArray<TypeWithAnnotations> typeArgs = default(ImmutableArray<TypeWithAnnotations>), CSharpSyntaxNode queryClause = null, bool allowFieldsAndProperties = false, bool allowUnexpandedForm = true) { Debug.Assert(receiver != null); receiver = BindToNaturalType(receiver, diagnostics); var boundExpression = BindInstanceMemberAccess(node, node, receiver, methodName, typeArgs.NullToEmpty().Length, typeArgsSyntax, typeArgs, invoked: true, indexed: false, diagnostics); // The other consumers of this helper (await and collection initializers) require the target member to be a method. if (!allowFieldsAndProperties && (boundExpression.Kind == BoundKind.FieldAccess || boundExpression.Kind == BoundKind.PropertyAccess)) { Symbol symbol; MessageID msgId; if (boundExpression.Kind == BoundKind.FieldAccess) { msgId = MessageID.IDS_SK_FIELD; symbol = ((BoundFieldAccess)boundExpression).FieldSymbol; } else { msgId = MessageID.IDS_SK_PROPERTY; symbol = ((BoundPropertyAccess)boundExpression).PropertySymbol; } diagnostics.Add( ErrorCode.ERR_BadSKknown, node.Location, methodName, msgId.Localize(), MessageID.IDS_SK_METHOD.Localize()); return BadExpression(node, LookupResultKind.Empty, ImmutableArray.Create(symbol), args.Add(receiver)); } boundExpression = CheckValue(boundExpression, BindValueKind.RValueOrMethodGroup, diagnostics); boundExpression.WasCompilerGenerated = true; var analyzedArguments = AnalyzedArguments.GetInstance(); Debug.Assert(!args.Any(e => e.Kind == BoundKind.OutVariablePendingInference || e.Kind == BoundKind.OutDeconstructVarPendingInference || e.Kind == BoundKind.DiscardExpression && !e.HasExpressionType())); analyzedArguments.Arguments.AddRange(args); BoundExpression result = BindInvocationExpression( node, node, methodName, boundExpression, analyzedArguments, diagnostics, queryClause, allowUnexpandedForm: allowUnexpandedForm); // Query operator can't be called dynamically. if (queryClause != null && result.Kind == BoundKind.DynamicInvocation) { // the error has already been reported by BindInvocationExpression Debug.Assert(diagnostics.HasAnyErrors()); result = CreateBadCall(node, boundExpression, LookupResultKind.Viable, analyzedArguments); } result.WasCompilerGenerated = true; analyzedArguments.Free(); return result; } /// <summary> /// Bind an expression as a method invocation. /// </summary> private BoundExpression BindInvocationExpression( InvocationExpressionSyntax node, DiagnosticBag diagnostics) { BoundExpression result; if (TryBindNameofOperator(node, diagnostics, out result)) { return result; // all of the binding is done by BindNameofOperator } // M(__arglist()) is legal, but M(__arglist(__arglist()) is not! bool isArglist = node.Expression.Kind() == SyntaxKind.ArgListExpression; AnalyzedArguments analyzedArguments = AnalyzedArguments.GetInstance(); if (isArglist) { BindArgumentsAndNames(node.ArgumentList, diagnostics, analyzedArguments, allowArglist: false); result = BindArgListOperator(node, diagnostics, analyzedArguments); } else { BoundExpression boundExpression = BindMethodGroup(node.Expression, invoked: true, indexed: false, diagnostics: diagnostics); boundExpression = CheckValue(boundExpression, BindValueKind.RValueOrMethodGroup, diagnostics); string name = boundExpression.Kind == BoundKind.MethodGroup ? GetName(node.Expression) : null; BindArgumentsAndNames(node.ArgumentList, diagnostics, analyzedArguments, allowArglist: true); result = BindInvocationExpression(node, node.Expression, name, boundExpression, analyzedArguments, diagnostics); } analyzedArguments.Free(); return result; } private BoundExpression BindArgListOperator(InvocationExpressionSyntax node, DiagnosticBag diagnostics, AnalyzedArguments analyzedArguments) { bool hasErrors = analyzedArguments.HasErrors; // We allow names, oddly enough; M(__arglist(x : 123)) is legal. We just ignore them. TypeSymbol objType = GetSpecialType(SpecialType.System_Object, diagnostics, node); for (int i = 0; i < analyzedArguments.Arguments.Count; ++i) { BoundExpression argument = analyzedArguments.Arguments[i]; if (argument.Kind == BoundKind.OutVariablePendingInference) { analyzedArguments.Arguments[i] = ((OutVariablePendingInference)argument).FailInference(this, diagnostics); } else if ((object)argument.Type == null && !argument.HasAnyErrors) { // We are going to need every argument in here to have a type. If we don't have one, // try converting it to object. We'll either succeed (if it is a null literal) // or fail with a good error message. // // Note that the native compiler converts null literals to object, and for everything // else it either crashes, or produces nonsense code. Roslyn improves upon this considerably. analyzedArguments.Arguments[i] = GenerateConversionForAssignment(objType, argument, diagnostics); } else if (argument.Type.IsVoidType()) { Error(diagnostics, ErrorCode.ERR_CantUseVoidInArglist, argument.Syntax); hasErrors = true; } else if (analyzedArguments.RefKind(i) == RefKind.None) { analyzedArguments.Arguments[i] = BindToNaturalType(analyzedArguments.Arguments[i], diagnostics); } switch (analyzedArguments.RefKind(i)) { case RefKind.None: case RefKind.Ref: break; default: // Disallow "in" or "out" arguments Error(diagnostics, ErrorCode.ERR_CantUseInOrOutInArglist, argument.Syntax); hasErrors = true; break; } } ImmutableArray<BoundExpression> arguments = analyzedArguments.Arguments.ToImmutable(); ImmutableArray<RefKind> refKinds = analyzedArguments.RefKinds.ToImmutableOrNull(); return new BoundArgListOperator(node, arguments, refKinds, null, hasErrors); } /// <summary> /// Bind an expression as a method invocation. /// </summary> private BoundExpression BindInvocationExpression( SyntaxNode node, SyntaxNode expression, string methodName, BoundExpression boundExpression, AnalyzedArguments analyzedArguments, DiagnosticBag diagnostics, CSharpSyntaxNode queryClause = null, bool allowUnexpandedForm = true) { BoundExpression result; NamedTypeSymbol delegateType; if ((object)boundExpression.Type != null && boundExpression.Type.IsDynamic()) { // Either we have a dynamic method group invocation "dyn.M(...)" or // a dynamic delegate invocation "dyn(...)" -- either way, bind it as a dynamic // invocation and let the lowering pass sort it out. ReportSuppressionIfNeeded(boundExpression, diagnostics); result = BindDynamicInvocation(node, boundExpression, analyzedArguments, ImmutableArray<MethodSymbol>.Empty, diagnostics, queryClause); } else if (boundExpression.Kind == BoundKind.MethodGroup) { ReportSuppressionIfNeeded(boundExpression, diagnostics); result = BindMethodGroupInvocation( node, expression, methodName, (BoundMethodGroup)boundExpression, analyzedArguments, diagnostics, queryClause, allowUnexpandedForm: allowUnexpandedForm, anyApplicableCandidates: out _); } else if ((object)(delegateType = GetDelegateType(boundExpression)) != null) { if (ReportDelegateInvokeUseSiteDiagnostic(diagnostics, delegateType, node: node)) { return CreateBadCall(node, boundExpression, LookupResultKind.Viable, analyzedArguments); } result = BindDelegateInvocation(node, expression, methodName, boundExpression, analyzedArguments, diagnostics, queryClause, delegateType); } else { if (!boundExpression.HasAnyErrors) { diagnostics.Add(new CSDiagnosticInfo(ErrorCode.ERR_MethodNameExpected), expression.Location); } result = CreateBadCall(node, boundExpression, LookupResultKind.NotInvocable, analyzedArguments); } CheckRestrictedTypeReceiver(result, this.Compilation, diagnostics); return result; } private BoundExpression BindDynamicInvocation( SyntaxNode node, BoundExpression expression, AnalyzedArguments arguments, ImmutableArray<MethodSymbol> applicableMethods, DiagnosticBag diagnostics, CSharpSyntaxNode queryClause) { CheckNamedArgumentsForDynamicInvocation(arguments, diagnostics); bool hasErrors = false; if (expression.Kind == BoundKind.MethodGroup) { BoundMethodGroup methodGroup = (BoundMethodGroup)expression; BoundExpression receiver = methodGroup.ReceiverOpt; // receiver is null if we are calling a static method declared on an outer class via its simple name: if (receiver != null) { switch (receiver.Kind) { case BoundKind.BaseReference: Error(diagnostics, ErrorCode.ERR_NoDynamicPhantomOnBase, node, methodGroup.Name); hasErrors = true; break; case BoundKind.ThisReference: // Can't call the HasThis method due to EE doing odd things with containing member and its containing type. if ((InConstructorInitializer || InFieldInitializer) && receiver.WasCompilerGenerated) { // Only a static method can be called in a constructor initializer. If we were not in a ctor initializer // the runtime binder would ignore the receiver, but in a ctor initializer we can't read "this" before // the base constructor is called. We need to handle this as a type qualified static method call. // Also applicable to things like field initializers, which run before the ctor initializer. expression = methodGroup.Update( methodGroup.TypeArgumentsOpt, methodGroup.Name, methodGroup.Methods, methodGroup.LookupSymbolOpt, methodGroup.LookupError, methodGroup.Flags & ~BoundMethodGroupFlags.HasImplicitReceiver, receiverOpt: new BoundTypeExpression(node, null, this.ContainingType).MakeCompilerGenerated(), resultKind: methodGroup.ResultKind); } break; case BoundKind.TypeOrValueExpression: var typeOrValue = (BoundTypeOrValueExpression)receiver; // Unfortunately, the runtime binder doesn't have APIs that would allow us to pass both "type or value". // Ideally the runtime binder would choose between type and value based on the result of the overload resolution. // We need to pick one or the other here. Dev11 compiler passes the type only if the value can't be accessed. bool inStaticContext; bool useType = IsInstance(typeOrValue.Data.ValueSymbol) && !HasThis(isExplicit: false, inStaticContext: out inStaticContext); BoundExpression finalReceiver = ReplaceTypeOrValueReceiver(typeOrValue, useType, diagnostics); expression = methodGroup.Update( methodGroup.TypeArgumentsOpt, methodGroup.Name, methodGroup.Methods, methodGroup.LookupSymbolOpt, methodGroup.LookupError, methodGroup.Flags, finalReceiver, methodGroup.ResultKind); break; } } } else { expression = BindToNaturalType(expression, diagnostics); } ImmutableArray<BoundExpression> argArray = BuildArgumentsForDynamicInvocation(arguments, diagnostics); var refKindsArray = arguments.RefKinds.ToImmutableOrNull(); hasErrors &= ReportBadDynamicArguments(node, argArray, refKindsArray, diagnostics, queryClause); return new BoundDynamicInvocation( node, arguments.GetNames(), refKindsArray, applicableMethods, expression, argArray, type: Compilation.DynamicType, hasErrors: hasErrors); } private void CheckNamedArgumentsForDynamicInvocation(AnalyzedArguments arguments, DiagnosticBag diagnostics) { if (arguments.Names.Count == 0) { return; } if (!Compilation.LanguageVersion.AllowNonTrailingNamedArguments()) { return; } bool seenName = false; for (int i = 0; i < arguments.Names.Count; i++) { if (arguments.Names[i] != null) { seenName = true; } else if (seenName) { Error(diagnostics, ErrorCode.ERR_NamedArgumentSpecificationBeforeFixedArgumentInDynamicInvocation, arguments.Arguments[i].Syntax); return; } } } private ImmutableArray<BoundExpression> BuildArgumentsForDynamicInvocation(AnalyzedArguments arguments, DiagnosticBag diagnostics) { var builder = ArrayBuilder<BoundExpression>.GetInstance(arguments.Arguments.Count); builder.AddRange(arguments.Arguments); for (int i = 0, n = builder.Count; i < n; i++) { builder[i] = builder[i] switch { OutVariablePendingInference outvar => outvar.FailInference(this, diagnostics), BoundDiscardExpression discard when !discard.HasExpressionType() => discard.FailInference(this, diagnostics), var arg => BindToNaturalType(arg, diagnostics) }; } return builder.ToImmutableAndFree(); } // Returns true if there were errors. private static bool ReportBadDynamicArguments( SyntaxNode node, ImmutableArray<BoundExpression> arguments, ImmutableArray<RefKind> refKinds, DiagnosticBag diagnostics, CSharpSyntaxNode queryClause) { bool hasErrors = false; bool reportedBadQuery = false; if (!refKinds.IsDefault) { for (int argIndex = 0; argIndex < refKinds.Length; argIndex++) { if (refKinds[argIndex] == RefKind.In) { Error(diagnostics, ErrorCode.ERR_InDynamicMethodArg, arguments[argIndex].Syntax); hasErrors = true; } } } foreach (var arg in arguments) { if (!IsLegalDynamicOperand(arg)) { if (queryClause != null && !reportedBadQuery) { reportedBadQuery = true; Error(diagnostics, ErrorCode.ERR_BadDynamicQuery, node); hasErrors = true; continue; } if (arg.Kind == BoundKind.Lambda || arg.Kind == BoundKind.UnboundLambda) { // Cannot use a lambda expression as an argument to a dynamically dispatched operation without first casting it to a delegate or expression tree type. Error(diagnostics, ErrorCode.ERR_BadDynamicMethodArgLambda, arg.Syntax); hasErrors = true; } else if (arg.Kind == BoundKind.MethodGroup) { // Cannot use a method group as an argument to a dynamically dispatched operation. Did you intend to invoke the method? Error(diagnostics, ErrorCode.ERR_BadDynamicMethodArgMemgrp, arg.Syntax); hasErrors = true; } else if (arg.Kind == BoundKind.ArgListOperator) { // Not a great error message, since __arglist is not a type, but it'll do. // error CS1978: Cannot use an expression of type '__arglist' as an argument to a dynamically dispatched operation Error(diagnostics, ErrorCode.ERR_BadDynamicMethodArg, arg.Syntax, "__arglist"); } else { // Lambdas,anonymous methods and method groups are the typeless expressions that // are not usable as dynamic arguments; if we get here then the expression must have a type. Debug.Assert((object)arg.Type != null); // error CS1978: Cannot use an expression of type 'int*' as an argument to a dynamically dispatched operation Error(diagnostics, ErrorCode.ERR_BadDynamicMethodArg, arg.Syntax, arg.Type); hasErrors = true; } } } return hasErrors; } private BoundExpression BindDelegateInvocation( SyntaxNode node, SyntaxNode expression, string methodName, BoundExpression boundExpression, AnalyzedArguments analyzedArguments, DiagnosticBag diagnostics, CSharpSyntaxNode queryClause, NamedTypeSymbol delegateType) { BoundExpression result; var methodGroup = MethodGroup.GetInstance(); methodGroup.PopulateWithSingleMethod(boundExpression, delegateType.DelegateInvokeMethod); var overloadResolutionResult = OverloadResolutionResult<MethodSymbol>.GetInstance(); HashSet<DiagnosticInfo> useSiteDiagnostics = null; OverloadResolution.MethodInvocationOverloadResolution( methods: methodGroup.Methods, typeArguments: methodGroup.TypeArguments, receiver: methodGroup.Receiver, arguments: analyzedArguments, result: overloadResolutionResult, useSiteDiagnostics: ref useSiteDiagnostics); diagnostics.Add(node, useSiteDiagnostics); // If overload resolution on the "Invoke" method found an applicable candidate, and one of the arguments // was dynamic then treat this as a dynamic call. if (analyzedArguments.HasDynamicArgument && overloadResolutionResult.HasAnyApplicableMember) { result = BindDynamicInvocation(node, boundExpression, analyzedArguments, overloadResolutionResult.GetAllApplicableMembers(), diagnostics, queryClause); } else { result = BindInvocationExpressionContinued(node, expression, methodName, overloadResolutionResult, analyzedArguments, methodGroup, delegateType, diagnostics, queryClause); } overloadResolutionResult.Free(); methodGroup.Free(); return result; } private static bool HasApplicableConditionalMethod(OverloadResolutionResult<MethodSymbol> results) { var r = results.Results; for (int i = 0; i < r.Length; ++i) { if (r[i].IsApplicable && r[i].Member.IsConditional) { return true; } } return false; } private BoundExpression BindMethodGroupInvocation( SyntaxNode syntax, SyntaxNode expression, string methodName, BoundMethodGroup methodGroup, AnalyzedArguments analyzedArguments, DiagnosticBag diagnostics, CSharpSyntaxNode queryClause, bool allowUnexpandedForm, out bool anyApplicableCandidates) { BoundExpression result; HashSet<DiagnosticInfo> useSiteDiagnostics = null; var resolution = this.ResolveMethodGroup( methodGroup, expression, methodName, analyzedArguments, isMethodGroupConversion: false, useSiteDiagnostics: ref useSiteDiagnostics, allowUnexpandedForm: allowUnexpandedForm); diagnostics.Add(expression, useSiteDiagnostics); anyApplicableCandidates = resolution.ResultKind == LookupResultKind.Viable && resolution.OverloadResolutionResult.HasAnyApplicableMember; if (!methodGroup.HasAnyErrors) diagnostics.AddRange(resolution.Diagnostics); // Suppress cascading. if (resolution.HasAnyErrors) { ImmutableArray<MethodSymbol> originalMethods; LookupResultKind resultKind; ImmutableArray<TypeWithAnnotations> typeArguments; if (resolution.OverloadResolutionResult != null) { originalMethods = GetOriginalMethods(resolution.OverloadResolutionResult); resultKind = resolution.MethodGroup.ResultKind; typeArguments = resolution.MethodGroup.TypeArguments.ToImmutable(); } else { originalMethods = methodGroup.Methods; resultKind = methodGroup.ResultKind; typeArguments = methodGroup.TypeArgumentsOpt; } result = CreateBadCall( syntax, methodName, methodGroup.ReceiverOpt, originalMethods, resultKind, typeArguments, analyzedArguments, invokedAsExtensionMethod: resolution.IsExtensionMethodGroup, isDelegate: false); } else if (!resolution.IsEmpty) { // We're checking resolution.ResultKind, rather than methodGroup.HasErrors // to better handle the case where there's a problem with the receiver // (e.g. inaccessible), but the method group resolved correctly (e.g. because // it's actually an accessible static method on a base type). // CONSIDER: could check for error types amongst method group type arguments. if (resolution.ResultKind != LookupResultKind.Viable) { if (resolution.MethodGroup != null) { // we want to force any unbound lambda arguments to cache an appropriate conversion if possible; see 9448. DiagnosticBag discarded = DiagnosticBag.GetInstance(); result = BindInvocationExpressionContinued( syntax, expression, methodName, resolution.OverloadResolutionResult, resolution.AnalyzedArguments, resolution.MethodGroup, delegateTypeOpt: null, diagnostics: discarded, queryClause: queryClause); discarded.Free(); } // Since the resolution is non-empty and has no diagnostics, the LookupResultKind in its MethodGroup is uninteresting. result = CreateBadCall(syntax, methodGroup, methodGroup.ResultKind, analyzedArguments); } else { // If overload resolution found one or more applicable methods and at least one argument // was dynamic then treat this as a dynamic call. if (resolution.AnalyzedArguments.HasDynamicArgument && resolution.OverloadResolutionResult.HasAnyApplicableMember) { if (resolution.IsLocalFunctionInvocation) { result = BindLocalFunctionInvocationWithDynamicArgument( syntax, expression, methodName, methodGroup, diagnostics, queryClause, resolution); } else if (resolution.IsExtensionMethodGroup) { // error CS1973: 'T' has no applicable method named 'M' but appears to have an // extension method by that name. Extension methods cannot be dynamically dispatched. Consider // casting the dynamic arguments or calling the extension method without the extension method // syntax. // We found an extension method, so the instance associated with the method group must have // existed and had a type. Debug.Assert(methodGroup.InstanceOpt != null && (object)methodGroup.InstanceOpt.Type != null); Error(diagnostics, ErrorCode.ERR_BadArgTypeDynamicExtension, syntax, methodGroup.InstanceOpt.Type, methodGroup.Name); result = CreateBadCall(syntax, methodGroup, methodGroup.ResultKind, analyzedArguments); } else { if (HasApplicableConditionalMethod(resolution.OverloadResolutionResult)) { // warning CS1974: The dynamically dispatched call to method 'Goo' may fail at runtime // because one or more applicable overloads are conditional methods Error(diagnostics, ErrorCode.WRN_DynamicDispatchToConditionalMethod, syntax, methodGroup.Name); } // Note that the runtime binder may consider candidates that haven't passed compile-time final validation // and an ambiguity error may be reported. Also additional checks are performed in runtime final validation // that are not performed at compile-time. // Only if the set of final applicable candidates is empty we know for sure the call will fail at runtime. var finalApplicableCandidates = GetCandidatesPassingFinalValidation(syntax, resolution.OverloadResolutionResult, methodGroup.ReceiverOpt, methodGroup.TypeArgumentsOpt, diagnostics); if (finalApplicableCandidates.Length > 0) { result = BindDynamicInvocation(syntax, methodGroup, resolution.AnalyzedArguments, finalApplicableCandidates, diagnostics, queryClause); } else { result = CreateBadCall(syntax, methodGroup, methodGroup.ResultKind, analyzedArguments); } } } else { result = BindInvocationExpressionContinued( syntax, expression, methodName, resolution.OverloadResolutionResult, resolution.AnalyzedArguments, resolution.MethodGroup, delegateTypeOpt: null, diagnostics: diagnostics, queryClause: queryClause); } } } else { result = CreateBadCall(syntax, methodGroup, methodGroup.ResultKind, analyzedArguments); } resolution.Free(); return result; } private BoundExpression BindLocalFunctionInvocationWithDynamicArgument( SyntaxNode syntax, SyntaxNode expression, string methodName, BoundMethodGroup boundMethodGroup, DiagnosticBag diagnostics, CSharpSyntaxNode queryClause, MethodGroupResolution resolution) { // Invocations of local functions with dynamic arguments don't need // to be dispatched as dynamic invocations since they cannot be // overloaded. Instead, we'll just emit a standard call with // dynamic implicit conversions for any dynamic arguments. There // are two exceptions: "params", and unconstructed generics. While // implementing those cases with dynamic invocations is possible, // we have decided the implementation complexity is not worth it. // Refer to the comments below for the exact semantics. Debug.Assert(resolution.IsLocalFunctionInvocation); Debug.Assert(resolution.OverloadResolutionResult.Succeeded); Debug.Assert(queryClause == null); var validResult = resolution.OverloadResolutionResult.ValidResult; var args = resolution.AnalyzedArguments.Arguments.ToImmutable(); var refKindsArray = resolution.AnalyzedArguments.RefKinds.ToImmutableOrNull(); ReportBadDynamicArguments(syntax, args, refKindsArray, diagnostics, queryClause); var localFunction = validResult.Member; var methodResult = validResult.Result; // We're only in trouble if a dynamic argument is passed to the // params parameter and is ambiguous at compile time between normal // and expanded form i.e., there is exactly one dynamic argument to // a params parameter // See https://github.com/dotnet/roslyn/issues/10708 if (OverloadResolution.IsValidParams(localFunction) && methodResult.Kind == MemberResolutionKind.ApplicableInNormalForm) { var parameters = localFunction.Parameters; Debug.Assert(parameters.Last().IsParams); var lastParamIndex = parameters.Length - 1; for (int i = 0; i < args.Length; ++i) { var arg = args[i]; if (arg.HasDynamicType() && methodResult.ParameterFromArgument(i) == lastParamIndex) { Error(diagnostics, ErrorCode.ERR_DynamicLocalFunctionParamsParameter, syntax, parameters.Last().Name, localFunction.Name); return BindDynamicInvocation( syntax, boundMethodGroup, resolution.AnalyzedArguments, resolution.OverloadResolutionResult.GetAllApplicableMembers(), diagnostics, queryClause); } } } // If we call an unconstructed generic local function with a // dynamic argument in a place where it influences the type // parameters, we need to dynamically dispatch the call (as the // function must be constructed at runtime). We cannot do that, so // disallow that. However, doing a specific analysis of each // argument and its corresponding parameter to check if it's // generic (and allow dynamic in non-generic parameters) may break // overload resolution in the future, if we ever allow overloaded // local functions. So, just disallow any mixing of dynamic and // inferred generics. (Explicit generic arguments are fine) // See https://github.com/dotnet/roslyn/issues/21317 if (boundMethodGroup.TypeArgumentsOpt.IsDefaultOrEmpty && localFunction.IsGenericMethod) { Error(diagnostics, ErrorCode.ERR_DynamicLocalFunctionTypeParameter, syntax, localFunction.Name); return BindDynamicInvocation( syntax, boundMethodGroup, resolution.AnalyzedArguments, resolution.OverloadResolutionResult.GetAllApplicableMembers(), diagnostics, queryClause); } return BindInvocationExpressionContinued( node: syntax, expression: expression, methodName: methodName, result: resolution.OverloadResolutionResult, analyzedArguments: resolution.AnalyzedArguments, methodGroup: resolution.MethodGroup, delegateTypeOpt: null, diagnostics: diagnostics, queryClause: queryClause); } private ImmutableArray<TMethodOrPropertySymbol> GetCandidatesPassingFinalValidation<TMethodOrPropertySymbol>( SyntaxNode syntax, OverloadResolutionResult<TMethodOrPropertySymbol> overloadResolutionResult, BoundExpression receiverOpt, ImmutableArray<TypeWithAnnotations> typeArgumentsOpt, DiagnosticBag diagnostics) where TMethodOrPropertySymbol : Symbol { Debug.Assert(overloadResolutionResult.HasAnyApplicableMember); var finalCandidates = ArrayBuilder<TMethodOrPropertySymbol>.GetInstance(); DiagnosticBag firstFailed = null; DiagnosticBag candidateDiagnostics = DiagnosticBag.GetInstance(); for (int i = 0, n = overloadResolutionResult.ResultsBuilder.Count; i < n; i++) { var result = overloadResolutionResult.ResultsBuilder[i]; if (result.Result.IsApplicable) { // For F to pass the check, all of the following must hold: // ... // * If the type parameters of F were substituted in the step above, their constraints are satisfied. // * If F is a static method, the method group must have resulted from a simple-name, a member-access through a type, // or a member-access whose receiver can't be classified as a type or value until after overload resolution (see §7.6.4.1). // * If F is an instance method, the method group must have resulted from a simple-name, a member-access through a variable or value, // or a member-access whose receiver can't be classified as a type or value until after overload resolution (see §7.6.4.1). if (!MemberGroupFinalValidationAccessibilityChecks(receiverOpt, result.Member, syntax, candidateDiagnostics, invokedAsExtensionMethod: false) && (typeArgumentsOpt.IsDefault || ((MethodSymbol)(object)result.Member).CheckConstraints(this.Conversions, syntax, this.Compilation, candidateDiagnostics))) { finalCandidates.Add(result.Member); continue; } if (firstFailed == null) { firstFailed = candidateDiagnostics; candidateDiagnostics = DiagnosticBag.GetInstance(); } else { candidateDiagnostics.Clear(); } } } if (firstFailed != null) { // Report diagnostics of the first candidate that failed the validation // unless we have at least one candidate that passes. if (finalCandidates.Count == 0) { diagnostics.AddRange(firstFailed); } firstFailed.Free(); } candidateDiagnostics.Free(); return finalCandidates.ToImmutableAndFree(); } private void CheckRestrictedTypeReceiver(BoundExpression expression, CSharpCompilation compilation, DiagnosticBag diagnostics) { Debug.Assert(diagnostics != null); // It is never legal to box a restricted type, even if we are boxing it as the receiver // of a method call. When must be box? We skip boxing when the method in question is defined // on the restricted type or overridden by the restricted type. switch (expression.Kind) { case BoundKind.Call: { var call = (BoundCall)expression; if (!call.HasAnyErrors && call.ReceiverOpt != null && (object)call.ReceiverOpt.Type != null) { // error CS0029: Cannot implicitly convert type 'A' to 'B' // Case 1: receiver is a restricted type, and method called is defined on a parent type if (call.ReceiverOpt.Type.IsRestrictedType() && !TypeSymbol.Equals(call.Method.ContainingType, call.ReceiverOpt.Type, TypeCompareKind.ConsiderEverything2)) { SymbolDistinguisher distinguisher = new SymbolDistinguisher(compilation, call.ReceiverOpt.Type, call.Method.ContainingType); Error(diagnostics, ErrorCode.ERR_NoImplicitConv, call.ReceiverOpt.Syntax, distinguisher.First, distinguisher.Second); } // Case 2: receiver is a base reference, and the the child type is restricted else if (call.ReceiverOpt.Kind == BoundKind.BaseReference && this.ContainingType.IsRestrictedType()) { SymbolDistinguisher distinguisher = new SymbolDistinguisher(compilation, this.ContainingType, call.Method.ContainingType); Error(diagnostics, ErrorCode.ERR_NoImplicitConv, call.ReceiverOpt.Syntax, distinguisher.First, distinguisher.Second); } } } break; case BoundKind.DynamicInvocation: { var dynInvoke = (BoundDynamicInvocation)expression; if (!dynInvoke.HasAnyErrors && (object)dynInvoke.Expression.Type != null && dynInvoke.Expression.Type.IsRestrictedType()) { // eg: b = typedReference.Equals(dyn); // error CS1978: Cannot use an expression of type 'TypedReference' as an argument to a dynamically dispatched operation Error(diagnostics, ErrorCode.ERR_BadDynamicMethodArg, dynInvoke.Expression.Syntax, dynInvoke.Expression.Type); } } break; default: throw ExceptionUtilities.UnexpectedValue(expression.Kind); } } /// <summary> /// Perform overload resolution on the method group or expression (BoundMethodGroup) /// and arguments and return a BoundExpression representing the invocation. /// </summary> /// <param name="node">Invocation syntax node.</param> /// <param name="expression">The syntax for the invoked method, including receiver.</param> /// <param name="methodName">Name of the invoked method.</param> /// <param name="result">Overload resolution result for method group executed by caller.</param> /// <param name="analyzedArguments">Arguments bound by the caller.</param> /// <param name="methodGroup">Method group if the invocation represents a potentially overloaded member.</param> /// <param name="delegateTypeOpt">Delegate type if method group represents a delegate.</param> /// <param name="diagnostics">Diagnostics.</param> /// <param name="queryClause">The syntax for the query clause generating this invocation expression, if any.</param> /// <returns>BoundCall or error expression representing the invocation.</returns> private BoundCall BindInvocationExpressionContinued( SyntaxNode node, SyntaxNode expression, string methodName, OverloadResolutionResult<MethodSymbol> result, AnalyzedArguments analyzedArguments, MethodGroup methodGroup, NamedTypeSymbol delegateTypeOpt, DiagnosticBag diagnostics, CSharpSyntaxNode queryClause = null) { Debug.Assert(node != null); Debug.Assert(methodGroup != null); Debug.Assert(methodGroup.Error == null); Debug.Assert(methodGroup.Methods.Count > 0); Debug.Assert(((object)delegateTypeOpt == null) || (methodGroup.Methods.Count == 1)); var invokedAsExtensionMethod = methodGroup.IsExtensionMethodGroup; // Delegate invocations should never be considered extension method // invocations (even though the delegate may refer to an extension method). Debug.Assert(!invokedAsExtensionMethod || ((object)delegateTypeOpt == null)); // We have already determined that we are not in a situation where we can successfully do // a dynamic binding. We might be in one of the following situations: // // * There were dynamic arguments but overload resolution still found zero applicable candidates. // * There were no dynamic arguments and overload resolution found zero applicable candidates. // * There were no dynamic arguments and overload resolution found multiple applicable candidates // without being able to find the best one. // // In those three situations we might give an additional error. if (!result.Succeeded) { if (analyzedArguments.HasErrors) { // Errors for arguments have already been reported, except for unbound lambdas and switch expressions. // We report those now. foreach (var argument in analyzedArguments.Arguments) { switch (argument) { case UnboundLambda unboundLambda: var boundWithErrors = unboundLambda.BindForErrorRecovery(); diagnostics.AddRange(boundWithErrors.Diagnostics); break; case BoundTupleLiteral _: // Tuple literals can contain unbound lambdas or switch expressions. _ = BindToNaturalType(argument, diagnostics); break; case BoundUnconvertedSwitchExpression { Type: { } naturalType } switchExpr: _ = ConvertSwitchExpression(switchExpr, naturalType ?? CreateErrorType(), conversionIfTargetTyped: null, diagnostics); break; } } } else { // Since there were no argument errors to report, we report an error on the invocation itself. string name = (object)delegateTypeOpt == null ? methodName : null; result.ReportDiagnostics( binder: this, location: GetLocationForOverloadResolutionDiagnostic(node, expression), nodeOpt: node, diagnostics: diagnostics, name: name, receiver: methodGroup.Receiver, invokedExpression: expression, arguments: analyzedArguments, memberGroup: methodGroup.Methods.ToImmutable(), typeContainingConstructor: null, delegateTypeBeingInvoked: delegateTypeOpt, queryClause: queryClause); } return CreateBadCall(node, methodGroup.Name, invokedAsExtensionMethod && analyzedArguments.Arguments.Count > 0 && (object)methodGroup.Receiver == (object)analyzedArguments.Arguments[0] ? null : methodGroup.Receiver, GetOriginalMethods(result), methodGroup.ResultKind, methodGroup.TypeArguments.ToImmutable(), analyzedArguments, invokedAsExtensionMethod: invokedAsExtensionMethod, isDelegate: ((object)delegateTypeOpt != null)); } // Otherwise, there were no dynamic arguments and overload resolution found a unique best candidate. // We still have to determine if it passes final validation. var methodResult = result.ValidResult; var returnType = methodResult.Member.ReturnType; this.CoerceArguments(methodResult, analyzedArguments.Arguments, diagnostics); var method = methodResult.Member; var expanded = methodResult.Result.Kind == MemberResolutionKind.ApplicableInExpandedForm; var argsToParams = methodResult.Result.ArgsToParamsOpt; // It is possible that overload resolution succeeded, but we have chosen an // instance method and we're in a static method. A careful reading of the // overload resolution spec shows that the "final validation" stage allows an // "implicit this" on any method call, not just method calls from inside // instance methods. Therefore we must detect this scenario here, rather than in // overload resolution. var receiver = ReplaceTypeOrValueReceiver(methodGroup.Receiver, !method.RequiresInstanceReceiver && !invokedAsExtensionMethod, diagnostics); // Note: we specifically want to do final validation (7.6.5.1) without checking delegate compatibility (15.2), // so we're calling MethodGroupFinalValidation directly, rather than via MethodGroupConversionHasErrors. // Note: final validation wants the receiver that corresponds to the source representation // (i.e. the first argument, if invokedAsExtensionMethod). var gotError = MemberGroupFinalValidation(receiver, method, expression, diagnostics, invokedAsExtensionMethod); CheckImplicitThisCopyInReadOnlyMember(receiver, method, diagnostics); if (invokedAsExtensionMethod) { BoundExpression receiverArgument = analyzedArguments.Argument(0); ParameterSymbol receiverParameter = method.Parameters.First(); // we will have a different receiver if ReplaceTypeOrValueReceiver has unwrapped TypeOrValue if ((object)receiver != receiverArgument) { // Because the receiver didn't pass through CoerceArguments, we need to apply an appropriate conversion here. Debug.Assert(argsToParams.IsDefault || argsToParams[0] == 0); receiverArgument = CreateConversion(receiver, methodResult.Result.ConversionForArg(0), receiverParameter.Type, diagnostics); } if (receiverParameter.RefKind == RefKind.Ref) { // If this was a ref extension method, receiverArgument must be checked for L-value constraints. // This helper method will also replace it with a BoundBadExpression if it was invalid. receiverArgument = CheckValue(receiverArgument, BindValueKind.RefOrOut, diagnostics); if (analyzedArguments.RefKinds.Count == 0) { analyzedArguments.RefKinds.Count = analyzedArguments.Arguments.Count; } // receiver of a `ref` extension method is a `ref` argument. (and we have checked above that it can be passed as a Ref) // we need to adjust the argument refkind as if we had a `ref` modifier in a call. analyzedArguments.RefKinds[0] = RefKind.Ref; CheckFeatureAvailability(receiverArgument.Syntax, MessageID.IDS_FeatureRefExtensionMethods, diagnostics); } else if (receiverParameter.RefKind == RefKind.In) { // NB: receiver of an `in` extension method is treated as a `byval` argument, so no changes from the default refkind is needed in that case. Debug.Assert(analyzedArguments.RefKind(0) == RefKind.None); CheckFeatureAvailability(receiverArgument.Syntax, MessageID.IDS_FeatureRefExtensionMethods, diagnostics); } analyzedArguments.Arguments[0] = receiverArgument; } // This will be the receiver of the BoundCall node that we create. // For extension methods, there is no receiver because the receiver in source was actually the first argument. // For instance methods, we may have synthesized an implicit this node. We'll keep it for the emitter. // For static methods, we may have synthesized a type expression. It serves no purpose, so we'll drop it. if (invokedAsExtensionMethod || (!method.RequiresInstanceReceiver && receiver != null && receiver.WasCompilerGenerated)) { receiver = null; } var argNames = analyzedArguments.GetNames(); var argRefKinds = analyzedArguments.RefKinds.ToImmutableOrNull(); var args = analyzedArguments.Arguments.ToImmutable(); if (!gotError && method.RequiresInstanceReceiver && receiver != null && receiver.Kind == BoundKind.ThisReference && receiver.WasCompilerGenerated) { gotError = IsRefOrOutThisParameterCaptured(node, diagnostics); } // What if some of the arguments are implicit? Dev10 reports unsafe errors // if the implied argument would have an unsafe type. We need to check // the parameters explicitly, since there won't be bound nodes for the implied // arguments until lowering. if (method.HasUnsafeParameter()) { // Don't worry about double reporting (i.e. for both the argument and the parameter) // because only one unsafe diagnostic is allowed per scope - the others are suppressed. gotError = ReportUnsafeIfNotAllowed(node, diagnostics) || gotError; } bool hasBaseReceiver = receiver != null && receiver.Kind == BoundKind.BaseReference; ReportDiagnosticsIfObsolete(diagnostics, method, node, hasBaseReceiver); // No use site errors, but there could be use site warnings. // If there are any use site warnings, they have already been reported by overload resolution. Debug.Assert(!method.HasUseSiteError, "Shouldn't have reached this point if there were use site errors."); if (method.IsRuntimeFinalizer()) { ErrorCode code = hasBaseReceiver ? ErrorCode.ERR_CallingBaseFinalizeDeprecated : ErrorCode.ERR_CallingFinalizeDeprecated; Error(diagnostics, code, node); gotError = true; } Debug.Assert(args.IsDefaultOrEmpty || (object)receiver != (object)args[0]); if (!gotError) { gotError = !CheckInvocationArgMixing( node, method, receiver, method.Parameters, args, argsToParams, this.LocalScopeDepth, diagnostics); } bool isDelegateCall = (object)delegateTypeOpt != null; if (!isDelegateCall) { if (method.RequiresInstanceReceiver) { WarnOnAccessOfOffDefault(node.Kind() == SyntaxKind.InvocationExpression ? ((InvocationExpressionSyntax)node).Expression : node, receiver, diagnostics); } } return new BoundCall(node, receiver, method, args, argNames, argRefKinds, isDelegateCall: isDelegateCall, expanded: expanded, invokedAsExtensionMethod: invokedAsExtensionMethod, argsToParamsOpt: argsToParams, resultKind: LookupResultKind.Viable, binderOpt: this, type: returnType, hasErrors: gotError); } /// <summary> /// Returns false if an implicit 'this' copy will occur due to an instance member invocation in a readonly member. /// </summary> internal bool CheckImplicitThisCopyInReadOnlyMember(BoundExpression receiver, MethodSymbol method, DiagnosticBag diagnostics) { // For now we are warning only in implicit copy scenarios that are only possible with readonly members. // Eventually we will warn on implicit value copies in more scenarios. See https://github.com/dotnet/roslyn/issues/33968. if (receiver is BoundThisReference && receiver.Type.IsValueType && ContainingMemberOrLambda is MethodSymbol containingMethod && containingMethod.IsEffectivelyReadOnly && // Ignore calls to base members. TypeSymbol.Equals(containingMethod.ContainingType, method.ContainingType, TypeCompareKind.ConsiderEverything) && !method.IsEffectivelyReadOnly && method.RequiresInstanceReceiver) { Error(diagnostics, ErrorCode.WRN_ImplicitCopyInReadOnlyMember, receiver.Syntax, method, ThisParameterSymbol.SymbolName); return false; } return true; } /// <param name="node">Invocation syntax node.</param> /// <param name="expression">The syntax for the invoked method, including receiver.</param> private static Location GetLocationForOverloadResolutionDiagnostic(SyntaxNode node, SyntaxNode expression) { if (node != expression) { switch (expression.Kind()) { case SyntaxKind.QualifiedName: return ((QualifiedNameSyntax)expression).Right.GetLocation(); case SyntaxKind.SimpleMemberAccessExpression: case SyntaxKind.PointerMemberAccessExpression: return ((MemberAccessExpressionSyntax)expression).Name.GetLocation(); } } return expression.GetLocation(); } /// <summary> /// Replace a BoundTypeOrValueExpression with a BoundExpression for either a type (if useType is true) /// or a value (if useType is false). Any other node is bound to its natural type. /// </summary> /// <remarks> /// Call this once overload resolution has succeeded on the method group of which the BoundTypeOrValueExpression /// is the receiver. Generally, useType will be true if the chosen method is static and false otherwise. /// </remarks> private BoundExpression ReplaceTypeOrValueReceiver(BoundExpression receiver, bool useType, DiagnosticBag diagnostics) { if ((object)receiver == null) { return null; } switch (receiver.Kind) { case BoundKind.TypeOrValueExpression: var typeOrValue = (BoundTypeOrValueExpression)receiver; if (useType) { diagnostics.AddRange(typeOrValue.Data.TypeDiagnostics); return typeOrValue.Data.TypeExpression; } else { diagnostics.AddRange(typeOrValue.Data.ValueDiagnostics); return CheckValue(typeOrValue.Data.ValueExpression, BindValueKind.RValue, diagnostics); } case BoundKind.QueryClause: // a query clause may wrap a TypeOrValueExpression. var q = (BoundQueryClause)receiver; var value = q.Value; var replaced = ReplaceTypeOrValueReceiver(value, useType, diagnostics); return (value == replaced) ? q : q.Update(replaced, q.DefinedSymbol, q.Operation, q.Cast, q.Binder, q.UnoptimizedForm, q.Type); default: return BindToNaturalType(receiver, diagnostics); } } /// <summary> /// Return the delegate type if this expression represents a delegate. /// </summary> private static NamedTypeSymbol GetDelegateType(BoundExpression expr) { if ((object)expr != null && expr.Kind != BoundKind.TypeExpression) { var type = expr.Type as NamedTypeSymbol; if (((object)type != null) && type.IsDelegateType()) { return type; } } return null; } private BoundCall CreateBadCall( SyntaxNode node, string name, BoundExpression receiver, ImmutableArray<MethodSymbol> methods, LookupResultKind resultKind, ImmutableArray<TypeWithAnnotations> typeArgumentsWithAnnotations, AnalyzedArguments analyzedArguments, bool invokedAsExtensionMethod, bool isDelegate) { MethodSymbol method; ImmutableArray<BoundExpression> args; if (!typeArgumentsWithAnnotations.IsDefaultOrEmpty) { var constructedMethods = ArrayBuilder<MethodSymbol>.GetInstance(); foreach (var m in methods) { constructedMethods.Add(m.ConstructedFrom == m && m.Arity == typeArgumentsWithAnnotations.Length ? m.Construct(typeArgumentsWithAnnotations) : m); } methods = constructedMethods.ToImmutableAndFree(); } if (methods.Length == 1 && !IsUnboundGeneric(methods[0])) { method = methods[0]; } else { var returnType = GetCommonTypeOrReturnType(methods) ?? new ExtendedErrorTypeSymbol(this.Compilation, string.Empty, arity: 0, errorInfo: null); var methodContainer = (object)receiver != null && (object)receiver.Type != null ? receiver.Type : this.ContainingType; method = new ErrorMethodSymbol(methodContainer, returnType, name); } args = BuildArgumentsForErrorRecovery(analyzedArguments, methods); var argNames = analyzedArguments.GetNames(); var argRefKinds = analyzedArguments.RefKinds.ToImmutableOrNull(); receiver = BindToTypeForErrorRecovery(receiver); return BoundCall.ErrorCall(node, receiver, method, args, argNames, argRefKinds, isDelegate, invokedAsExtensionMethod: invokedAsExtensionMethod, originalMethods: methods, resultKind: resultKind, binder: this); } private static bool IsUnboundGeneric(MethodSymbol method) { return method.IsGenericMethod && method.ConstructedFrom() == method; } // Arbitrary limit on the number of parameter lists from overload // resolution candidates considered when binding argument types. // Any additional parameter lists are ignored. internal const int MaxParameterListsForErrorRecovery = 10; private ImmutableArray<BoundExpression> BuildArgumentsForErrorRecovery(AnalyzedArguments analyzedArguments, ImmutableArray<MethodSymbol> methods) { var parameterListList = ArrayBuilder<ImmutableArray<ParameterSymbol>>.GetInstance(); foreach (var m in methods) { if (!IsUnboundGeneric(m) && m.ParameterCount > 0) { parameterListList.Add(m.Parameters); if (parameterListList.Count == MaxParameterListsForErrorRecovery) { break; } } } var result = BuildArgumentsForErrorRecovery(analyzedArguments, parameterListList); parameterListList.Free(); return result; } private ImmutableArray<BoundExpression> BuildArgumentsForErrorRecovery(AnalyzedArguments analyzedArguments, ImmutableArray<PropertySymbol> properties) { var parameterListList = ArrayBuilder<ImmutableArray<ParameterSymbol>>.GetInstance(); foreach (var p in properties) { if (p.ParameterCount > 0) { parameterListList.Add(p.Parameters); if (parameterListList.Count == MaxParameterListsForErrorRecovery) { break; } } } var result = BuildArgumentsForErrorRecovery(analyzedArguments, parameterListList); parameterListList.Free(); return result; } private ImmutableArray<BoundExpression> BuildArgumentsForErrorRecovery(AnalyzedArguments analyzedArguments, IEnumerable<ImmutableArray<ParameterSymbol>> parameterListList) { var discardedDiagnostics = DiagnosticBag.GetInstance(); int argumentCount = analyzedArguments.Arguments.Count; ArrayBuilder<BoundExpression> newArguments = ArrayBuilder<BoundExpression>.GetInstance(argumentCount); newArguments.AddRange(analyzedArguments.Arguments); for (int i = 0; i < argumentCount; i++) { var argument = newArguments[i]; switch (argument.Kind) { case BoundKind.UnboundLambda: { // bind the argument against each applicable parameter var unboundArgument = (UnboundLambda)argument; foreach (var parameterList in parameterListList) { var parameterType = GetCorrespondingParameterType(analyzedArguments, i, parameterList); if (parameterType?.Kind == SymbolKind.NamedType && (object)parameterType.GetDelegateType() != null) { var discarded = unboundArgument.Bind((NamedTypeSymbol)parameterType); } } // replace the unbound lambda with its best inferred bound version newArguments[i] = unboundArgument.BindForErrorRecovery(); break; } case BoundKind.OutVariablePendingInference: case BoundKind.DiscardExpression: { if (argument.HasExpressionType()) { break; } var candidateType = getCorrespondingParameterType(i); if (argument.Kind == BoundKind.OutVariablePendingInference) { if ((object)candidateType == null) { newArguments[i] = ((OutVariablePendingInference)argument).FailInference(this, null); } else { newArguments[i] = ((OutVariablePendingInference)argument).SetInferredTypeWithAnnotations(TypeWithAnnotations.Create(candidateType), null); } } else if (argument.Kind == BoundKind.DiscardExpression) { if ((object)candidateType == null) { newArguments[i] = ((BoundDiscardExpression)argument).FailInference(this, null); } else { newArguments[i] = ((BoundDiscardExpression)argument).SetInferredTypeWithAnnotations(TypeWithAnnotations.Create(candidateType)); } } break; } case BoundKind.OutDeconstructVarPendingInference: { newArguments[i] = ((OutDeconstructVarPendingInference)argument).FailInference(this); break; } case BoundKind.Parameter: case BoundKind.Local: { newArguments[i] = BindToTypeForErrorRecovery(argument); break; } default: { newArguments[i] = BindToTypeForErrorRecovery(argument, getCorrespondingParameterType(i)); break; } } } discardedDiagnostics.Free(); return newArguments.ToImmutableAndFree(); TypeSymbol getCorrespondingParameterType(int i) { // See if all applicable applicable parameters have the same type TypeSymbol candidateType = null; foreach (var parameterList in parameterListList) { var parameterType = GetCorrespondingParameterType(analyzedArguments, i, parameterList); if ((object)parameterType != null) { if ((object)candidateType == null) { candidateType = parameterType; } else if (!candidateType.Equals(parameterType, TypeCompareKind.IgnoreCustomModifiersAndArraySizesAndLowerBounds | TypeCompareKind.IgnoreNullableModifiersForReferenceTypes)) { // type mismatch candidateType = null; break; } } } return candidateType; } } /// <summary> /// Compute the type of the corresponding parameter, if any. This is used to improve error recovery, /// for bad invocations, not for semantic analysis of correct invocations, so it is a heuristic. /// If no parameter appears to correspond to the given argument, we return null. /// </summary> /// <param name="analyzedArguments">The analyzed argument list</param> /// <param name="i">The index of the argument</param> /// <param name="parameterList">The parameter list to match against</param> /// <returns>The type of the corresponding parameter.</returns> private static TypeSymbol GetCorrespondingParameterType(AnalyzedArguments analyzedArguments, int i, ImmutableArray<ParameterSymbol> parameterList) { string name = analyzedArguments.Name(i); if (name != null) { // look for a parameter by that name foreach (var parameter in parameterList) { if (parameter.Name == name) return parameter.Type; } return null; } return (i < parameterList.Length) ? parameterList[i].Type : null; // CONSIDER: should we handle variable argument lists? } /// <summary> /// Absent parameter types to bind the arguments, we simply use the arguments provided for error recovery. /// </summary> private ImmutableArray<BoundExpression> BuildArgumentsForErrorRecovery(AnalyzedArguments analyzedArguments) { return BuildArgumentsForErrorRecovery(analyzedArguments, Enumerable.Empty<ImmutableArray<ParameterSymbol>>()); } private BoundCall CreateBadCall( SyntaxNode node, BoundExpression expr, LookupResultKind resultKind, AnalyzedArguments analyzedArguments) { TypeSymbol returnType = new ExtendedErrorTypeSymbol(this.Compilation, string.Empty, arity: 0, errorInfo: null); var methodContainer = expr.Type ?? this.ContainingType; MethodSymbol method = new ErrorMethodSymbol(methodContainer, returnType, string.Empty); var args = BuildArgumentsForErrorRecovery(analyzedArguments); var argNames = analyzedArguments.GetNames(); var argRefKinds = analyzedArguments.RefKinds.ToImmutableOrNull(); var originalMethods = (expr.Kind == BoundKind.MethodGroup) ? ((BoundMethodGroup)expr).Methods : ImmutableArray<MethodSymbol>.Empty; return BoundCall.ErrorCall(node, expr, method, args, argNames, argRefKinds, isDelegateCall: false, invokedAsExtensionMethod: false, originalMethods: originalMethods, resultKind: resultKind, binder: this); } private static TypeSymbol GetCommonTypeOrReturnType<TMember>(ImmutableArray<TMember> members) where TMember : Symbol { TypeSymbol type = null; for (int i = 0, n = members.Length; i < n; i++) { TypeSymbol returnType = members[i].GetTypeOrReturnType().Type; if ((object)type == null) { type = returnType; } else if (!TypeSymbol.Equals(type, returnType, TypeCompareKind.ConsiderEverything2)) { return null; } } return type; } private bool TryBindNameofOperator(InvocationExpressionSyntax node, DiagnosticBag diagnostics, out BoundExpression result) { result = null; if (node.Expression.Kind() != SyntaxKind.IdentifierName || ((IdentifierNameSyntax)node.Expression).Identifier.ContextualKind() != SyntaxKind.NameOfKeyword || node.ArgumentList.Arguments.Count != 1) { return false; } ArgumentSyntax argument = node.ArgumentList.Arguments[0]; if (argument.NameColon != null || argument.RefOrOutKeyword != default(SyntaxToken) || InvocableNameofInScope()) { return false; } result = BindNameofOperatorInternal(node, diagnostics); return true; } private BoundExpression BindNameofOperatorInternal(InvocationExpressionSyntax node, DiagnosticBag diagnostics) { CheckFeatureAvailability(node, MessageID.IDS_FeatureNameof, diagnostics); var argument = node.ArgumentList.Arguments[0].Expression; string name = ""; // We relax the instance-vs-static requirement for top-level member access expressions by creating a NameofBinder binder. var nameofBinder = new NameofBinder(argument, this); var boundArgument = nameofBinder.BindExpression(argument, diagnostics); if (!boundArgument.HasAnyErrors && CheckSyntaxForNameofArgument(argument, out name, diagnostics) && boundArgument.Kind == BoundKind.MethodGroup) { var methodGroup = (BoundMethodGroup)boundArgument; if (!methodGroup.TypeArgumentsOpt.IsDefaultOrEmpty) { // method group with type parameters not allowed diagnostics.Add(ErrorCode.ERR_NameofMethodGroupWithTypeParameters, argument.Location); } else { nameofBinder.EnsureNameofExpressionSymbols(methodGroup, diagnostics); } } return new BoundNameOfOperator(node, boundArgument, ConstantValue.Create(name), Compilation.GetSpecialType(SpecialType.System_String)); } private void EnsureNameofExpressionSymbols(BoundMethodGroup methodGroup, DiagnosticBag diagnostics) { // Check that the method group contains something applicable. Otherwise error. HashSet<DiagnosticInfo> useSiteDiagnostics = null; var resolution = ResolveMethodGroup(methodGroup, analyzedArguments: null, isMethodGroupConversion: false, useSiteDiagnostics: ref useSiteDiagnostics); diagnostics.Add(methodGroup.Syntax, useSiteDiagnostics); diagnostics.AddRange(resolution.Diagnostics); if (resolution.IsExtensionMethodGroup) { diagnostics.Add(ErrorCode.ERR_NameofExtensionMethod, methodGroup.Syntax.Location); } } /// <summary> /// Returns true if syntax form is OK (so no errors were reported) /// </summary> private bool CheckSyntaxForNameofArgument(ExpressionSyntax argument, out string name, DiagnosticBag diagnostics, bool top = true) { switch (argument.Kind()) { case SyntaxKind.IdentifierName: { var syntax = (IdentifierNameSyntax)argument; name = syntax.Identifier.ValueText; return true; } case SyntaxKind.GenericName: { var syntax = (GenericNameSyntax)argument; name = syntax.Identifier.ValueText; return true; } case SyntaxKind.SimpleMemberAccessExpression: { var syntax = (MemberAccessExpressionSyntax)argument; bool ok = true; switch (syntax.Expression.Kind()) { case SyntaxKind.BaseExpression: case SyntaxKind.ThisExpression: break; default: ok = CheckSyntaxForNameofArgument(syntax.Expression, out name, diagnostics, false); break; } name = syntax.Name.Identifier.ValueText; return ok; } case SyntaxKind.AliasQualifiedName: { var syntax = (AliasQualifiedNameSyntax)argument; bool ok = true; if (top) { diagnostics.Add(ErrorCode.ERR_AliasQualifiedNameNotAnExpression, argument.Location); ok = false; } name = syntax.Name.Identifier.ValueText; return ok; } case SyntaxKind.ThisExpression: case SyntaxKind.BaseExpression: case SyntaxKind.PredefinedType: name = ""; if (top) goto default; return true; default: { var code = top ? ErrorCode.ERR_ExpressionHasNoName : ErrorCode.ERR_SubexpressionNotInNameof; diagnostics.Add(code, argument.Location); name = ""; return false; } } } /// <summary> /// Helper method that checks whether there is an invocable 'nameof' in scope. /// </summary> private bool InvocableNameofInScope() { var lookupResult = LookupResult.GetInstance(); const LookupOptions options = LookupOptions.AllMethodsOnArityZero | LookupOptions.MustBeInvocableIfMember; HashSet<DiagnosticInfo> useSiteDiagnostics = null; this.LookupSymbolsWithFallback(lookupResult, SyntaxFacts.GetText(SyntaxKind.NameOfKeyword), useSiteDiagnostics: ref useSiteDiagnostics, arity: 0, options: options); var result = lookupResult.IsMultiViable; lookupResult.Free(); return result; } } }
{ "content_hash": "4983b451130febba13b8bfbe2f09003d", "timestamp": "", "source": "github", "line_count": 1647, "max_line_length": 231, "avg_line_length": 51.387978142076506, "alnum_prop": 0.5700293019518881, "repo_name": "abock/roslyn", "id": "8c1fe47eb58b163e0d944fcb418b1b40f8ab9b84", "size": "84640", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Compilers/CSharp/Portable/Binder/Binder_Invocation.cs", "mode": "33188", "license": "mit", "language": [ { "name": "1C Enterprise", "bytes": "289100" }, { "name": "Batchfile", "bytes": "9059" }, { "name": "C#", "bytes": "126276814" }, { "name": "C++", "bytes": "5602" }, { "name": "CMake", "bytes": "8276" }, { "name": "Dockerfile", "bytes": "2450" }, { "name": "F#", "bytes": "549" }, { "name": "PowerShell", "bytes": "236203" }, { "name": "Shell", "bytes": "94929" }, { "name": "Visual Basic .NET", "bytes": "70520200" } ], "symlink_target": "" }
<?php declare(strict_types=1); namespace Parsemd\Parsemd\Parsers\Parsemd\Abstractions\Inlines; use Parsemd\Parsemd\Elements\InlineElement; use Parsemd\Parsemd\Lines\Line; use Parsemd\Parsemd\Parsers\Inline; use Parsemd\Parsemd\InlineData; use Parsemd\Parsemd\Parsers\Core\Inlines\AbstractInline; use RuntimeException; abstract class Emphasis extends AbstractInline implements Inline { protected const STRICT_FAIL = false; protected const INTRAWORD_MARKER_BLACKLIST = []; public static function parse(Line $Line) : ?Inline { if ($data = static::parseText($Line)) { return new static( $data['width'], $data['textStart'], $data['start'], $data['text'] ); } return null; } public function interrupts(InlineData $Current, InlineData $Next) : bool { if ($Current->getInline() instanceof Emphasis) { /** * When there are two potential emphasis or strong emphasis spans * with the same closing delimiter, the shorter one (the one that * opens later) takes precedence. * * http://spec.commonmark.org/0.27/#emphasis-and-strong-emphasis */ if ( $Next->end() === $Current->end() and $Next->textEnd() === $Current->textEnd() and $Next->start() > $Current->textStart() ) { return true; } if ( $Next->start() >= $Current->textStart() and $Current->end() >= $Next->end() and $Next->textStart() - $Next->start() >= $Current->textStart() - $Current->start() ) { return true; } if ( $Next->end() >= $Current->textEnd() and $Next->textEnd() < $Current->textEnd() and $Next->start() > $Current->start() and $Next->end() - $Next->textEnd() > $Current->end() - $Current->textEnd() ) { return true; } } return parent::ignores($Current, $Next); } public function ignores(InlineData $Current, InlineData $Next) : bool { if ($Next->getInline() instanceof Emphasis) { /** * When two potential emphasis or strong emphasis spans overlap, so * that the second begins before the first ends and ends after the * first ends, the first takes precedence. * * http://spec.commonmark.org/0.27/#emphasis-and-strong-emphasis */ if ( $Next->start() < $Current->end() and $Next->end() > $Current->end() ) { return true; } } return parent::ignores($Current, $Next); } /** * The idea here is to parse the outer inline (sub-structures will be * parsed recursively). * * Unfortunately (for performance) the only way to parse two types of * emphasis that utilise idential marking characters, that may also be * arbitrarily nested, or may just be "literal" is to be aware of * substructures as we are parsing the outer one, so that we know the * correct place to end. * * @param Line $Line * * @return ?array */ protected static function parseText(Line $Line) : ?array { $marker = $Line[0]; $root = static::measureDelimiterRun($Line, $marker); $start = $Line->key(); # ensure there is a root delimiter and it is left flanking if ( ! $root or ! static::isLeftFlanking($Line, $root) or ( ! static::isRunLengthValid($root) and ! (static::canGetNearestValid($root)) ) ) { return null; } $Line = clone($Line); $trail = 0; for (; $Line->valid(); $Line->strcspnJump($marker)) { if ($Line->isEscaped()) { continue; } if ($length = static::measureDelimiterRun($Line, $marker)) { $isLf = static::isLeftFlanking($Line, $length); $isRf = static::isRightFlanking($Line, $length); if (static::canOpen($isLf, $isRf, $length, $trail)) { $trail = static::open($length, $trail); } elseif (static::canClose($isLf, $isRf, $length, $trail, $root)) { $close = [ 'length' => $length, 'trail' => $trail, ]; $trail = static::close($length, $trail); } elseif ($trail === 0) { return null; } $Line->jump($Line->key() + $length -1); } if ($trail === 0) { $Line->next(); break; } } if (isset($close)) { $lsft = 0; $rsft = 0; if ( ! static::isRunLengthValid($close['length'])) { if ( ! (static::canGetNearestValid($close['length']))) { return null; } } $realRoot = $root; if ( ! static::isRunLengthValid($root)) { $root = static::getNearestValid($root); } $len = min([$root, $close['length']]); # some cases when we do not close perfectly: # when the root run is too long if ($close['length'] < $realRoot and $trail) { $lsft = $realRoot - $close['length']; } # when the close run is too long if ($close['trail'] < $close['length']) { $sft = $close['length'] - $close['trail']; if (static::canGetNearestValid($close['length'] - $sft)) { $rsft += $sft; } } # when we can tighten both ends if ( $trail === 0 and $realRoot > $root and $close['length'] > $root ) { $sft = $realRoot - static::shortenToValidModulo($realRoot); $lsft += $sft; $rsft += $sft; } $start += $lsft; $end = $Line->key() - $rsft; return [ 'text' => $Line->substr($start + $len, $end - $len), 'textStart' => $len, 'start' => $lsft, 'width' => $end - $start ]; } return null; } /** * Measure a delimiter run as defined in * http://spec.commonmark.org/0.27/#delimiter-run * * Return null if the line pointer has not been placed at the beginning of * a valid delimiter run * * @param Line $Line * @param string $marker * * @return ?int */ protected static function measureDelimiterRun( Line $Line, string $marker ) : ?int { if ($length = strspn($Line->current(), $marker)) { if ( defined('static::MIN_RUN') and defined('static::MAX_RUN') and static::MIN_RUN !== static::MAX_RUN and $Line[-1] === $marker and ! $Line->isEscapedAt($Line->key() -1) ) { return null; } $before = $Line[-1] ?? ''; $after = $Line[$length] ?? ''; /** * http://spec.commonmark.org/0.27/#emphasis-and-strong-emphasis * * Many implementations have also restricted intraword emphasis to * the * forms, to avoid unwanted emphasis in words containing * internal underscores. */ if ( in_array($marker, static::INTRAWORD_MARKER_BLACKLIST, true) and ( preg_match('/^[[:alnum:]]{2}/u', $before.$after) ) ) { return null; } return $length; } return null; } protected static function isRunLengthValid(int $length) : bool { if (defined('static::MAX_RUN') and $length > static::MAX_RUN) { return false; } elseif (defined('static::MIN_RUN') and $length < static::MIN_RUN) { return false; } return true; } protected static function shortenToValidModulo(int $length) : int { $mod = (defined('static::MAX_RUN') ? static::MAX_RUN : $length); if (static::STRICT_FAIL) { return $length; } while (0 !== ($length % $mod) and static::canGetNearestValid($length)) { $length--; } return $length; } protected static function canGetNearestValid(int $length) : bool { return static::getNearestValid($length) !== null; } protected static function getNearestValid(int $length) : ?int { $hasMin = defined('static::MIN_RUN'); $hasMax = defined('static::MAX_RUN'); if ( static::STRICT_FAIL and $hasMax and $length > static::MAX_RUN or $length <= 0 ) { return null; } if ( ! $hasMax and ! $hasMin) { return $length; } if ( $hasMax and static::MAX_RUN <= 0 or $hasMin and static::MIN_RUN <= 0 or $hasMin and $hasMax and static::MIN_RUN > static::MAX_RUN ) { throw new RuntimeException( 'Bad MAX_RUN/MIN_RUN defined.' ); } # if it's too big (or just big enough), we can shrink it to the MAX_RUN # (or leave it as MAX_RUN) if ($hasMax and $length >= static::MAX_RUN) { return static::MAX_RUN; } # if it's too small, we cannot expand it elseif ($hasMin and $length < static::MIN_RUN) { return null; } # if it's not too small, and not too big, we can leave it as is else { return $length; } } /** * Given a Line with the pointer at the begining of an already valid * delimiter run, determine whether it is left flanking as defined in * http://spec.commonmark.org/0.27/#left-flanking-delimiter-run * * @param Line $Line * @param int $length * * @return bool */ protected static function isLeftFlanking(Line $Line, int $length) : bool { $before = $Line[-1] ?? ' '; $after = $Line[$length] ?? ' '; return ( ! ctype_space($after) and ( ! preg_match('/^\p{P}/', $after) or ctype_space($before) or preg_match('/^\p{P}/', $before) ) ); } /** * Given a Line with the pointer at the begining of an already valid * delimiter run, determine whether it is right flanking as defined in * http://spec.commonmark.org/0.27/#right-flanking-delimiter-run * * @param Line $Line * @param int $length * * @return bool */ protected static function isRightFlanking(Line $Line, int $length) : bool { $before = $Line[-1] ?? ' '; $after = $Line[$length] ?? ' '; return ( ! ctype_space($before) and ( ! preg_match('/^\p{P}/', $before) or ctype_space($after) or preg_match('/^\p{P}/', $after) ) ); } /** * Determine whether the given sequence may open * * @param bool $isLf * @param bool $isRf * @param int $length * @param int $trail * * @return bool */ protected static function canOpen( bool $isLf, bool $isRf, int $length, int $trail ) : bool { return ($isLf and ( ! $isRf or $length > $trail)); } /** * Determine whether the given sequence may close * * @param bool $isLf * @param bool $isRf * @param int $length * @param int $root * * @return bool */ protected static function canClose( bool $isLf, bool $isRf, int $length, int $trail, int $root ) : bool { return $isRf and ( ( ! $isLf or $length <= $trail and abs($trail - $length) !== 1 ) and ( ! static::STRICT_FAIL or $root === $length) ); } /** * Open emph with the run length $length. * * @param int $length * @param int $trial * * @return int */ protected static function open(int $length, int $trail) : int { return $trail + $length; } /** * Close emph with the run length $length. * * @param int $length * @param int $trail * * @param int */ protected static function close(int $length, int $trail) : int { $trail -= $length; if ($trail < 0) { $trail = 0; } return $trail; } protected function __construct( int $width, int $textStart, int $start, string $text ) { $this->start = $start; $this->width = $width; $this->textStart = $textStart; $this->Element = new InlineElement(static::TAG); $this->Element->appendContent($text); } }
{ "content_hash": "13e2e6277e88c6936dded7c9d7488c9e", "timestamp": "", "source": "github", "line_count": 519, "max_line_length": 100, "avg_line_length": 27.11175337186898, "alnum_prop": 0.46229834411200343, "repo_name": "Parsemd/Parsemd", "id": "0bd64b8293f4d8ed32ede8a176367eef2a6e76f6", "size": "14071", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Parsers/Parsemd/Abstractions/Inlines/Emphasis.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "116331" } ], "symlink_target": "" }
import os import shutil import snapcraft class WafPlugin(snapcraft.BasePlugin): @classmethod def schema(cls): schema = super().schema() schema['properties']['configflags'] = { 'type': 'array', 'minitems': 1, 'uniqueItems': True, 'items': { 'type': 'string', }, 'default': [], } # Inform Snapcraft of the properties associated with building. If these # change in the YAML Snapcraft will consider the build step dirty. schema['build-properties'].append('configflags') return schema def __init__(self, name, options, project): super().__init__(name, options, project) self.build_packages.extend(['make']) def build(self): super().build() # if os.path.exists(self.builddir): # shutil.rmtree(self.builddir) # os.mkdir(self.builddir) # source_subdir = getattr(self.options, 'source_subdir', None) # if source_subdir: # sourcedir = os.path.join(self.sourcedir, source_subdir) # else: # sourcedir = self.sourcedir env = self._build_environment() # Run bootstrap.py to download waf binary self.run(['./bootstrap.py'], env=env) # Run waf to configure print(env) self.run(['./waf', '-v', 'configure', '--prefix=/usr/local'], env=env) # Run waf to build the sources self.run(['./waf', '-v'], env=env) # Install self.run(['./waf', '-v', 'install', '--destdir=' + self.installdir], env=env) def _build_environment(self): env = os.environ.copy() env['QT_SELECT'] = '5' env['LFLAGS'] = '-L ' + ' -L'.join( ['{0}/lib', '{0}/usr/lib', '{0}/lib/{1}', '{0}/usr/lib/{1}']).format( self.project.stage_dir, self.project.arch_triplet) env['INCDIRS'] = ':'.join( ['{0}/include', '{0}/usr/include', '{0}/include/{1}', '{0}/usr/include/{1}']).format( self.project.stage_dir, self.project.arch_triplet) env['CPATH'] = ':'.join( ['{0}/include', '{0}/usr/include', '{0}/include/{1}', '{0}/usr/include/{1}']).format( self.project.stage_dir, self.project.arch_triplet) env['LIBRARY_PATH'] = '$LD_LIBRARY_PATH:' + ':'.join( ['{0}/lib', '{0}/usr/lib', '{0}/lib/{1}', '{0}/usr/lib/{1}']).format( self.project.stage_dir, self.project.arch_triplet) return env
{ "content_hash": "bc36d7f0d3673b74ec59236558b739b4", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 85, "avg_line_length": 34.13157894736842, "alnum_prop": 0.5169622205088666, "repo_name": "Zap123/snappy-playpen", "id": "01771c957103ca618daa4c8a5cb62f9b0e8d1078", "size": "2595", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "mpv/parts/plugins/x-waf.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "921" }, { "name": "Python", "bytes": "19139" }, { "name": "Shell", "bytes": "21752" } ], "symlink_target": "" }
layout: home title: Contribute sidebar: contribute lang: en --- <div class="page-header"> <h1>How to contribute ?</h1> </div> Thelia project is host on [GitHub](https://github.com/thelia/thelia). For contributing you just have to fork the project and submit [Pull Request](https://help.github.com/articles/using-pull-requests) or [Issues](https://github.com/thelia/thelia) ## Coding Standard Thelia follow [PSR-I](http://www.php-fig.org/psr/psr-1/) and [PSR-2](http://www.php-fig.org/psr/psr-2/) therefore you must follow this rules. Don't worry, you can use some tools for doing this like the [PHP Coding Standards Fixer](http://cs.sensiolabs.org/) ## Pull Request [Creating a Pull request](https://help.github.com/articles/creating-a-pull-request) is the better way for submitting a patch but there are some rules to follow. First of all, fork [Thelia](https://github.com/thelia/thelia) repo and create a new branch, never work on the master branch, use it only for syncing with [Thelia](https://github.com/thelia/thelia) repo. ``` $ git checkout -b new-branch master ``` After finishing your modification you have to rebase your branch and push it to your repo ``` $ git remote add upstream https://github.com/thelia/thelia.git $ git checkout master $ git pull --ff-only upstream master $ git checkout new-branch $ git rebase master $ git push origin new-branch ``` Next and last step, submit a Pull Request as indicated in the [GitHub documentation](https://help.github.com/articles/creating-a-pull-request). If you want to do more, read this usefull blog post : [http://williamdurand.fr/2013/11/20/on-creating-pull-requests/](http://williamdurand.fr/2013/11/20/on-creating-pull-requests/) ## SQL scripts modification From Thelia version 2.2, if you submit modifications that adds new data or change the structure of the database, please read the following documentation. SQL files should not be modified directly as they are generated by a Thelia command. Instead, you should edit the smarty templates. The first one is the file `setup/insert.sql.tpl` that is used to generate the `insert.sql` file. Others are located in `setup/update/tpl` and are used to generate all SQL update files. This templates only differ from sql for **i18n** tables. *But we could imagine other uses with Smarty*. A typical application : ```smarty ... INSERT INTO `module_i18n` (`id`, `locale`, `title`, `description`, `chapo`, `postscriptum`) VALUES {foreach $locales as $locale} (@max_id+1, '{$locale}', {intl l='Navigation block' locale=$locale}, NULL, NULL, NULL), (@max_id+2, '{$locale}', {intl l='Currency block' locale=$locale}, NULL, NULL, NULL), ... (@max_id+12, '{$locale}', {intl l='New Products block' locale=$locale}, NULL, NULL, NULL), (@max_id+13, '{$locale}', {intl l='Products offer block' locale=$locale}, NULL, NULL, NULL){if ! $locale@last},{/if} {/foreach} ; ... ``` - `{foreach $locales as $locale}` is used to iterate on the list of locales : en_US, fr_FR, ... - `{intl l='Navigation block' locale=$locale}` is used to display the translation corresponding to the `l` attribute. This `intl` function differs from the classic one used in Thelia. If the translation does not exist, no fallbacks will be used by default. The text will be escaped for SQL and quotes will be placed around the input string. If the string is empty then it will be replaced by a `NULL` value. The attribute `in_string="1"` is used to disable the placement of quotes around the string. The attribute `use_default="1"` allow you to use the `l` attribute as a fallback if the translation does not exist. - don't forget to use the `{if ! $locale@last},{/if}` before the `{/foreach}` otherwise your SQL will not be valid. Keep attention on brackets `{` or `}` that is used by smarty. You can use `{ldelim}`, `{rdelim}` or `{literal}...{/literal}` to escape non smarty code. To translate the new string, you can use the translation page in the back office. I you modify templates you have to regenerate all sql files, you can use this Thelia command : `php Thelia generate:sql` You can also limit to a specific list of locales if you use `locales` parameter : `php Thelia generate:sql --locales='en_US,fr_FR'` ## How to contribute new or update Thelia translations Translations are contributed by Thelia users worldwide. The translation work is coordinated at [Crowdin](http://crowdin.com). The Thelia project is located at <http://translate.thelia.net/>. Translations for **non english** languages should only be done on <http://translate.thelia.net/>, not in a Thelia development website and submitted to us with a pull request on GitHub. During the development stage, only english strings should be used inside Thelia and submitted with a pull request. Prior to any release, Thelia maintainers will make an announcement and we'll have a couple of weeks of string freeze in order to give people time to complete the translations. Once translations are done, Thelia maintainers will integrate all translations in Thelia. If you want to contribute to translation or want to discuss specific translations, go to the [Thelia project page](http://translate.thelia.net/). If you would like to help out with translating or adding a language that isn’t yet translated, here’s what to do: - Visit the [Thelia project page](http://translate.thelia.net/). - Sign up at [Crowdin](http://crowdin.com) or log in if you already have an account. - On the Thelia project page, click the **Join Translation Project** button. - Choose the language you want to work on, or – in case the language doesn’t exist yet – request a new language by clicking on the **Contact** link of one of the managers of the project. - Then Select a file in the list and start translating. if you encounter any problems, please consult [Crowdin Knowledge Base](https://support.crowdin.com/) or open a [new discussion on Thelia project page](http://translate.thelia.net/project/thelia/discussions).
{ "content_hash": "02738e8e1d2124ec1aaef9fe8752818b", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 316, "avg_line_length": 56.06542056074766, "alnum_prop": 0.743790631771962, "repo_name": "Mertiozys/thelia.github.io", "id": "dd2aeee864986ae20e1bf7307cfe5a34221269b9", "size": "6013", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "en/documentation/contribute.md", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "10174" }, { "name": "CSS", "bytes": "582171" }, { "name": "HTML", "bytes": "1409560970" }, { "name": "JavaScript", "bytes": "2891409" }, { "name": "PHP", "bytes": "2599" }, { "name": "Ruby", "bytes": "49" }, { "name": "Shell", "bytes": "82" } ], "symlink_target": "" }
export type FlatMatrix = number[] export type DeepMatrix = number[][] export type Matrix = FlatMatrix | DeepMatrix function isDeepMatrix(matrix: Matrix): matrix is DeepMatrix { return Array.isArray(matrix[0]) } export function ensureFlatMatrix(matrix: Matrix): FlatMatrix { if (!matrix.length) { throw new Error('Matrix must have length') } let flatMatrix: FlatMatrix = matrix as FlatMatrix if (isDeepMatrix(matrix)) { flatMatrix = matrix.reduce((acc: any, arr: any) => acc.concat(arr), []) } const matrixSize = Math.sqrt(flatMatrix.length) if (matrixSize !== Math.round(matrixSize)) { throw new Error('Matrix must be square') } return flatMatrix }
{ "content_hash": "b3a1e7046e092edf7f7595d2a5ed2f5d", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 75, "avg_line_length": 27.44, "alnum_prop": 0.7084548104956269, "repo_name": "ouranos-oss/js-image", "id": "4cb2995efe648045ceb30012b24a97d427d6a7df", "size": "686", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/image/lib/matrix.ts", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "53142" }, { "name": "TypeScript", "bytes": "59124" } ], "symlink_target": "" }
package org.redisson.client.codec; import java.io.IOException; import org.redisson.client.handler.State; import org.redisson.client.protocol.Decoder; import org.redisson.client.protocol.Encoder; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; /** * * @author Nikita Koksharov * */ public class ByteArrayCodec extends BaseCodec { public static final ByteArrayCodec INSTANCE = new ByteArrayCodec(); private final Encoder encoder = new Encoder() { @Override public ByteBuf encode(Object in) throws IOException { byte[] payload = (byte[])in; ByteBuf out = ByteBufAllocator.DEFAULT.buffer(payload.length); out.writeBytes(payload); return out; } }; private final Decoder<Object> decoder = new Decoder<Object>() { @Override public Object decode(ByteBuf buf, State state) { byte[] result = new byte[buf.readableBytes()]; buf.readBytes(result); return result; } }; @Override public Decoder<Object> getValueDecoder() { return decoder; } @Override public Encoder getValueEncoder() { return encoder; } }
{ "content_hash": "2f1aa090bbdece91f23652db33b83113", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 74, "avg_line_length": 24.019607843137255, "alnum_prop": 0.6440816326530612, "repo_name": "jackygurui/redisson", "id": "9c9c0c7746dd52400b821efbbf40c97ff7393489", "size": "1824", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "redisson/src/main/java/org/redisson/client/codec/ByteArrayCodec.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "5826881" } ], "symlink_target": "" }
var handlebars = require('handlebars'); var swag = require('swag'); var fs = require('fs'); //test firebase json var test = require('../firebase.json').apps["watch-coffee"]; swag.registerHelpers(handlebars); var persitantDataSize = 0; handlebars.registerHelper('cvarname', function(items, options){ return cvarname(items); }); handlebars.registerHelper('appKeysAccessor', function(items, options){ return appKeysAccessor(test, items); }); handlebars.registerHelper('addToPersistant', function(items, options){ console.log(items); return addToPersistant(items); }); function firebaseToHeader(input) { var headerTemplate = fs.readFileSync('./autoconfig.h'); var headerBars = handlebars.compile(cleanUpWhiteSpace(removeComments(headerTemplate.toString()))); return headerBars(input); } function cvarname(input) { if(!input) { console.log("input was undefined! input: " + input); return "problem" } else { return input.replace(/\s[^\w]+|\s|[^\w]\s+|[^\w]+/g, '_'); } } function addToPersistant(bytes) { persistantDataSize += bytes; if(bytes > 4000){ console.log("ERROR: The size of all persisted values cannot exceed 4KB but your parameters require "+persistantDataSize+"B"); } } function appKeysAccessor(input, selectorId) { return input.configuration.appKeys[selectorId]; } function removeComments(input) { //removes: /*jhdjhdf""*/ input = input.replace(/\/\*.+?\*\//g, ''); //console.log(input); return input; } function cleanUpWhiteSpace(input) { input = input.replace(/^ {1,2}/gm, ''); //console.log(input); return input; } console.log(firebaseToHeader(test.configuration)); module.exports = { cvarname: cvarname, firebaseToHeader: firebaseToHeader, appKeysAccessor: appKeysAccessor };
{ "content_hash": "f99e4a84a03a8f727014ec7a2604e435", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 129, "avg_line_length": 25.855072463768117, "alnum_prop": 0.6995515695067265, "repo_name": "RapidCollective/watch-settings-server", "id": "04a27520a546fa6499d0b1c233b7336a063a9703", "size": "1784", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/code-generator.js", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "4898" }, { "name": "CSS", "bytes": "6613" }, { "name": "Go", "bytes": "1375" }, { "name": "JavaScript", "bytes": "12843" }, { "name": "Shell", "bytes": "893" } ], "symlink_target": "" }
/* ========================================================================== jQuery plugin settings and other scripts ========================================================================== */ $(document).ready(function(){ // Sticky footer var bumpIt = function() { $('body').css('margin-bottom', $('.page__footer').outerHeight(true)); }, didResize = false; bumpIt(); $(window).resize(function() { didResize = true; }); setInterval(function() { if(didResize) { didResize = false; bumpIt(); } }, 250); // FitVids init $("#main").fitVids(); // Follow menu drop down $(".author__urls-wrapper button").on("click", function() { $(".author__urls").toggleClass("is--visible"); $(".author__urls-wrapper button").toggleClass("open"); }); // Search toggle $(".search__toggle").on("click", function() { $(".search-content").toggleClass("is--visible"); $(".initial-content").toggleClass("is--hidden"); // set focus on input setTimeout(function() { $("#search").focus(); }, 400); }); // init smooth scroll $("a").smoothScroll({offset: -20}); // add lightbox class to all image links $("a[href$='.jpg'],a[href$='.jpeg'],a[href$='.JPG'],a[href$='.png'],a[href$='.gif']").addClass("image-popup"); // Magnific-Popup options $(".image-popup").magnificPopup({ // disableOn: function() { // if( $(window).width() < 500 ) { // return false; // } // return true; // }, type: 'image', tLoading: 'Loading image #%curr%...', gallery: { enabled: true, navigateByImgClick: true, preload: [0,1] // Will preload 0 - before current, and 1 after the current image }, image: { tError: '<a href="%url%">Image #%curr%</a> could not be loaded.', }, removalDelay: 500, // Delay in milliseconds before popup is removed // Class that is added to body when popup is open. // make it unique to apply your CSS animations just to this exact popup mainClass: 'mfp-zoom-in', callbacks: { beforeOpen: function() { // just a hack that adds mfp-anim class to markup this.st.image.markup = this.st.image.markup.replace('mfp-figure', 'mfp-figure mfp-with-anim'); } }, closeOnContentClick: true, midClick: true // allow opening popup on middle mouse click. Always set it to true if you don't provide alternative source. }); });
{ "content_hash": "c6b1086ddca1e51f0de8039106ddca55", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 127, "avg_line_length": 30.024390243902438, "alnum_prop": 0.5491470349309504, "repo_name": "opening-pathways/website", "id": "d77b336a2dd295201d3f819d2ca99a4ed206b257", "size": "2462", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "assets/js/_main.js", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "99731" }, { "name": "HTML", "bytes": "112129" }, { "name": "JavaScript", "bytes": "89775" }, { "name": "Ruby", "bytes": "8911" } ], "symlink_target": "" }
using Cassette.IO; namespace Cassette { public class CompileContext { /// <summary> /// The application relative path of the file being compiled. /// </summary> public string SourceFilePath { get; set; } /// <summary> /// The root directory of the application. /// </summary> public IDirectory RootDirectory { get; set; } } }
{ "content_hash": "04c64fc147428101f5969409a1be5462", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 69, "avg_line_length": 26.0625, "alnum_prop": 0.5563549160671463, "repo_name": "mhoyer/cassette", "id": "19fffda900e6895f488d561e0eced9e3f3a3145d", "size": "417", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/Cassette/CompileContext.cs", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
'use strict'; const Sequelize = require('sequelize'); const db = require('APP/db'); const Order = db.define('order', { isResolved: Sequelize.BOOLEAN, deliveredAt: Sequelize.DATE, }, { getterMethods: { getFormatedDate: function (){ let formattedDate; return this.deliveredAt; } }, instanceMethods: { getproducts: function(){ return this.getProducts(); }, addproduct: function(id){ return this.addProduct(id) } } }); module.exports = Order;
{ "content_hash": "fa7e0dfaab9301df3f64325e5f9e8698", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 39, "avg_line_length": 19.423076923076923, "alnum_prop": 0.6316831683168317, "repo_name": "pdelac01/ComicShopper", "id": "8797307f06c95e523b5c8a9a9e08ec748f663798", "size": "505", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "db/models/order.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4405" }, { "name": "HTML", "bytes": "1197922" }, { "name": "JavaScript", "bytes": "172401" }, { "name": "Shell", "bytes": "1794" } ], "symlink_target": "" }
namespace llvm { /// @brief Lazy-emitting IR layer. /// /// This layer accepts sets of LLVM IR Modules (via addModuleSet), but does /// not immediately emit them the layer below. Instead, emissing to the base /// layer is deferred until some symbol in the module set is requested via /// getSymbolAddress. template <typename BaseLayerT> class LazyEmittingLayer { public: typedef typename BaseLayerT::ModuleSetHandleT BaseLayerHandleT; private: class EmissionDeferredSet { public: EmissionDeferredSet() : EmitState(NotEmitted) {} virtual ~EmissionDeferredSet() {} uint64_t Search(StringRef Name, bool ExportedSymbolsOnly, BaseLayerT &B) { switch (EmitState) { case NotEmitted: if (Provides(Name, ExportedSymbolsOnly)) { EmitState = Emitting; Handle = Emit(B); EmitState = Emitted; } else return 0; break; case Emitting: // The module has been added to the base layer but we haven't gotten a // handle back yet so we can't use lookupSymbolAddressIn. Just return // '0' here - LazyEmittingLayer::getSymbolAddress will do a global // search in the base layer when it doesn't find the symbol here, so // we'll find it in the end. return 0; case Emitted: // Nothing to do. Go ahead and search the base layer. break; } return B.lookupSymbolAddressIn(Handle, Name, ExportedSymbolsOnly); } void RemoveModulesFromBaseLayer(BaseLayerT &BaseLayer) { if (EmitState != NotEmitted) BaseLayer.removeModuleSet(Handle); } template <typename ModuleSetT> static std::unique_ptr<EmissionDeferredSet> create(BaseLayerT &B, ModuleSetT Ms, std::unique_ptr<RTDyldMemoryManager> MM); protected: virtual bool Provides(StringRef Name, bool ExportedSymbolsOnly) const = 0; virtual BaseLayerHandleT Emit(BaseLayerT &BaseLayer) = 0; private: enum { NotEmitted, Emitting, Emitted } EmitState; BaseLayerHandleT Handle; }; template <typename ModuleSetT> class EmissionDeferredSetImpl : public EmissionDeferredSet { public: EmissionDeferredSetImpl(ModuleSetT Ms, std::unique_ptr<RTDyldMemoryManager> MM) : Ms(std::move(Ms)), MM(std::move(MM)) {} protected: BaseLayerHandleT Emit(BaseLayerT &BaseLayer) override { // We don't need the mangled names set any more: Once we've emitted this // to the base layer we'll just look for symbols there. MangledNames.reset(); return BaseLayer.addModuleSet(std::move(Ms), std::move(MM)); } bool Provides(StringRef Name, bool ExportedSymbolsOnly) const override { // FIXME: We could clean all this up if we had a way to reliably demangle // names: We could just demangle name and search, rather than // mangling everything else. // If we have already built the mangled name set then just search it. if (MangledNames) { auto VI = MangledNames->find(Name); if (VI == MangledNames->end()) return false; return !ExportedSymbolsOnly || VI->second; } // If we haven't built the mangled name set yet, try to build it. As an // optimization this will leave MangledNames set to nullptr if we find // Name in the process of building the set. buildMangledNames(Name, ExportedSymbolsOnly); if (!MangledNames) return true; return false; } private: // If the mangled name of the given GlobalValue matches the given search // name (and its visibility conforms to the ExportedSymbolsOnly flag) then // just return 'true'. Otherwise, add the mangled name to the Names map and // return 'false'. bool addGlobalValue(StringMap<bool> &Names, const GlobalValue &GV, const Mangler &Mang, StringRef SearchName, bool ExportedSymbolsOnly) const { // Modules don't "provide" decls or common symbols. if (GV.isDeclaration() || GV.hasCommonLinkage()) return false; // Mangle the GV name. std::string MangledName; { raw_string_ostream MangledNameStream(MangledName); Mang.getNameWithPrefix(MangledNameStream, &GV, false); } // Check whether this is the name we were searching for, and if it is then // bail out early. if (MangledName == SearchName) if (!ExportedSymbolsOnly || GV.hasDefaultVisibility()) return true; // Otherwise add this to the map for later. Names[MangledName] = GV.hasDefaultVisibility(); return false; } // Build the MangledNames map. Bails out early (with MangledNames left set // to nullptr) if the given SearchName is found while building the map. void buildMangledNames(StringRef SearchName, bool ExportedSymbolsOnly) const { assert(!MangledNames && "Mangled names map already exists?"); auto Names = llvm::make_unique<StringMap<bool>>(); for (const auto &M : Ms) { Mangler Mang(M->getDataLayout()); for (const auto &GV : M->globals()) if (addGlobalValue(*Names, GV, Mang, SearchName, ExportedSymbolsOnly)) return; for (const auto &F : *M) if (addGlobalValue(*Names, F, Mang, SearchName, ExportedSymbolsOnly)) return; } MangledNames = std::move(Names); } ModuleSetT Ms; std::unique_ptr<RTDyldMemoryManager> MM; mutable std::unique_ptr<StringMap<bool>> MangledNames; }; typedef std::list<std::unique_ptr<EmissionDeferredSet>> ModuleSetListT; BaseLayerT &BaseLayer; ModuleSetListT ModuleSetList; public: /// @brief Handle to a set of loaded modules. typedef typename ModuleSetListT::iterator ModuleSetHandleT; /// @brief Construct a lazy emitting layer. LazyEmittingLayer(BaseLayerT &BaseLayer) : BaseLayer(BaseLayer) {} /// @brief Add the given set of modules to the lazy emitting layer. /// /// This method stores the set of modules in a side table, rather than /// immediately emitting them to the next layer of the JIT. When the address /// of a symbol provided by this set is requested (via getSymbolAddress) it /// triggers the emission of this set to the layer below (along with the given /// memory manager instance), and returns the address of the requested symbol. template <typename ModuleSetT> ModuleSetHandleT addModuleSet(ModuleSetT Ms, std::unique_ptr<RTDyldMemoryManager> MM) { return ModuleSetList.insert( ModuleSetList.end(), EmissionDeferredSet::create(BaseLayer, std::move(Ms), std::move(MM))); } /// @brief Remove the module set represented by the given handle. /// /// This method will free the memory associated with the given module set, /// both in this layer, and the base layer. void removeModuleSet(ModuleSetHandleT H) { H->RemoveModulesFromBaseLayer(); ModuleSetList.erase(H); } /// @brief Get the address of a symbol provided by this layer, or some layer /// below this one. /// /// When called for a symbol that has been added to this layer (via /// addModuleSet) but not yet emitted, this will trigger the emission of the /// module set containing the definiton of the symbol. uint64_t getSymbolAddress(const std::string &Name, bool ExportedSymbolsOnly) { // Look up symbol among existing definitions. if (uint64_t Addr = BaseLayer.getSymbolAddress(Name, ExportedSymbolsOnly)) return Addr; // If not found then search the deferred sets. The call to 'Search' will // cause the set to be emitted to the next layer if it provides a definition // of 'Name'. for (auto &DeferredSet : ModuleSetList) if (uint64_t Addr = DeferredSet->Search(Name, ExportedSymbolsOnly, BaseLayer)) return Addr; // If no definition found anywhere return 0. return 0; } /// @brief Get the address of the given symbol in the context of the set of /// compiled modules represented by the handle H. This call is /// forwarded to the base layer's implementation. uint64_t lookupSymbolAddressIn(ModuleSetHandleT H, const std::string &Name, bool ExportedSymbolsOnly) { return (*H)->Search(Name, ExportedSymbolsOnly, BaseLayer); } }; template <typename BaseLayerT> template <typename ModuleSetT> std::unique_ptr<typename LazyEmittingLayer<BaseLayerT>::EmissionDeferredSet> LazyEmittingLayer<BaseLayerT>::EmissionDeferredSet::create( BaseLayerT &B, ModuleSetT Ms, std::unique_ptr<RTDyldMemoryManager> MM) { return llvm::make_unique<EmissionDeferredSetImpl<ModuleSetT>>(std::move(Ms), std::move(MM)); } } #endif // LLVM_EXECUTIONENGINE_ORC_LAZYEMITTINGLAYER_H
{ "content_hash": "fd88fa305393db2f694d7a5866b23af5", "timestamp": "", "source": "github", "line_count": 236, "max_line_length": 80, "avg_line_length": 38.00847457627118, "alnum_prop": 0.6638795986622074, "repo_name": "impedimentToProgress/Ratchet", "id": "c9463e4071af7292d050f101ae1b744792466c46", "size": "9649", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "llvm/include/llvm/ExecutionEngine/Orc/LazyEmittingLayer.h", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "7704512" }, { "name": "Batchfile", "bytes": "13322" }, { "name": "C", "bytes": "781703" }, { "name": "C++", "bytes": "43693178" }, { "name": "CMake", "bytes": "255669" }, { "name": "CSS", "bytes": "9266" }, { "name": "Emacs Lisp", "bytes": "11520" }, { "name": "Go", "bytes": "131716" }, { "name": "LLVM", "bytes": "30529312" }, { "name": "M4", "bytes": "97430" }, { "name": "Makefile", "bytes": "287076" }, { "name": "OCaml", "bytes": "401181" }, { "name": "Objective-C", "bytes": "392650" }, { "name": "Perl", "bytes": "27878" }, { "name": "Python", "bytes": "512356" }, { "name": "Roff", "bytes": "18799" }, { "name": "Shell", "bytes": "120669" }, { "name": "SourcePawn", "bytes": "2461" }, { "name": "Standard ML", "bytes": "2841" }, { "name": "Vim script", "bytes": "13485" } ], "symlink_target": "" }
from setuptools import setup, find_packages # Get the long description from the relevant file # with codecs_open('README.rst', encoding='utf-8') as f: # long_description = f.read() setup(name='mica', version='0.0.1', description=u"Matplotlib Improved Color Abbreviations", #long_description=long_description, classifiers=[], keywords='Matplotlib', author=u"Julian Irwin", author_email='julian.irwin@gmail.com', url='https://github.com/julianirwin/mica', license='MIT', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, setup_requires=['nose>=1.0'], extras_require={ 'test': ['nose'], }, test_suite = 'nose.collector', entry_points=""" [console_scripts] pyskel=pyskel.scripts.cli:cli """ )
{ "content_hash": "ab89848cec3f45b4f6c4efabc60f0fbb", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 72, "avg_line_length": 28.806451612903224, "alnum_prop": 0.6136618141097424, "repo_name": "julianirwin/mica", "id": "fe49d3b079b6661b4e72caa469d3545b551e756b", "size": "934", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "setup.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "1845" } ], "symlink_target": "" }
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var LS_ACCESS_TOKEN_KEY = exports.LS_ACCESS_TOKEN_KEY = 'gitment-comments-token'; var LS_USER_KEY = exports.LS_USER_KEY = 'gitment-user-info'; var NOT_INITIALIZED_ERROR = exports.NOT_INITIALIZED_ERROR = new Error('Comments Not Initialized'); //# sourceMappingURL=constants.js.map
{ "content_hash": "093edca76b5767e54e228fc1d142a763", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 98, "avg_line_length": 36, "alnum_prop": 0.7388888888888889, "repo_name": "tanwubin/tanwubin.github.io", "id": "89c7517f1bc673592c98765240df4d680b285a5d", "size": "360", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "assets/gitment/node_modules/gitment/dist/constants.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "36397" }, { "name": "HTML", "bytes": "16917" }, { "name": "JavaScript", "bytes": "1274" }, { "name": "Less", "bytes": "78481" }, { "name": "Ruby", "bytes": "2015" }, { "name": "SCSS", "bytes": "79489" } ], "symlink_target": "" }