repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
apiman/apiman | test/common/src/main/java/io/apiman/test/common/resttest/IGatewayTestServer.java | 1286 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.test.common.resttest;
import com.fasterxml.jackson.databind.JsonNode;
/**
* Any gateway under test would need to implement this interface, along
* with standing up an actual gateway instance (with API). The gateway
* REST tests will then send http messages to the appropriate endpoints
* in order to affect the test being run.
*
* @author eric.wittmann@redhat.com
*/
public interface IGatewayTestServer {
public void configure(JsonNode config);
public String getApiEndpoint();
public String getGatewayEndpoint();
public String getEchoTestEndpoint();
public void start();
public void stop();
public void next(String endpoint);
}
| apache-2.0 |
cloudbau/glance | glance/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py | 1195 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table, Index
INDEX_NAME = 'checksum_image_idx'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
images = Table('images', meta, autoload=True)
index = Index(INDEX_NAME, images.c.checksum)
index.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
images = Table('images', meta, autoload=True)
index = Index(INDEX_NAME, images.c.checksum)
index.drop(migrate_engine)
| apache-2.0 |
nugget/home-assistant | homeassistant/components/sensor/geizhals.py | 3083 | """
Parse prices of a device from geizhals.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.geizhals/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
from homeassistant.helpers.entity import Entity
from homeassistant.const import CONF_NAME
REQUIREMENTS = ['geizhals==0.0.9']
_LOGGER = logging.getLogger(__name__)
CONF_DESCRIPTION = 'description'
CONF_PRODUCT_ID = 'product_id'
CONF_LOCALE = 'locale'
ICON = 'mdi:coin'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_PRODUCT_ID): cv.positive_int,
vol.Optional(CONF_DESCRIPTION, default='Price'): cv.string,
vol.Optional(CONF_LOCALE, default='DE'): vol.In(
['AT',
'EU',
'DE',
'UK',
'PL']),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Geizwatch sensor."""
name = config.get(CONF_NAME)
description = config.get(CONF_DESCRIPTION)
product_id = config.get(CONF_PRODUCT_ID)
domain = config.get(CONF_LOCALE)
add_entities([Geizwatch(name, description, product_id, domain)],
True)
class Geizwatch(Entity):
"""Implementation of Geizwatch."""
def __init__(self, name, description, product_id, domain):
"""Initialize the sensor."""
from geizhals import Device, Geizhals
# internal
self._name = name
self._geizhals = Geizhals(product_id, domain)
self._device = Device()
# external
self.description = description
self.product_id = product_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon for the frontend."""
return ICON
@property
def state(self):
"""Return the best price of the selected product."""
if not self._device.prices:
return None
return self._device.prices[0]
@property
def device_state_attributes(self):
"""Return the state attributes."""
while len(self._device.prices) < 4:
self._device.prices.append('None')
attrs = {'device_name': self._device.name,
'description': self.description,
'unit_of_measurement': self._device.price_currency,
'product_id': self.product_id,
'price1': self._device.prices[0],
'price2': self._device.prices[1],
'price3': self._device.prices[2],
'price4': self._device.prices[3]}
return attrs
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest price from geizhals and updates the state."""
self._device = self._geizhals.parse()
| apache-2.0 |
mdanielwork/intellij-community | plugins/IntelliLang/java-support/org/intellij/plugins/intelliLang/inject/java/ConcatenationInjector.java | 22416 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.intelliLang.inject.java;
import com.intellij.lang.Language;
import com.intellij.lang.LanguageParserDefinitions;
import com.intellij.lang.injection.ConcatenationAwareInjector;
import com.intellij.lang.injection.MultiHostRegistrar;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.Trinity;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.psi.injection.ReferenceInjector;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashSet;
import org.intellij.plugins.intelliLang.Configuration;
import org.intellij.plugins.intelliLang.inject.InjectedLanguage;
import org.intellij.plugins.intelliLang.inject.InjectorUtils;
import org.intellij.plugins.intelliLang.inject.LanguageInjectionSupport;
import org.intellij.plugins.intelliLang.inject.TemporaryPlacesRegistry;
import org.intellij.plugins.intelliLang.inject.config.BaseInjection;
import org.intellij.plugins.intelliLang.util.AnnotationUtilEx;
import org.intellij.plugins.intelliLang.util.ContextComputationProcessor;
import org.intellij.plugins.intelliLang.util.PsiUtilEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author cdr
*/
public class ConcatenationInjector implements ConcatenationAwareInjector {
private final Configuration myConfiguration;
private final Project myProject;
private final TemporaryPlacesRegistry myTemporaryPlacesRegistry;
private final LanguageInjectionSupport mySupport;
public ConcatenationInjector(Configuration configuration, Project project, TemporaryPlacesRegistry temporaryPlacesRegistry) {
myConfiguration = configuration;
myProject = project;
myTemporaryPlacesRegistry = temporaryPlacesRegistry;
mySupport = InjectorUtils.findNotNullInjectionSupport(JavaLanguageInjectionSupport.JAVA_SUPPORT_ID);
}
@Override
public void getLanguagesToInject(@NotNull MultiHostRegistrar registrar, @NotNull PsiElement... operands) {
if (operands.length == 0) return;
boolean hasLiteral = false;
InjectedLanguage tempInjectedLanguage = null;
PsiFile containingFile = null;
for (PsiElement operand : operands) {
if (PsiUtilEx.isStringOrCharacterLiteral(operand)) {
hasLiteral = true;
if (containingFile == null) {
containingFile = operands[0].getContainingFile();
}
tempInjectedLanguage = myTemporaryPlacesRegistry.getLanguageFor((PsiLanguageInjectionHost)operand, containingFile);
if (tempInjectedLanguage != null) break;
}
}
if (!hasLiteral) return;
processOperandsInjection(registrar, containingFile, tempInjectedLanguage, operands);
}
private void processOperandsInjection(@NotNull MultiHostRegistrar registrar,
@NotNull PsiFile containingFile, @Nullable InjectedLanguage tempInjectedLanguage,
@NotNull PsiElement[] operands) {
Language tempLanguage = tempInjectedLanguage == null ? null : tempInjectedLanguage.getLanguage();
LanguageInjectionSupport injectionSupport = tempLanguage == null
? mySupport
: TemporaryPlacesRegistry.getInstance(myProject).getLanguageInjectionSupport();
InjectionProcessor injectionProcessor = new InjectionProcessor(myConfiguration, injectionSupport, operands) {
@Override
protected Pair<PsiLanguageInjectionHost, Language> processInjection(Language language,
List<Trinity<PsiLanguageInjectionHost, InjectedLanguage, TextRange>> list,
boolean settingsAvailable,
boolean unparsable) {
InjectorUtils.registerInjection(language, list, containingFile, registrar);
InjectorUtils.registerSupport(getLanguageInjectionSupport(), settingsAvailable, list.get(0).getFirst(), language);
PsiLanguageInjectionHost host = list.get(0).getFirst();
if (tempLanguage != null) {
InjectorUtils
.putInjectedFileUserData(host, language, LanguageInjectionSupport.TEMPORARY_INJECTED_LANGUAGE, tempInjectedLanguage);
}
InjectorUtils
.putInjectedFileUserData(host, language, InjectedLanguageUtil.FRANKENSTEIN_INJECTION, unparsable ? Boolean.TRUE : null);
return Pair.create(host, language);
}
@Override
protected boolean areThereInjectionsWithName(String methodName, boolean annoOnly) {
if (methodName == null) return false;
if (getAnnotatedElementsValue().contains(methodName)) {
return true;
}
return !annoOnly && getXmlAnnotatedElementsValue().contains(methodName);
}
};
if (tempLanguage != null) {
BaseInjection baseInjection = new BaseInjection(JavaLanguageInjectionSupport.JAVA_SUPPORT_ID);
baseInjection.setInjectedLanguageId(tempInjectedLanguage.getID());
List<Pair<PsiLanguageInjectionHost, Language>> list = injectionProcessor.processInjectionWithContext(baseInjection, false);
for (Pair<PsiLanguageInjectionHost, Language> pair : list) {
PsiLanguageInjectionHost host = pair.getFirst();
Language language = pair.getSecond();
InjectorUtils.putInjectedFileUserData(host, language, LanguageInjectionSupport.TEMPORARY_INJECTED_LANGUAGE, tempInjectedLanguage);
}
}
else {
injectionProcessor.processInjections();
}
}
public static class InjectionProcessor {
private final Configuration myConfiguration;
private final LanguageInjectionSupport mySupport;
private final PsiElement[] myOperands;
private boolean myShouldStop;
private boolean myUnparsable;
InjectionProcessor(Configuration configuration, LanguageInjectionSupport support, PsiElement... operands) {
myConfiguration = configuration;
mySupport = support;
myOperands = operands;
}
public void processInjections() {
PsiElement firstOperand = myOperands[0];
PsiElement topBlock = PsiUtil.getTopLevelEnclosingCodeBlock(firstOperand, null);
LocalSearchScope searchScope = new LocalSearchScope(new PsiElement[]{topBlock instanceof PsiCodeBlock
? topBlock : firstOperand.getContainingFile()}, "", true);
List<PsiElement> places = new ArrayList<>(5);
places.add(firstOperand);
Set<PsiModifierListOwner> visitedVars = new THashSet<>();
class MyAnnoVisitor implements AnnotationUtilEx.AnnotatedElementVisitor {
@Override
public boolean visitMethodParameter(PsiExpression expression, PsiCall psiCallExpression) {
PsiExpressionList list = psiCallExpression.getArgumentList();
assert list != null;
int index = ArrayUtil.indexOf(list.getExpressions(), expression);
String methodName;
if (psiCallExpression instanceof PsiMethodCallExpression) {
String referenceName = ((PsiMethodCallExpression)psiCallExpression).getMethodExpression().getReferenceName();
if ("super".equals(referenceName) || "this".equals(referenceName)) { // constructor call
PsiClass psiClass = PsiTreeUtil.getParentOfType(psiCallExpression, PsiClass.class, true);
PsiClass psiTargetClass = "super".equals(referenceName)? psiClass == null ? null : psiClass.getSuperClass() : psiClass;
methodName = psiTargetClass == null? null : psiTargetClass.getName();
}
else {
methodName = referenceName;
}
}
else if (psiCallExpression instanceof PsiNewExpression) {
PsiJavaCodeReferenceElement classRef = ((PsiNewExpression)psiCallExpression).getClassOrAnonymousClassReference();
methodName = classRef == null ? null : classRef.getReferenceName();
}
else if (psiCallExpression instanceof PsiEnumConstant) {
PsiMethod method = psiCallExpression.resolveMethod();
methodName = method != null ? method.getName() : null;
}
else {
methodName = null;
}
if (methodName != null && index >= 0 && areThereInjectionsWithName(methodName, false)) {
PsiMethod method = psiCallExpression.resolveMethod();
if (method != null) {
PsiParameter[] parameters = method.getParameterList().getParameters();
if (index < parameters.length) {
process(parameters[index], method, index);
}
else if (method.isVarArgs()) {
process(parameters[parameters.length - 1], method, parameters.length - 1);
}
}
}
return false;
}
@Override
public boolean visitMethodReturnStatement(PsiElement source, PsiMethod method) {
if (areThereInjectionsWithName(method.getName(), false)) {
process(method, method, -1);
}
return false;
}
private void visitVariableUsages(PsiVariable variable) {
if (variable == null) return;
if (myConfiguration.getAdvancedConfiguration().getDfaOption() != Configuration.DfaOption.OFF && visitedVars.add(variable)) {
ReferencesSearch.search(variable, searchScope).forEach(psiReference -> {
PsiElement element = psiReference.getElement();
if (element instanceof PsiExpression) {
PsiExpression refExpression = (PsiExpression)element;
places.add(refExpression);
if (!myUnparsable) {
myUnparsable = checkUnparsableReference(refExpression);
}
}
return true;
});
}
}
@Override
public boolean visitVariable(PsiVariable variable) {
visitVariableUsages(variable);
PsiElement anchor = !(variable.getFirstChild() instanceof PsiComment) ? variable :
variable.getModifierList() != null ? variable.getModifierList() :
variable.getTypeElement();
if (anchor != null && !processCommentInjection(anchor)) {
myShouldStop = true;
}
else {
process(variable, null, -1);
}
return false;
}
@Override
public boolean visitAnnotationParameter(PsiNameValuePair nameValuePair, PsiAnnotation psiAnnotation) {
String paramName = nameValuePair.getName();
String methodName = paramName != null ? paramName : PsiAnnotation.DEFAULT_REFERENCED_METHOD_NAME;
if (areThereInjectionsWithName(methodName, false)) {
PsiReference reference = nameValuePair.getReference();
PsiElement element = reference == null ? null : reference.resolve();
if (element instanceof PsiMethod) {
process((PsiMethod)element, (PsiMethod)element, -1);
}
}
return false;
}
@Override
public boolean visitReference(PsiReferenceExpression expression) {
if (myConfiguration.getAdvancedConfiguration().getDfaOption() == Configuration.DfaOption.OFF) return true;
PsiElement e = expression.resolve();
if (e instanceof PsiVariable) {
if (e instanceof PsiParameter) {
PsiParameter p = (PsiParameter)e;
PsiElement declarationScope = p.getDeclarationScope();
PsiMethod method = declarationScope instanceof PsiMethod ? (PsiMethod)declarationScope : null;
PsiParameterList parameterList = method == null ? null : method.getParameterList();
// don't check catchblock parameters & etc.
if (!(parameterList == null || parameterList != e.getParent()) &&
areThereInjectionsWithName(method.getName(), false)) {
int parameterIndex = parameterList.getParameterIndex((PsiParameter)e);
process((PsiModifierListOwner)e, method, parameterIndex);
}
}
visitVariable((PsiVariable)e);
}
return !myShouldStop;
}
private boolean processCommentInjection(@NotNull PsiElement anchor) {
Ref<PsiElement> causeRef = Ref.create();
BaseInjection injection = mySupport.findCommentInjection(anchor, causeRef);
if (injection != null) {
PsiVariable variable = PsiTreeUtil.getParentOfType(anchor, PsiVariable.class);
visitVariableUsages(variable);
return processCommentInjectionInner(causeRef.get(), injection);
}
return true;
}
}
MyAnnoVisitor visitor = new MyAnnoVisitor();
if (!visitor.processCommentInjection(firstOperand)) {
return;
}
while (!places.isEmpty() && !myShouldStop) {
PsiElement curPlace = places.remove(0);
AnnotationUtilEx.visitAnnotatedElements(curPlace, visitor);
}
}
protected boolean processCommentInjectionInner(PsiElement comment, BaseInjection injection) {
processInjectionWithContext(injection, false);
return false;
}
private void process(PsiModifierListOwner owner, PsiMethod method, int paramIndex) {
if (!processAnnotationInjections(owner)) {
myShouldStop = true;
}
for (BaseInjection injection : myConfiguration.getInjections(JavaLanguageInjectionSupport.JAVA_SUPPORT_ID)) {
if (injection.acceptsPsiElement(owner)) {
if (!processXmlInjections(injection, owner, method, paramIndex)) {
myShouldStop = true;
break;
}
}
}
}
private boolean processAnnotationInjections(PsiModifierListOwner annoElement) {
if (annoElement instanceof PsiParameter) {
PsiElement scope = ((PsiParameter)annoElement).getDeclarationScope();
if (scope instanceof PsiMethod && !areThereInjectionsWithName(((PsiNamedElement)scope).getName(), true)) {
return true;
}
}
PsiAnnotation[] annotations =
AnnotationUtilEx.getAnnotationFrom(annoElement, myConfiguration.getAdvancedConfiguration().getLanguageAnnotationPair(), true);
if (annotations.length > 0) {
return processAnnotationInjectionInner(annoElement, annotations);
}
return true;
}
protected boolean processAnnotationInjectionInner(PsiModifierListOwner owner, PsiAnnotation[] annotations) {
String id = AnnotationUtilEx.calcAnnotationValue(annotations, "value");
String prefix = AnnotationUtilEx.calcAnnotationValue(annotations, "prefix");
String suffix = AnnotationUtilEx.calcAnnotationValue(annotations, "suffix");
BaseInjection injection = new BaseInjection(JavaLanguageInjectionSupport.JAVA_SUPPORT_ID);
if (prefix != null) injection.setPrefix(prefix);
if (suffix != null) injection.setSuffix(suffix);
if (id != null) injection.setInjectedLanguageId(id);
processInjectionWithContext(injection, false);
return false;
}
protected boolean processXmlInjections(BaseInjection injection, PsiModifierListOwner owner, PsiMethod method, int paramIndex) {
processInjectionWithContext(injection, true);
return !injection.isTerminal();
}
@NotNull
List<Pair<PsiLanguageInjectionHost, Language>> processInjectionWithContext(BaseInjection injection, boolean settingsAvailable) {
Language language = InjectorUtils.getLanguage(injection);
if (language == null) return Collections.emptyList();
boolean separateFiles = !injection.isSingleFile() && StringUtil.isNotEmpty(injection.getValuePattern());
Ref<Boolean> unparsableRef = Ref.create(myUnparsable);
List<Object> objects = ContextComputationProcessor.collectOperands(injection.getPrefix(), injection.getSuffix(), unparsableRef, myOperands);
if (objects.isEmpty()) return Collections.emptyList();
List<Trinity<PsiLanguageInjectionHost, InjectedLanguage, TextRange>> result = new ArrayList<>();
int len = objects.size();
for (int i = 0; i < len; i++) {
String curPrefix = null;
Object o = objects.get(i);
if (o instanceof String) {
curPrefix = (String)o;
if (i == len - 1) return Collections.emptyList(); // IDEADEV-26751
o = objects.get(++i);
}
String curSuffix = null;
PsiLanguageInjectionHost curHost = null;
if (o instanceof PsiLanguageInjectionHost) {
curHost = (PsiLanguageInjectionHost)o;
if (i == len - 2) {
Object next = objects.get(i + 1);
if (next instanceof String) {
i++;
curSuffix = (String)next;
}
}
}
if (curHost == null) {
unparsableRef.set(Boolean.TRUE);
}
else {
if (curHost instanceof PsiLiteralExpression) {
List<TextRange> injectedArea = injection.getInjectedArea(curHost);
for (int j = 0, injectedAreaSize = injectedArea.size(); j < injectedAreaSize; j++) {
TextRange textRange = injectedArea.get(j);
TextRange.assertProperRange(textRange, injection);
result.add(Trinity.create(
curHost, InjectedLanguage.create(injection.getInjectedLanguageId(),
separateFiles || j == 0 ? curPrefix : "",
separateFiles || j == injectedAreaSize - 1 ? curSuffix : "",
true), textRange));
}
}
else {
TextRange textRange = ElementManipulators.getManipulator(curHost).getRangeInElement(curHost);
TextRange.assertProperRange(textRange, injection);
result.add(Trinity.create(curHost, InjectedLanguage.create(injection.getInjectedLanguageId(), curPrefix, curSuffix, true),
textRange));
}
}
}
if (result.isEmpty()) {
return Collections.emptyList();
}
List<Pair<PsiLanguageInjectionHost, Language>> res = new ArrayList<>();
if (separateFiles) {
for (Trinity<PsiLanguageInjectionHost, InjectedLanguage, TextRange> trinity : result) {
ContainerUtil.addIfNotNull(res, processInjection(language, Collections.singletonList(trinity), settingsAvailable, false));
}
}
else {
if (isReferenceInject(language)) {
// OMG in case of reference inject they confused shreds (several places in the host file to form a single injection) with several injections
for (Trinity<PsiLanguageInjectionHost, InjectedLanguage, TextRange> trinity : result) {
ContainerUtil.addIfNotNull(res, processInjection(language, Collections.singletonList(trinity), settingsAvailable, unparsableRef.get()));
}
}
else {
ContainerUtil.addIfNotNull(res, processInjection(language, result, settingsAvailable, unparsableRef.get()));
}
}
return res;
}
private static boolean isReferenceInject(Language language) {
return LanguageParserDefinitions.INSTANCE.forLanguage(language) == null && ReferenceInjector.findById(language.getID()) != null;
}
protected Pair<PsiLanguageInjectionHost, Language> processInjection(Language language,
List<Trinity<PsiLanguageInjectionHost, InjectedLanguage, TextRange>> list,
boolean xmlInjection,
boolean unparsable) {
return null;
}
protected boolean areThereInjectionsWithName(String methodName, boolean annoOnly) {
return true;
}
public LanguageInjectionSupport getLanguageInjectionSupport() {
return mySupport;
}
}
private static boolean checkUnparsableReference(PsiExpression refExpression) {
PsiElement parent = refExpression.getParent();
if (parent instanceof PsiAssignmentExpression) {
PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)parent;
IElementType operation = assignmentExpression.getOperationTokenType();
if (assignmentExpression.getLExpression() == refExpression && JavaTokenType.PLUSEQ.equals(operation)) {
return true;
}
}
else if (parent instanceof PsiPolyadicExpression ||
parent instanceof PsiParenthesizedExpression ||
parent instanceof PsiConditionalExpression) {
return true;
}
return false;
}
private Collection<String> getAnnotatedElementsValue() {
// note: external annotations not supported
return InjectionCache.getInstance(myProject).getAnnoIndex();
}
private Collection<String> getXmlAnnotatedElementsValue() {
return InjectionCache.getInstance(myProject).getXmlIndex();
}
}
| apache-2.0 |
k82/kubernetes | cmd/kube-controller-manager/app/controllermanager.go | 25770 | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package app implements a server that runs a set of active
// components. This includes replication controllers, service endpoints and
// nodes.
//
package app
import (
"context"
"fmt"
"io/ioutil"
"math/rand"
"net/http"
"os"
"time"
"github.com/spf13/cobra"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/runtime/schema"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/uuid"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/apiserver/pkg/server"
"k8s.io/apiserver/pkg/server/healthz"
"k8s.io/apiserver/pkg/server/mux"
utilfeature "k8s.io/apiserver/pkg/util/feature"
cacheddiscovery "k8s.io/client-go/discovery/cached"
"k8s.io/client-go/informers"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/client-go/metadata"
"k8s.io/client-go/metadata/metadatainformer"
restclient "k8s.io/client-go/rest"
"k8s.io/client-go/restmapper"
"k8s.io/client-go/tools/leaderelection"
"k8s.io/client-go/tools/leaderelection/resourcelock"
certutil "k8s.io/client-go/util/cert"
"k8s.io/client-go/util/keyutil"
cloudprovider "k8s.io/cloud-provider"
cliflag "k8s.io/component-base/cli/flag"
"k8s.io/component-base/cli/globalflag"
"k8s.io/component-base/term"
"k8s.io/component-base/version"
"k8s.io/component-base/version/verflag"
"k8s.io/klog"
genericcontrollermanager "k8s.io/kubernetes/cmd/controller-manager/app"
"k8s.io/kubernetes/cmd/kube-controller-manager/app/config"
"k8s.io/kubernetes/cmd/kube-controller-manager/app/options"
"k8s.io/kubernetes/pkg/controller"
kubectrlmgrconfig "k8s.io/kubernetes/pkg/controller/apis/config"
serviceaccountcontroller "k8s.io/kubernetes/pkg/controller/serviceaccount"
"k8s.io/kubernetes/pkg/features"
"k8s.io/kubernetes/pkg/serviceaccount"
"k8s.io/kubernetes/pkg/util/configz"
utilflag "k8s.io/kubernetes/pkg/util/flag"
)
const (
// ControllerStartJitter is the Jitter used when starting controller managers
ControllerStartJitter = 1.0
// ConfigzName is the name used for register kube-controller manager /configz, same with GroupName.
ConfigzName = "kubecontrollermanager.config.k8s.io"
)
// ControllerLoopMode is the kube-controller-manager's mode of running controller loops that are cloud provider dependent
type ControllerLoopMode int
const (
// IncludeCloudLoops means the kube-controller-manager include the controller loops that are cloud provider dependent
IncludeCloudLoops ControllerLoopMode = iota
// ExternalLoops means the kube-controller-manager exclude the controller loops that are cloud provider dependent
ExternalLoops
)
// NewControllerManagerCommand creates a *cobra.Command object with default parameters
func NewControllerManagerCommand() *cobra.Command {
s, err := options.NewKubeControllerManagerOptions()
if err != nil {
klog.Fatalf("unable to initialize command options: %v", err)
}
cmd := &cobra.Command{
Use: "kube-controller-manager",
Long: `The Kubernetes controller manager is a daemon that embeds
the core control loops shipped with Kubernetes. In applications of robotics and
automation, a control loop is a non-terminating loop that regulates the state of
the system. In Kubernetes, a controller is a control loop that watches the shared
state of the cluster through the apiserver and makes changes attempting to move the
current state towards the desired state. Examples of controllers that ship with
Kubernetes today are the replication controller, endpoints controller, namespace
controller, and serviceaccounts controller.`,
Run: func(cmd *cobra.Command, args []string) {
verflag.PrintAndExitIfRequested()
utilflag.PrintFlags(cmd.Flags())
c, err := s.Config(KnownControllers(), ControllersDisabledByDefault.List())
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
if err := Run(c.Complete(), wait.NeverStop); err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
},
}
fs := cmd.Flags()
namedFlagSets := s.Flags(KnownControllers(), ControllersDisabledByDefault.List())
verflag.AddFlags(namedFlagSets.FlagSet("global"))
globalflag.AddGlobalFlags(namedFlagSets.FlagSet("global"), cmd.Name())
registerLegacyGlobalFlags(namedFlagSets)
for _, f := range namedFlagSets.FlagSets {
fs.AddFlagSet(f)
}
usageFmt := "Usage:\n %s\n"
cols, _, _ := term.TerminalSize(cmd.OutOrStdout())
cmd.SetUsageFunc(func(cmd *cobra.Command) error {
fmt.Fprintf(cmd.OutOrStderr(), usageFmt, cmd.UseLine())
cliflag.PrintSections(cmd.OutOrStderr(), namedFlagSets, cols)
return nil
})
cmd.SetHelpFunc(func(cmd *cobra.Command, args []string) {
fmt.Fprintf(cmd.OutOrStdout(), "%s\n\n"+usageFmt, cmd.Long, cmd.UseLine())
cliflag.PrintSections(cmd.OutOrStdout(), namedFlagSets, cols)
})
return cmd
}
// ResyncPeriod returns a function which generates a duration each time it is
// invoked; this is so that multiple controllers don't get into lock-step and all
// hammer the apiserver with list requests simultaneously.
func ResyncPeriod(c *config.CompletedConfig) func() time.Duration {
return func() time.Duration {
factor := rand.Float64() + 1
return time.Duration(float64(c.ComponentConfig.Generic.MinResyncPeriod.Nanoseconds()) * factor)
}
}
// Run runs the KubeControllerManagerOptions. This should never exit.
func Run(c *config.CompletedConfig, stopCh <-chan struct{}) error {
// To help debugging, immediately log version
klog.Infof("Version: %+v", version.Get())
if cfgz, err := configz.New(ConfigzName); err == nil {
cfgz.Set(c.ComponentConfig)
} else {
klog.Errorf("unable to register configz: %v", err)
}
// Setup any healthz checks we will want to use.
var checks []healthz.HealthChecker
var electionChecker *leaderelection.HealthzAdaptor
if c.ComponentConfig.Generic.LeaderElection.LeaderElect {
electionChecker = leaderelection.NewLeaderHealthzAdaptor(time.Second * 20)
checks = append(checks, electionChecker)
}
// Start the controller manager HTTP server
// unsecuredMux is the handler for these controller *after* authn/authz filters have been applied
var unsecuredMux *mux.PathRecorderMux
if c.SecureServing != nil {
unsecuredMux = genericcontrollermanager.NewBaseHandler(&c.ComponentConfig.Generic.Debugging, checks...)
handler := genericcontrollermanager.BuildHandlerChain(unsecuredMux, &c.Authorization, &c.Authentication)
// TODO: handle stoppedCh returned by c.SecureServing.Serve
if _, err := c.SecureServing.Serve(handler, 0, stopCh); err != nil {
return err
}
}
if c.InsecureServing != nil {
unsecuredMux = genericcontrollermanager.NewBaseHandler(&c.ComponentConfig.Generic.Debugging, checks...)
insecureSuperuserAuthn := server.AuthenticationInfo{Authenticator: &server.InsecureSuperuser{}}
handler := genericcontrollermanager.BuildHandlerChain(unsecuredMux, nil, &insecureSuperuserAuthn)
if err := c.InsecureServing.Serve(handler, 0, stopCh); err != nil {
return err
}
}
run := func(ctx context.Context) {
rootClientBuilder := controller.SimpleControllerClientBuilder{
ClientConfig: c.Kubeconfig,
}
var clientBuilder controller.ControllerClientBuilder
if c.ComponentConfig.KubeCloudShared.UseServiceAccountCredentials {
if len(c.ComponentConfig.SAController.ServiceAccountKeyFile) == 0 {
// It's possible another controller process is creating the tokens for us.
// If one isn't, we'll timeout and exit when our client builder is unable to create the tokens.
klog.Warningf("--use-service-account-credentials was specified without providing a --service-account-private-key-file")
}
if shouldTurnOnDynamicClient(c.Client) {
klog.V(1).Infof("using dynamic client builder")
//Dynamic builder will use TokenRequest feature and refresh service account token periodically
clientBuilder = controller.NewDynamicClientBuilder(
restclient.AnonymousClientConfig(c.Kubeconfig),
c.Client.CoreV1(),
"kube-system")
} else {
klog.V(1).Infof("using legacy client builder")
clientBuilder = controller.SAControllerClientBuilder{
ClientConfig: restclient.AnonymousClientConfig(c.Kubeconfig),
CoreClient: c.Client.CoreV1(),
AuthenticationClient: c.Client.AuthenticationV1(),
Namespace: "kube-system",
}
}
} else {
clientBuilder = rootClientBuilder
}
controllerContext, err := CreateControllerContext(c, rootClientBuilder, clientBuilder, ctx.Done())
if err != nil {
klog.Fatalf("error building controller context: %v", err)
}
saTokenControllerInitFunc := serviceAccountTokenControllerStarter{rootClientBuilder: rootClientBuilder}.startServiceAccountTokenController
if err := StartControllers(controllerContext, saTokenControllerInitFunc, NewControllerInitializers(controllerContext.LoopMode), unsecuredMux); err != nil {
klog.Fatalf("error starting controllers: %v", err)
}
controllerContext.InformerFactory.Start(controllerContext.Stop)
controllerContext.ObjectOrMetadataInformerFactory.Start(controllerContext.Stop)
close(controllerContext.InformersStarted)
select {}
}
if !c.ComponentConfig.Generic.LeaderElection.LeaderElect {
run(context.TODO())
panic("unreachable")
}
id, err := os.Hostname()
if err != nil {
return err
}
// add a uniquifier so that two processes on the same host don't accidentally both become active
id = id + "_" + string(uuid.NewUUID())
rl, err := resourcelock.New(c.ComponentConfig.Generic.LeaderElection.ResourceLock,
c.ComponentConfig.Generic.LeaderElection.ResourceNamespace,
c.ComponentConfig.Generic.LeaderElection.ResourceName,
c.LeaderElectionClient.CoreV1(),
c.LeaderElectionClient.CoordinationV1(),
resourcelock.ResourceLockConfig{
Identity: id,
EventRecorder: c.EventRecorder,
})
if err != nil {
klog.Fatalf("error creating lock: %v", err)
}
leaderelection.RunOrDie(context.TODO(), leaderelection.LeaderElectionConfig{
Lock: rl,
LeaseDuration: c.ComponentConfig.Generic.LeaderElection.LeaseDuration.Duration,
RenewDeadline: c.ComponentConfig.Generic.LeaderElection.RenewDeadline.Duration,
RetryPeriod: c.ComponentConfig.Generic.LeaderElection.RetryPeriod.Duration,
Callbacks: leaderelection.LeaderCallbacks{
OnStartedLeading: run,
OnStoppedLeading: func() {
klog.Fatalf("leaderelection lost")
},
},
WatchDog: electionChecker,
Name: "kube-controller-manager",
})
panic("unreachable")
}
// ControllerContext defines the context object for controller
type ControllerContext struct {
// ClientBuilder will provide a client for this controller to use
ClientBuilder controller.ControllerClientBuilder
// InformerFactory gives access to informers for the controller.
InformerFactory informers.SharedInformerFactory
// ObjectOrMetadataInformerFactory gives access to informers for typed resources
// and dynamic resources by their metadata. All generic controllers currently use
// object metadata - if a future controller needs access to the full object this
// would become GenericInformerFactory and take a dynamic client.
ObjectOrMetadataInformerFactory controller.InformerFactory
// ComponentConfig provides access to init options for a given controller
ComponentConfig kubectrlmgrconfig.KubeControllerManagerConfiguration
// DeferredDiscoveryRESTMapper is a RESTMapper that will defer
// initialization of the RESTMapper until the first mapping is
// requested.
RESTMapper *restmapper.DeferredDiscoveryRESTMapper
// AvailableResources is a map listing currently available resources
AvailableResources map[schema.GroupVersionResource]bool
// Cloud is the cloud provider interface for the controllers to use.
// It must be initialized and ready to use.
Cloud cloudprovider.Interface
// Control for which control loops to be run
// IncludeCloudLoops is for a kube-controller-manager running all loops
// ExternalLoops is for a kube-controller-manager running with a cloud-controller-manager
LoopMode ControllerLoopMode
// Stop is the stop channel
Stop <-chan struct{}
// InformersStarted is closed after all of the controllers have been initialized and are running. After this point it is safe,
// for an individual controller to start the shared informers. Before it is closed, they should not.
InformersStarted chan struct{}
// ResyncPeriod generates a duration each time it is invoked; this is so that
// multiple controllers don't get into lock-step and all hammer the apiserver
// with list requests simultaneously.
ResyncPeriod func() time.Duration
}
// IsControllerEnabled checks if the context's controllers enabled or not
func (c ControllerContext) IsControllerEnabled(name string) bool {
return genericcontrollermanager.IsControllerEnabled(name, ControllersDisabledByDefault, c.ComponentConfig.Generic.Controllers)
}
// InitFunc is used to launch a particular controller. It may run additional "should I activate checks".
// Any error returned will cause the controller process to `Fatal`
// The bool indicates whether the controller was enabled.
type InitFunc func(ctx ControllerContext) (debuggingHandler http.Handler, enabled bool, err error)
// KnownControllers returns all known controllers's name
func KnownControllers() []string {
ret := sets.StringKeySet(NewControllerInitializers(IncludeCloudLoops))
// add "special" controllers that aren't initialized normally. These controllers cannot be initialized
// using a normal function. The only known special case is the SA token controller which *must* be started
// first to ensure that the SA tokens for future controllers will exist. Think very carefully before adding
// to this list.
ret.Insert(
saTokenControllerName,
)
return ret.List()
}
// ControllersDisabledByDefault is the set of controllers which is disabled by default
var ControllersDisabledByDefault = sets.NewString(
"bootstrapsigner",
"tokencleaner",
)
const (
saTokenControllerName = "serviceaccount-token"
)
// NewControllerInitializers is a public map of named controller groups (you can start more than one in an init func)
// paired to their InitFunc. This allows for structured downstream composition and subdivision.
func NewControllerInitializers(loopMode ControllerLoopMode) map[string]InitFunc {
controllers := map[string]InitFunc{}
controllers["endpoint"] = startEndpointController
controllers["endpointslice"] = startEndpointSliceController
controllers["replicationcontroller"] = startReplicationController
controllers["podgc"] = startPodGCController
controllers["resourcequota"] = startResourceQuotaController
controllers["namespace"] = startNamespaceController
controllers["serviceaccount"] = startServiceAccountController
controllers["garbagecollector"] = startGarbageCollectorController
controllers["daemonset"] = startDaemonSetController
controllers["job"] = startJobController
controllers["deployment"] = startDeploymentController
controllers["replicaset"] = startReplicaSetController
controllers["horizontalpodautoscaling"] = startHPAController
controllers["disruption"] = startDisruptionController
controllers["statefulset"] = startStatefulSetController
controllers["cronjob"] = startCronJobController
controllers["csrsigning"] = startCSRSigningController
controllers["csrapproving"] = startCSRApprovingController
controllers["csrcleaner"] = startCSRCleanerController
controllers["ttl"] = startTTLController
controllers["bootstrapsigner"] = startBootstrapSignerController
controllers["tokencleaner"] = startTokenCleanerController
controllers["nodeipam"] = startNodeIpamController
controllers["nodelifecycle"] = startNodeLifecycleController
if loopMode == IncludeCloudLoops {
controllers["service"] = startServiceController
controllers["route"] = startRouteController
controllers["cloud-node-lifecycle"] = startCloudNodeLifecycleController
// TODO: volume controller into the IncludeCloudLoops only set.
}
controllers["persistentvolume-binder"] = startPersistentVolumeBinderController
controllers["attachdetach"] = startAttachDetachController
controllers["persistentvolume-expander"] = startVolumeExpandController
controllers["clusterrole-aggregation"] = startClusterRoleAggregrationController
controllers["pvc-protection"] = startPVCProtectionController
controllers["pv-protection"] = startPVProtectionController
controllers["ttl-after-finished"] = startTTLAfterFinishedController
controllers["root-ca-cert-publisher"] = startRootCACertPublisher
return controllers
}
// GetAvailableResources gets the map which contains all available resources of the apiserver
// TODO: In general, any controller checking this needs to be dynamic so
// users don't have to restart their controller manager if they change the apiserver.
// Until we get there, the structure here needs to be exposed for the construction of a proper ControllerContext.
func GetAvailableResources(clientBuilder controller.ControllerClientBuilder) (map[schema.GroupVersionResource]bool, error) {
client := clientBuilder.ClientOrDie("controller-discovery")
discoveryClient := client.Discovery()
_, resourceMap, err := discoveryClient.ServerGroupsAndResources()
if err != nil {
utilruntime.HandleError(fmt.Errorf("unable to get all supported resources from server: %v", err))
}
if len(resourceMap) == 0 {
return nil, fmt.Errorf("unable to get any supported resources from server")
}
allResources := map[schema.GroupVersionResource]bool{}
for _, apiResourceList := range resourceMap {
version, err := schema.ParseGroupVersion(apiResourceList.GroupVersion)
if err != nil {
return nil, err
}
for _, apiResource := range apiResourceList.APIResources {
allResources[version.WithResource(apiResource.Name)] = true
}
}
return allResources, nil
}
// CreateControllerContext creates a context struct containing references to resources needed by the
// controllers such as the cloud provider and clientBuilder. rootClientBuilder is only used for
// the shared-informers client and token controller.
func CreateControllerContext(s *config.CompletedConfig, rootClientBuilder, clientBuilder controller.ControllerClientBuilder, stop <-chan struct{}) (ControllerContext, error) {
versionedClient := rootClientBuilder.ClientOrDie("shared-informers")
sharedInformers := informers.NewSharedInformerFactory(versionedClient, ResyncPeriod(s)())
metadataClient := metadata.NewForConfigOrDie(rootClientBuilder.ConfigOrDie("metadata-informers"))
metadataInformers := metadatainformer.NewSharedInformerFactory(metadataClient, ResyncPeriod(s)())
// If apiserver is not running we should wait for some time and fail only then. This is particularly
// important when we start apiserver and controller manager at the same time.
if err := genericcontrollermanager.WaitForAPIServer(versionedClient, 10*time.Second); err != nil {
return ControllerContext{}, fmt.Errorf("failed to wait for apiserver being healthy: %v", err)
}
// Use a discovery client capable of being refreshed.
discoveryClient := rootClientBuilder.ClientOrDie("controller-discovery")
cachedClient := cacheddiscovery.NewMemCacheClient(discoveryClient.Discovery())
restMapper := restmapper.NewDeferredDiscoveryRESTMapper(cachedClient)
go wait.Until(func() {
restMapper.Reset()
}, 30*time.Second, stop)
availableResources, err := GetAvailableResources(rootClientBuilder)
if err != nil {
return ControllerContext{}, err
}
cloud, loopMode, err := createCloudProvider(s.ComponentConfig.KubeCloudShared.CloudProvider.Name, s.ComponentConfig.KubeCloudShared.ExternalCloudVolumePlugin,
s.ComponentConfig.KubeCloudShared.CloudProvider.CloudConfigFile, s.ComponentConfig.KubeCloudShared.AllowUntaggedCloud, sharedInformers)
if err != nil {
return ControllerContext{}, err
}
ctx := ControllerContext{
ClientBuilder: clientBuilder,
InformerFactory: sharedInformers,
ObjectOrMetadataInformerFactory: controller.NewInformerFactory(sharedInformers, metadataInformers),
ComponentConfig: s.ComponentConfig,
RESTMapper: restMapper,
AvailableResources: availableResources,
Cloud: cloud,
LoopMode: loopMode,
Stop: stop,
InformersStarted: make(chan struct{}),
ResyncPeriod: ResyncPeriod(s),
}
return ctx, nil
}
// StartControllers starts a set of controllers with a specified ControllerContext
func StartControllers(ctx ControllerContext, startSATokenController InitFunc, controllers map[string]InitFunc, unsecuredMux *mux.PathRecorderMux) error {
// Always start the SA token controller first using a full-power client, since it needs to mint tokens for the rest
// If this fails, just return here and fail since other controllers won't be able to get credentials.
if _, _, err := startSATokenController(ctx); err != nil {
return err
}
// Initialize the cloud provider with a reference to the clientBuilder only after token controller
// has started in case the cloud provider uses the client builder.
if ctx.Cloud != nil {
ctx.Cloud.Initialize(ctx.ClientBuilder, ctx.Stop)
}
for controllerName, initFn := range controllers {
if !ctx.IsControllerEnabled(controllerName) {
klog.Warningf("%q is disabled", controllerName)
continue
}
time.Sleep(wait.Jitter(ctx.ComponentConfig.Generic.ControllerStartInterval.Duration, ControllerStartJitter))
klog.V(1).Infof("Starting %q", controllerName)
debugHandler, started, err := initFn(ctx)
if err != nil {
klog.Errorf("Error starting %q", controllerName)
return err
}
if !started {
klog.Warningf("Skipping %q", controllerName)
continue
}
if debugHandler != nil && unsecuredMux != nil {
basePath := "/debug/controllers/" + controllerName
unsecuredMux.UnlistedHandle(basePath, http.StripPrefix(basePath, debugHandler))
unsecuredMux.UnlistedHandlePrefix(basePath+"/", http.StripPrefix(basePath, debugHandler))
}
klog.Infof("Started %q", controllerName)
}
return nil
}
// serviceAccountTokenControllerStarter is special because it must run first to set up permissions for other controllers.
// It cannot use the "normal" client builder, so it tracks its own. It must also avoid being included in the "normal"
// init map so that it can always run first.
type serviceAccountTokenControllerStarter struct {
rootClientBuilder controller.ControllerClientBuilder
}
func (c serviceAccountTokenControllerStarter) startServiceAccountTokenController(ctx ControllerContext) (http.Handler, bool, error) {
if !ctx.IsControllerEnabled(saTokenControllerName) {
klog.Warningf("%q is disabled", saTokenControllerName)
return nil, false, nil
}
if len(ctx.ComponentConfig.SAController.ServiceAccountKeyFile) == 0 {
klog.Warningf("%q is disabled because there is no private key", saTokenControllerName)
return nil, false, nil
}
privateKey, err := keyutil.PrivateKeyFromFile(ctx.ComponentConfig.SAController.ServiceAccountKeyFile)
if err != nil {
return nil, true, fmt.Errorf("error reading key for service account token controller: %v", err)
}
var rootCA []byte
if ctx.ComponentConfig.SAController.RootCAFile != "" {
if rootCA, err = readCA(ctx.ComponentConfig.SAController.RootCAFile); err != nil {
return nil, true, fmt.Errorf("error parsing root-ca-file at %s: %v", ctx.ComponentConfig.SAController.RootCAFile, err)
}
} else {
rootCA = c.rootClientBuilder.ConfigOrDie("tokens-controller").CAData
}
tokenGenerator, err := serviceaccount.JWTTokenGenerator(serviceaccount.LegacyIssuer, privateKey)
if err != nil {
return nil, false, fmt.Errorf("failed to build token generator: %v", err)
}
controller, err := serviceaccountcontroller.NewTokensController(
ctx.InformerFactory.Core().V1().ServiceAccounts(),
ctx.InformerFactory.Core().V1().Secrets(),
c.rootClientBuilder.ClientOrDie("tokens-controller"),
serviceaccountcontroller.TokensControllerOptions{
TokenGenerator: tokenGenerator,
RootCA: rootCA,
},
)
if err != nil {
return nil, true, fmt.Errorf("error creating Tokens controller: %v", err)
}
go controller.Run(int(ctx.ComponentConfig.SAController.ConcurrentSATokenSyncs), ctx.Stop)
// start the first set of informers now so that other controllers can start
ctx.InformerFactory.Start(ctx.Stop)
return nil, true, nil
}
func readCA(file string) ([]byte, error) {
rootCA, err := ioutil.ReadFile(file)
if err != nil {
return nil, err
}
if _, err := certutil.ParseCertsPEM(rootCA); err != nil {
return nil, err
}
return rootCA, err
}
func shouldTurnOnDynamicClient(client clientset.Interface) bool {
if !utilfeature.DefaultFeatureGate.Enabled(features.TokenRequest) {
return false
}
apiResourceList, err := client.Discovery().ServerResourcesForGroupVersion(v1.SchemeGroupVersion.String())
if err != nil {
klog.Warningf("fetch api resource lists failed, use legacy client builder: %v", err)
return false
}
for _, resource := range apiResourceList.APIResources {
if resource.Name == "serviceaccounts/token" &&
resource.Group == "authentication.k8s.io" &&
sets.NewString(resource.Verbs...).Has("create") {
return true
}
}
return false
}
| apache-2.0 |
ndimiduk/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestServerCrashProcedureCarryingMetaStuck.java | 4073 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AsyncAdmin;
import org.apache.hadoop.hbase.client.AsyncConnection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({ MasterTests.class, MediumTests.class })
public class TestServerCrashProcedureCarryingMetaStuck {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestServerCrashProcedureCarryingMetaStuck.class);
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
@BeforeClass
public static void setUp() throws Exception {
UTIL.startMiniCluster(3);
UTIL.getAdmin().balancerSwitch(false, true);
}
@AfterClass
public static void tearDown() throws Exception {
UTIL.shutdownMiniCluster();
}
@Test
public void test() throws Exception {
RegionServerThread rsThread = null;
for (RegionServerThread t : UTIL.getMiniHBaseCluster().getRegionServerThreads()) {
if (!t.getRegionServer().getRegions(TableName.META_TABLE_NAME).isEmpty()) {
rsThread = t;
break;
}
}
HRegionServer rs = rsThread.getRegionServer();
RegionInfo hri = rs.getRegions(TableName.META_TABLE_NAME).get(0).getRegionInfo();
HMaster master = UTIL.getMiniHBaseCluster().getMaster();
ProcedureExecutor<MasterProcedureEnv> executor = master.getMasterProcedureExecutor();
DummyRegionProcedure proc = new DummyRegionProcedure(executor.getEnvironment(), hri);
long procId = master.getMasterProcedureExecutor().submitProcedure(proc);
proc.waitUntilArrive();
try (AsyncConnection conn =
ConnectionFactory.createAsyncConnection(UTIL.getConfiguration()).get()) {
AsyncAdmin admin = conn.getAdmin();
CompletableFuture<Void> future = admin.move(hri.getRegionName());
rs.abort("For testing!");
UTIL.waitFor(30000,
() -> executor.getProcedures().stream()
.filter(p -> p instanceof TransitRegionStateProcedure)
.map(p -> (TransitRegionStateProcedure) p)
.anyMatch(p -> Bytes.equals(hri.getRegionName(), p.getRegion().getRegionName())));
proc.resume();
UTIL.waitFor(30000, () -> executor.isFinished(procId));
// see whether the move region procedure can finish properly
future.get(30, TimeUnit.SECONDS);
}
}
}
| apache-2.0 |
littlezhou/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java | 14657 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SHARED_FILE_DESCRIPTOR_PATHS;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_SHARED_FILE_DESCRIPTOR_PATHS_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT;
import java.io.Closeable;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.InvalidRequestException;
import org.apache.hadoop.hdfs.ExtendedBlockId;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.ShmId;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.Slot;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId;
import org.apache.hadoop.io.nativeio.SharedFileDescriptorFactory;
import org.apache.hadoop.net.unix.DomainSocket;
import org.apache.hadoop.net.unix.DomainSocketWatcher;
import org.apache.hadoop.hdfs.shortcircuit.DfsClientShmManager;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashMultimap;
/**
* Manages client short-circuit memory segments on the DataNode.
*
* DFSClients request shared memory segments from the DataNode. The
* ShortCircuitRegistry generates and manages these segments. Each segment
* has a randomly generated 128-bit ID which uniquely identifies it. The
* segments each contain several "slots."
*
* Before performing a short-circuit read, DFSClients must request a pair of
* file descriptors from the DataNode via the REQUEST_SHORT_CIRCUIT_FDS
* operation. As part of this operation, DFSClients pass the ID of the shared
* memory segment they would like to use to communicate information about this
* replica, as well as the slot number within that segment they would like to
* use. Slot allocation is always done by the client.
*
* Slots are used to track the state of the block on the both the client and
* datanode. When this DataNode mlocks a block, the corresponding slots for the
* replicas are marked as "anchorable". Anchorable blocks can be safely read
* without verifying the checksum. This means that BlockReaderLocal objects
* using these replicas can skip checksumming. It also means that we can do
* zero-copy reads on these replicas (the ZCR interface has no way of
* verifying checksums.)
*
* When a DN needs to munlock a block, it needs to first wait for the block to
* be unanchored by clients doing a no-checksum read or a zero-copy read. The
* DN also marks the block's slots as "unanchorable" to prevent additional
* clients from initiating these operations in the future.
*
* The counterpart of this class on the client is {@link DfsClientShmManager}.
*/
public class ShortCircuitRegistry {
public static final Logger LOG =
LoggerFactory.getLogger(ShortCircuitRegistry.class);
private static final int SHM_LENGTH = 8192;
public static class RegisteredShm extends ShortCircuitShm
implements DomainSocketWatcher.Handler {
private final String clientName;
private final ShortCircuitRegistry registry;
RegisteredShm(String clientName, ShmId shmId, FileInputStream stream,
ShortCircuitRegistry registry) throws IOException {
super(shmId, stream);
this.clientName = clientName;
this.registry = registry;
}
@Override
public boolean handle(DomainSocket sock) {
synchronized (registry) {
synchronized (this) {
registry.removeShm(this);
}
}
return true;
}
String getClientName() {
return clientName;
}
}
public synchronized void removeShm(ShortCircuitShm shm) {
if (LOG.isTraceEnabled()) {
LOG.trace("removing shm " + shm);
}
// Stop tracking the shmId.
RegisteredShm removedShm = segments.remove(shm.getShmId());
Preconditions.checkState(removedShm == shm,
"failed to remove " + shm.getShmId());
// Stop tracking the slots.
for (Iterator<Slot> iter = shm.slotIterator(); iter.hasNext(); ) {
Slot slot = iter.next();
boolean removed = slots.remove(slot.getBlockId(), slot);
Preconditions.checkState(removed);
slot.makeInvalid();
}
// De-allocate the memory map and close the shared file.
shm.free();
}
/**
* Whether or not the registry is enabled.
*/
private boolean enabled;
/**
* The factory which creates shared file descriptors.
*/
private final SharedFileDescriptorFactory shmFactory;
/**
* A watcher which sends out callbacks when the UNIX domain socket
* associated with a shared memory segment closes.
*/
private final DomainSocketWatcher watcher;
private final HashMap<ShmId, RegisteredShm> segments =
new HashMap<ShmId, RegisteredShm>(0);
private final HashMultimap<ExtendedBlockId, Slot> slots =
HashMultimap.create(0, 1);
public ShortCircuitRegistry(Configuration conf) throws IOException {
boolean enabled = false;
SharedFileDescriptorFactory shmFactory = null;
DomainSocketWatcher watcher = null;
try {
int interruptCheck = conf.getInt(
DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS,
DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT);
if (interruptCheck <= 0) {
throw new IOException(
DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS +
" was set to " + interruptCheck);
}
String[] shmPaths =
conf.getTrimmedStrings(DFS_DATANODE_SHARED_FILE_DESCRIPTOR_PATHS);
if (shmPaths.length == 0) {
shmPaths =
DFS_DATANODE_SHARED_FILE_DESCRIPTOR_PATHS_DEFAULT.split(",");
}
shmFactory = SharedFileDescriptorFactory.
create("HadoopShortCircuitShm_", shmPaths);
String dswLoadingFailure = DomainSocketWatcher.getLoadingFailureReason();
if (dswLoadingFailure != null) {
throw new IOException(dswLoadingFailure);
}
watcher = new DomainSocketWatcher(interruptCheck, "datanode");
enabled = true;
if (LOG.isDebugEnabled()) {
LOG.debug("created new ShortCircuitRegistry with interruptCheck=" +
interruptCheck + ", shmPath=" + shmFactory.getPath());
}
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Disabling ShortCircuitRegistry", e);
}
} finally {
this.enabled = enabled;
this.shmFactory = shmFactory;
this.watcher = watcher;
}
}
/**
* Process a block mlock event from the FsDatasetCache.
*
* @param blockId The block that was mlocked.
*/
public synchronized void processBlockMlockEvent(ExtendedBlockId blockId) {
if (!enabled) return;
Set<Slot> affectedSlots = slots.get(blockId);
for (Slot slot : affectedSlots) {
slot.makeAnchorable();
}
}
/**
* Mark any slots associated with this blockId as unanchorable.
*
* @param blockId The block ID.
* @return True if we should allow the munlock request.
*/
public synchronized boolean processBlockMunlockRequest(
ExtendedBlockId blockId) {
if (!enabled) return true;
boolean allowMunlock = true;
Set<Slot> affectedSlots = slots.get(blockId);
for (Slot slot : affectedSlots) {
slot.makeUnanchorable();
if (slot.isAnchored()) {
allowMunlock = false;
}
}
return allowMunlock;
}
/**
* Invalidate any slot associated with a blockId that we are invalidating
* (deleting) from this DataNode. When a slot is invalid, the DFSClient will
* not use the corresponding replica for new read or mmap operations (although
* existing, ongoing read or mmap operations will complete.)
*
* @param blockId The block ID.
*/
public synchronized void processBlockInvalidation(ExtendedBlockId blockId) {
if (!enabled) return;
final Set<Slot> affectedSlots = slots.get(blockId);
if (!affectedSlots.isEmpty()) {
final StringBuilder bld = new StringBuilder();
String prefix = "";
bld.append("Block ").append(blockId).append(" has been invalidated. ").
append("Marking short-circuit slots as invalid: ");
for (Slot slot : affectedSlots) {
slot.makeInvalid();
bld.append(prefix).append(slot.toString());
prefix = ", ";
}
LOG.info(bld.toString());
}
}
public synchronized String getClientNames(ExtendedBlockId blockId) {
if (!enabled) return "";
final HashSet<String> clientNames = new HashSet<String>();
final Set<Slot> affectedSlots = slots.get(blockId);
for (Slot slot : affectedSlots) {
clientNames.add(((RegisteredShm)slot.getShm()).getClientName());
}
return Joiner.on(",").join(clientNames);
}
public static class NewShmInfo implements Closeable {
private final ShmId shmId;
private final FileInputStream stream;
NewShmInfo(ShmId shmId, FileInputStream stream) {
this.shmId = shmId;
this.stream = stream;
}
public ShmId getShmId() {
return shmId;
}
public FileInputStream getFileStream() {
return stream;
}
@Override
public void close() throws IOException {
stream.close();
}
}
/**
* Handle a DFSClient request to create a new memory segment.
*
* @param clientName Client name as reported by the client.
* @param sock The DomainSocket to associate with this memory
* segment. When this socket is closed, or the
* other side writes anything to the socket, the
* segment will be closed. This can happen at any
* time, including right after this function returns.
* @return A NewShmInfo object. The caller must close the
* NewShmInfo object once they are done with it.
* @throws IOException If the new memory segment could not be created.
*/
public NewShmInfo createNewMemorySegment(String clientName,
DomainSocket sock) throws IOException {
NewShmInfo info = null;
RegisteredShm shm = null;
ShmId shmId = null;
synchronized (this) {
if (!enabled) {
if (LOG.isTraceEnabled()) {
LOG.trace("createNewMemorySegment: ShortCircuitRegistry is " +
"not enabled.");
}
throw new UnsupportedOperationException();
}
FileInputStream fis = null;
try {
do {
shmId = ShmId.createRandom();
} while (segments.containsKey(shmId));
fis = shmFactory.createDescriptor(clientName, SHM_LENGTH);
shm = new RegisteredShm(clientName, shmId, fis, this);
} finally {
if (shm == null) {
IOUtils.closeQuietly(fis);
}
}
info = new NewShmInfo(shmId, fis);
segments.put(shmId, shm);
}
// Drop the registry lock to prevent deadlock.
// After this point, RegisteredShm#handle may be called at any time.
watcher.add(sock, shm);
if (LOG.isTraceEnabled()) {
LOG.trace("createNewMemorySegment: created " + info.shmId);
}
return info;
}
public synchronized void registerSlot(ExtendedBlockId blockId, SlotId slotId,
boolean isCached) throws InvalidRequestException {
if (!enabled) {
if (LOG.isTraceEnabled()) {
LOG.trace(this + " can't register a slot because the " +
"ShortCircuitRegistry is not enabled.");
}
throw new UnsupportedOperationException();
}
ShmId shmId = slotId.getShmId();
RegisteredShm shm = segments.get(shmId);
if (shm == null) {
throw new InvalidRequestException("there is no shared memory segment " +
"registered with shmId " + shmId);
}
Slot slot = shm.registerSlot(slotId.getSlotIdx(), blockId);
if (isCached) {
slot.makeAnchorable();
} else {
slot.makeUnanchorable();
}
boolean added = slots.put(blockId, slot);
Preconditions.checkState(added);
if (LOG.isTraceEnabled()) {
LOG.trace(this + ": registered " + blockId + " with slot " +
slotId + " (isCached=" + isCached + ")");
}
}
public synchronized void unregisterSlot(SlotId slotId)
throws InvalidRequestException {
if (!enabled) {
if (LOG.isTraceEnabled()) {
LOG.trace("unregisterSlot: ShortCircuitRegistry is " +
"not enabled.");
}
throw new UnsupportedOperationException();
}
ShmId shmId = slotId.getShmId();
RegisteredShm shm = segments.get(shmId);
if (shm == null) {
throw new InvalidRequestException("there is no shared memory segment " +
"registered with shmId " + shmId);
}
Slot slot = shm.getSlot(slotId.getSlotIdx());
slot.makeInvalid();
shm.unregisterSlot(slotId.getSlotIdx());
slots.remove(slot.getBlockId(), slot);
}
public void shutdown() {
synchronized (this) {
if (!enabled) return;
enabled = false;
}
IOUtils.closeQuietly(watcher);
}
public static interface Visitor {
boolean accept(HashMap<ShmId, RegisteredShm> segments,
HashMultimap<ExtendedBlockId, Slot> slots);
}
@VisibleForTesting
public synchronized boolean visit(Visitor visitor) {
return visitor.accept(segments, slots);
}
}
| apache-2.0 |
ThorbenLindhauer/activiti-engine-ppi | modules/activiti-engine/src/test/java/org/activiti/engine/test/api/runtime/DummySerializable.java | 847 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.test.api.runtime;
import java.io.Serializable;
/**
* Dummy class that implements {@link Serializable}.
*
* @author Frederik Heremans
*/
public class DummySerializable implements Serializable {
private static final long serialVersionUID = 1L;
}
| apache-2.0 |
ftomassetti/java-symbol-solver | java-symbol-solver-testing/src/test/resources/javassist_symbols/main_jar/src/com/github/javaparser/javasymbolsolver/javassist_symbols/main_jar/EnumInterfaceUserIncludedJar.java | 269 | package com.github.javaparser.javasymbolsolver.javassist_symbols.main_jar;
import com.github.javaparser.javasymbolsolver.javassist_symbols.included_jar.InterfaceIncludedJar;
public enum EnumInterfaceUserIncludedJar implements InterfaceIncludedJar {
OWN_ENUM_VAL
} | apache-2.0 |
arehart13/smile | core/src/test/java/smile/wavelet/SymmletWaveletTest.java | 1862 | /*******************************************************************************
* Copyright (c) 2010 Haifeng Li
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package smile.wavelet;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Haifeng Li
*/
public class SymmletWaveletTest {
public SymmletWaveletTest() {
}
@BeforeClass
public static void setUpClass() throws Exception {
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
@Test
public void testFilter() {
System.out.println("filter");
for (int p = 8; p <= 20; p += 2) {
System.out.format("p = %d\n", p);
double[] a = {.2, -.4, -.6, -.5, -.8, -.4, -.9, 0, -.2, .1, -.1, .1, .7, .9, 0, .3};
double[] b = a.clone();
Wavelet instance = new SymmletWavelet(p);
instance.transform(a);
instance.inverse(a);
for (int i = 0; i < a.length; i++) {
assertEquals(b[i], a[i], 1E-7);
}
}
}
} | apache-2.0 |
archerjd/modularcombat_v3.0.0 | 3.0.0/src/vgui2/vgui_controls/DirectorySelectDialog.cpp | 18525 | //========= Copyright Valve Corporation, All rights reserved. ============//
//
// Purpose:
//
//=============================================================================//
#define PROTECTED_THINGS_DISABLE
#include <vgui_controls/Button.h>
#include <vgui_controls/ComboBox.h>
#include <vgui_controls/DirectorySelectDialog.h>
#include <vgui_controls/TreeView.h>
#include <vgui_controls/ImageList.h>
#include <vgui_controls/MessageBox.h>
#include <vgui/Cursor.h>
#include <KeyValues.h>
#include <vgui/IInput.h>
#include <vgui/ISurface.h>
#include <vgui/ISystem.h>
#include <filesystem.h>
#ifdef WIN32
#include <direct.h>
#include <stdio.h>
#include <io.h>
#endif
#include <sys/types.h>
#include <sys/stat.h>
// memdbgon must be the last include file in a .cpp file!!!
#include <tier0/memdbgon.h>
using namespace vgui;
DirectoryTreeView::DirectoryTreeView(DirectorySelectDialog *parent, const char *name) : TreeView(parent, name)
{
m_pParent = parent;
}
void DirectoryTreeView::GenerateChildrenOfNode(int itemIndex)
{
m_pParent->GenerateChildrenOfDirectoryNode(itemIndex);
}
//-----------------------------------------------------------------------------
// Purpose: Used to prompt the user to create a directory
//-----------------------------------------------------------------------------
class CreateDirectoryDialog : public Frame
{
DECLARE_CLASS_SIMPLE(CreateDirectoryDialog, Frame);
public:
CreateDirectoryDialog(Panel *parent, const char *defaultCreateDirName) : BaseClass(parent, NULL)
{
SetSize(320, 100);
SetSizeable(false);
SetTitle("Choose directory name", false);
MoveToCenterOfScreen();
m_pOKButton = new Button(this, "OKButton", "#vgui_ok");
m_pCancelButton = new Button(this, "OKButton", "#vgui_cancel");
m_pNameEntry = new TextEntry(this, "NameEntry");
m_pOKButton->SetCommand("OK");
m_pCancelButton->SetCommand("Close");
m_pNameEntry->SetText(defaultCreateDirName);
m_pNameEntry->RequestFocus();
m_pNameEntry->SelectAllText(true);
// If some other window was hogging the input focus, then we have to hog it or else we'll never get input.
m_PrevAppFocusPanel = vgui::input()->GetAppModalSurface();
if ( m_PrevAppFocusPanel )
vgui::input()->SetAppModalSurface( GetVPanel() );
}
~CreateDirectoryDialog()
{
if ( m_PrevAppFocusPanel )
vgui::input()->SetAppModalSurface( m_PrevAppFocusPanel );
}
virtual void PerformLayout()
{
BaseClass::PerformLayout();
m_pNameEntry->SetBounds(24, 32, GetWide() - 48, 24);
m_pOKButton->SetBounds(GetWide() - 176, 64, 72, 24);
m_pCancelButton->SetBounds(GetWide() - 94, 64, 72, 24);
}
virtual void OnCommand(const char *command)
{
if (!stricmp(command, "OK"))
{
PostActionSignal(new KeyValues("CreateDirectory", "dir", GetControlString("NameEntry")));
Close();
}
else
{
BaseClass::OnCommand(command);
}
}
virtual void OnClose()
{
BaseClass::OnClose();
MarkForDeletion();
}
private:
vgui::Button *m_pOKButton;
vgui::Button *m_pCancelButton;
vgui::TextEntry *m_pNameEntry;
vgui::VPANEL m_PrevAppFocusPanel;
};
//-----------------------------------------------------------------------------
// Purpose: Constructor
//-----------------------------------------------------------------------------
DirectorySelectDialog::DirectorySelectDialog(vgui::Panel *parent, const char *title) : Frame(parent, NULL)
{
SetTitle(title, true);
SetSize(320, 360);
SetMinimumSize(300, 240);
m_szCurrentDir[0] = 0;
m_szDefaultCreateDirName[0] = 0;
m_pDirTree = new DirectoryTreeView(this, "DirTree");
m_pDriveCombo = new ComboBox(this, "DriveCombo", 6, false);
m_pCancelButton = new Button(this, "CancelButton", "#VGui_Cancel");
m_pSelectButton = new Button(this, "SelectButton", "#VGui_Select");
m_pCreateButton = new Button(this, "CreateButton", "#VGui_CreateFolder");
m_pCancelButton->SetCommand("Cancel");
m_pSelectButton->SetCommand("Select");
m_pCreateButton->SetCommand("Create");
}
//-----------------------------------------------------------------------------
// Purpose: lays out controls
//-----------------------------------------------------------------------------
void DirectorySelectDialog::PerformLayout()
{
BaseClass::PerformLayout();
// lay out all the controls
m_pDriveCombo->SetBounds(24, 30, GetWide() - 48, 24);
m_pDirTree->SetBounds(24, 64, GetWide() - 48, GetTall() - 128);
m_pCreateButton->SetBounds(24, GetTall() - 48, 104, 24);
m_pSelectButton->SetBounds(GetWide() - 172, GetTall() - 48, 72, 24);
m_pCancelButton->SetBounds(GetWide() - 96, GetTall() - 48, 72, 24);
}
//-----------------------------------------------------------------------------
// Purpose: lays out controls
//-----------------------------------------------------------------------------
void DirectorySelectDialog::ApplySchemeSettings(IScheme *pScheme)
{
ImageList *imageList = new ImageList(false);
imageList->AddImage(scheme()->GetImage("Resource/icon_folder", false));
imageList->AddImage(scheme()->GetImage("Resource/icon_folder_selected", false));
m_pDirTree->SetImageList(imageList, true);
BaseClass::ApplySchemeSettings(pScheme);
}
//-----------------------------------------------------------------------------
// Purpose: Move the start string forward until we hit a slash and return the
// the first character past the trailing slash
//-----------------------------------------------------------------------------
inline const char *MoveToNextSubDir( const char *pStart, int *nCount )
{
int nMoved = 0;
// Move past pre-pended slash
if ( pStart[nMoved] == '\\' )
{
nMoved++;
}
// Move past the current block of text until we've hit the next path seperator (or end)
while ( pStart[nMoved] != '\\' && pStart[nMoved] != '\0' )
{
nMoved++;
}
// Move past trailing slash
if ( pStart[nMoved] == '\\' )
{
nMoved++;
}
// Give back a count if they've supplied a pointer
if ( nCount != NULL )
{
*nCount = nMoved;
}
// The beginning of the next string, past slash
return (pStart+nMoved);
}
//-----------------------------------------------------------------------------
// Purpose: Walk through our directory structure given a path as our guide, while expanding
// and populating the nodes of the tree view to match
// Input : *path - path (with drive letter) to show
//-----------------------------------------------------------------------------
void DirectorySelectDialog::ExpandTreeToPath( const char *lpszPath, bool bSelectFinalDirectory /*= true*/ )
{
// Make sure our slashes are correct!
char workPath[MAX_PATH];
Q_strncpy( workPath, lpszPath, sizeof(workPath) );
Q_FixSlashes( workPath );
// Set us to the work drive
SetStartDirectory( workPath );
// Check that the path is valid
if ( workPath[0] == '\0' || DoesDirectoryHaveSubdirectories( m_szCurrentDrive, "" ) == false )
{
// Failing, start in C:
SetStartDirectory( "C:\\" );
}
// Start at the root of our tree
int nItemIndex = m_pDirTree->GetRootItemIndex();
// Move past the drive letter to the first subdir
int nPathPos = 0;
const char *lpszSubDirName = MoveToNextSubDir( workPath, &nPathPos );
const char *lpszLastSubDirName = NULL;
int nPathIncr = 0;
char subDirName[MAX_PATH];
// While there are subdirectory names present, expand and populate the tree with their subdirectories
while ( lpszSubDirName[0] != '\0' )
{
// Move our string pointer forward while keeping where our last subdir started off
lpszLastSubDirName = lpszSubDirName;
lpszSubDirName = MoveToNextSubDir( lpszSubDirName, &nPathIncr );
// Get the span between the last subdir and the new one
Q_StrLeft( lpszLastSubDirName, nPathIncr, subDirName, sizeof(subDirName) );
Q_StripTrailingSlash( subDirName );
// Increment where we are in the string for use later
nPathPos += nPathIncr;
// Run through the list and expand to our currently selected directory
for ( int i = 0; i < m_pDirTree->GetNumChildren( nItemIndex ); i++ )
{
// Get the child and data for it
int nChild = m_pDirTree->GetChild( nItemIndex, i );
KeyValues *pValues = m_pDirTree->GetItemData( nChild );
// See if this matches
if ( Q_stricmp( pValues->GetString( "Text" ), subDirName ) == 0 )
{
// This is the new root item
nItemIndex = nChild;
// Get the full path (starting from the drive letter) up to our current subdir
Q_strncpy( subDirName, workPath, nPathPos );
Q_AppendSlash( subDirName, sizeof(subDirName) );
// Expand the tree node and populate its subdirs for our next iteration
ExpandTreeNode( subDirName, nItemIndex );
break;
}
}
}
// Select our last directory if we've been asked to (and it's valid)
if ( bSelectFinalDirectory && m_pDirTree->IsItemIDValid( nItemIndex ) )
{
// If we don't call this once before selecting an item, the tree will not be properly expanded
// before it calculates how to show the selected item in the view
PerformLayout();
// Select that item
m_pDirTree->AddSelectedItem( nItemIndex, true );
}
}
//-----------------------------------------------------------------------------
// Purpose: sets where it should start searching
//-----------------------------------------------------------------------------
void DirectorySelectDialog::SetStartDirectory(const char *path)
{
strncpy(m_szCurrentDir, path, sizeof(m_szCurrentDir));
strncpy(m_szCurrentDrive, path, sizeof(m_szCurrentDrive));
m_szCurrentDrive[sizeof(m_szCurrentDrive) - 1] = 0;
char *firstSlash = strstr(m_szCurrentDrive, "\\");
if (firstSlash)
{
firstSlash[1] = 0;
}
BuildDirTree();
BuildDriveChoices();
// update state of create directory button
int selectedIndex = m_pDirTree->GetFirstSelectedItem();
if (m_pDirTree->IsItemIDValid(selectedIndex))
{
m_pCreateButton->SetEnabled(true);
}
else
{
m_pCreateButton->SetEnabled(false);
}
}
//-----------------------------------------------------------------------------
// Purpose: sets what name should show up by default in the create directory dialog
//-----------------------------------------------------------------------------
void DirectorySelectDialog::SetDefaultCreateDirectoryName(const char *defaultCreateDirName)
{
strncpy(m_szDefaultCreateDirName, defaultCreateDirName, sizeof(m_szDefaultCreateDirName));
m_szDefaultCreateDirName[sizeof(m_szDefaultCreateDirName) - 1] = 0;
}
//-----------------------------------------------------------------------------
// Purpose: opens the dialog
//-----------------------------------------------------------------------------
void DirectorySelectDialog::DoModal()
{
input()->SetAppModalSurface(GetVPanel());
BaseClass::Activate();
MoveToCenterOfScreen();
}
//-----------------------------------------------------------------------------
// Purpose: Builds drive choices
//-----------------------------------------------------------------------------
void DirectorySelectDialog::BuildDriveChoices()
{
m_pDriveCombo->DeleteAllItems();
char drives[256] = { 0 };
int len = system()->GetAvailableDrives(drives, sizeof(drives));
char *pBuf = drives;
KeyValues *kv = new KeyValues("drive");
for (int i = 0; i < len / 4; i++)
{
kv->SetString("drive", pBuf);
int itemID = m_pDriveCombo->AddItem(pBuf, kv);
if (!stricmp(pBuf, m_szCurrentDrive))
{
m_pDriveCombo->ActivateItem(itemID);
}
pBuf += 4;
}
kv->deleteThis();
}
//-----------------------------------------------------------------------------
// Purpose: Builds the base tree directory
//-----------------------------------------------------------------------------
void DirectorySelectDialog::BuildDirTree()
{
// clear current tree
m_pDirTree->RemoveAll();
// add in a root
int rootIndex = m_pDirTree->AddItem(new KeyValues("root", "Text", m_szCurrentDrive), -1);
// build first level of the tree
ExpandTreeNode(m_szCurrentDrive, rootIndex);
// start the root expanded
m_pDirTree->ExpandItem(rootIndex, true);
}
//-----------------------------------------------------------------------------
// Purpose: expands a path
//-----------------------------------------------------------------------------
void DirectorySelectDialog::ExpandTreeNode(const char *path, int parentNodeIndex)
{
// set the small wait cursor
surface()->SetCursor(dc_waitarrow);
// get all the subfolders of the current drive
char searchString[512];
sprintf(searchString, "%s*.*", path);
FileFindHandle_t h;
const char *pFileName = g_pFullFileSystem->FindFirstEx( searchString, NULL, &h );
for ( ; pFileName; pFileName = g_pFullFileSystem->FindNext( h ) )
{
if ( !Q_stricmp( pFileName, ".." ) || !Q_stricmp( pFileName, "." ) )
continue;
KeyValues *kv = new KeyValues("item");
kv->SetString("Text", pFileName);
// set the folder image
kv->SetInt("Image", 1);
kv->SetInt("SelectedImage", 1);
kv->SetInt("Expand", DoesDirectoryHaveSubdirectories(path, pFileName));
m_pDirTree->AddItem(kv, parentNodeIndex);
}
g_pFullFileSystem->FindClose( h );
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
bool DirectorySelectDialog::DoesDirectoryHaveSubdirectories(const char *path, const char *dir)
{
char searchString[512];
sprintf(searchString, "%s%s\\*.*", path, dir);
FileFindHandle_t h;
const char *pFileName = g_pFullFileSystem->FindFirstEx( searchString, NULL, &h );
for ( ; pFileName; pFileName = g_pFullFileSystem->FindNext( h ) )
{
char szFullPath[ MAX_PATH ];
Q_snprintf( szFullPath, sizeof(szFullPath), "%s\\%s", path, pFileName );
Q_FixSlashes( szFullPath );
if ( g_pFullFileSystem->IsDirectory( szFullPath ) )
{
g_pFullFileSystem->FindClose( h );
return true;
}
}
g_pFullFileSystem->FindClose( h );
return false;
}
//-----------------------------------------------------------------------------
// Purpose: Generates the children for the specified node
//-----------------------------------------------------------------------------
void DirectorySelectDialog::GenerateChildrenOfDirectoryNode(int nodeIndex)
{
// generate path
char path[512];
GenerateFullPathForNode(nodeIndex, path, sizeof(path));
// expand out
ExpandTreeNode(path, nodeIndex);
}
//-----------------------------------------------------------------------------
// Purpose: creates the full path for a node
//-----------------------------------------------------------------------------
void DirectorySelectDialog::GenerateFullPathForNode(int nodeIndex, char *path, int pathBufferSize)
{
// get all the nodes
CUtlLinkedList<int, int> nodes;
nodes.AddToTail(nodeIndex);
int parentIndex = nodeIndex;
while (1)
{
parentIndex = m_pDirTree->GetItemParent(parentIndex);
if (parentIndex == -1)
break;
nodes.AddToHead(parentIndex);
}
// walk the nodes, adding to the path
path[0] = 0;
bool bFirst = true;
FOR_EACH_LL( nodes, i )
{
KeyValues *kv = m_pDirTree->GetItemData( nodes[i] );
strcat(path, kv->GetString("Text"));
if (!bFirst)
{
strcat(path, "\\");
}
bFirst = false;
}
}
//-----------------------------------------------------------------------------
// Purpose: Handles combo box changes
//-----------------------------------------------------------------------------
void DirectorySelectDialog::OnTextChanged()
{
KeyValues *kv = m_pDriveCombo->GetActiveItemUserData();
if (!kv)
return;
const char *newDrive = kv->GetString("drive");
if (stricmp(newDrive, m_szCurrentDrive))
{
// drive changed, reset
SetStartDirectory(newDrive);
}
}
//-----------------------------------------------------------------------------
// Purpose: creates a directory
//-----------------------------------------------------------------------------
void DirectorySelectDialog::OnCreateDirectory(const char *dir)
{
int selectedIndex = m_pDirTree->GetFirstSelectedItem();
if (m_pDirTree->IsItemIDValid(selectedIndex))
{
char fullPath[512];
GenerateFullPathForNode(selectedIndex, fullPath, sizeof(fullPath));
// create the new directory underneath
strcat(fullPath, dir);
if (_mkdir(fullPath) == 0)
{
// add new path to tree view
KeyValues *kv = new KeyValues("item");
kv->SetString("Text", dir);
// set the folder image
kv->SetInt("Image", 1);
kv->SetInt("SelectedImage", 1);
int itemID = m_pDirTree->AddItem(kv, selectedIndex);
// select the item
m_pDirTree->AddSelectedItem( itemID, true );
}
else
{
// print error message
MessageBox *box = new MessageBox("#vgui_CreateDirectoryFail_Title", "#vgui_CreateDirectoryFail_Info");
box->DoModal(this);
}
}
}
//-----------------------------------------------------------------------------
// Purpose: dialog closes
//-----------------------------------------------------------------------------
void DirectorySelectDialog::OnClose()
{
BaseClass::OnClose();
MarkForDeletion();
}
//-----------------------------------------------------------------------------
// Purpose: handles button commands
//-----------------------------------------------------------------------------
void DirectorySelectDialog::OnCommand(const char *command)
{
if (!stricmp(command, "Cancel"))
{
Close();
}
else if (!stricmp(command, "Select"))
{
// path selected
int selectedIndex = m_pDirTree->GetFirstSelectedItem();
if (m_pDirTree->IsItemIDValid(selectedIndex))
{
char fullPath[512];
GenerateFullPathForNode(selectedIndex, fullPath, sizeof(fullPath));
PostActionSignal(new KeyValues("DirectorySelected", "dir", fullPath));
Close();
}
}
else if (!stricmp(command, "Create"))
{
int selectedIndex = m_pDirTree->GetFirstSelectedItem();
if (m_pDirTree->IsItemIDValid(selectedIndex))
{
CreateDirectoryDialog *dlg = new CreateDirectoryDialog(this, m_szDefaultCreateDirName);
dlg->AddActionSignalTarget(this);
dlg->Activate();
}
}
else
{
BaseClass::OnCommand(command);
}
}
//-----------------------------------------------------------------------------
// Purpose: Update the text in the combo
//-----------------------------------------------------------------------------
void DirectorySelectDialog::OnTreeViewItemSelected()
{
int selectedIndex = m_pDirTree->GetFirstSelectedItem();
if (!m_pDirTree->IsItemIDValid(selectedIndex))
{
m_pCreateButton->SetEnabled(false);
return;
}
m_pCreateButton->SetEnabled(true);
// build the string
char fullPath[512];
GenerateFullPathForNode(selectedIndex, fullPath, sizeof(fullPath));
int itemID = m_pDriveCombo->GetActiveItem();
m_pDriveCombo->UpdateItem(itemID, fullPath, NULL);
m_pDriveCombo->SetText(fullPath);
} | artistic-2.0 |
TheTypoMaster/SPHERE-Framework | Library/MathJax/2.5.0/localization/ast/MathMenu.js | 5826 | /*
* /MathJax/localization/ast/MathMenu.js
*
* Copyright (c) 2009-2015 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.Localization.addTranslation( "ast", "MathMenu", {
version: "2.5.0",
isLoaded: true,
strings: {
Show: "Ver les f\u00F3rmules como",
MathMLcode: "C\u00F3digu MathML",
OriginalMathML: "MathML orixinal",
TeXCommands: "Comandos de TeX",
AsciiMathInput: "Entrada AsciiMathML",
Original: "Forma orixinal",
ErrorMessage: "Mensaxe d'error",
Annotation: "Anotaci\u00F3n",
TeX: "TeX",
StarMath: "StarMath",
Maple: "Maple",
ContentMathML: "Conten\u00EDu MathML",
OpenMath: "OpenMath",
texHints: "Amosar gabitos TeX en MathML",
Settings: "Configuraci\u00F3n de matem\u00E1tiques",
ZoomTrigger: "Disparador del zoom",
Hover: "Al pasar el mur",
Click: "Al facer clic",
DoubleClick: "Con doble clic",
NoZoom: "Ensin zoom",
TriggerRequires: "El disparador requier:",
Option: "Opci\u00F3n",
Alt: "Alt",
Command: "Comandu",
Control: "Control",
Shift: "May\u00FAs",
ZoomFactor: "Factor d'ampliaci\u00F3n",
Renderer: "Renderizador de matem\u00E1tiques",
MPHandles: "Permitir que MathPlayer alministre:",
MenuEvents: "Socesos de men\u00FA",
MouseEvents: "Socesos del mur",
MenuAndMouse: "Socesos del mur y del men\u00FA",
FontPrefs: "Preferencies de tipograf\u00EDa",
ForHTMLCSS: "Pa HTML-CSS:",
Auto: "Auto",
TeXLocal: "TeX (llocal)",
TeXWeb: "TeX (web)",
TeXImage: "TeX (imaxe)",
STIXLocal: "STIX (llocal)",
STIXWeb: "STIX (web)",
AsanaMathWeb: "Asana Math (web)",
GyrePagellaWeb: "Gyre Pagella (web)",
GyreTermesWeb: "Gyre Termes (web)",
LatinModernWeb: "Llat\u00EDn modernu (web)",
NeoEulerWeb: "Neo Euler (web)",
ContextMenu: "Men\u00FA contestual",
Browser: "Navegador",
Scale: "Escalar toles f\u00F3rmules...",
Discoverable: "Illuminar al pasar el mur",
Locale: "Llingua",
LoadLocale: "Cargar d'una URL...",
About: "Tocante a MathJax",
Help: "Ayuda de MathJax",
localTeXfonts: "usando tipograf\u00EDa TeX llocal",
webTeXfonts: "usando tipograf\u00EDa TeX de la web",
imagefonts: "usando tipograf\u00EDa d'imaxe",
localSTIXfonts: "usando tipograf\u00EDa STIX llocal",
webSVGfonts: "usando tipograf\u00EDa SVG de la web",
genericfonts: "usando tipograf\u00EDa Unicode xen\u00E9rica",
wofforotffonts: "tipograf\u00EDa WOFF u OTF",
eotffonts: "tipograf\u00EDa EOT",
svgfonts: "tipograf\u00EDa SVG",
WebkitNativeMMLWarning: "Paez que'l so restolador nun tien sofitu pa MathML de mou nativu, de mou que cambiar la salida a MathML pue facer que les f\u00F3rmules de la p\u00E1xina nun puedan lleese",
MSIENativeMMLWarning: "Internet Explorer requier el complementu MathPlayer pa procesar la salida de MathML",
OperaNativeMMLWarning: "El sofitu d'Opera pa MathML ye parcial, de mou que cambiar la salida a MathML pue facer que delles espresiones nun se vean bien.",
SafariNativeMMLWarning: "El MathML nativu del so navegador nun cuenta con toles carauter\u00EDstiques qu'usa MathJax, de mou que delles espresiones podr\u00EDen nun representase correutamente.",
FirefoxNativeMMLWarning: "El MathML nativu del so navegador nun cuenta con toles carauter\u00EDstiques qu'usa MathJax, de mou que delles espresiones podr\u00EDen nun representase correutamente.",
MSIESVGWarning: "SVG nun ta implement\u00E1u n'Internet Explorer anterior a IE9 o cuando ta emulando IE8 o anterior. Si cambia a la salida SVG, les f\u00F3rmules nun se ver\u00E1n correutamente.",
LoadURL: "Cargar los datos de traducci\u00F3n d'esta URL:",
BadURL: "La URL tendr\u00EDa de ser pa un ficheru JavaScript que define los datos de traducci\u00F3n de MathJax. Los nomes de los ficheros JavaScript tendr\u00EDen d'acabar en '.js'",
BadData: "Nun pudieron cargase los datos de traducci\u00F3n de %1",
SwitchAnyway: "\u00BFCambiar la representaci\u00F3n de toles maneres?\n\n(Calque Aceutar pa cambiar, Encaboxar pa siguir cola representaci\u00F3n actual)",
ScaleMath: "Ampliar toles f\u00F3rmules (en comparancia col testu d'alredor) nun",
NonZeroScale: "La escala nun tien de ser cero",
PercentScale: "La escala tien de ser un porcentaxe (por exemplu 120%%)",
IE8warning: "Esto desactivar\u00E1 les funciones de men\u00FA y zoom de MathJax, pero pue facer Alt-Clic nuna espresi\u00F3n pa ver el men\u00FA de MathJax.\n\n\u00BFRealmente quier cambiar la configuraci\u00F3n de MathPlayer?",
IE9warning: "El men\u00FA contestual de MathJax tar\u00E1 desactiv\u00E1u, pero pue facer Alt-Clic nuna espresi\u00F3n pa ver el men\u00FA MathJax.",
NoOriginalForm: "Nun ta disponible denguna forma orixinal",
Close: "Zarrar",
EqSource: "C\u00F3digu fonte d'ecuaci\u00F3n MathJax"
}
} );
MathJax.Ajax.loadComplete( "[MathJax]/localization/ast/MathMenu.js" );
| bsd-2-clause |
ericbn/homebrew-cask | Casks/qqlive.rb | 776 | cask "qqlive" do
version "2.18.2.52825"
sha256 "5d0b53ff0f95605c306326a531ffc5341842aa8231db8abc1491d3eadeb7b335"
url "https://dldir1.qq.com/qqtv/mac/TencentVideo_V#{version}.dmg"
appcast "https://v.qq.com/biu/download#Mac"
name "QQLive"
name "腾讯视频"
homepage "https://v.qq.com/download.html#mac"
auto_updates true
depends_on macos: ">= :yosemite"
app "QQLive.app"
zap trash: [
"~/Library/Caches/com.tencent.tenvideo",
"~/Library/Containers/com.tencent.tenvideo",
"~/Library/Cookies/com.tencent.tenvideo.binarycookies",
"~/Library/Logs/QQLive",
"~/Library/Preferences/com.tencent.tenvideo.plist",
"~/Library/Saved Application State/com.tencent.tenvideo.savedState",
"~/Library/WebKit/com.tencent.tenvideo",
]
end
| bsd-2-clause |
neilbu/osrm-backend | third_party/libosmium/include/osmium/area/detail/proto_ring.hpp | 8174 | #ifndef OSMIUM_AREA_DETAIL_PROTO_RING_HPP
#define OSMIUM_AREA_DETAIL_PROTO_RING_HPP
/*
This file is part of Osmium (http://osmcode.org/libosmium).
Copyright 2013-2017 Jochen Topf <jochen@topf.org> and others (see README).
Boost Software License - Version 1.0 - August 17th, 2003
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#include <algorithm>
#include <cassert>
#include <cstdint>
#include <iostream>
#include <set>
#include <vector>
#include <osmium/osm/location.hpp>
#include <osmium/osm/node_ref.hpp>
#include <osmium/area/detail/node_ref_segment.hpp>
namespace osmium {
class Way;
namespace area {
namespace detail {
/**
* A ring in the process of being built by the Assembler object.
*/
class ProtoRing {
public:
using segments_type = std::vector<NodeRefSegment*>;
private:
// Segments in this ring.
segments_type m_segments;
// If this is an outer ring, these point to it's inner rings
// (if any).
std::vector<ProtoRing*> m_inner;
// The smallest segment. Will be kept current whenever a new
// segment is added to the ring.
NodeRefSegment* m_min_segment;
// If this is an inner ring, points to the outer ring.
ProtoRing* m_outer_ring;
#ifdef OSMIUM_DEBUG_RING_NO
static int64_t next_num() noexcept {
static int64_t counter = 0;
return ++counter;
}
int64_t m_num;
#endif
int64_t m_sum;
public:
explicit ProtoRing(NodeRefSegment* segment) noexcept :
m_segments(),
m_inner(),
m_min_segment(segment),
m_outer_ring(nullptr),
#ifdef OSMIUM_DEBUG_RING_NO
m_num(next_num()),
#endif
m_sum(0) {
add_segment_back(segment);
}
void add_segment_back(NodeRefSegment* segment) {
assert(segment);
if (*segment < *m_min_segment) {
m_min_segment = segment;
}
m_segments.push_back(segment);
segment->set_ring(this);
m_sum += segment->det();
}
NodeRefSegment* min_segment() const noexcept {
return m_min_segment;
}
ProtoRing* outer_ring() const noexcept {
return m_outer_ring;
}
void set_outer_ring(ProtoRing* outer_ring) noexcept {
assert(outer_ring);
assert(m_inner.empty());
m_outer_ring = outer_ring;
}
const std::vector<ProtoRing*>& inner_rings() const noexcept {
return m_inner;
}
void add_inner_ring(ProtoRing* ring) {
assert(ring);
assert(!m_outer_ring);
m_inner.push_back(ring);
}
bool is_outer() const noexcept {
return !m_outer_ring;
}
const segments_type& segments() const noexcept {
return m_segments;
}
const NodeRef& get_node_ref_start() const noexcept {
return m_segments.front()->start();
}
const NodeRef& get_node_ref_stop() const noexcept {
return m_segments.back()->stop();
}
bool closed() const noexcept {
return get_node_ref_start().location() == get_node_ref_stop().location();
}
void reverse() {
std::for_each(m_segments.begin(), m_segments.end(), [](NodeRefSegment* segment) {
segment->reverse();
});
std::reverse(m_segments.begin(), m_segments.end());
m_sum = -m_sum;
}
void mark_direction_done() {
std::for_each(m_segments.begin(), m_segments.end(), [](NodeRefSegment* segment) {
segment->mark_direction_done();
});
}
bool is_cw() const noexcept {
return m_sum <= 0;
}
int64_t sum() const noexcept {
return m_sum;
}
void fix_direction() noexcept {
if (is_cw() == is_outer()) {
reverse();
}
}
void reset() {
m_inner.clear();
m_outer_ring = nullptr;
std::for_each(m_segments.begin(), m_segments.end(), [](NodeRefSegment* segment) {
segment->mark_direction_not_done();
});
}
void get_ways(std::set<const osmium::Way*>& ways) const {
for (const auto& segment : m_segments) {
ways.insert(segment->way());
}
}
void join_forward(ProtoRing& other) {
for (NodeRefSegment* segment : other.m_segments) {
add_segment_back(segment);
}
}
void join_backward(ProtoRing& other) {
for (auto it = other.m_segments.rbegin(); it != other.m_segments.rend(); ++it) {
(*it)->reverse();
add_segment_back(*it);
}
}
void print(std::ostream& out) const {
#ifdef OSMIUM_DEBUG_RING_NO
out << "Ring #" << m_num << " [";
#else
out << "Ring [";
#endif
if (!m_segments.empty()) {
out << m_segments.front()->start().ref();
}
for (const auto& segment : m_segments) {
out << ',' << segment->stop().ref();
}
out << "]-" << (is_outer() ? "OUTER" : "INNER");
}
}; // class ProtoRing
template <typename TChar, typename TTraits>
inline std::basic_ostream<TChar, TTraits>& operator<<(std::basic_ostream<TChar, TTraits>& out, const ProtoRing& ring) {
ring.print(out);
return out;
}
} // namespace detail
} // namespace area
} // namespace osmium
#endif // OSMIUM_AREA_DETAIL_PROTO_RING_HPP
| bsd-2-clause |
moyogo/robofab | Docs/Examples/objects/RGlyph_03.py | 645 | # robofab manual
# Glyph object
# method examples
# In FontLab the baseglyph of a component can't be changed easily.
# This assumes that there will only be
# one component that needs to be remapped.
def remapComponent(glyph, oldBaseGlyph, newBaseGlyph):
foundComponent = None
for component in glyph.components:
if component.baseGlyph = oldBaseGlyph:
foundComponent = component
break
if foundComponent is None:
return
offset = foundComponent.offset
scale = foundComponent.scale
glyph.removeComponent(component)
glyph.appendComponent(newBaseGlyph, offset=offset, scale=scale)
| bsd-3-clause |
endlessm/chromium-browser | tools/idl_parser/idl_node.py | 5635 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
#
# IDL Node
#
# IDL Node defines the IDLAttribute and IDLNode objects which are constructed
# by the parser as it processes the various 'productions'. The IDLAttribute
# objects are assigned to the IDLNode's property dictionary instead of being
# applied as children of The IDLNodes, so they do not exist in the final tree.
# The AST of IDLNodes is the output from the parsing state and will be used
# as the source data by the various generators.
#
#
# CopyToList
#
# Takes an input item, list, or None, and returns a new list of that set.
def CopyToList(item):
# If the item is 'Empty' make it an empty list
if not item:
item = []
# If the item is not a list
if type(item) is not type([]):
item = [item]
# Make a copy we can modify
return list(item)
# IDLSearch
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLSearch(object):
def __init__(self):
self.depth = 0
def Enter(self, node):
pass
def Exit(self, node):
pass
# IDLAttribute
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLAttribute(object):
def __init__(self, name, value):
self._cls = 'Property'
self.name = name
self.value = value
def __str__(self):
return '%s=%s' % (self.name, self.value)
def GetClass(self):
return self._cls
#
# IDLNode
#
# This class implements the AST tree, providing the associations between
# parents and children. It also contains a namepsace and propertynode to
# allow for look-ups. IDLNode is derived from IDLRelease, so it is
# version aware.
#
class IDLNode(object):
VERBOSE_PROPS = [
'PROD', 'NAME', 'VALUE', 'TYPE',
'ERRORS', 'WARNINGS', 'FILENAME', 'LINENO', 'POSITION', 'DATETIME',
]
def __init__(self, cls, filename, lineno, pos, children=None):
self._cls = cls
self._properties = {
'ERRORS' : [],
'WARNINGS': [],
'FILENAME': filename,
'LINENO' : lineno,
'POSITION' : pos,
}
self._children = []
self._parent = None
self.AddChildren(children)
#
#
#
# Return a string representation of this node
def __str__(self):
name = self.GetProperty('NAME','')
value = self.GetProperty('VALUE')
if value or value == '':
return '%s(%s) = "%s"' % (self._cls, name, value)
return '%s(%s)' % (self._cls, name)
def GetLogLine(self, msg):
filename, lineno = self.GetFileAndLine()
return '%s(%d) : %s\n' % (filename, lineno, msg)
# Log an error for this object
def Error(self, msg):
self.GetProperty('ERRORS').append(msg)
sys.stderr.write(self.GetLogLine('error: ' + msg))
# Log a warning for this object
def Warning(self, msg):
self.GetProperty('WARNINGS').append(msg)
sys.stdout.write(self.GetLogLine('warning:' + msg))
# Return file and line number for where node was defined
def GetFileAndLine(self):
return self.GetProperty('FILENAME'), self.GetProperty('LINENO')
def GetClass(self):
return self._cls
def GetName(self):
return self.GetProperty('NAME')
def GetParent(self):
return self._parent
def Traverse(self, search, filter_nodes):
if self._cls in filter_nodes:
return ''
search.Enter(self)
search.depth += 1
for child in self._children:
child.Traverse(search, filter_nodes)
search.depth -= 1
search.Exit(self)
def Tree(self, filter_nodes=None, suppress_props=VERBOSE_PROPS):
class DumpTreeSearch(IDLSearch):
def __init__(self, props):
IDLSearch.__init__(self)
self.out = []
self.props = props or []
def Enter(self, node):
tab = ''.rjust(self.depth * 2)
self.out.append(tab + str(node))
proplist = []
for key, value in node.GetProperties().iteritems():
if key not in self.props:
proplist.append(tab + ' %s: %s' % (key, str(value)))
if proplist:
self.out.extend(proplist)
if filter_nodes == None:
filter_nodes = ['SpecialComment']
search = DumpTreeSearch(suppress_props)
self.Traverse(search, filter_nodes)
return search.out
#
# Search related functions
#
# Check if node is of a given type
def IsA(self, *typelist):
if self._cls in typelist:
return True
return False
# Get a list of all children
def GetChildren(self):
return self._children
def GetListOf(self, *keys):
out = []
for child in self.GetChildren():
if child.GetClass() in keys:
out.append(child)
return out
def GetOneOf(self, *keys):
out = self.GetListOf(*keys)
if out:
return out[0]
return None
def AddChildren(self, children):
children = CopyToList(children)
for child in children:
if not child:
continue
if type(child) == IDLAttribute:
self.SetProperty(child.name, child.value)
continue
if type(child) == IDLNode:
child._parent = self
self._children.append(child)
continue
raise RuntimeError('Adding child of type %s.\n' % type(child).__name__)
#
# Property Functions
#
def SetProperty(self, name, val):
self._properties[name] = val
def GetProperty(self, name, default=None):
return self._properties.get(name, default)
def GetProperties(self):
return self._properties
| bsd-3-clause |
Bysmyyr/chromium-crosswalk | third_party/WebKit/Source/core/css/CSSQuadValue.cpp | 1453 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "config.h"
#include "core/css/CSSQuadValue.h"
#include "wtf/text/StringBuilder.h"
namespace blink {
String CSSQuadValue::customCSSText() const
{
String top = m_top->cssText();
String right = m_right->cssText();
String bottom = m_bottom->cssText();
String left = m_left->cssText();
if (m_serializationType == SerializationType::SerializeAsRect)
return "rect(" + top + ' ' + right + ' ' + bottom + ' ' + left + ')';
StringBuilder result;
// reserve space for the four strings, plus three space separator characters.
result.reserveCapacity(top.length() + right.length() + bottom.length() + left.length() + 3);
result.append(top);
if (right != top || bottom != top || left != top) {
result.append(' ');
result.append(right);
if (bottom != top || right != left) {
result.append(' ');
result.append(bottom);
if (left != right) {
result.append(' ');
result.append(left);
}
}
}
return result.toString();
}
DEFINE_TRACE_AFTER_DISPATCH(CSSQuadValue)
{
visitor->trace(m_top);
visitor->trace(m_right);
visitor->trace(m_bottom);
visitor->trace(m_left);
CSSValue::traceAfterDispatch(visitor);
}
}
| bsd-3-clause |
danakj/chromium | components/domain_reliability/header.cc | 8759 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/domain_reliability/header.h"
#include <stdint.h>
#include <string>
#include "base/memory/ptr_util.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_tokenizer.h"
#include "components/domain_reliability/config.h"
#include "content/public/common/origin_util.h"
namespace {
// Parses directives in the format ("foo; bar=value for bar; baz; quux=123")
// used by NEL.
class DirectiveHeaderValueParser {
public:
enum State {
BEFORE_NAME,
AFTER_NAME,
BEFORE_VALUE,
AFTER_DIRECTIVE,
SYNTAX_ERROR
};
DirectiveHeaderValueParser(base::StringPiece value)
: value_(value.data()),
tokenizer_(value_.begin(), value_.end(), ";= "),
stopped_with_error_(false) {
tokenizer_.set_options(base::StringTokenizer::RETURN_DELIMS);
tokenizer_.set_quote_chars("\"'");
}
// Gets the next directive, if there is one. Returns whether there was one.
bool GetNext() {
if (stopped_with_error_)
return false;
directive_name_ = base::StringPiece();
directive_has_value_ = false;
directive_values_.clear();
State state = BEFORE_NAME;
while (state != AFTER_DIRECTIVE && state != SYNTAX_ERROR
&& tokenizer_.GetNext()) {
if (*tokenizer_.token_begin() == ' ')
continue;
switch (state) {
case BEFORE_NAME:
state = DoBeforeName();
break;
case AFTER_NAME:
state = DoAfterName();
break;
case BEFORE_VALUE:
state = DoBeforeValue();
break;
case AFTER_DIRECTIVE:
case SYNTAX_ERROR:
NOTREACHED();
break;
}
}
switch (state) {
// If the parser just read the last directive, it may be in one of these
// states, so return true to yield that directive.
case AFTER_NAME:
case BEFORE_VALUE:
case AFTER_DIRECTIVE:
return true;
// If the parser never found a name, return false, since it doesn't have
// a new directive for the caller.
case BEFORE_NAME:
return false;
case SYNTAX_ERROR:
stopped_with_error_ = true;
return false;
default:
NOTREACHED();
return false;
}
}
base::StringPiece directive_name() const { return directive_name_; }
bool directive_has_value() const { return directive_has_value_; }
const std::vector<base::StringPiece>& directive_values() const {
return directive_values_;
}
bool stopped_with_error() const { return stopped_with_error_; }
private:
State DoBeforeName() {
if (tokenizer_.token_is_delim())
return SYNTAX_ERROR;
directive_name_ = tokenizer_.token_piece();
return AFTER_NAME;
}
State DoAfterName() {
if (tokenizer_.token_is_delim()) {
char token_begin = *tokenizer_.token_begin();
// Name can be followed by =value, ;, or just EOF.
if (token_begin == '=') {
directive_has_value_ = true;
return BEFORE_VALUE;
}
if (token_begin == ';')
return AFTER_DIRECTIVE;
}
return SYNTAX_ERROR;
}
State DoBeforeValue() {
if (tokenizer_.token_is_delim()) {
char token_begin = *tokenizer_.token_begin();
if (token_begin == ';')
return AFTER_DIRECTIVE;
return SYNTAX_ERROR;
}
directive_values_.push_back(tokenizer_.token_piece());
return BEFORE_VALUE;
}
std::string value_;
base::StringTokenizer tokenizer_;
base::StringPiece directive_name_;
bool directive_has_value_;
std::vector<base::StringPiece> directive_values_;
bool stopped_with_error_;
};
bool Unquote(const std::string& in, std::string* out) {
char first = in[0];
char last = in[in.length() - 1];
if (((first == '"') ^ (last == '"')) || ((first == '<') ^ (last == '>')))
return false;
if ((first == '"') || (first == '<'))
*out = in.substr(1, in.length() - 2);
else
*out = in;
return true;
}
bool ParseReportUri(const std::vector<base::StringPiece> in,
ScopedVector<GURL>* out) {
if (in.size() < 1u)
return false;
out->clear();
for (const auto& in_token : in) {
std::string unquoted;
if (!Unquote(in_token.as_string(), &unquoted))
return false;
GURL url(unquoted);
if (!url.is_valid() || !content::IsOriginSecure(url))
return false;
out->push_back(new GURL(url));
}
return true;
}
bool ParseMaxAge(const std::vector<base::StringPiece> in,
base::TimeDelta* out) {
if (in.size() != 1u)
return false;
int64_t seconds;
if (!base::StringToInt64(in[0], &seconds))
return false;
if (seconds < 0)
return false;
*out = base::TimeDelta::FromSeconds(seconds);
return true;
}
} // namespace
namespace domain_reliability {
DomainReliabilityHeader::~DomainReliabilityHeader() {}
// static
std::unique_ptr<DomainReliabilityHeader> DomainReliabilityHeader::Parse(
base::StringPiece value) {
ScopedVector<GURL> report_uri;
base::TimeDelta max_age;
bool include_subdomains = false;
bool got_report_uri = false;
bool got_max_age = false;
bool got_include_subdomains = false;
DirectiveHeaderValueParser parser(value);
while (parser.GetNext()) {
base::StringPiece name = parser.directive_name();
if (name == "report-uri") {
if (got_report_uri
|| !parser.directive_has_value()
|| !ParseReportUri(parser.directive_values(), &report_uri)) {
return base::WrapUnique(new DomainReliabilityHeader(PARSE_ERROR));
}
got_report_uri = true;
} else if (name == "max-age") {
if (got_max_age
|| !parser.directive_has_value()
|| !ParseMaxAge(parser.directive_values(), &max_age)) {
return base::WrapUnique(new DomainReliabilityHeader(PARSE_ERROR));
}
got_max_age = true;
} else if (name == "includeSubdomains") {
if (got_include_subdomains ||
parser.directive_has_value()) {
return base::WrapUnique(new DomainReliabilityHeader(PARSE_ERROR));
}
include_subdomains = true;
got_include_subdomains = true;
} else {
LOG(WARNING) << "Ignoring unknown NEL header directive " << name << ".";
}
}
if (parser.stopped_with_error() || !got_max_age)
return base::WrapUnique(new DomainReliabilityHeader(PARSE_ERROR));
if (max_age == base::TimeDelta::FromMicroseconds(0))
return base::WrapUnique(new DomainReliabilityHeader(PARSE_CLEAR_CONFIG));
if (!got_report_uri)
return base::WrapUnique(new DomainReliabilityHeader(PARSE_ERROR));
std::unique_ptr<DomainReliabilityConfig> config(
new DomainReliabilityConfig());
config->include_subdomains = include_subdomains;
config->collectors.clear();
config->collectors.swap(report_uri);
config->success_sample_rate = 0.0;
config->failure_sample_rate = 1.0;
config->path_prefixes.clear();
return base::WrapUnique(new DomainReliabilityHeader(
PARSE_SET_CONFIG, std::move(config), max_age));
}
const DomainReliabilityConfig& DomainReliabilityHeader::config() const {
DCHECK_EQ(PARSE_SET_CONFIG, status_);
return *config_;
}
base::TimeDelta DomainReliabilityHeader::max_age() const {
DCHECK_EQ(PARSE_SET_CONFIG, status_);
return max_age_;
}
std::unique_ptr<DomainReliabilityConfig>
DomainReliabilityHeader::ReleaseConfig() {
DCHECK_EQ(PARSE_SET_CONFIG, status_);
status_ = PARSE_ERROR;
return std::move(config_);
}
std::string DomainReliabilityHeader::ToString() const {
std::string string = "";
int64_t max_age_s = max_age_.InSeconds();
if (config_->collectors.empty()) {
DCHECK_EQ(0, max_age_s);
} else {
string += "report-uri=";
for (const auto* uri : config_->collectors)
string += uri->spec() + " ";
// Remove trailing space.
string.erase(string.length() - 1, 1);
string += "; ";
}
string += "max-age=" + base::Int64ToString(max_age_s) + "; ";
if (config_->include_subdomains)
string += "includeSubdomains; ";
// Remove trailing "; ".
string.erase(string.length() - 2, 2);
return string;
}
DomainReliabilityHeader::DomainReliabilityHeader(ParseStatus status)
: status_(status) {
DCHECK_NE(PARSE_SET_CONFIG, status_);
}
DomainReliabilityHeader::DomainReliabilityHeader(
ParseStatus status,
std::unique_ptr<DomainReliabilityConfig> config,
base::TimeDelta max_age)
: status_(status), config_(std::move(config)), max_age_(max_age) {
DCHECK_EQ(PARSE_SET_CONFIG, status_);
DCHECK(config_.get());
DCHECK_NE(0, max_age_.InMicroseconds());
}
} // namespace domain_reliability
| bsd-3-clause |
endlessm/chromium-browser | third_party/llvm/parallel-libs/acxxel/tests/status_test.cpp | 1414 | //===--- status_test.cpp - Tests for the Status and Expected classes ------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "status.h"
#include "gtest/gtest.h"
#include <memory>
namespace {
struct RefCounter {
static int Count;
RefCounter() { ++Count; }
~RefCounter() { --Count; }
RefCounter(const RefCounter &) = delete;
RefCounter &operator=(const RefCounter &) = delete;
};
int RefCounter::Count;
TEST(Expected, RefCounter) {
RefCounter::Count = 0;
using uptr = std::unique_ptr<RefCounter>;
acxxel::Expected<uptr> E0(uptr(new RefCounter));
EXPECT_FALSE(E0.isError());
EXPECT_EQ(1, RefCounter::Count);
acxxel::Expected<uptr> E1(std::move(E0));
EXPECT_FALSE(E1.isError());
EXPECT_EQ(1, RefCounter::Count);
acxxel::Expected<uptr> E2(acxxel::Status("nothing in here yet"));
EXPECT_TRUE(E2.isError());
EXPECT_EQ(1, RefCounter::Count);
E2 = std::move(E1);
EXPECT_FALSE(E2.isError());
EXPECT_EQ(1, RefCounter::Count);
EXPECT_EQ(1, E2.getValue()->Count);
EXPECT_FALSE(E2.isError());
EXPECT_EQ(1, RefCounter::Count);
EXPECT_EQ(1, E2.takeValue()->Count);
EXPECT_EQ(0, RefCounter::Count);
}
} // namespace
| bsd-3-clause |
chromium/chromium | chrome/browser/captive_portal/captive_portal_browsertest.cc | 136347 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <algorithm>
#include <atomic>
#include <iterator>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <tuple>
#include <utility>
#include <vector>
#include "base/base_switches.h"
#include "base/bind.h"
#include "base/command_line.h"
#include "base/compiler_specific.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/memory/raw_ptr.h"
#include "base/path_service.h"
#include "base/run_loop.h"
#include "base/sequence_checker.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/test/bind.h"
#include "base/test/scoped_feature_list.h"
#include "base/values.h"
#include "build/build_config.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/captive_portal/captive_portal_service_factory.h"
#include "chrome/browser/chrome_notification_types.h"
#include "chrome/browser/net/secure_dns_config.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_commands.h"
#include "chrome/browser/ui/browser_finder.h"
#include "chrome/browser/ui/browser_list.h"
#include "chrome/browser/ui/browser_navigator_params.h"
#include "chrome/browser/ui/browser_window.h"
#include "chrome/browser/ui/tab_contents/tab_contents_iterator.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/browser/ui/tabs/tab_strip_model_observer.h"
#include "chrome/common/chrome_paths.h"
#include "chrome/common/pref_names.h"
#include "chrome/test/base/in_process_browser_test.h"
#include "chrome/test/base/ui_test_utils.h"
#include "components/captive_portal/content/captive_portal_service.h"
#include "components/captive_portal/content/captive_portal_tab_helper.h"
#include "components/captive_portal/content/captive_portal_tab_reloader.h"
#include "components/captive_portal/core/captive_portal_types.h"
#include "components/embedder_support/pref_names.h"
#include "components/prefs/pref_service.h"
#include "components/security_interstitials/content/captive_portal_blocking_page.h"
#include "components/security_interstitials/content/security_interstitial_page.h"
#include "components/security_interstitials/content/security_interstitial_tab_helper.h"
#include "components/security_interstitials/content/ssl_blocking_page.h"
#include "components/security_interstitials/content/ssl_error_handler.h"
#include "content/public/browser/browser_task_traits.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/navigation_controller.h"
#include "content/public/browser/navigation_entry.h"
#include "content/public/browser/notification_observer.h"
#include "content/public/browser/notification_registrar.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_types.h"
#include "content/public/browser/render_frame_host.h"
#include "content/public/browser/storage_partition.h"
#include "content/public/browser/web_contents.h"
#include "content/public/common/url_constants.h"
#include "content/public/test/browser_test.h"
#include "content/public/test/browser_test_utils.h"
#include "content/public/test/prerender_test_util.h"
#include "content/public/test/url_loader_interceptor.h"
#include "net/base/net_errors.h"
#include "net/cert/x509_certificate.h"
#include "net/dns/mock_host_resolver.h"
#include "net/http/http_util.h"
#include "net/http/transport_security_state.h"
#include "net/test/cert_test_util.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
#include "net/test/test_data_directory.h"
#include "services/network/public/mojom/url_response_head.mojom.h"
#include "testing/gtest/include/gtest/gtest.h"
#if BUILDFLAG(IS_WIN)
#include "base/win/win_util.h"
#endif
using captive_portal::CaptivePortalResult;
using content::BrowserThread;
using content::WebContents;
namespace {
// Path of the fake login page, when using the TestServer.
const char* const kTestServerLoginPath = "/captive_portal/login.html";
// Path of a page with an iframe that has a mock SSL timeout, when using the
// TestServer.
const char* const kTestServerIframeTimeoutPath =
"/captive_portal/iframe_timeout.html";
// Path of a page that redirects to kMockHttpsUrl.
const char* const kRedirectToMockHttpsPath =
"/captive_portal/redirect_to_mock_https.html";
// Path of a page that serves a bad SSL certificate.
// The path doesn't matter because all we need is that it's served from a
// server that's configured to serve a bad cert.
const char* const kMockHttpsBadCertPath = "/bad_cert.html";
// The following URLs each have two different behaviors, depending on whether
// URLRequestMockCaptivePortalJobFactory is currently simulating the presence
// of a captive portal or not. They use different domains so that HSTS can be
// applied to them independently.
// A mock URL for the captive_portal::CaptivePortalService's |test_url|. When
// behind a captive portal, this URL returns a mock login page. When connected
// to the Internet, it returns a 204 response. Uses the name of the login file
// so that reloading it will not request a different URL.
const char* const kMockCaptivePortalTestUrl =
"http://mock.captive.portal.test/login.html";
// Another mock URL for the captive_portal::CaptivePortalService's |test_url|.
// When behind a captive portal, this URL returns a 511 status code and an HTML
// page that redirect to the above URL. When connected to the Internet, it
// returns a 204 response.
const char* const kMockCaptivePortal511Url =
"http://mock.captive.portal.test/page511.html";
// When behind a captive portal, this URL hangs without committing until a call
// to FailJobs. When that function is called, the request will time out.
//
// When connected to the Internet, this URL returns a non-error page.
const char* const kMockHttpsUrl =
"https://mock.captive.portal.long.timeout/title2.html";
// Same as above, but different domain, so can be used to trigger cross-site
// navigations.
const char* const kMockHttpsUrl2 =
"https://mock.captive.portal.long.timeout2/title2.html";
// Same as kMockHttpsUrl, except the timeout happens instantly.
const char* const kMockHttpsQuickTimeoutUrl =
"https://mock.captive.portal.quick.timeout/title2.html";
// The intercepted URLs used to mock errors.
const char* const kMockHttpConnectionTimeoutErr =
"http://mock.captive.portal.quick.error/timeout";
const char* const kMockHttpsConnectionTimeoutErr =
"https://mock.captive.portal.quick.error/timeout";
const char* const kMockHttpsConnectionUnexpectedErr =
"https://mock.captive.portal.quick.error/unexpected";
const char* const kMockHttpConnectionConnectionClosedErr =
"http://mock.captive.portal.quick.error/connection_closed";
const char* const kMockHttpConnectionSecureDnsErr =
"http://mock.captive.portal.quick.error/secure_dns";
const char* const kMockHttpsConnectionSecureDnsErr =
"https://mock.captive.portal.quick.error/secure_dns";
// Expected title of a tab once an HTTPS load completes, when not behind a
// captive portal.
const char* const kInternetConnectedTitle = "Title Of Awesomeness";
// Expected title of a login page that was created in secure mode.
const char* const kLoginSecureDnsDisabledTitle =
"Fake Login Page Secure Dns Disabled";
// Creates a server-side redirect for use with the TestServer.
std::string CreateServerRedirect(const std::string& dest_url) {
const char* const kServerRedirectBase = "/server-redirect?";
return kServerRedirectBase + dest_url;
}
// Returns the total number of tabs across all Browsers, for all Profiles.
int NumTabs() {
return std::distance(AllTabContentses().begin(), AllTabContentses().end());
}
// Returns the total number of loading tabs across all Browsers, for all
// Profiles.
int NumLoadingTabs() {
return std::count_if(AllTabContentses().begin(), AllTabContentses().end(),
[](content::WebContents* web_contents) {
return web_contents->IsLoading();
});
}
bool IsLoginTab(WebContents* web_contents) {
return captive_portal::CaptivePortalTabHelper::FromWebContents(web_contents)
->IsLoginTab();
}
// Tracks how many times each tab has been navigated since the Observer was
// created. The standard TestNavigationObserver can only watch specific
// pre-existing tabs or loads in serial for all tabs.
class MultiNavigationObserver : public content::NotificationObserver {
public:
MultiNavigationObserver();
MultiNavigationObserver(const MultiNavigationObserver&) = delete;
MultiNavigationObserver& operator=(const MultiNavigationObserver&) = delete;
~MultiNavigationObserver() override;
// Waits for exactly |num_navigations_to_wait_for| LOAD_STOP
// notifications to have occurred since the construction of |this|. More
// navigations than expected occuring will trigger a expect failure.
void WaitForNavigations(int num_navigations_to_wait_for);
// Returns the number of LOAD_STOP events that have occurred for
// |web_contents| since this was constructed.
int NumNavigationsForTab(WebContents* web_contents) const;
// The number of LOAD_STOP events since |this| was created.
int num_navigations() const { return num_navigations_; }
private:
typedef std::map<const WebContents*, int> TabNavigationMap;
// content::NotificationObserver:
void Observe(int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) override;
int num_navigations_;
// Map of how many times each tab has navigated since |this| was created.
TabNavigationMap tab_navigation_map_;
// Total number of navigations to wait for. Value only matters when
// |waiting_for_navigation_| is true.
int num_navigations_to_wait_for_;
// True if WaitForNavigations has been called, until
// |num_navigations_to_wait_for_| have been observed.
bool waiting_for_navigation_;
std::unique_ptr<base::RunLoop> run_loop_;
content::NotificationRegistrar registrar_;
};
MultiNavigationObserver::MultiNavigationObserver()
: num_navigations_(0),
num_navigations_to_wait_for_(0),
waiting_for_navigation_(false) {
registrar_.Add(this, content::NOTIFICATION_LOAD_STOP,
content::NotificationService::AllSources());
}
MultiNavigationObserver::~MultiNavigationObserver() {
}
void MultiNavigationObserver::WaitForNavigations(
int num_navigations_to_wait_for) {
// Shouldn't already be waiting for navigations.
EXPECT_FALSE(waiting_for_navigation_);
EXPECT_LT(0, num_navigations_to_wait_for);
if (num_navigations_ < num_navigations_to_wait_for) {
num_navigations_to_wait_for_ = num_navigations_to_wait_for;
waiting_for_navigation_ = true;
run_loop_ = std::make_unique<base::RunLoop>();
run_loop_->Run();
EXPECT_FALSE(waiting_for_navigation_);
}
EXPECT_EQ(num_navigations_, num_navigations_to_wait_for);
}
int MultiNavigationObserver::NumNavigationsForTab(
WebContents* web_contents) const {
auto tab_navigations = tab_navigation_map_.find(web_contents);
if (tab_navigations == tab_navigation_map_.end())
return 0;
return tab_navigations->second;
}
void MultiNavigationObserver::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
ASSERT_EQ(type, content::NOTIFICATION_LOAD_STOP);
content::NavigationController* controller =
content::Source<content::NavigationController>(source).ptr();
++num_navigations_;
++tab_navigation_map_[controller->DeprecatedGetWebContents()];
if (waiting_for_navigation_ &&
num_navigations_to_wait_for_ == num_navigations_) {
waiting_for_navigation_ = false;
if (run_loop_)
run_loop_->Quit();
}
}
// This observer creates a list of loading tabs, and then waits for them all
// to stop loading and have the kInternetConnectedTitle.
//
// This is for the specific purpose of observing tabs time out after logging in
// to a captive portal, which will then cause them to reload.
// MultiNavigationObserver is insufficient for this because there may or may not
// be a LOAD_STOP event between the timeout and the reload.
// See bug http://crbug.com/133227
class FailLoadsAfterLoginObserver : public content::NotificationObserver {
public:
FailLoadsAfterLoginObserver();
FailLoadsAfterLoginObserver(const FailLoadsAfterLoginObserver&) = delete;
FailLoadsAfterLoginObserver& operator=(const FailLoadsAfterLoginObserver&) =
delete;
~FailLoadsAfterLoginObserver() override;
void WaitForNavigations();
private:
typedef std::set<const WebContents*> TabSet;
// content::NotificationObserver:
void Observe(int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) override;
// The set of tabs that need to be navigated. This is the set of loading
// tabs when the observer is created.
TabSet tabs_needing_navigation_;
// Number of tabs that have stopped navigating with the expected title. These
// are expected not to be navigated again.
TabSet tabs_navigated_to_final_destination_;
// True if WaitForNavigations has been called, until
// |tabs_navigated_to_final_destination_| equals |tabs_needing_navigation_|.
bool waiting_for_navigation_;
std::unique_ptr<base::RunLoop> run_loop_;
content::NotificationRegistrar registrar_;
};
FailLoadsAfterLoginObserver::FailLoadsAfterLoginObserver()
: waiting_for_navigation_(false) {
registrar_.Add(this, content::NOTIFICATION_LOAD_STOP,
content::NotificationService::AllSources());
std::copy_if(
AllTabContentses().begin(), AllTabContentses().end(),
std::inserter(tabs_needing_navigation_, tabs_needing_navigation_.end()),
[](content::WebContents* web_contents) {
return web_contents->IsLoading();
});
}
FailLoadsAfterLoginObserver::~FailLoadsAfterLoginObserver() {
}
void FailLoadsAfterLoginObserver::WaitForNavigations() {
// Shouldn't already be waiting for navigations.
EXPECT_FALSE(waiting_for_navigation_);
if (tabs_needing_navigation_.size() !=
tabs_navigated_to_final_destination_.size()) {
waiting_for_navigation_ = true;
run_loop_ = std::make_unique<base::RunLoop>();
run_loop_->Run();
EXPECT_FALSE(waiting_for_navigation_);
}
EXPECT_EQ(tabs_needing_navigation_.size(),
tabs_navigated_to_final_destination_.size());
}
void FailLoadsAfterLoginObserver::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
ASSERT_EQ(type, content::NOTIFICATION_LOAD_STOP);
content::NavigationController* controller =
content::Source<content::NavigationController>(source).ptr();
WebContents* contents = controller->DeprecatedGetWebContents();
ASSERT_EQ(1u, tabs_needing_navigation_.count(contents));
ASSERT_EQ(0u, tabs_navigated_to_final_destination_.count(contents));
if (contents->GetTitle() != base::ASCIIToUTF16(kInternetConnectedTitle))
return;
tabs_navigated_to_final_destination_.insert(contents);
if (waiting_for_navigation_ &&
tabs_needing_navigation_.size() ==
tabs_navigated_to_final_destination_.size()) {
waiting_for_navigation_ = false;
if (run_loop_)
run_loop_->Quit();
}
}
// An observer for watching the captive_portal::CaptivePortalService. It tracks
// the last received result and the total number of received results.
class CaptivePortalObserver {
public:
explicit CaptivePortalObserver(Profile* profile);
CaptivePortalObserver(const CaptivePortalObserver&) = delete;
CaptivePortalObserver& operator=(const CaptivePortalObserver&) = delete;
// Runs the message loop until exactly |update_count| captive portal
// results have been received, since the creation of |this|. Expects no
// additional captive portal results.
void WaitForResults(int num_results_to_wait_for);
int num_results_received() const { return num_results_received_; }
CaptivePortalResult captive_portal_result() const {
return captive_portal_result_;
}
private:
// Records results and exits the message loop, if needed.
void Observe(const captive_portal::CaptivePortalService::Results& results);
// Number of times OnPortalResult has been called since construction.
int num_results_received_;
// If WaitForResults was called, the total number of updates for which to
// wait. Value doesn't matter when |waiting_for_result_| is false.
int num_results_to_wait_for_;
bool waiting_for_result_;
std::unique_ptr<base::RunLoop> run_loop_;
raw_ptr<captive_portal::CaptivePortalService> captive_portal_service_;
base::CallbackListSubscription subscription_;
// Last result received.
CaptivePortalResult captive_portal_result_;
};
CaptivePortalObserver::CaptivePortalObserver(Profile* profile)
: num_results_received_(0),
num_results_to_wait_for_(0),
waiting_for_result_(false),
captive_portal_service_(
CaptivePortalServiceFactory::GetForProfile(profile)),
captive_portal_result_(
captive_portal_service_->last_detection_result()) {
subscription_ = captive_portal_service_->RegisterCallback(base::BindRepeating(
&CaptivePortalObserver::Observe, base::Unretained(this)));
}
void CaptivePortalObserver::WaitForResults(int num_results_to_wait_for) {
EXPECT_LT(0, num_results_to_wait_for);
EXPECT_FALSE(waiting_for_result_);
if (num_results_received_ < num_results_to_wait_for) {
num_results_to_wait_for_ = num_results_to_wait_for;
waiting_for_result_ = true;
run_loop_ = std::make_unique<base::RunLoop>();
run_loop_->Run();
EXPECT_FALSE(waiting_for_result_);
}
EXPECT_EQ(num_results_to_wait_for, num_results_received_);
}
void CaptivePortalObserver::Observe(
const captive_portal::CaptivePortalService::Results& results) {
EXPECT_EQ(captive_portal_result_, results.previous_result);
EXPECT_EQ(captive_portal_service_->last_detection_result(), results.result);
captive_portal_result_ = results.result;
++num_results_received_;
if (waiting_for_result_ &&
num_results_to_wait_for_ == num_results_received_) {
waiting_for_result_ = false;
if (run_loop_)
run_loop_->Quit();
}
}
// This observer waits for the SSLErrorHandler to start an interstitial timer
// for the given web contents.
class SSLInterstitialTimerObserver {
public:
explicit SSLInterstitialTimerObserver(content::WebContents* web_contents);
SSLInterstitialTimerObserver(const SSLInterstitialTimerObserver&) = delete;
SSLInterstitialTimerObserver& operator=(const SSLInterstitialTimerObserver&) =
delete;
~SSLInterstitialTimerObserver();
// Waits until the interstitial delay timer in SSLErrorHandler is started.
void WaitForTimerStarted();
private:
void OnTimerStarted(content::WebContents* web_contents);
raw_ptr<const content::WebContents> web_contents_;
SSLErrorHandler::TimerStartedCallback callback_;
scoped_refptr<content::MessageLoopRunner> message_loop_runner_;
};
SSLInterstitialTimerObserver::SSLInterstitialTimerObserver(
content::WebContents* web_contents)
: web_contents_(web_contents),
message_loop_runner_(new content::MessageLoopRunner) {
callback_ = base::BindRepeating(&SSLInterstitialTimerObserver::OnTimerStarted,
base::Unretained(this));
SSLErrorHandler::SetInterstitialTimerStartedCallbackForTesting(&callback_);
}
SSLInterstitialTimerObserver::~SSLInterstitialTimerObserver() {
SSLErrorHandler::SetInterstitialTimerStartedCallbackForTesting(nullptr);
}
void SSLInterstitialTimerObserver::WaitForTimerStarted() {
message_loop_runner_->Run();
}
void SSLInterstitialTimerObserver::OnTimerStarted(
content::WebContents* web_contents) {
if (web_contents_ == web_contents && message_loop_runner_.get())
message_loop_runner_->Quit();
}
// Helper for waiting for a change of the active tab.
// Users can wait for the change via WaitForActiveTabChange method.
// DCHECKs ensure that only one change happens during the lifetime of a
// TabActivationWaiter instance.
class TabActivationWaiter : public TabStripModelObserver {
public:
explicit TabActivationWaiter(TabStripModel* tab_strip_model)
: number_of_unconsumed_active_tab_changes_(0) {
tab_strip_model->AddObserver(this);
}
TabActivationWaiter(const TabActivationWaiter&) = delete;
TabActivationWaiter& operator=(const TabActivationWaiter&) = delete;
void WaitForActiveTabChange() {
if (number_of_unconsumed_active_tab_changes_ == 0) {
// Wait until TabStripModelObserver::ActiveTabChanged will get called.
message_loop_runner_ = new content::MessageLoopRunner;
message_loop_runner_->Run();
}
// "consume" one tab activation event.
DCHECK_EQ(1, number_of_unconsumed_active_tab_changes_);
number_of_unconsumed_active_tab_changes_--;
}
// TabStripModelObserver overrides.
void OnTabStripModelChanged(
TabStripModel* tab_strip_model,
const TabStripModelChange& change,
const TabStripSelectionChange& selection) override {
if (tab_strip_model->empty() || !selection.active_tab_changed())
return;
number_of_unconsumed_active_tab_changes_++;
DCHECK_EQ(1, number_of_unconsumed_active_tab_changes_);
if (message_loop_runner_)
message_loop_runner_->Quit();
}
private:
scoped_refptr<content::MessageLoopRunner> message_loop_runner_;
int number_of_unconsumed_active_tab_changes_;
};
} // namespace
class CaptivePortalBrowserTest : public InProcessBrowserTest {
public:
CaptivePortalBrowserTest();
CaptivePortalBrowserTest(const CaptivePortalBrowserTest&) = delete;
CaptivePortalBrowserTest& operator=(const CaptivePortalBrowserTest&) = delete;
~CaptivePortalBrowserTest() override;
// InProcessBrowserTest:
void SetUpOnMainThread() override;
void TearDownOnMainThread() override;
// Called by |url_loader_interceptor_|.
// It emulates captive portal behavior.
// Initially, it emulates being behind a captive portal. When
// SetBehindCaptivePortal(false) is called, it emulates behavior when not
// behind a captive portal.
bool OnIntercept(content::URLLoaderInterceptor::RequestParams* params);
// Sets the captive portal checking preference.
void EnableCaptivePortalDetection(Profile* profile, bool enabled);
// Enables or disables actual captive portal probes. Should only be called
// after captive portal service setup is done. When disabled, probe requests
// are silently ignored, never receiving a response.
void RespondToProbeRequests(bool enabled);
// Sets up the captive portal service for the given profile so that
// all checks go to |test_url|. Also disables all timers.
void SetUpCaptivePortalService(Profile* profile, const GURL& test_url);
// Returns true if |browser|'s profile is currently running a captive portal
// check.
bool CheckPending(Browser* browser);
// Returns the type of the interstitial being shown.
security_interstitials::SecurityInterstitialPage::TypeID GetInterstitialType(
WebContents* contents) const;
bool IsShowingInterstitial(WebContents* contents);
// Asserts an interstitial is showing and waits for the render frame to be
// ready.
void WaitForInterstitial(content::WebContents* contents);
// Returns the captive_portal::CaptivePortalTabReloader::State of
// |web_contents|.
captive_portal::CaptivePortalTabReloader::State GetStateOfTabReloader(
WebContents* web_contents) const;
// Returns the captive_portal::CaptivePortalTabReloader::State of the
// indicated tab.
captive_portal::CaptivePortalTabReloader::State GetStateOfTabReloaderAt(
Browser* browser,
int index) const;
// Returns the number of tabs with the given state, across all profiles.
int NumTabsWithState(
captive_portal::CaptivePortalTabReloader::State state) const;
// Returns the number of tabs broken by captive portals, across all profiles.
int NumBrokenTabs() const;
// Returns the number of tabs that need to be reloaded due to having logged
// in to a captive portal, across all profiles.
int NumNeedReloadTabs() const;
// Navigates |browser|'s active tab to |url| and expects no captive portal
// test to be triggered.
void NavigateToPageExpectNoTest(Browser* browser, const GURL& url);
// Navigates |browser|'s active tab to an SSL tab that takes a while to load,
// triggering a captive portal check, which is expected to give the result
// |expected_result|. The page finishes loading, with a timeout, after the
// captive portal check.
void SlowLoadNoCaptivePortal(Browser* browser,
CaptivePortalResult expected_result);
// Navigates |browser|'s active tab to an SSL timeout, expecting a captive
// portal check to be triggered and return a result which will indicates
// there's no detected captive portal.
void FastTimeoutNoCaptivePortal(Browser* browser,
CaptivePortalResult expected_result);
// Navigates the active tab to a slow loading SSL page, which will then
// trigger a captive portal test. The test is expected to find a captive
// portal. The slow loading page will continue to load after the function
// returns, until FailJobs() is called, at which point it will timeout.
//
// When |expect_open_login_tab| is false, no login tab is expected to be
// opened, because one already exists, and the function returns once the
// captive portal test is complete.
//
// If |expect_open_login_tab| is true, a login tab is then expected to be
// opened. If |expect_new_login_browser| is true, this tab is expected to be
// opened in a new captive portal popup window. It waits until both the
// login tab has finished loading, and two captive portal tests complete.
// The second test is triggered by the load of the captive portal tab
// completing.
//
// This function must not be called when the active tab is currently loading.
// Waits for the hanging request to be issued, so other functions can rely
// on WaitForJobs having been called.
//
// If not null, `out_login_browser` will be set to the browser window used for
// the login tab. If no login tab was opened, or if the window is
// undetermined, will set to null.
void SlowLoadBehindCaptivePortal(Browser* browser,
bool expect_open_login_tab,
bool expect_new_login_browser = false,
Browser** out_login_browser = nullptr);
// Same as above, but takes extra parameters.
//
// |hanging_url| should either be kMockHttpsUrl or redirect to kMockHttpsUrl.
//
// |expected_portal_checks| and |expected_login_tab_navigations| allow
// client-side redirects to be tested. |expected_login_tab_navigations| is
// ignored when |expect_open_login_tab| is false.
//
// If not null, `out_login_browser` will be set to the browser window used for
// the login tab. If no login tab was opened, or if the window is
// undetermined, will set to null.
void SlowLoadBehindCaptivePortal(Browser* browser,
bool expect_open_login_tab,
bool expect_new_login_browser,
const GURL& hanging_url,
int expected_portal_checks,
int expected_login_tab_navigations,
Browser** out_login_browser = nullptr);
// Just like SlowLoadBehindCaptivePortal, except the navigated tab has
// a connection timeout rather having its time trigger, and the function
// waits until that timeout occurs.
void FastTimeoutBehindCaptivePortal(Browser* browser,
bool expect_open_login_tab);
// Much as above, but accepts a URL parameter and can be used for errors that
// trigger captive portal checks other than timeouts. |error_url| should
// result in an error rather than hanging.
//
// If not null, `out_login_browser` will be set to the browser window used for
// the login tab. If no login tab was opened, or if the window is
// undetermined, will set to null.
void FastErrorBehindCaptivePortal(Browser* browser,
bool expect_open_login_tab,
bool expect_new_login_browser,
const GURL& error_url,
Browser** out_login_browser = nullptr);
// Navigates the active tab to an SSL error page which triggers an
// interstitial timer. Also disables captive portal checks indefinitely, so
// the page appears to be hanging.
void FastErrorWithInterstitialTimer(Browser* browser,
const GURL& cert_error_url);
// Navigates the login tab without logging in. The login tab must be the
// specified browser's active tab. Expects no other tab to change state.
// |num_loading_tabs| and |num_timed_out_tabs| are used as extra checks
// that nothing has gone wrong prior to the function call.
void NavigateLoginTab(Browser* browser,
int num_loading_tabs,
int num_timed_out_tabs);
// Simulates a login by updating the URLRequestMockCaptivePortalJob's
// behind captive portal state, and navigating the login tab. Waits for
// all broken but not loading tabs to be reloaded.
// |captive-portal_browser| is the browser containing the login page.
// |num_loading_tabs| and |num_timed_out_tabs| are used as extra checks
// that nothing has gone wrong prior to the function call.
// |expected_portal_checks| allows client-side redirects to be tested.
void Login(Browser* captive_portal_browser,
int num_loading_tabs,
int num_timed_out_tabs,
int expected_portal_checks);
// Simulates a login when the broken tab shows an SSL or captive portal
// interstitial. Can't use Login() in those cases because the interstitial
// tab looks like a cross between a hung tab (Load was never committed) and a
// tab at an error page (The load was stopped).
void LoginCertError(Browser* browser);
// Makes the slow SSL loads of all active tabs time out at once, and waits for
// them to finish both that load and the automatic reload it should trigger.
// There should be no timed out tabs when this is called.
void FailLoadsAfterLogin(Browser* browser, int num_loading_tabs);
// Makes the slow SSL loads of all active tabs time out at once, and waits for
// them to finish displaying their error pages. The login tab should be the
// active tab. There should be no timed out tabs when this is called.
//
// If non-null, `captive_portal_browser` specifies a separate popup window
// used for the captive portal login tab.
void FailLoadsWithoutLogin(Browser* browser,
int num_loading_tabs,
Browser* captive_portal_browser = nullptr);
// Navigates |browser|'s active tab to |starting_url| while not behind a
// captive portal. Then navigates to |interrupted_url|, which should create
// a URLRequestTimeoutOnDemandJob, which is then abandoned. The load should
// trigger a captive portal check, which finds a captive portal and opens a
// tab.
//
// Then the navigation is interrupted by a navigation to |timeout_url|, which
// should trigger a captive portal check, and finally the test simulates
// logging in.
//
// The purpose of this test is to make sure the TabHelper triggers a captive
// portal check when a load is interrupted by another load, particularly in
// the case of cross-process navigations.
void RunNavigateLoadingTabToTimeoutTest(Browser* browser,
const GURL& starting_url,
const GURL& interrupted_url,
const GURL& timeout_url);
// Sets the timeout used by a captive_portal::CaptivePortalTabReloader on slow
// SSL loads before a captive portal check.
void SetSlowSSLLoadTime(
captive_portal::CaptivePortalTabReloader* tab_reloader,
base::TimeDelta slow_ssl_load_time);
captive_portal::CaptivePortalTabReloader* GetTabReloader(
WebContents* web_contents) const;
// Sets whether or not there is a captive portal. Outstanding requests are
// not affected.
void SetBehindCaptivePortal(bool behind_captive_portal) {
behind_captive_portal_ = behind_captive_portal;
}
// Waits for exactly |num_jobs| kMockHttps* requests.
void WaitForJobs(int num_jobs) {
if (BrowserThread::CurrentlyOn(BrowserThread::UI)) {
SetNumJobsToWaitForOnInterceptorThread(num_jobs);
} else {
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE,
base::BindOnce(
&CaptivePortalBrowserTest::SetNumJobsToWaitForOnInterceptorThread,
base::Unretained(this), num_jobs));
}
run_loop_ = std::make_unique<base::RunLoop>();
// Will be exited via QuitRunLoop() when the interceptor has received
// |num_jobs|.
run_loop_->Run();
}
void SetNumJobsToWaitForOnInterceptorThread(int num_jobs) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
DCHECK(!num_jobs_to_wait_for_);
int num_ongoing_jobs = static_cast<int>(ongoing_mock_requests_.size());
if (num_ongoing_jobs == num_jobs) {
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE, base::BindOnce(&CaptivePortalBrowserTest::QuitRunLoop,
base::Unretained(this)));
return;
}
EXPECT_LT(num_ongoing_jobs, num_jobs);
num_jobs_to_wait_for_ = num_jobs;
}
// Fails all active kMockHttps* requests with error code |error| and
// hostname resolution error info |resolve_error_info|.
// There are expected to be exactly |expected_num_jobs| waiting for
// failure. The only way to guarantee this is with an earlier call to
// WaitForJobs, so makes sure there has been a matching WaitForJobs call.
void FailJobs(int expected_num_jobs,
int error,
net::ResolveErrorInfo resolve_error_info) {
if (!BrowserThread::CurrentlyOn(BrowserThread::UI)) {
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE, base::BindOnce(&CaptivePortalBrowserTest::FailJobs,
base::Unretained(this), expected_num_jobs,
error, resolve_error_info));
return;
}
EXPECT_EQ(expected_num_jobs,
static_cast<int>(ongoing_mock_requests_.size()));
network::URLLoaderCompletionStatus status;
status.error_code = error;
status.resolve_error_info = resolve_error_info;
for (auto& job : ongoing_mock_requests_)
job.client->OnComplete(status);
ongoing_mock_requests_.clear();
}
// Fails all active kMockHttps* requests with SSL cert errors.
// |expected_num_jobs| behaves just as in FailJobs.
void FailJobsWithCertError(int expected_num_jobs,
const net::SSLInfo& ssl_info) {
if (!BrowserThread::CurrentlyOn(BrowserThread::UI)) {
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE,
base::BindOnce(&CaptivePortalBrowserTest::FailJobsWithCertError,
base::Unretained(this), expected_num_jobs, ssl_info));
return;
}
DCHECK(intercept_bad_cert_);
// With the network service enabled, these will be requests to
// kMockHttpsBadCertPath that is served by a misconfigured
// EmbeddedTestServer. Once the request reaches the network service, it'll
// notice the bad SSL cert.
// Set |intercept_bad_cert_| so that when we use the network service'
// URLLoaderFactory again it doesn't get intercepted and goes to the
// nework process. This has to be done on the UI thread as that's where we
// currently have a public URLLoaderFactory for the profile.
intercept_bad_cert_ = false;
EXPECT_EQ(expected_num_jobs,
static_cast<int>(ongoing_mock_requests_.size()));
for (auto& job : ongoing_mock_requests_) {
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE, base::BindOnce(&CaptivePortalBrowserTest::CreateLoader,
base::Unretained(this), std::move(job)));
}
ongoing_mock_requests_.clear();
}
void CreateLoader(content::URLLoaderInterceptor::RequestParams job) {
CHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
browser()
->profile()
->GetDefaultStoragePartition()
->GetURLLoaderFactoryForBrowserProcess()
->CreateLoaderAndStart(std::move(job.receiver), job.request_id,
job.options, std::move(job.url_request),
job.client.Unbind(), job.traffic_annotation);
}
// Abandon all active kMockHttps* requests. |expected_num_jobs|
// behaves just as in FailJobs.
void AbandonJobs(int expected_num_jobs) {
if (!BrowserThread::CurrentlyOn(BrowserThread::UI)) {
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE, base::BindOnce(&CaptivePortalBrowserTest::AbandonJobs,
base::Unretained(this), expected_num_jobs));
return;
}
EXPECT_EQ(expected_num_jobs,
static_cast<int>(ongoing_mock_requests_.size()));
for (auto& job : ongoing_mock_requests_)
std::ignore = job.client.Unbind().PassPipe().release();
ongoing_mock_requests_.clear();
}
// Returns the contents of the given filename under chrome/test/data.
static std::string GetContents(const std::string& path) {
base::FilePath root_http;
base::PathService::Get(chrome::DIR_TEST_DATA, &root_http);
base::ScopedAllowBlockingForTesting allow_io;
base::FilePath file_path = root_http.AppendASCII(path);
std::string contents;
CHECK(base::ReadFileToString(file_path, &contents));
return contents;
}
void QuitRunLoop() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (run_loop_)
run_loop_->Quit();
}
protected:
base::test::ScopedFeatureList feature_list_;
std::unique_ptr<content::URLLoaderInterceptor> url_loader_interceptor_;
std::unique_ptr<base::RunLoop> run_loop_;
// Only accessed on the UI thread.
int num_jobs_to_wait_for_ = 0;
std::vector<content::URLLoaderInterceptor::RequestParams>
ongoing_mock_requests_;
std::atomic<bool> behind_captive_portal_;
#if BUILDFLAG(IS_WIN)
base::win::ScopedDomainStateForTesting scoped_domain_;
#endif
raw_ptr<const BrowserList> browser_list_;
bool intercept_bad_cert_ = true;
};
CaptivePortalBrowserTest::CaptivePortalBrowserTest()
: behind_captive_portal_(true),
#if BUILDFLAG(IS_WIN)
// Mark as not enterprise managed to prevent the secure DNS mode from
// being downgraded to off.
scoped_domain_(false),
#endif
browser_list_(BrowserList::GetInstance()) {
feature_list_.InitAndEnableFeature(kCaptivePortalInterstitial);
}
CaptivePortalBrowserTest::~CaptivePortalBrowserTest() = default;
void CaptivePortalBrowserTest::SetUpOnMainThread() {
url_loader_interceptor_ =
std::make_unique<content::URLLoaderInterceptor>(base::BindRepeating(
&CaptivePortalBrowserTest::OnIntercept, base::Unretained(this)));
// Double-check that the captive portal service isn't enabled by default for
// browser tests.
EXPECT_EQ(captive_portal::CaptivePortalService::DISABLED_FOR_TESTING,
captive_portal::CaptivePortalService::get_state_for_testing());
captive_portal::CaptivePortalService::set_state_for_testing(
captive_portal::CaptivePortalService::NOT_TESTING);
EnableCaptivePortalDetection(browser()->profile(), true);
// Set the captive portal service to use URLRequestMockCaptivePortalJob's
// mock URL, by default.
SetUpCaptivePortalService(browser()->profile(),
GURL(kMockCaptivePortalTestUrl));
// Set SSL interstitial delay long enough so that a captive portal result
// is guaranteed to arrive during this window, and a captive portal
// error page is displayed instead of an SSL interstitial.
SSLErrorHandler::SetInterstitialDelayForTesting(base::Hours(1));
}
bool CaptivePortalBrowserTest::OnIntercept(
content::URLLoaderInterceptor::RequestParams* params) {
if (params->url_request.url.path() == kMockHttpsBadCertPath &&
intercept_bad_cert_) {
CHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
ongoing_mock_requests_.emplace_back(std::move(*params));
return true;
}
auto url_string = params->url_request.url.spec();
network::URLLoaderCompletionStatus status;
status.error_code = net::OK;
if (url_string == kMockHttpConnectionTimeoutErr ||
url_string == kMockHttpsConnectionTimeoutErr) {
status.error_code = net::ERR_CONNECTION_TIMED_OUT;
} else if (url_string == kMockHttpsConnectionUnexpectedErr) {
status.error_code = net::ERR_UNEXPECTED;
} else if (url_string == kMockHttpConnectionConnectionClosedErr) {
status.error_code = net::ERR_CONNECTION_CLOSED;
} else if (url_string == kMockHttpConnectionSecureDnsErr ||
url_string == kMockHttpsConnectionSecureDnsErr) {
status.error_code = net::ERR_NAME_NOT_RESOLVED;
status.resolve_error_info = net::ResolveErrorInfo(
net::ERR_CERT_COMMON_NAME_INVALID, true /* is_secure_network_error */);
}
if (status.error_code != net::OK) {
params->client->OnComplete(status);
return true;
}
if (url_string == kMockHttpsUrl || url_string == kMockHttpsUrl2 ||
url_string == kMockHttpsQuickTimeoutUrl ||
params->url_request.url.path() == kRedirectToMockHttpsPath) {
CHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
if (params->url_request.url.path() == kRedirectToMockHttpsPath) {
net::RedirectInfo redirect_info;
redirect_info.new_url = GURL(kMockHttpsUrl);
redirect_info.new_method = "GET";
std::string headers;
headers = base::StringPrintf(
"HTTP/1.0 301 Moved permanently\n"
"Location: %s\n"
"Content-Type: text/html\n\n",
kMockHttpsUrl);
net::HttpResponseInfo info;
info.headers = base::MakeRefCounted<net::HttpResponseHeaders>(
net::HttpUtil::AssembleRawHeaders(headers));
auto response = network::mojom::URLResponseHead::New();
response->headers = info.headers;
response->headers->GetMimeType(&response->mime_type);
response->encoded_data_length = 0;
params->client->OnReceiveRedirect(redirect_info, std::move(response));
}
if (behind_captive_portal_) {
if (url_string == kMockHttpsQuickTimeoutUrl) {
network::URLLoaderCompletionStatus completion_status;
completion_status.error_code = net::ERR_CONNECTION_TIMED_OUT;
params->client->OnComplete(completion_status);
} else {
ongoing_mock_requests_.emplace_back(std::move(*params));
if (num_jobs_to_wait_for_ ==
static_cast<int>(ongoing_mock_requests_.size())) {
num_jobs_to_wait_for_ = 0;
content::GetUIThreadTaskRunner({})->PostTask(
FROM_HERE, base::BindOnce(&CaptivePortalBrowserTest::QuitRunLoop,
base::Unretained(this)));
}
}
} else {
// Once logged in to the portal, HTTPS requests return the page that was
// actually requested.
content::URLLoaderInterceptor::WriteResponse(
"HTTP/1.1 200 OK\nContent-type: text/html\n\n",
GetContents("title2.html"), params->client.get());
}
return true;
}
std::string headers;
if (url_string == kMockCaptivePortalTestUrl ||
url_string == kMockCaptivePortal511Url) {
std::string contents;
if (behind_captive_portal_) {
// Prior to logging in to the portal, the HTTP test URLs are
// intercepted by the captive portal.
if (url_string == kMockCaptivePortal511Url) {
contents = GetContents("captive_portal/page511.html");
headers = "HTTP/1.1 511 Network Authentication Required\n";
} else {
// These URLs should only be requested for navigations, which will have
// trusted_params.
contents =
params->url_request.trusted_params->disable_secure_dns
? GetContents("captive_portal/login_secure_dns_disabled.html")
: GetContents("captive_portal/login.html");
headers = "HTTP/1.0 200 Just Peachy\n";
}
} else {
// After logging in to the portal, the test URLs return a 204
// response.
headers = "HTTP/1.0 204 No Content\nContent-Length: 0\n";
}
headers += "Content-Type: text/html\n\n";
content::URLLoaderInterceptor::WriteResponse(headers, contents,
params->client.get());
return true;
}
return false;
}
void CaptivePortalBrowserTest::TearDownOnMainThread() {
// No test should have a captive portal check pending on quit.
EXPECT_FALSE(CheckPending(browser()));
url_loader_interceptor_.reset();
}
void CaptivePortalBrowserTest::EnableCaptivePortalDetection(
Profile* profile, bool enabled) {
profile->GetPrefs()->SetBoolean(embedder_support::kAlternateErrorPagesEnabled,
enabled);
}
void CaptivePortalBrowserTest::RespondToProbeRequests(bool enabled) {
if (enabled) {
EXPECT_EQ(captive_portal::CaptivePortalService::IGNORE_REQUESTS_FOR_TESTING,
captive_portal::CaptivePortalService::get_state_for_testing());
captive_portal::CaptivePortalService::set_state_for_testing(
captive_portal::CaptivePortalService::NOT_TESTING);
} else {
EXPECT_EQ(captive_portal::CaptivePortalService::NOT_TESTING,
captive_portal::CaptivePortalService::get_state_for_testing());
captive_portal::CaptivePortalService::set_state_for_testing(
captive_portal::CaptivePortalService::IGNORE_REQUESTS_FOR_TESTING);
}
}
void CaptivePortalBrowserTest::SetUpCaptivePortalService(Profile* profile,
const GURL& test_url) {
captive_portal::CaptivePortalService* captive_portal_service =
CaptivePortalServiceFactory::GetForProfile(profile);
captive_portal_service->set_test_url(test_url);
// Don't use any non-zero timers. Timers are checked in unit tests.
captive_portal::CaptivePortalService::RecheckPolicy* recheck_policy =
&captive_portal_service->recheck_policy();
recheck_policy->initial_backoff_no_portal_ms = 0;
recheck_policy->initial_backoff_portal_ms = 0;
recheck_policy->backoff_policy.maximum_backoff_ms = 0;
}
bool CaptivePortalBrowserTest::CheckPending(Browser* browser) {
captive_portal::CaptivePortalService* captive_portal_service =
CaptivePortalServiceFactory::GetForProfile(browser->profile());
return captive_portal_service->DetectionInProgress() ||
captive_portal_service->TimerRunning();
}
security_interstitials::SecurityInterstitialPage::TypeID
CaptivePortalBrowserTest::GetInterstitialType(WebContents* contents) const {
security_interstitials::SecurityInterstitialTabHelper* helper =
security_interstitials::SecurityInterstitialTabHelper::FromWebContents(
contents);
if (!helper)
return nullptr;
security_interstitials::SecurityInterstitialPage* blocking_page =
helper->GetBlockingPageForCurrentlyCommittedNavigationForTesting();
if (!blocking_page)
return nullptr;
return blocking_page->GetTypeForTesting();
}
bool CaptivePortalBrowserTest::IsShowingInterstitial(WebContents* contents) {
return GetInterstitialType(contents) != nullptr;
}
void CaptivePortalBrowserTest::WaitForInterstitial(
content::WebContents* contents) {
ASSERT_TRUE(IsShowingInterstitial(contents));
ASSERT_TRUE(WaitForRenderFrameReady(contents->GetMainFrame()));
}
captive_portal::CaptivePortalTabReloader::State
CaptivePortalBrowserTest::GetStateOfTabReloader(
WebContents* web_contents) const {
return GetTabReloader(web_contents)->state();
}
captive_portal::CaptivePortalTabReloader::State
CaptivePortalBrowserTest::GetStateOfTabReloaderAt(Browser* browser,
int index) const {
return GetStateOfTabReloader(
browser->tab_strip_model()->GetWebContentsAt(index));
}
int CaptivePortalBrowserTest::NumTabsWithState(
captive_portal::CaptivePortalTabReloader::State state) const {
return std::count_if(AllTabContentses().begin(), AllTabContentses().end(),
[this, state](content::WebContents* web_contents) {
return GetStateOfTabReloader(web_contents) == state;
});
}
int CaptivePortalBrowserTest::NumBrokenTabs() const {
return NumTabsWithState(
captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL);
}
int CaptivePortalBrowserTest::NumNeedReloadTabs() const {
return NumTabsWithState(
captive_portal::CaptivePortalTabReloader::STATE_NEEDS_RELOAD);
}
void CaptivePortalBrowserTest::NavigateToPageExpectNoTest(Browser* browser,
const GURL& url) {
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser->profile());
ASSERT_TRUE(ui_test_utils::NavigateToURL(browser, url));
// No captive portal checks should have ocurred or be pending, and there
// should be no new tabs.
EXPECT_EQ(0, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(1, browser->tab_strip_model()->count());
EXPECT_EQ(1, navigation_observer.num_navigations());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser, 0));
}
void CaptivePortalBrowserTest::SlowLoadNoCaptivePortal(
Browser* browser,
CaptivePortalResult expected_result) {
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(browser->tab_strip_model()->GetActiveWebContents());
ASSERT_TRUE(tab_reloader);
SetSlowSSLLoadTime(tab_reloader, base::TimeDelta());
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser->profile());
ui_test_utils::NavigateToURLWithDisposition(
browser, GURL(kMockHttpsUrl), WindowOpenDisposition::CURRENT_TAB,
ui_test_utils::BROWSER_TEST_NONE);
portal_observer.WaitForResults(1);
ASSERT_EQ(1, browser->tab_strip_model()->count());
EXPECT_EQ(expected_result, portal_observer.captive_portal_result());
EXPECT_EQ(1, portal_observer.num_results_received());
EXPECT_EQ(0, navigation_observer.num_navigations());
EXPECT_FALSE(CheckPending(browser));
// First tab should still be loading.
EXPECT_EQ(1, NumLoadingTabs());
// Wait for the request to be issued, then time it out.
WaitForJobs(1);
FailJobs(1, net::ERR_CONNECTION_TIMED_OUT, net::ResolveErrorInfo(net::OK));
navigation_observer.WaitForNavigations(1);
ASSERT_EQ(1, browser->tab_strip_model()->count());
EXPECT_EQ(1, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(0, NumLoadingTabs());
// Set a slow SSL load time to prevent the timer from triggering.
SetSlowSSLLoadTime(tab_reloader, base::Days(1));
}
void CaptivePortalBrowserTest::FastTimeoutNoCaptivePortal(
Browser* browser,
CaptivePortalResult expected_result) {
ASSERT_NE(expected_result, captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL);
// Set the load time to be large, so the timer won't trigger. The value is
// not restored at the end of the function.
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(browser->tab_strip_model()->GetActiveWebContents());
ASSERT_TRUE(tab_reloader);
SetSlowSSLLoadTime(tab_reloader, base::Hours(1));
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser->profile());
// Neither of these should be changed by the navigation.
int active_index = browser->tab_strip_model()->active_index();
int expected_tab_count = browser->tab_strip_model()->count();
ASSERT_TRUE(ui_test_utils::NavigateToURL(
browser, GURL(kMockHttpsConnectionTimeoutErr)));
// An attempt to detect a captive portal should have started by now. If not,
// abort early to prevent hanging.
ASSERT_TRUE(portal_observer.num_results_received() > 0 ||
CheckPending(browser));
portal_observer.WaitForResults(1);
navigation_observer.WaitForNavigations(1);
// Check the result.
EXPECT_EQ(1, portal_observer.num_results_received());
EXPECT_EQ(expected_result, portal_observer.captive_portal_result());
// Check that the right tab was navigated, and there were no extra
// navigations.
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
browser->tab_strip_model()->GetWebContentsAt(active_index)));
EXPECT_EQ(0, NumLoadingTabs());
// Check the tab's state, and verify no captive portal check is pending.
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser, 0));
EXPECT_FALSE(CheckPending(browser));
// Make sure no login tab was opened.
EXPECT_EQ(expected_tab_count, browser->tab_strip_model()->count());
}
void CaptivePortalBrowserTest::SlowLoadBehindCaptivePortal(
Browser* browser,
bool expect_open_login_tab,
bool expect_new_login_browser,
Browser** out_login_browser) {
return SlowLoadBehindCaptivePortal(
browser, expect_open_login_tab, expect_new_login_browser,
GURL(kMockHttpsUrl), 1, 1, out_login_browser);
}
void CaptivePortalBrowserTest::SlowLoadBehindCaptivePortal(
Browser* browser,
bool expect_open_login_tab,
bool expect_new_login_browser,
const GURL& hanging_url,
int expected_portal_checks,
int expected_login_tab_navigations,
Browser** out_login_browser) {
ASSERT_GE(expected_portal_checks, 1);
TabStripModel* tab_strip_model = browser->tab_strip_model();
// Calling this on a tab that's waiting for a load to manually be timed out
// will result in a hang.
ASSERT_FALSE(tab_strip_model->GetActiveWebContents()->IsLoading());
// Trigger a captive portal check quickly.
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(tab_strip_model->GetActiveWebContents());
ASSERT_TRUE(tab_reloader);
SetSlowSSLLoadTime(tab_reloader, base::TimeDelta());
// Number of tabs expected to be open after the captive portal checks
// have completed.
int initial_tab_count = tab_strip_model->count();
int initial_active_index = tab_strip_model->active_index();
int initial_loading_tabs = NumLoadingTabs();
int expected_broken_tabs = NumBrokenTabs();
size_t initial_browser_count = browser_list_->size();
if (captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL !=
GetStateOfTabReloader(tab_strip_model->GetActiveWebContents())) {
++expected_broken_tabs;
}
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser->profile());
ui_test_utils::NavigateToURLWithDisposition(
browser, hanging_url, WindowOpenDisposition::CURRENT_TAB,
ui_test_utils::BROWSER_TEST_NONE);
portal_observer.WaitForResults(expected_portal_checks);
Browser* login_browser = nullptr;
if (expect_open_login_tab) {
ASSERT_GE(expected_login_tab_navigations, 1);
navigation_observer.WaitForNavigations(expected_login_tab_navigations);
WebContents* login_tab;
if (expect_new_login_browser) {
ASSERT_EQ(initial_browser_count + 1, browser_list_->size());
// Check the original browser
ASSERT_EQ(initial_tab_count, tab_strip_model->count());
EXPECT_EQ(initial_tab_count - 1, tab_strip_model->active_index());
// Check the new popup browser
login_browser = browser_list_->get(initial_browser_count);
EXPECT_EQ(Browser::TYPE_POPUP, login_browser->type());
login_tab = login_browser->tab_strip_model()->GetWebContentsAt(0);
EXPECT_TRUE(
captive_portal::CaptivePortalTabHelper::FromWebContents(login_tab)
->is_captive_portal_window());
EXPECT_EQ(base::ASCIIToUTF16(kLoginSecureDnsDisabledTitle),
login_tab->GetTitle());
} else {
ASSERT_EQ(initial_browser_count, browser_list_->size());
ASSERT_EQ(initial_tab_count + 1, tab_strip_model->count());
EXPECT_EQ(initial_tab_count, tab_strip_model->active_index());
login_tab = tab_strip_model->GetWebContentsAt(initial_tab_count);
login_browser = browser;
}
EXPECT_EQ(expected_login_tab_navigations,
navigation_observer.NumNavigationsForTab(login_tab));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloader(login_tab));
EXPECT_TRUE(IsLoginTab(login_tab));
} else {
ASSERT_EQ(initial_browser_count, browser_list_->size());
EXPECT_EQ(0, navigation_observer.num_navigations());
EXPECT_EQ(initial_active_index, tab_strip_model->active_index());
ASSERT_EQ(initial_tab_count, tab_strip_model->count());
EXPECT_EQ(initial_active_index, tab_strip_model->active_index());
}
// Wait for all the expect resource loads to actually start, so subsequent
// functions can rely on them having started.
WaitForJobs(initial_loading_tabs + 1);
EXPECT_EQ(initial_loading_tabs + 1, NumLoadingTabs());
EXPECT_EQ(expected_broken_tabs, NumBrokenTabs());
EXPECT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer.captive_portal_result());
EXPECT_EQ(expected_portal_checks, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(browser, initial_active_index));
// Reset the load time to be large, so the timer won't trigger on a reload.
SetSlowSSLLoadTime(tab_reloader, base::Hours(1));
if (out_login_browser)
*out_login_browser = login_browser;
}
void CaptivePortalBrowserTest::FastTimeoutBehindCaptivePortal(
Browser* browser,
bool expect_open_login_tab) {
Browser* login_browser = nullptr;
FastErrorBehindCaptivePortal(browser, expect_open_login_tab,
false /* expect_new_login_browser */,
GURL(kMockHttpsQuickTimeoutUrl), &login_browser);
// Expected result of setting `expect_new_login_browser` to false.
DCHECK(!login_browser || login_browser == browser);
}
void CaptivePortalBrowserTest::FastErrorBehindCaptivePortal(
Browser* browser,
bool expect_open_login_tab,
bool expect_new_login_browser,
const GURL& error_url,
Browser** out_login_browser) {
TabStripModel* tab_strip_model = browser->tab_strip_model();
// Calling this on a tab that's waiting for a load to manually be timed out
// will result in a hang.
ASSERT_FALSE(tab_strip_model->GetActiveWebContents()->IsLoading());
// Set the load time to be large, so the timer won't trigger. The value is
// not restored at the end of the function.
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(tab_strip_model->GetActiveWebContents());
ASSERT_TRUE(tab_reloader);
SetSlowSSLLoadTime(tab_reloader, base::Hours(1));
// Number of tabs expected to be open after the captive portal checks
// have completed.
int initial_tab_count = tab_strip_model->count();
int initial_active_index = tab_strip_model->active_index();
int initial_loading_tabs = NumLoadingTabs();
int expected_broken_tabs = NumBrokenTabs();
size_t initial_browser_count = browser_list_->size();
if (captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL !=
GetStateOfTabReloader(tab_strip_model->GetActiveWebContents())) {
++expected_broken_tabs;
}
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser->profile());
ui_test_utils::NavigateToURLWithDisposition(
browser, error_url, WindowOpenDisposition::CURRENT_TAB,
ui_test_utils::BROWSER_TEST_NONE);
portal_observer.WaitForResults(1);
Browser* login_browser = nullptr;
if (expect_open_login_tab) {
navigation_observer.WaitForNavigations(2);
WebContents* login_tab;
if (expect_new_login_browser) {
ASSERT_EQ(initial_browser_count + 1, browser_list_->size());
// Check the original browser
ASSERT_EQ(initial_tab_count, tab_strip_model->count());
EXPECT_EQ(initial_tab_count - 1, tab_strip_model->active_index());
// Check the new popup browser
login_browser = browser_list_->get(initial_browser_count);
EXPECT_EQ(Browser::TYPE_POPUP, login_browser->type());
login_tab = login_browser->tab_strip_model()->GetWebContentsAt(0);
EXPECT_TRUE(
captive_portal::CaptivePortalTabHelper::FromWebContents(login_tab)
->is_captive_portal_window());
EXPECT_EQ(base::ASCIIToUTF16(kLoginSecureDnsDisabledTitle),
login_tab->GetTitle());
} else {
ASSERT_EQ(initial_browser_count, browser_list_->size());
ASSERT_EQ(initial_tab_count + 1, tab_strip_model->count());
EXPECT_EQ(initial_tab_count, tab_strip_model->active_index());
login_tab = tab_strip_model->GetWebContentsAt(initial_tab_count);
login_browser = browser;
}
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(initial_active_index)));
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(login_tab));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloader(login_tab));
EXPECT_TRUE(IsLoginTab(login_tab));
} else {
navigation_observer.WaitForNavigations(1);
ASSERT_EQ(initial_browser_count, browser_list_->size());
EXPECT_EQ(initial_active_index, tab_strip_model->active_index());
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(initial_active_index)));
ASSERT_EQ(initial_tab_count, tab_strip_model->count());
EXPECT_EQ(initial_active_index, tab_strip_model->active_index());
}
EXPECT_EQ(initial_loading_tabs, NumLoadingTabs());
EXPECT_EQ(expected_broken_tabs, NumBrokenTabs());
EXPECT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer.captive_portal_result());
EXPECT_EQ(1, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(browser, initial_active_index));
if (out_login_browser)
*out_login_browser = login_browser;
}
void CaptivePortalBrowserTest::FastErrorWithInterstitialTimer(
Browser* browser,
const GURL& cert_error_url) {
TabStripModel* tab_strip_model = browser->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
// Disable captive portal checks indefinitely.
RespondToProbeRequests(false);
SSLInterstitialTimerObserver interstitial_timer_observer(broken_tab_contents);
ui_test_utils::NavigateToURLWithDisposition(
browser, cert_error_url, WindowOpenDisposition::CURRENT_TAB,
ui_test_utils::BROWSER_TEST_NONE);
interstitial_timer_observer.WaitForTimerStarted();
// The tab should be in loading state, waiting for the interstitial timer to
// expire or a captive portal result to arrive. Since captive portal checks
// are disabled and timer set to expire after a very long time, the tab should
// hang indefinitely.
EXPECT_TRUE(broken_tab_contents->IsLoading());
EXPECT_EQ(1, NumLoadingTabs());
}
void CaptivePortalBrowserTest::NavigateLoginTab(Browser* browser,
int num_loading_tabs,
int num_timed_out_tabs) {
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser->profile());
TabStripModel* tab_strip_model = browser->tab_strip_model();
int initial_tab_count = tab_strip_model->count();
EXPECT_EQ(num_loading_tabs, NumLoadingTabs());
EXPECT_EQ(num_timed_out_tabs, NumBrokenTabs() - NumLoadingTabs());
int login_tab_index = tab_strip_model->active_index();
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloader(tab_strip_model->GetActiveWebContents()));
ASSERT_TRUE(IsLoginTab(browser->tab_strip_model()->GetActiveWebContents()));
// Do the navigation.
content::ExecuteScriptAsync(tab_strip_model->GetActiveWebContents(),
"submitForm()");
portal_observer.WaitForResults(1);
navigation_observer.WaitForNavigations(1);
// Check the captive portal result.
EXPECT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer.captive_portal_result());
EXPECT_EQ(1, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
// Make sure not much has changed.
EXPECT_EQ(initial_tab_count, tab_strip_model->count());
EXPECT_EQ(num_loading_tabs, NumLoadingTabs());
EXPECT_EQ(num_loading_tabs + num_timed_out_tabs, NumBrokenTabs());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser, login_tab_index));
EXPECT_TRUE(IsLoginTab(tab_strip_model->GetWebContentsAt(login_tab_index)));
// Make sure there were no unexpected navigations.
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(login_tab_index)));
}
void CaptivePortalBrowserTest::Login(Browser* captive_portal_browser,
int num_loading_tabs,
int num_timed_out_tabs,
int expected_portal_checks) {
// Simulate logging in.
SetBehindCaptivePortal(false);
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(captive_portal_browser->profile());
TabStripModel* tab_strip_model = captive_portal_browser->tab_strip_model();
size_t initial_browser_count = browser_list_->size();
int initial_tab_count = NumTabs();
ASSERT_EQ(num_loading_tabs, NumLoadingTabs());
EXPECT_EQ(num_timed_out_tabs, NumBrokenTabs() - NumLoadingTabs());
// Verify that the login page is on top.
int login_tab_index = tab_strip_model->active_index();
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(captive_portal_browser, login_tab_index));
ASSERT_TRUE(IsLoginTab(tab_strip_model->GetWebContentsAt(login_tab_index)));
// Trigger a navigation.
content::ExecuteScriptAsync(tab_strip_model->GetActiveWebContents(),
"submitForm()");
portal_observer.WaitForResults(1);
// Wait for all the timed out tabs to reload and any new portal checks
// triggered by the reloads.
navigation_observer.WaitForNavigations(1 + num_timed_out_tabs);
portal_observer.WaitForResults(expected_portal_checks);
EXPECT_EQ(expected_portal_checks, portal_observer.num_results_received());
// The tabs that were loading before should still be loading, and now be in
// STATE_NEEDS_RELOAD.
EXPECT_EQ(0, NumBrokenTabs());
EXPECT_EQ(num_loading_tabs, NumLoadingTabs());
EXPECT_EQ(num_loading_tabs, NumNeedReloadTabs());
// Make sure that the broken tabs have reloaded, and there's no more
// captive portal tab.
EXPECT_EQ(initial_browser_count, browser_list_->size());
EXPECT_EQ(initial_tab_count, NumTabs());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(captive_portal_browser, login_tab_index));
EXPECT_FALSE(IsLoginTab(tab_strip_model->GetWebContentsAt(login_tab_index)));
// Make sure there were no unexpected navigations of the login tab.
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(login_tab_index)));
}
void CaptivePortalBrowserTest::LoginCertError(Browser* browser) {
SetBehindCaptivePortal(false);
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser->profile());
TabStripModel* tab_strip_model = browser->tab_strip_model();
// Verify that the login page is on top.
int login_tab_index = tab_strip_model->active_index();
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser, login_tab_index));
ASSERT_TRUE(IsLoginTab(tab_strip_model->GetWebContentsAt(login_tab_index)));
// Trigger a navigation.
content::ExecuteScriptAsync(tab_strip_model->GetActiveWebContents(),
"submitForm()");
// The captive portal tab navigation will trigger a captive portal check,
// and reloading the original tab will bring up the interstitial page again,
// triggering a second captive portal check.
portal_observer.WaitForResults(2);
// Wait for both tabs to finish loading.
navigation_observer.WaitForNavigations(2);
EXPECT_EQ(2, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(captive_portal::RESULT_INTERNET_CONNECTED,
portal_observer.captive_portal_result());
// Check state of tabs. While the first tab is still displaying an
// interstitial page, since no portal was found, it should be in STATE_NONE,
// as should the login tab.
ASSERT_EQ(2, tab_strip_model->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser, 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser, login_tab_index));
EXPECT_FALSE(IsLoginTab(tab_strip_model->GetWebContentsAt(login_tab_index)));
// Make sure only one navigation was for the login tab.
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(login_tab_index)));
}
void CaptivePortalBrowserTest::FailLoadsAfterLogin(Browser* browser,
int num_loading_tabs) {
ASSERT_EQ(num_loading_tabs, NumLoadingTabs());
ASSERT_EQ(num_loading_tabs, NumNeedReloadTabs());
EXPECT_EQ(0, NumBrokenTabs());
TabStripModel* tab_strip_model = browser->tab_strip_model();
int initial_num_tabs = tab_strip_model->count();
int initial_active_tab = tab_strip_model->active_index();
CaptivePortalObserver portal_observer(browser->profile());
FailLoadsAfterLoginObserver fail_loads_observer;
// Connection(s) finally time out. There should have already been a call
// to wait for the requests to be issued before logging on.
WaitForJobs(num_loading_tabs);
FailJobs(num_loading_tabs, net::ERR_CONNECTION_TIMED_OUT,
net::ResolveErrorInfo(net::OK));
fail_loads_observer.WaitForNavigations();
// No captive portal checks should have ocurred or be pending, and there
// should be no new tabs.
EXPECT_EQ(0, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(initial_num_tabs, tab_strip_model->count());
EXPECT_EQ(initial_active_tab, tab_strip_model->active_index());
EXPECT_EQ(0, NumNeedReloadTabs());
EXPECT_EQ(0, NumLoadingTabs());
}
void CaptivePortalBrowserTest::FailLoadsWithoutLogin(
Browser* browser,
int num_loading_tabs,
Browser* captive_portal_browser) {
if (!captive_portal_browser)
captive_portal_browser = browser;
ASSERT_EQ(num_loading_tabs, NumLoadingTabs());
ASSERT_EQ(0, NumNeedReloadTabs());
EXPECT_EQ(num_loading_tabs, NumBrokenTabs());
TabStripModel* tab_strip_model = captive_portal_browser->tab_strip_model();
int initial_num_tabs = NumTabs();
int login_tab = tab_strip_model->active_index();
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloader(tab_strip_model->GetActiveWebContents()));
ASSERT_TRUE(IsLoginTab(tab_strip_model->GetActiveWebContents()));
CaptivePortalObserver portal_observer(browser->profile());
MultiNavigationObserver navigation_observer;
// Connection(s) finally time out. There should have already been a call
// to wait for the requests to be issued.
FailJobs(num_loading_tabs, net::ERR_CONNECTION_TIMED_OUT,
net::ResolveErrorInfo(net::OK));
navigation_observer.WaitForNavigations(num_loading_tabs);
// No captive portal checks should have ocurred or be pending, and there
// should be no new tabs.
EXPECT_EQ(0, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(initial_num_tabs, NumTabs());
EXPECT_EQ(0, NumNeedReloadTabs());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_EQ(num_loading_tabs, NumBrokenTabs());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloader(tab_strip_model->GetActiveWebContents()));
EXPECT_TRUE(IsLoginTab(tab_strip_model->GetActiveWebContents()));
EXPECT_EQ(login_tab, tab_strip_model->active_index());
EXPECT_EQ(0, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(login_tab)));
}
void CaptivePortalBrowserTest::RunNavigateLoadingTabToTimeoutTest(
Browser* browser,
const GURL& starting_url,
const GURL& hanging_url,
const GURL& timeout_url) {
// Temporarily disable the captive portal and navigate to the starting
// URL, which may be a URL that will hang when behind a captive portal.
SetBehindCaptivePortal(false);
NavigateToPageExpectNoTest(browser, starting_url);
SetBehindCaptivePortal(true);
// Go to the first hanging url.
SlowLoadBehindCaptivePortal(browser, true /* expect_open_login_tab */,
false /* is-expect_new_login_browser */,
hanging_url, 1, 1);
// Abandon the request.
WaitForJobs(1);
AbandonJobs(1);
TabStripModel* tab_strip_model = browser->tab_strip_model();
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(tab_strip_model->GetWebContentsAt(0));
ASSERT_TRUE(tab_reloader);
// A non-zero delay makes it more likely that
// captive_portal::CaptivePortalTabHelper will be confused by events relating
// to canceling the old navigation.
SetSlowSSLLoadTime(tab_reloader, base::Seconds(2));
CaptivePortalObserver portal_observer(browser->profile());
// Navigate the error tab to another slow loading page. Can't have
// ui_test_utils do the navigation because it will wait for loading tabs to
// stop loading before navigating.
//
// This may result in either 0 or 1 DidStopLoading events. If there is one,
// it must happen before the captive_portal::CaptivePortalService sends out
// its test request, so waiting for PortalObserver to see that request
// prevents it from confusing the MultiNavigationObservers used later.
tab_strip_model->ActivateTabAt(0, {TabStripModel::GestureType::kOther});
browser->OpenURL(content::OpenURLParams(timeout_url, content::Referrer(),
WindowOpenDisposition::CURRENT_TAB,
ui::PAGE_TRANSITION_TYPED, false));
portal_observer.WaitForResults(1);
EXPECT_FALSE(CheckPending(browser));
EXPECT_EQ(1, NumLoadingTabs());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(browser, 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser, 1));
ASSERT_TRUE(IsLoginTab(tab_strip_model->GetWebContentsAt(1)));
// Need to make sure the request has been issued before logging in.
WaitForJobs(1);
// Simulate logging in.
tab_strip_model->ActivateTabAt(1, {TabStripModel::GestureType::kOther});
SetSlowSSLLoadTime(tab_reloader, base::Days(1));
Login(browser, 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
// Timeout occurs, and page is automatically reloaded.
FailLoadsAfterLogin(browser, 1);
}
void CaptivePortalBrowserTest::SetSlowSSLLoadTime(
captive_portal::CaptivePortalTabReloader* tab_reloader,
base::TimeDelta slow_ssl_load_time) {
tab_reloader->set_slow_ssl_load_time(slow_ssl_load_time);
}
captive_portal::CaptivePortalTabReloader*
CaptivePortalBrowserTest::GetTabReloader(WebContents* web_contents) const {
return captive_portal::CaptivePortalTabHelper::FromWebContents(web_contents)
->GetTabReloaderForTest();
}
// Make sure there's no test for a captive portal on HTTP timeouts.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, HttpTimeout) {
NavigateToPageExpectNoTest(browser(), GURL(kMockHttpConnectionTimeoutErr));
}
// Make sure there's no check for a captive portal on HTTPS errors other than
// timeouts, when they preempt the slow load timer.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, HttpsNonTimeoutError) {
NavigateToPageExpectNoTest(browser(),
GURL(kMockHttpsConnectionUnexpectedErr));
}
// Make sure no captive portal test triggers on HTTPS timeouts of iframes.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, HttpsIframeTimeout) {
// Use an HTTPS server for the top level page.
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
GURL url = https_server.GetURL(kTestServerIframeTimeoutPath);
NavigateToPageExpectNoTest(browser(), url);
}
// Check the captive portal result when the test request reports a network
// error. The check is triggered by a slow loading page, and the page
// errors out only after getting a captive portal result.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, RequestFails) {
SetUpCaptivePortalService(browser()->profile(),
GURL(kMockHttpConnectionConnectionClosedErr));
SlowLoadNoCaptivePortal(browser(), captive_portal::RESULT_NO_RESPONSE);
}
// Same as above, but for the rather unlikely case that the connection times out
// before the timer triggers.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, RequestFailsFastTimout) {
SetUpCaptivePortalService(browser()->profile(),
GURL(kMockHttpConnectionConnectionClosedErr));
FastTimeoutNoCaptivePortal(browser(), captive_portal::RESULT_NO_RESPONSE);
}
// Checks the case that captive portal detection is disabled.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, Disabled) {
EnableCaptivePortalDetection(browser()->profile(), false);
SlowLoadNoCaptivePortal(browser(), captive_portal::RESULT_INTERNET_CONNECTED);
}
// Checks that we look for a captive portal on HTTPS timeouts and don't reload
// the error tab when the captive portal probe gets a 204 response, indicating
// there is no captive portal.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, InternetConnected) {
// Can't just use SetBehindCaptivePortal(false), since then there wouldn't
// be a timeout.
ASSERT_TRUE(embedded_test_server()->Start());
SetUpCaptivePortalService(browser()->profile(),
embedded_test_server()->GetURL("/nocontent"));
SlowLoadNoCaptivePortal(browser(), captive_portal::RESULT_INTERNET_CONNECTED);
}
// Checks that no login page is opened when the HTTP test URL redirects to an
// SSL certificate error.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, RedirectSSLCertError) {
// Need an HTTP TestServer to handle a dynamically created server redirect.
ASSERT_TRUE(embedded_test_server()->Start());
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
GURL ssl_login_url = https_server.GetURL(kTestServerLoginPath);
captive_portal::CaptivePortalService* captive_portal_service =
CaptivePortalServiceFactory::GetForProfile(browser()->profile());
ASSERT_TRUE(captive_portal_service);
SetUpCaptivePortalService(browser()->profile(),
embedded_test_server()->GetURL(
CreateServerRedirect(ssl_login_url.spec())));
SlowLoadNoCaptivePortal(browser(), captive_portal::RESULT_NO_RESPONSE);
}
// A slow SSL load triggers a captive portal check. The user logs on before
// the SSL page times out. We wait for the timeout and subsequent reload.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, Login) {
// Load starts, detect captive portal and open up a login tab.
SlowLoadBehindCaptivePortal(browser(), true);
// Log in.
Login(browser(), 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
// Timeout occurs, and page is automatically reloaded.
FailLoadsAfterLogin(browser(), 1);
}
// Same as above, except we make sure everything works with an incognito
// profile. Main issues it tests for are that the incognito has its own
// non-NULL captive portal service, and we open the tab in the correct
// window.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, LoginIncognito) {
// This will watch tabs for both profiles, but only used to make sure no
// navigations occur for the non-incognito profile.
MultiNavigationObserver navigation_observer;
CaptivePortalObserver non_incognito_portal_observer(browser()->profile());
Browser* incognito_browser = CreateIncognitoBrowser();
EnableCaptivePortalDetection(incognito_browser->profile(), true);
SetUpCaptivePortalService(incognito_browser->profile(),
GURL(kMockCaptivePortalTestUrl));
SlowLoadBehindCaptivePortal(incognito_browser, true);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
EXPECT_EQ(1, tab_strip_model->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
Login(incognito_browser, 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
FailLoadsAfterLogin(incognito_browser, 1);
EXPECT_EQ(1, tab_strip_model->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
EXPECT_EQ(0, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(0)));
EXPECT_EQ(0, non_incognito_portal_observer.num_results_received());
}
// The captive portal page is opened before the SSL page times out,
// but the user logs in only after the page times out.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, LoginSlow) {
SlowLoadBehindCaptivePortal(browser(), true);
FailLoadsWithoutLogin(browser(), 1);
Login(browser(), 0 /* num_loading_tabs */, 1 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
}
// Checks the unlikely case that the tab times out before the timer triggers.
// This most likely won't happen, but should still work:
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, LoginFastTimeout) {
FastTimeoutBehindCaptivePortal(browser(), true);
Login(browser(), 0 /* num_loading_tabs */, 1 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
}
// A cert error triggers a captive portal check and results in opening a login
// tab.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
ShowCaptivePortalInterstitialOnCertError) {
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
// The path does not matter.
GURL cert_error_url = https_server.GetURL(kTestServerLoginPath);
int cert_error_tab_index = tab_strip_model->active_index();
// The interstitial should trigger a captive portal check when it opens, just
// like navigating to kMockHttpsQuickTimeoutUrl.
FastErrorBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
false /* expect_new_login_browser */,
cert_error_url);
EXPECT_EQ(CaptivePortalBlockingPage::kTypeForTesting,
GetInterstitialType(broken_tab_contents));
// Switch to the interstitial and click the |Connect| button. Should switch
// active tab to the captive portal landing page.
int login_tab_index = tab_strip_model->active_index();
tab_strip_model->ActivateTabAt(cert_error_tab_index);
// Wait for the interstitial to load all the JavaScript code. Otherwise,
// trying to click on a button will fail.
content::RenderFrameHost* rfh;
rfh = broken_tab_contents->GetMainFrame();
EXPECT_TRUE(WaitForRenderFrameReady(rfh));
const char kClickConnectButtonJS[] =
"document.getElementById('primary-button').click();";
{
TabActivationWaiter tab_activation_waiter(tab_strip_model);
content::ExecuteScriptAsync(rfh, kClickConnectButtonJS);
tab_activation_waiter.WaitForActiveTabChange();
}
EXPECT_EQ(login_tab_index, tab_strip_model->active_index());
// For completeness, close the login tab and try clicking |Connect| again.
// A new login tab should open.
EXPECT_EQ(1, login_tab_index);
content::WebContentsDestroyedWatcher destroyed_watcher(
tab_strip_model->GetActiveWebContents());
EXPECT_TRUE(
tab_strip_model->CloseWebContentsAt(tab_strip_model->active_index(), 0));
destroyed_watcher.Wait();
MultiNavigationObserver navigation_observer;
content::ExecuteScriptAsync(rfh, kClickConnectButtonJS);
navigation_observer.WaitForNavigations(1);
EXPECT_EQ(login_tab_index, tab_strip_model->active_index());
LoginCertError(browser());
// Once logged in, broken tab should reload and display the SSL interstitial.
WaitForInterstitial(broken_tab_contents);
tab_strip_model->ActivateTabAt(cert_error_tab_index);
EXPECT_EQ(SSLBlockingPage::kTypeForTesting,
GetInterstitialType(tab_strip_model->GetActiveWebContents()));
// Trigger another captive portal check while the SSL interstitial is showing.
// At this point the user is logged in to the captive portal, so the captive
// portal interstitial shouldn't get recreated.
CaptivePortalObserver portal_observer(browser()->profile());
captive_portal::CaptivePortalService* captive_portal_service =
CaptivePortalServiceFactory::GetForProfile(browser()->profile());
captive_portal_service->DetectCaptivePortal();
portal_observer.WaitForResults(1);
EXPECT_EQ(SSLBlockingPage::kTypeForTesting,
GetInterstitialType(broken_tab_contents));
// A captive portal appears. Trigger a final captive portal check. The
// captive portal interstitial should still not get recreated.
SetBehindCaptivePortal(true);
CaptivePortalObserver final_portal_observer(browser()->profile());
captive_portal_service->DetectCaptivePortal();
final_portal_observer.WaitForResults(1);
EXPECT_EQ(SSLBlockingPage::kTypeForTesting,
GetInterstitialType(broken_tab_contents));
}
// Tests this scenario:
// - Portal probe requests are ignored, so that no captive portal result can
// arrive.
// - A cert error triggers an interstitial timer with a very long timeout.
// - No captive portal results arrive, causing the tab to appear as loading
// indefinitely (because probe requests are ignored).
// - Stopping the page load shouldn't result in any interstitials.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
InterstitialTimerStopNavigationWhileLoading) {
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
// The path does not matter.
GURL cert_error_url = https_server.GetURL(kTestServerLoginPath);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
CaptivePortalObserver portal_observer1(browser()->profile());
FastErrorWithInterstitialTimer(browser(), cert_error_url);
// Page appears loading. Stop the navigation. There should be no interstitial.
MultiNavigationObserver test_navigation_observer;
broken_tab_contents->Stop();
test_navigation_observer.WaitForNavigations(1);
// Make sure that the |ssl_error_handler| is deleted if page load is stopped.
EXPECT_TRUE(nullptr == SSLErrorHandler::FromWebContents(broken_tab_contents));
EXPECT_FALSE(IsShowingInterstitial(broken_tab_contents));
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(0, portal_observer1.num_results_received());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(1, browser()->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
// Re-enable captive portal checks and fire one. The result should be ignored.
RespondToProbeRequests(true);
CaptivePortalObserver portal_observer2(browser()->profile());
captive_portal::CaptivePortalService* captive_portal_service =
CaptivePortalServiceFactory::GetForProfile(browser()->profile());
captive_portal_service->DetectCaptivePortal();
portal_observer2.WaitForResults(1);
EXPECT_FALSE(IsShowingInterstitial(broken_tab_contents));
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(1, portal_observer2.num_results_received());
EXPECT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer2.captive_portal_result());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(1, browser()->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
}
// Same as above, but instead of stopping, the loading page is reloaded. The end
// result is the same. (i.e. page load stops, no interstitials shown)
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
InterstitialTimerReloadWhileLoading) {
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
// The path does not matter.
GURL cert_error_url = https_server.GetURL(kTestServerLoginPath);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
CaptivePortalObserver portal_observer(browser()->profile());
FastErrorWithInterstitialTimer(browser(), cert_error_url);
// Page appears loading. Reloading it cancels the page load. Since the load is
// stopped, no cert error occurs and SSLErrorHandler isn't instantiated.
MultiNavigationObserver test_navigation_observer;
chrome::Reload(browser(), WindowOpenDisposition::CURRENT_TAB);
test_navigation_observer.WaitForNavigations(1);
// Make sure that the |ssl_error_handler| is deleted.
EXPECT_TRUE(nullptr == SSLErrorHandler::FromWebContents(broken_tab_contents));
EXPECT_FALSE(IsShowingInterstitial(broken_tab_contents));
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(0, portal_observer.num_results_received());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(1, browser()->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
// Re-enable captive portal checks and fire one. The result should be ignored.
RespondToProbeRequests(true);
CaptivePortalObserver portal_observer2(browser()->profile());
captive_portal::CaptivePortalService* captive_portal_service =
CaptivePortalServiceFactory::GetForProfile(browser()->profile());
captive_portal_service->DetectCaptivePortal();
portal_observer2.WaitForResults(1);
EXPECT_FALSE(IsShowingInterstitial(broken_tab_contents));
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(1, portal_observer2.num_results_received());
EXPECT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer2.captive_portal_result());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(1, browser()->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
}
// Same as |InterstitialTimerReloadWhileLoading_NoSSLError|, but instead of
// reloading, the page is navigated away. The new page should load, and no
// interstitials should be shown.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
InterstitialTimerNavigateAwayWhileLoading) {
ASSERT_TRUE(embedded_test_server()->Start());
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
// The path does not matter.
GURL cert_error_url = https_server.GetURL(kTestServerLoginPath);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
CaptivePortalObserver portal_observer(browser()->profile());
FastErrorWithInterstitialTimer(browser(), cert_error_url);
// Page appears loading. Navigating away shouldn't result in any interstitial.
// Can't use ui_test_utils::NavigateToURLWithDisposition because it waits for
// a load stop notification before starting a new navigation.
MultiNavigationObserver test_navigation_observer;
browser()->OpenURL(content::OpenURLParams(
embedded_test_server()->GetURL("/title2.html"), content::Referrer(),
WindowOpenDisposition::CURRENT_TAB, ui::PAGE_TRANSITION_TYPED, false));
test_navigation_observer.WaitForNavigations(1);
// Make sure that the |ssl_error_handler| is deleted.
EXPECT_TRUE(nullptr == SSLErrorHandler::FromWebContents(broken_tab_contents));
EXPECT_FALSE(IsShowingInterstitial(broken_tab_contents));
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(0, portal_observer.num_results_received());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(1, browser()->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
// Re-enable captive portal checks and fire one. The result should be ignored.
RespondToProbeRequests(true);
CaptivePortalObserver portal_observer2(browser()->profile());
captive_portal::CaptivePortalService* captive_portal_service =
CaptivePortalServiceFactory::GetForProfile(browser()->profile());
captive_portal_service->DetectCaptivePortal();
portal_observer2.WaitForResults(1);
EXPECT_FALSE(IsShowingInterstitial(broken_tab_contents));
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(1, portal_observer2.num_results_received());
EXPECT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer2.captive_portal_result());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(1, browser()->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
}
// Same as above, but the hanging load is interrupted by a navigation to the
// same page, this time committing the navigation. This should end up with an
// SSL interstitial when not behind a captive portal. This ensures that a new
// |SSLErrorHandler| is created on a new navigation, even though the tab's
// WebContents doesn't change.
IN_PROC_BROWSER_TEST_F(
CaptivePortalBrowserTest,
InterstitialTimerNavigateWhileLoading_EndWithSSLInterstitial) {
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
// The path does not matter.
GURL cert_error_url = https_server.GetURL(kTestServerLoginPath);
SetBehindCaptivePortal(false);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
FastErrorWithInterstitialTimer(browser(), cert_error_url);
// Page appears loading. Turn on response to probe request again, and navigate
// to the same page. This should result in a cert error which should
// instantiate an |SSLErrorHandler| and end up showing an SSL interstitial.
RespondToProbeRequests(true);
// Can't have ui_test_utils do the navigation because it will wait for loading
// tabs to stop loading before navigating.
CaptivePortalObserver portal_observer(browser()->profile());
MultiNavigationObserver test_navigation_observer;
browser()->OpenURL(content::OpenURLParams(cert_error_url, content::Referrer(),
WindowOpenDisposition::CURRENT_TAB,
ui::PAGE_TRANSITION_TYPED, false));
test_navigation_observer.WaitForNavigations(1);
// Should end up with an SSL interstitial.
WaitForInterstitial(broken_tab_contents);
ASSERT_TRUE(IsShowingInterstitial(broken_tab_contents));
EXPECT_EQ(SSLBlockingPage::kTypeForTesting,
GetInterstitialType(broken_tab_contents));
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(1, portal_observer.num_results_received());
EXPECT_EQ(captive_portal::RESULT_INTERNET_CONNECTED,
portal_observer.captive_portal_result());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(1, browser()->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
}
// Same as above, but this time behind a captive portal.
IN_PROC_BROWSER_TEST_F(
CaptivePortalBrowserTest,
InterstitialTimerNavigateWhileLoading_EndWithCaptivePortalInterstitial) {
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
// The path does not matter.
GURL cert_error_url = https_server.GetURL(kTestServerLoginPath);
SetBehindCaptivePortal(true);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
int initial_tab_count = tab_strip_model->count();
FastErrorWithInterstitialTimer(browser(), cert_error_url);
// Page appears loading. Turn on response to probe request again, and navigate
// to the same page. This should result in a cert error which should
// instantiate an |SSLErrorHandler| and end up showing an SSL.
RespondToProbeRequests(true);
// Can't have ui_test_utils do the navigation because it will wait for loading
// tabs to stop loading before navigating.
CaptivePortalObserver portal_observer(browser()->profile());
MultiNavigationObserver test_navigation_observer;
browser()->OpenURL(content::OpenURLParams(cert_error_url, content::Referrer(),
WindowOpenDisposition::CURRENT_TAB,
ui::PAGE_TRANSITION_TYPED, false));
// Expect two navigations:
// 1- For completing the load of the above navigation.
// 2- For completing the load of the login tab.
test_navigation_observer.WaitForNavigations(2);
// Should end up with a captive portal interstitial and a new login tab.
WaitForInterstitial(broken_tab_contents);
ASSERT_TRUE(IsShowingInterstitial(broken_tab_contents));
EXPECT_EQ(CaptivePortalBlockingPage::kTypeForTesting,
GetInterstitialType(broken_tab_contents));
ASSERT_EQ(initial_tab_count + 1, tab_strip_model->count());
EXPECT_EQ(initial_tab_count, tab_strip_model->active_index());
EXPECT_FALSE(broken_tab_contents->IsLoading());
EXPECT_EQ(1, portal_observer.num_results_received());
EXPECT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer.captive_portal_result());
EXPECT_EQ(0, NumLoadingTabs());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(browser(), 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 1));
EXPECT_TRUE(IsLoginTab(tab_strip_model->GetWebContentsAt(1)));
}
// A cert error triggers a captive portal check and results in opening a login
// tab. The user then logs in and the page with the error is reloaded.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, SSLCertErrorLogin) {
// Need an HTTP TestServer to handle a dynamically created server redirect.
ASSERT_TRUE(embedded_test_server()->Start());
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
// Set SSL interstitial delay to zero so that a captive portal result can not
// arrive during this window, so an SSL interstitial is displayed instead
// of a captive portal error page.
SSLErrorHandler::SetInterstitialDelayForTesting(base::TimeDelta());
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
// The path does not matter.
GURL cert_error_url = https_server.GetURL(kTestServerLoginPath);
// A captive portal check is triggered in FastErrorBehindCaptivePortal.
FastErrorBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
false /* expect_new_login_browser */,
cert_error_url);
EXPECT_EQ(SSLBlockingPage::kTypeForTesting,
GetInterstitialType(broken_tab_contents));
LoginCertError(browser());
}
// Tries navigating both the tab that encounters an SSL timeout and the
// login tab twice, only logging in the second time.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, LoginExtraNavigations) {
FastTimeoutBehindCaptivePortal(browser(), true);
// Activate the timed out tab and navigate it to a timeout again.
TabStripModel* tab_strip_model = browser()->tab_strip_model();
tab_strip_model->ActivateTabAt(0, {TabStripModel::GestureType::kOther});
FastTimeoutBehindCaptivePortal(browser(), false);
// Activate and navigate the captive portal tab. This should not trigger a
// reload of the tab with the error.
tab_strip_model->ActivateTabAt(1, {TabStripModel::GestureType::kOther});
NavigateLoginTab(browser(), 0, 1);
// Simulate logging in.
Login(browser(), 0 /* num_loading_tabs */, 1 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
}
// After the first SSL timeout, closes the login tab and makes sure it's opened
// it again on a second timeout.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, CloseLoginTab) {
// First load starts, opens a login tab, and then times out.
SlowLoadBehindCaptivePortal(browser(), true);
FailLoadsWithoutLogin(browser(), 1);
// Close login tab.
chrome::CloseTab(browser());
// Go through the standard slow load login, and make sure it still works.
SlowLoadBehindCaptivePortal(browser(), true);
Login(browser(), 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
FailLoadsAfterLogin(browser(), 1);
}
// Checks that two tabs with SSL timeouts in the same window work. Both
// tabs only timeout after logging in.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, TwoBrokenTabs) {
ASSERT_TRUE(embedded_test_server()->Start());
SlowLoadBehindCaptivePortal(browser(), true);
// Can't set the TabReloader HTTPS timeout on a new tab without doing some
// acrobatics, so open a new tab at a normal page, and then navigate it to a
// timeout.
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser()->profile());
ui_test_utils::NavigateToURLWithDisposition(
browser(), embedded_test_server()->GetURL("/title2.html"),
WindowOpenDisposition::NEW_FOREGROUND_TAB,
ui_test_utils::BROWSER_TEST_WAIT_FOR_LOAD_STOP);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
ASSERT_EQ(3, tab_strip_model->count());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(0, portal_observer.num_results_received());
EXPECT_EQ(1, NumLoadingTabs());
EXPECT_EQ(1, navigation_observer.num_navigations());
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(2)));
ASSERT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(browser(), 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 1));
ASSERT_TRUE(IsLoginTab(tab_strip_model->GetWebContentsAt(1)));
ASSERT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 2));
ASSERT_EQ(2, tab_strip_model->active_index());
SlowLoadBehindCaptivePortal(browser(), false);
tab_strip_model->ActivateTabAt(1, {TabStripModel::GestureType::kOther});
Login(browser(), 2 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
FailLoadsAfterLogin(browser(), 2);
}
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, AbortLoad) {
SlowLoadBehindCaptivePortal(browser(), true);
// Abandon the request.
WaitForJobs(1);
AbandonJobs(1);
CaptivePortalObserver portal_observer(browser()->profile());
MultiNavigationObserver navigation_observer;
// Switch back to the hung tab from the login tab, and abort the navigation.
TabStripModel* tab_strip_model = browser()->tab_strip_model();
tab_strip_model->ActivateTabAt(0, {TabStripModel::GestureType::kOther});
chrome::Stop(browser());
navigation_observer.WaitForNavigations(1);
EXPECT_EQ(0, NumBrokenTabs());
EXPECT_EQ(0, portal_observer.num_results_received());
EXPECT_FALSE(CheckPending(browser()));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
tab_strip_model->ActivateTabAt(1, {TabStripModel::GestureType::kOther});
Login(browser(), 0 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
}
// Checks the case where the timed out tab is successfully navigated before
// logging in.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, NavigateBrokenTab) {
ASSERT_TRUE(embedded_test_server()->Start());
// Go to the error page.
SlowLoadBehindCaptivePortal(browser(), true);
FailLoadsWithoutLogin(browser(), 1);
// Navigate the error tab to a non-error page.
TabStripModel* tab_strip_model = browser()->tab_strip_model();
tab_strip_model->ActivateTabAt(0, {TabStripModel::GestureType::kOther});
ASSERT_TRUE(ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL("/title2.html")));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
// Simulate logging in.
tab_strip_model->ActivateTabAt(1, {TabStripModel::GestureType::kOther});
Login(browser(), 0 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
}
// Checks that captive portal detection triggers correctly when a same-site
// navigation is cancelled by a navigation to the same site.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
NavigateLoadingTabToTimeoutSingleSite) {
RunNavigateLoadingTabToTimeoutTest(
browser(),
GURL(kMockHttpsUrl),
GURL(kMockHttpsUrl),
GURL(kMockHttpsUrl));
}
// Fails on Windows only, mostly on Win7. http://crbug.com/170033
#if BUILDFLAG(IS_WIN)
#define MAYBE_NavigateLoadingTabToTimeoutTwoSites \
DISABLED_NavigateLoadingTabToTimeoutTwoSites
#else
#define MAYBE_NavigateLoadingTabToTimeoutTwoSites \
NavigateLoadingTabToTimeoutTwoSites
#endif
// Checks that captive portal detection triggers correctly when a same-site
// navigation is cancelled by a navigation to another site.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
MAYBE_NavigateLoadingTabToTimeoutTwoSites) {
RunNavigateLoadingTabToTimeoutTest(
browser(),
GURL(kMockHttpsUrl),
GURL(kMockHttpsUrl),
GURL(kMockHttpsUrl2));
}
// Checks that captive portal detection triggers correctly when a cross-site
// navigation is cancelled by a navigation to yet another site.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
NavigateLoadingTabToTimeoutThreeSites) {
ASSERT_TRUE(embedded_test_server()->Start());
RunNavigateLoadingTabToTimeoutTest(
browser(), embedded_test_server()->GetURL("/title1.html"),
GURL(kMockHttpsUrl), GURL(kMockHttpsUrl2));
}
// Checks that navigating a timed out tab back clears its state.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, GoBack) {
ASSERT_TRUE(embedded_test_server()->Start());
// Navigate to a working page.
ASSERT_TRUE(ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL("/title2.html")));
// Go to the error page.
SlowLoadBehindCaptivePortal(browser(), true);
FailLoadsWithoutLogin(browser(), 1);
CaptivePortalObserver portal_observer(browser()->profile());
MultiNavigationObserver navigation_observer;
// Activate the error page tab again and go back.
TabStripModel* tab_strip_model = browser()->tab_strip_model();
tab_strip_model->ActivateTabAt(0, {TabStripModel::GestureType::kOther});
chrome::GoBack(browser(), WindowOpenDisposition::CURRENT_TAB);
navigation_observer.WaitForNavigations(1);
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(0)));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
EXPECT_EQ(0, portal_observer.num_results_received());
}
// Checks that navigating back to a timeout triggers captive portal detection.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, GoBackToTimeout) {
ASSERT_TRUE(embedded_test_server()->Start());
// Disable captive portal detection so the first navigation doesn't open a
// login tab.
EnableCaptivePortalDetection(browser()->profile(), false);
SlowLoadNoCaptivePortal(browser(), captive_portal::RESULT_INTERNET_CONNECTED);
// Navigate to a working page.
ASSERT_TRUE(ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL("/title2.html")));
ASSERT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 0));
EnableCaptivePortalDetection(browser()->profile(), true);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(tab_strip_model->GetActiveWebContents());
ASSERT_TRUE(tab_reloader);
SetSlowSSLLoadTime(tab_reloader, base::TimeDelta());
// Go to the error page.
MultiNavigationObserver navigation_observer;
CaptivePortalObserver portal_observer(browser()->profile());
chrome::GoBack(browser(), WindowOpenDisposition::CURRENT_TAB);
// Wait for the check triggered by the broken tab and for the login tab to
// stop loading.
portal_observer.WaitForResults(1);
navigation_observer.WaitForNavigations(1);
// Make sure the request has been issued.
WaitForJobs(1);
EXPECT_EQ(1, portal_observer.num_results_received());
ASSERT_FALSE(CheckPending(browser()));
ASSERT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer.captive_portal_result());
ASSERT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(browser(), 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 1));
ASSERT_TRUE(IsLoginTab(browser()->tab_strip_model()->GetWebContentsAt(1)));
ASSERT_EQ(2, tab_strip_model->count());
EXPECT_EQ(1, tab_strip_model->active_index());
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(1)));
EXPECT_EQ(1, NumLoadingTabs());
SetSlowSSLLoadTime(tab_reloader, base::Days(1));
Login(browser(), 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
FailLoadsAfterLogin(browser(), 1);
}
// Checks that reloading a timeout triggers captive portal detection.
// Much like the last test, though the captive portal is disabled before
// the inital navigation, rather than captive portal detection.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, ReloadTimeout) {
SetBehindCaptivePortal(false);
// Do the first navigation while not behind a captive portal.
TabStripModel* tab_strip_model = browser()->tab_strip_model();
CaptivePortalObserver portal_observer(browser()->profile());
ASSERT_TRUE(ui_test_utils::NavigateToURL(browser(), GURL(kMockHttpsUrl)));
ASSERT_EQ(0, portal_observer.num_results_received());
ASSERT_EQ(1, tab_strip_model->count());
// A captive portal spontaneously appears.
SetBehindCaptivePortal(true);
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(tab_strip_model->GetActiveWebContents());
ASSERT_TRUE(tab_reloader);
SetSlowSSLLoadTime(tab_reloader, base::TimeDelta());
MultiNavigationObserver navigation_observer;
tab_strip_model->GetActiveWebContents()->GetController().Reload(
content::ReloadType::NORMAL, true);
// Wait for the check triggered by the broken tab and for the login tab to
// stop loading.
portal_observer.WaitForResults(1);
navigation_observer.WaitForNavigations(1);
// Make sure the request has been issued.
WaitForJobs(1);
ASSERT_EQ(1, portal_observer.num_results_received());
ASSERT_FALSE(CheckPending(browser()));
ASSERT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer.captive_portal_result());
ASSERT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(browser(), 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), 1));
ASSERT_TRUE(IsLoginTab(tab_strip_model->GetWebContentsAt(1)));
ASSERT_EQ(2, tab_strip_model->count());
EXPECT_EQ(1, tab_strip_model->active_index());
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
tab_strip_model->GetWebContentsAt(1)));
EXPECT_EQ(1, NumLoadingTabs());
SetSlowSSLLoadTime(tab_reloader, base::Days(1));
Login(browser(), 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
FailLoadsAfterLogin(browser(), 1);
}
// Checks the case where there are two windows, and there's an SSL timeout in
// the background one.
// Disabled: http://crbug.com/134357
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, DISABLED_TwoWindows) {
Browser* browser2 =
Browser::Create(Browser::CreateParams(browser()->profile(), true));
// Navigate the new browser window so it'll be shown and we can pick the
// active window.
ASSERT_TRUE(
ui_test_utils::NavigateToURL(browser2, GURL(url::kAboutBlankURL)));
// Generally, |browser2| will be the active window. However, if the
// original browser window lost focus before creating the new one, such as
// when running multiple tests at once, the original browser window may
// remain the profile's active window.
Browser* active_browser =
chrome::FindTabbedBrowser(browser()->profile(), true);
Browser* inactive_browser;
if (active_browser == browser2) {
// When only one test is running at a time, the new browser will probably be
// on top, but when multiple tests are running at once, this is not
// guaranteed.
inactive_browser = browser();
} else {
ASSERT_EQ(active_browser, browser());
inactive_browser = browser2;
}
CaptivePortalObserver portal_observer(browser()->profile());
MultiNavigationObserver navigation_observer;
// Navigate the tab in the inactive browser to an SSL timeout. Have to use
// NavigateParams and NEW_BACKGROUND_TAB to avoid activating the window.
NavigateParams params(inactive_browser, GURL(kMockHttpsQuickTimeoutUrl),
ui::PAGE_TRANSITION_TYPED);
params.disposition = WindowOpenDisposition::NEW_BACKGROUND_TAB;
params.window_action = NavigateParams::NO_ACTION;
ui_test_utils::NavigateToURL(¶ms);
navigation_observer.WaitForNavigations(2);
// Make sure the active window hasn't changed, and its new tab is
// active.
ASSERT_EQ(active_browser,
chrome::FindTabbedBrowser(browser()->profile(), true));
ASSERT_EQ(1, active_browser->tab_strip_model()->active_index());
// Check that the only two navigated tabs were the new error tab in the
// backround windows, and the login tab in the active window.
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
inactive_browser->tab_strip_model()->GetWebContentsAt(1)));
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(
active_browser->tab_strip_model()->GetWebContentsAt(1)));
EXPECT_EQ(0, NumLoadingTabs());
// Check captive portal test results.
portal_observer.WaitForResults(1);
ASSERT_EQ(captive_portal::RESULT_BEHIND_CAPTIVE_PORTAL,
portal_observer.captive_portal_result());
EXPECT_EQ(1, portal_observer.num_results_received());
// Check the inactive browser.
EXPECT_EQ(2, inactive_browser->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(inactive_browser, 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_BROKEN_BY_PORTAL,
GetStateOfTabReloaderAt(inactive_browser, 1));
// Check the active browser.
ASSERT_EQ(2, active_browser->tab_strip_model()->count());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(active_browser, 0));
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(active_browser, 1));
EXPECT_TRUE(
IsLoginTab(active_browser->tab_strip_model()->GetWebContentsAt(1)));
// Simulate logging in.
Login(active_browser, 0 /* num_loading_tabs */, 1 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
}
// An HTTP page redirects to an HTTPS page loads slowly before timing out. A
// captive portal is found, and then the user logs in before the original page
// times out.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, HttpToHttpsRedirectLogin) {
ASSERT_TRUE(embedded_test_server()->Start());
SlowLoadBehindCaptivePortal(
browser(), true /* expect_open_login_tab */,
false /* expect_new_login_browser */,
embedded_test_server()->GetURL(kRedirectToMockHttpsPath), 1, 1);
Login(browser(), 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
FailLoadsAfterLogin(browser(), 1);
}
// An HTTPS page redirects to an HTTP page.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, HttpsToHttpRedirect) {
// Use an HTTPS server for the top level page.
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
https_server.AddDefaultHandlers(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
// The redirect points to a non-existant host, instead of using a
// http://mock.failed.request URL, because with the network service enabled if
// the initial URL doesn't go through URLLoaderInterceptor (because it's
// served by the EmbeddedTestServer), then URLLoaderInterceptor (which is what
// handles mock.failed.request URLs) wouldn't see the redirect.
GURL http_error_url("http://doesnt.exist/");
NavigateToPageExpectNoTest(
browser(),
https_server.GetURL(CreateServerRedirect(http_error_url.spec())));
}
// Tests the 511 response code, along with an HTML redirect to a login page.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, Status511) {
SetUpCaptivePortalService(browser()->profile(),
GURL(kMockCaptivePortal511Url));
SlowLoadBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
false /* expect_new_login_browser */,
GURL(kMockHttpsUrl), 2, 2);
Login(browser(), 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
FailLoadsAfterLogin(browser(), 1);
}
// A slow SSL load starts. The reloader triggers a captive portal check, finds a
// captive portal. The SSL commits with a cert error, triggering another captive
// portal check.
// The second check finds no captive portal. The reloader triggers a reload at
// the same time SSL error handler tries to show an interstitial. Should result
// in an SSL interstitial.
// TODO(crbug.com/1271739): Flaky on all platforms.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
DISABLED_InterstitialTimerCertErrorAfterSlowLoad) {
net::EmbeddedTestServer https_server(net::EmbeddedTestServer::TYPE_HTTPS);
GURL cert_error_url;
// With the network service, the request must be handled in the network
// process as that's what triggers the NetworkServiceClient methods that
// call out to SSLManager.
https_server.SetSSLConfig(net::EmbeddedTestServer::CERT_MISMATCHED_NAME);
https_server.ServeFilesFromSourceDirectory(GetChromeTestDataDir());
ASSERT_TRUE(https_server.Start());
cert_error_url = https_server.GetURL(kMockHttpsBadCertPath);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
int broken_tab_index = tab_strip_model->active_index();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
SlowLoadBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
false /* expect_new_login_browser */,
cert_error_url, 1, 1);
// No longer behind a captive portal. Committing the SSL page should trigger
// an SSL interstitial which triggers a new captive portal check. Since there
// is no captive portal anymore, should end up with an SSL interstitial.
SetBehindCaptivePortal(false);
CaptivePortalObserver portal_observer(browser()->profile());
MultiNavigationObserver navigation_observer;
net::SSLInfo info;
info.cert_status = net::CERT_STATUS_COMMON_NAME_INVALID;
info.cert =
net::ImportCertFromFile(net::GetTestCertsDirectory(), "ok_cert.pem");
info.unverified_cert = info.cert;
FailJobsWithCertError(1, info);
navigation_observer.WaitForNavigations(1);
// The SSL interstitial navigation will result in the captive portal check
// firing (and returning no captive portal), so the state will get reset to
// none.
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE,
GetStateOfTabReloaderAt(browser(), broken_tab_index));
WaitForInterstitial(broken_tab_contents);
portal_observer.WaitForResults(2);
EXPECT_EQ(SSLBlockingPage::kTypeForTesting,
GetInterstitialType(broken_tab_contents));
}
// Fails on Windows only, mostly on Win7. http://crbug.com/170033
#if BUILDFLAG(IS_WIN)
#define MAYBE_SecureDnsCaptivePortal DISABLED_SecureDnsCaptivePortal
#else
#define MAYBE_SecureDnsCaptivePortal SecureDnsCaptivePortal
#endif
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest, MAYBE_SecureDnsCaptivePortal) {
PrefService* local_state = g_browser_process->local_state();
local_state->SetString(prefs::kDnsOverHttpsMode,
SecureDnsConfig::kModeSecure);
local_state->SetString(prefs::kDnsOverHttpsTemplates,
"https://bar.test/dns-query{?dns}");
Browser* login_browser = nullptr;
SlowLoadBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
true /* expect_new_login_browser */,
&login_browser);
ASSERT_TRUE(login_browser);
FailLoadsWithoutLogin(browser(), 1, login_browser);
// A second navigation on the same tab should not cause a new captive portal
// window to open.
SlowLoadBehindCaptivePortal(browser(), false /* expect_open_login_tab */,
false /* expect_new_login_browser */);
// A navigation in a new tab should not cause a new captive portal window to
// open.
Browser* second_user_browser = CreateBrowser(browser()->profile());
// Check that new window is visible.
EXPECT_TRUE(second_user_browser->window()->IsVisible());
SlowLoadBehindCaptivePortal(second_user_browser,
false /* expect_open_login_tab */,
false /* expect_new_login_browser */);
// Check that the existing captive portal popup is visible.
EXPECT_TRUE(login_browser->window()->IsVisible());
// Login to the captive portal.
Login(login_browser, 2 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
}
// Fails on Windows only, mostly on Win7. http://crbug.com/170033
#if BUILDFLAG(IS_WIN)
#define MAYBE_SecureDnsErrorTriggersCheck DISABLED_SecureDnsErrorTriggersCheck
#else
#define MAYBE_SecureDnsErrorTriggersCheck SecureDnsErrorTriggersCheck
#endif
// An HTTP load results in a secure DNS error, which triggers a captive portal
// probe that fails. After logging in, the secure DNS error happens again,
// triggering a captive portal probe that now succeeds.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
MAYBE_SecureDnsErrorTriggersCheck) {
PrefService* local_state = g_browser_process->local_state();
local_state->SetString(prefs::kDnsOverHttpsTemplates,
"https://bar.test/dns-query{?dns}");
local_state->SetString(prefs::kDnsOverHttpsMode,
SecureDnsConfig::kModeSecure);
TabStripModel* tab_strip_model = browser()->tab_strip_model();
WebContents* broken_tab_contents = tab_strip_model->GetActiveWebContents();
Browser* login_browser = nullptr;
FastErrorBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
true /* expect_new_login_browser */,
GURL(kMockHttpConnectionSecureDnsErr),
&login_browser);
ASSERT_TRUE(login_browser);
// The navigated tab should be displaying an error page.
EXPECT_TRUE(broken_tab_contents->GetController()
.GetLastCommittedEntry()
->GetPageType() == content::PAGE_TYPE_ERROR);
// Login to the captive portal. The captive portal tab navigation will trigger
// a captive portal check, and reloading the original tab will produce the
// same secure DNS error, triggering a second captive portal check.
Login(login_browser, 0 /* num_loading_tabs */, 1 /* num_timed_out_tabs */,
2 /* expected_portal_checks */);
// The reload of the original page should have produced another DNS error
// page.
EXPECT_TRUE(broken_tab_contents->GetController()
.GetLastCommittedEntry()
->GetPageType() == content::PAGE_TYPE_ERROR);
}
// Fails on Windows only, mostly on Win7. http://crbug.com/170033
#if BUILDFLAG(IS_WIN)
#define MAYBE_SlowLoadSecureDnsErrorWithCaptivePortal \
DISABLED_SlowLoadSecureDnsErrorWithCaptivePortal
#else
#define MAYBE_SlowLoadSecureDnsErrorWithCaptivePortal \
SlowLoadSecureDnsErrorWithCaptivePortal
#endif
// An HTTPS load happens slowly. The reloader triggers a captive portal check,
// which finds a captive portal. The HTTPS load finally completes with a secure
// DNS error, which does not trigger another captive portal check. Only one
// login tab should exist.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
MAYBE_SlowLoadSecureDnsErrorWithCaptivePortal) {
PrefService* local_state = g_browser_process->local_state();
local_state->SetString(prefs::kDnsOverHttpsTemplates,
"https://bar.test/dns-query{?dns}");
local_state->SetString(prefs::kDnsOverHttpsMode,
SecureDnsConfig::kModeSecure);
SlowLoadBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
true /* expect_new_login_browser */);
// Connection finally hits a secure DNS error. No new captive portal check is
// triggered.
MultiNavigationObserver navigation_observer;
FailJobs(1, net::ERR_NAME_NOT_RESOLVED,
net::ResolveErrorInfo(net::ERR_CERT_COMMON_NAME_INVALID, true));
navigation_observer.WaitForNavigations(1);
WebContents* tab = browser()->tab_strip_model()->GetWebContentsAt(0);
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(tab));
EXPECT_TRUE(tab->GetController().GetLastCommittedEntry()->GetPageType() ==
content::PAGE_TYPE_ERROR);
EXPECT_EQ(2u, browser_list_->size());
EXPECT_EQ(2, NumTabs());
}
// Fails on Windows only, more frequently on Win7. https://crbug.com/1225823
#if BUILDFLAG(IS_WIN)
#define MAYBE_SlowLoadSecureDnsErrorAfterLogin \
DISABLED_SlowLoadSecureDnsErrorAfterLogin
#else
#define MAYBE_SlowLoadSecureDnsErrorAfterLogin SlowLoadSecureDnsErrorAfterLogin
#endif
// An HTTPS load happens slowly. The reloader triggers a captive portal check,
// which finds a captive portal. After logging in, the HTTPS load finally
// completes with a secure DNS error, which triggers another captive portal
// check that should succeed.
IN_PROC_BROWSER_TEST_F(CaptivePortalBrowserTest,
MAYBE_SlowLoadSecureDnsErrorAfterLogin) {
PrefService* local_state = g_browser_process->local_state();
local_state->SetString(prefs::kDnsOverHttpsTemplates,
"https://bar.test/dns-query{?dns}");
local_state->SetString(prefs::kDnsOverHttpsMode,
SecureDnsConfig::kModeSecure);
Browser* login_browser = nullptr;
SlowLoadBehindCaptivePortal(browser(), true /* expect_open_login_tab */,
true /* expect_new_login_browser */,
&login_browser);
ASSERT_TRUE(login_browser);
// Login to the captive portal.
Login(login_browser, 1 /* num_loading_tabs */, 0 /* num_timed_out_tabs */,
1 /* expected_portal_checks */);
// Connection finally hits a secure DNS error. It should reload without
// sending a new captive portal check.
MultiNavigationObserver navigation_observer;
FailJobs(1, net::ERR_NAME_NOT_RESOLVED,
net::ResolveErrorInfo(net::ERR_CERT_COMMON_NAME_INVALID, true));
navigation_observer.WaitForNavigations(1);
WebContents* tab = browser()->tab_strip_model()->GetWebContentsAt(0);
EXPECT_EQ(1, navigation_observer.NumNavigationsForTab(tab));
EXPECT_TRUE(tab->GetController().GetLastCommittedEntry()->GetPageType() ==
content::PAGE_TYPE_NORMAL);
EXPECT_EQ(2u, browser_list_->size());
EXPECT_EQ(2, NumTabs());
}
class CaptivePortalForPrerenderingTest : public CaptivePortalBrowserTest {
public:
CaptivePortalForPrerenderingTest()
: prerender_helper_(base::BindRepeating(
&CaptivePortalForPrerenderingTest::GetWebContents,
base::Unretained(this))) {}
~CaptivePortalForPrerenderingTest() override = default;
void SetUp() override {
prerender_helper_.SetUp(embedded_test_server());
CaptivePortalBrowserTest::SetUp();
}
void SetUpOnMainThread() override {
CaptivePortalBrowserTest::SetUpOnMainThread();
host_resolver()->AddRule("*", "127.0.0.1");
ASSERT_TRUE(embedded_test_server()->Start());
}
void TearDownOnMainThread() override {
CaptivePortalBrowserTest::TearDownOnMainThread();
}
content::test::PrerenderTestHelper& prerender_helper() {
return prerender_helper_;
}
content::WebContents* GetWebContents() {
return browser()->tab_strip_model()->GetActiveWebContents();
}
void SetState(captive_portal::CaptivePortalTabReloader::State state) {
captive_portal::CaptivePortalTabReloader* tab_reloader =
GetTabReloader(GetWebContents());
ASSERT_TRUE(tab_reloader);
tab_reloader->SetState(state);
}
private:
content::test::PrerenderTestHelper prerender_helper_;
};
// Test that CaptivePortalTabHelper doesn't allow navigating on non-primary
// trees via Did[Start|Finish]Navigation.
IN_PROC_BROWSER_TEST_F(CaptivePortalForPrerenderingTest,
DontFireOnLoadStartDuringPrerendering) {
GURL initial_url = embedded_test_server()->GetURL("/empty.html");
GURL prerender_url = embedded_test_server()->GetURL("/title1.html");
ASSERT_NE(ui_test_utils::NavigateToURL(browser(), initial_url), nullptr);
// Sets CaptivePortalTabReloader's state to STATE_TIMER_RUNNING in order to
// check if the state is not changed during prerendering.
SetState(captive_portal::CaptivePortalTabReloader::STATE_TIMER_RUNNING);
prerender_helper().AddPrerender(prerender_url);
// Checks if the state is not changed during prerendering. This state will
// be set to STATE_NONE if CaptivePortalTabReloader::OnLoadStart() is called
// on non-SSL pages. Since the prerendering page is a non-SSL page, if
// OnLoadStart() was fired, this state would be STATE_NONE. The non-STATE_NONE
// state will prove that CaptivePortalTabHelper does not call OnLoadStart()
// during prerendering.
captive_portal::CaptivePortalTabReloader::State new_state =
GetStateOfTabReloader(GetWebContents());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_TIMER_RUNNING,
new_state);
}
// Test that CaptivePortalTabHelper does not support the redirect navigation on
// non-primary trees.
IN_PROC_BROWSER_TEST_F(CaptivePortalForPrerenderingTest,
DontFireOnRedirectDuringPrerendering) {
GURL initial_url = embedded_test_server()->GetURL("/empty.html");
GURL redirect_url =
embedded_test_server()->GetURL(CreateServerRedirect(initial_url.spec()));
ASSERT_NE(ui_test_utils::NavigateToURL(browser(), initial_url), nullptr);
// The redirect navigation on prerendering should not generate an assert.
prerender_helper().AddPrerender(redirect_url);
// Set CaptivePortalTabReloader's state to STATE_TIMER_RUNNING to check if
// the state is changed after activating the prerendered page. The state
// should be STATE_NONE because CaptivePortalTabHelper will call OnRedirect()
// after activating.
SetState(captive_portal::CaptivePortalTabReloader::STATE_TIMER_RUNNING);
// Activate the prerendered page.
prerender_helper().NavigatePrimaryPage(redirect_url);
// Only the primary main frame supports the redirect navigation. So, Crash
// should not occur after navigating the primary page with the redirect URL.
captive_portal::CaptivePortalTabReloader::State new_state =
GetStateOfTabReloader(GetWebContents());
EXPECT_EQ(captive_portal::CaptivePortalTabReloader::STATE_NONE, new_state);
}
| bsd-3-clause |
chromium/chromium | extensions/common/constants.cc | 10542 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/common/constants.h"
#include "base/cxx17_backports.h"
#include "base/strings/string_piece.h"
#include "build/build_config.h"
#include "build/chromecast_buildflags.h"
#include "build/chromeos_buildflags.h"
namespace extensions {
const char kExtensionScheme[] = "chrome-extension";
const base::FilePath::CharType kManifestFilename[] =
FILE_PATH_LITERAL("manifest.json");
const base::FilePath::CharType kDifferentialFingerprintFilename[] =
FILE_PATH_LITERAL("manifest.fingerprint");
const base::FilePath::CharType kLocaleFolder[] =
FILE_PATH_LITERAL("_locales");
const base::FilePath::CharType kMessagesFilename[] =
FILE_PATH_LITERAL("messages.json");
const base::FilePath::CharType kGzippedMessagesFilename[] =
FILE_PATH_LITERAL("messages.json.gz");
const base::FilePath::CharType kPlatformSpecificFolder[] =
FILE_PATH_LITERAL("_platform_specific");
const base::FilePath::CharType kMetadataFolder[] =
FILE_PATH_LITERAL("_metadata");
const base::FilePath::CharType kVerifiedContentsFilename[] =
FILE_PATH_LITERAL("verified_contents.json");
const base::FilePath::CharType kComputedHashesFilename[] =
FILE_PATH_LITERAL("computed_hashes.json");
const base::FilePath::CharType kIndexedRulesetDirectory[] =
FILE_PATH_LITERAL("generated_indexed_rulesets");
const char kInstallDirectoryName[] = "Extensions";
const char kTempExtensionName[] = "CRX_INSTALL";
const char kDecodedMessageCatalogsFilename[] = "DECODED_MESSAGE_CATALOGS";
const char kGeneratedBackgroundPageFilename[] =
"_generated_background_page.html";
const char kModulesDir[] = "_modules";
const base::FilePath::CharType kExtensionFileExtension[] =
FILE_PATH_LITERAL(".crx");
const base::FilePath::CharType kExtensionKeyFileExtension[] =
FILE_PATH_LITERAL(".pem");
// If auto-updates are turned on, default to running every 5 hours.
const int kDefaultUpdateFrequencySeconds = 60 * 60 * 5;
const base::FilePath::CharType kLocalAppSettingsDirectoryName[] =
FILE_PATH_LITERAL("Local App Settings");
const base::FilePath::CharType kLocalExtensionSettingsDirectoryName[] =
FILE_PATH_LITERAL("Local Extension Settings");
const base::FilePath::CharType kSyncAppSettingsDirectoryName[] =
FILE_PATH_LITERAL("Sync App Settings");
const base::FilePath::CharType kSyncExtensionSettingsDirectoryName[] =
FILE_PATH_LITERAL("Sync Extension Settings");
const base::FilePath::CharType kManagedSettingsDirectoryName[] =
FILE_PATH_LITERAL("Managed Extension Settings");
const base::FilePath::CharType kStateStoreName[] =
FILE_PATH_LITERAL("Extension State");
const base::FilePath::CharType kRulesStoreName[] =
FILE_PATH_LITERAL("Extension Rules");
const base::FilePath::CharType kScriptsStoreName[] =
FILE_PATH_LITERAL("Extension Scripts");
const char kWebStoreAppId[] = "ahfgeienlihckogmohjhadlkjgocpleb";
const char kSettingsDatabaseUMAClientName[] = "Settings";
const char kRulesDatabaseUMAClientName[] = "Rules";
const char kStateDatabaseUMAClientName[] = "State";
const char kScriptsDatabaseUMAClientName[] = "Scripts";
const uint8_t kWebstoreSignaturesPublicKey[] = {
0x30, 0x82, 0x01, 0x22, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86,
0xf7, 0x0d, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0f, 0x00,
0x30, 0x82, 0x01, 0x0a, 0x02, 0x82, 0x01, 0x01, 0x00, 0x8f, 0xfb, 0xbf,
0x5c, 0x37, 0x63, 0x94, 0x3c, 0xb0, 0xee, 0x01, 0xc4, 0xb5, 0xa6, 0x9a,
0xb1, 0x9f, 0x46, 0x74, 0x6f, 0x16, 0x38, 0xa0, 0x32, 0x27, 0x35, 0xdd,
0xf0, 0x71, 0x6b, 0x0e, 0xdc, 0xf6, 0x25, 0xcb, 0xb2, 0xed, 0xea, 0xfb,
0x32, 0xd5, 0xaf, 0x1e, 0x03, 0x43, 0x03, 0x46, 0xf0, 0xa7, 0x39, 0xdb,
0x23, 0x96, 0x1d, 0x65, 0xe5, 0x78, 0x51, 0xf0, 0x84, 0xb0, 0x0e, 0x12,
0xac, 0x0e, 0x5b, 0xdc, 0xc9, 0xd6, 0x4c, 0x7c, 0x00, 0xd5, 0xb8, 0x1b,
0x88, 0x33, 0x3e, 0x2f, 0xda, 0xeb, 0xaa, 0xf7, 0x1a, 0x75, 0xc2, 0xae,
0x3a, 0x54, 0xde, 0x37, 0x8f, 0x10, 0xd2, 0x28, 0xe6, 0x84, 0x79, 0x4d,
0x15, 0xb4, 0xf3, 0xbd, 0x3f, 0x56, 0xd3, 0x3c, 0x3f, 0x18, 0xab, 0xfc,
0x2e, 0x05, 0xc0, 0x1e, 0x08, 0x31, 0xb6, 0x61, 0xd0, 0xfd, 0x9f, 0x4f,
0x3f, 0x64, 0x0d, 0x17, 0x93, 0xbc, 0xad, 0x41, 0xc7, 0x48, 0xbe, 0x00,
0x27, 0xa8, 0x4d, 0x70, 0x42, 0x92, 0x05, 0x54, 0xa6, 0x6d, 0xb8, 0xde,
0x56, 0x6e, 0x20, 0x49, 0x70, 0xee, 0x10, 0x3e, 0x6b, 0xd2, 0x7c, 0x31,
0xbd, 0x1b, 0x6e, 0xa4, 0x3c, 0x46, 0x62, 0x9f, 0x08, 0x66, 0x93, 0xf9,
0x2a, 0x51, 0x31, 0xa8, 0xdb, 0xb5, 0x9d, 0xb9, 0x0f, 0x73, 0xe8, 0xa0,
0x09, 0x32, 0x01, 0xe9, 0x7b, 0x2a, 0x8a, 0x36, 0xa0, 0xcf, 0x17, 0xb0,
0x50, 0x70, 0x9d, 0xa2, 0xf9, 0xa4, 0x6f, 0x62, 0x4d, 0xb6, 0xc9, 0x31,
0xfc, 0xf3, 0x08, 0x12, 0xff, 0x93, 0xbd, 0x62, 0x31, 0xd8, 0x1c, 0xea,
0x1a, 0x9e, 0xf5, 0x81, 0x28, 0x7f, 0x75, 0x5e, 0xd2, 0x27, 0x7a, 0xc2,
0x96, 0xf5, 0x9d, 0xdb, 0x18, 0xfc, 0x76, 0xdc, 0x46, 0xf0, 0x57, 0xc0,
0x58, 0x34, 0xc8, 0x22, 0x2d, 0x2a, 0x65, 0x75, 0xa7, 0xd9, 0x08, 0x62,
0xcd, 0x02, 0x03, 0x01, 0x00, 0x01};
const size_t kWebstoreSignaturesPublicKeySize =
base::size(kWebstoreSignaturesPublicKey);
const char kUpdateURLData[] = "update_url_data";
const int kMainThreadId = 0;
const char kMimeTypeJpeg[] = "image/jpeg";
const char kMimeTypePng[] = "image/png";
} // namespace extensions
namespace extension_misc {
const int kUnknownTabId = -1;
const int kUnknownWindowId = -1;
const int kCurrentWindowId = -2;
#if BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_CHROMECAST)
// The extension id for the built-in component extension.
const char kChromeVoxExtensionId[] = "mndnfokpggljbaajbnioimlmbfngpief";
#else
// The extension id for the web store extension.
const char kChromeVoxExtensionId[] = "kgejglhpjiefppelpmljglcjbhoiplfn";
#endif
const char kFeedbackExtensionId[] = "gfdkimpbcpahaombhbimeihdjnejgicl";
const char kPdfExtensionId[] = "mhjfbmdgcfjbbpaeojofohoefgiehjai";
const char kQuickOfficeComponentExtensionId[] =
"bpmcpldpdmajfigpchkicefoigmkfalc";
const char kQuickOfficeInternalExtensionId[] =
"ehibbfinohgbchlgdbfpikodjaojhccn";
const char kQuickOfficeExtensionId[] = "gbkeegbaiigmenfmjfclcdgdpimamgkj";
const char kMimeHandlerPrivateTestExtensionId[] =
"oickdpebdnfbgkcaoklfcdhjniefkcji";
const char kFilesManagerAppId[] = "hhaomjibdihmijegdhdafkllkbggdgoj";
const char kCalculatorAppId[] = "joodangkbfjnajiiifokapkpmhfnpleo";
const char kCalendarDemoAppId[] = "fpgfohogebplgnamlafljlcidjedbdeb";
const char kGmailAppId[] = "pjkljhegncpnkpknbcohdijeoejaedia";
const char kGoogleDocsDemoAppId[] = "chdaoodbokekbiiphekbfjdmiodccljl";
const char kGoogleDocsPwaAppId[] = "cepkndkdlbllfhpfhledabdcdbidehkd";
const char kGoogleDriveAppId[] = "apdfllckaahabafndbhieahigkjlhalf";
const char kGoogleMeetPwaAppId[] = "dkainijpcknoofiakgccliajhbmlbhji";
const char kGoogleSheetsDemoAppId[] = "nifkmgcdokhkjghdlgflonppnefddien";
const char kGoogleSheetsPwaAppId[] = "hcgjdbbnhkmopplfiibmdgghhdhbiidh";
const char kGoogleSlidesDemoAppId[] = "hdmobeajeoanbanmdlabnbnlopepchip";
const char kGoogleKeepAppId[] = "hmjkmjkepdijhoojdojkdfohbdgmmhki";
const char kYoutubeAppId[] = "blpcfgokakmgnkcojhhkbfbldkacnbeo";
const char kYoutubePwaAppId[] = "agimnkijcaahngcdmfeangaknmldooml";
const char kSpotifyAppId[] = "pjibgclleladliembfgfagdaldikeohf";
const char kBeFunkyAppId[] = "fjoomcalbeohjbnlcneddljemclcekeg";
const char kClipchampAppId[] = "pfepfhbcedkbjdkanpimmmdjfgoddhkg";
const char kGeForceNowAppId[] = "egmafekfmcnknbdlbfbhafbllplmjlhn";
const char kZoomAppId[] = "jldpdkiafafcejhceeincjmlkmibemgj";
const char kGoogleDocsAppId[] = "aohghmighlieiainnegkcijnfilokake";
const char kGoogleSheetsAppId[] = "felcaaldnbdncclmgdcncolpebgiejap";
const char kGoogleSlidesAppId[] = "aapocclcgogkmnckokdopfmhonfmgoek";
#if BUILDFLAG(IS_CHROMEOS_ASH)
// TODO(michaelpg): Deprecate old app IDs before adding new ones to avoid bloat.
const char kHighlightsAppId[] = "lpmakjfjcconjeehbidjclhdlpjmfjjj";
const char kHighlightsAtlasAppId[] = "gjeelkjnolfmhphfhhjokaijbicopfln";
const char kScreensaverAppId[] = "mnoijifedipmbjaoekhadjcijipaijjc";
const char kScreensaverAtlasAppId[] = "bnabjkecnachpogjlfilfcnlpcmacglh";
const char kScreensaverKraneZdksAppId[] = "fafhbhdboeiciklpkminlncemohljlkj";
const char kSigninProfileTestExtensionId[] = "mecfefiddjlmabpeilblgegnbioikfmp";
const char kGuestModeTestExtensionId[] = "behllobkkfkfnphdnhnkndlbkcpglgmj";
bool IsSystemUIApp(base::StringPiece extension_id) {
static const char* const kApps[] = {
// clang-format off
kChromeVoxExtensionId,
kFeedbackExtensionId,
kFilesManagerAppId,
kHighlightsAtlasAppId,
kHighlightsAppId,
kScreensaverAtlasAppId,
kScreensaverAppId,
// clang-format on
};
for (const char* id : kApps) {
if (extension_id == id)
return true;
}
return false;
}
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// TODO(https://crbug.com/1257275): remove after default app migration is done.
bool IsPreinstalledAppId(const std::string& app_id) {
return app_id == kGmailAppId || app_id == kGoogleDocsAppId ||
app_id == kGoogleDriveAppId || app_id == kGoogleSheetsAppId ||
app_id == kGoogleSlidesAppId || app_id == kYoutubeAppId;
}
const char kProdHangoutsExtensionId[] = "nckgahadagoaajjgafhacjanaoiihapd";
const char* const kHangoutsExtensionIds[6] = {
kProdHangoutsExtensionId,
"ljclpkphhpbpinifbeabbhlfddcpfdde", // Debug.
"ppleadejekpmccmnpjdimmlfljlkdfej", // Alpha.
"eggnbpckecmjlblplehfpjjdhhidfdoj", // Beta.
"jfjjdfefebklmdbmenmlehlopoocnoeh", // Packaged App Debug.
"knipolnnllmklapflnccelgolnpehhpl" // Packaged App Prod.
// Keep in sync with _api_features.json and _manifest_features.json.
};
// Error returned when scripting of a page is denied due to enterprise policy.
const char kPolicyBlockedScripting[] =
"This page cannot be scripted due to an ExtensionsSettings policy.";
const int kContentVerificationDefaultBlockSize = 4096;
const char kCryptotokenExtensionId[] = "kmendfapggjehodndflmmgagdbamhnfd";
const char kCryptotokenDeprecationTrialName[] = "U2FSecurityKeyAPI";
// The following two IDs are duplicated in
// //components/app_constants/constants.h. Don't change these without changing
// the others.
const char kChromeAppId[] = "mgndgikekgjfcpckkfioiadnlibdjbkf";
const char kLacrosAppId[] = "jaimifaeiicidiikhmjedcgdimealfbh";
} // namespace extension_misc
| bsd-3-clause |
felixmulder/scala | test/files/run/reflection-modulemirror-inner-badpath.scala | 528 | import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.reflect.ClassTag
class Foo{
object R { override def toString = "R" }
def foo = {
val classTag = implicitly[ClassTag[R.type]]
val sym = cm.moduleSymbol(classTag.runtimeClass)
try {
val cls = cm.reflectModule(sym)
cls.instance
println("this indicates a failure")
} catch {
case ex: Throwable =>
println(ex.getMessage)
}
}
}
object Test extends App{
val foo = new Foo
println(foo.foo)
} | bsd-3-clause |
google-code-export/pysal | pysal/spreg/tests/test_ols.py | 4988 | import unittest
import numpy as np
import pysal
import pysal.spreg as EC
PEGP = pysal.examples.get_path
class TestBaseOLS(unittest.TestCase):
def setUp(self):
db = pysal.open(PEGP('columbus.dbf'),'r')
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("CRIME"))
self.X = np.array(X).T
self.w = pysal.weights.rook_from_shapefile(PEGP("columbus.shp"))
def test_ols(self):
self.X = np.hstack((np.ones(self.y.shape),self.X))
ols = EC.ols.BaseOLS(self.y,self.X)
np.testing.assert_array_almost_equal(ols.betas, np.array([[
46.42818268], [ 0.62898397], [ -0.48488854]]))
vm = np.array([[ 1.74022453e+02, -6.52060364e+00, -2.15109867e+00],
[ -6.52060364e+00, 2.87200008e-01, 6.80956787e-02],
[ -2.15109867e+00, 6.80956787e-02, 3.33693910e-02]])
np.testing.assert_array_almost_equal(ols.vm, vm,6)
def test_OLS(self):
ols = EC.OLS(self.y, self.X, self.w, spat_diag=True, moran=True, \
name_y='home value', name_x=['income','crime'], \
name_ds='columbus')
np.testing.assert_array_almost_equal(ols.aic, \
408.73548964604873 ,7)
np.testing.assert_array_almost_equal(ols.ar2, \
0.32123239427957662 ,7)
np.testing.assert_array_almost_equal(ols.betas, \
np.array([[ 46.42818268], [ 0.62898397], \
[ -0.48488854]]), 7)
bp = np.array([2, 5.7667905131212587, 0.05594449410070558])
ols_bp = np.array([ols.breusch_pagan['df'], ols.breusch_pagan['bp'], ols.breusch_pagan['pvalue']])
np.testing.assert_array_almost_equal(bp, ols_bp, 7)
np.testing.assert_array_almost_equal(ols.f_stat, \
(12.358198885356581, 5.0636903313953024e-05), 7)
jb = np.array([2, 39.706155069114878, 2.387360356860208e-09])
ols_jb = np.array([ols.jarque_bera['df'], ols.jarque_bera['jb'], ols.jarque_bera['pvalue']])
np.testing.assert_array_almost_equal(ols_jb,jb, 7)
white = np.array([5, 2.90606708, 0.71446484])
ols_white = np.array([ols.white['df'], ols.white['wh'], ols.white['pvalue']])
np.testing.assert_array_almost_equal(ols_white,white, 7)
np.testing.assert_equal(ols.k, 3)
kb = {'df': 2, 'kb': 2.2700383871478675, 'pvalue': 0.32141595215434604}
for key in kb:
self.assertAlmostEqual(ols.koenker_bassett[key], kb[key], 7)
np.testing.assert_array_almost_equal(ols.lm_error, \
(4.1508117035117893, 0.041614570655392716),7)
np.testing.assert_array_almost_equal(ols.lm_lag, \
(0.98279980617162233, 0.32150855529063727), 7)
np.testing.assert_array_almost_equal(ols.lm_sarma, \
(4.3222725729143736, 0.11519415308749938), 7)
np.testing.assert_array_almost_equal(ols.logll, \
-201.3677448230244 ,7)
np.testing.assert_array_almost_equal(ols.mean_y, \
38.436224469387746,7)
np.testing.assert_array_almost_equal(ols.moran_res[0], \
0.20373540938,7)
np.testing.assert_array_almost_equal(ols.moran_res[1], \
2.59180452208,7)
np.testing.assert_array_almost_equal(ols.moran_res[2], \
0.00954740031251,7)
np.testing.assert_array_almost_equal(ols.mulColli, \
12.537554873824675 ,7)
np.testing.assert_equal(ols.n, 49)
np.testing.assert_equal(ols.name_ds, 'columbus')
np.testing.assert_equal(ols.name_gwk, None)
np.testing.assert_equal(ols.name_w, 'unknown')
np.testing.assert_equal(ols.name_x, ['CONSTANT', 'income', 'crime'])
np.testing.assert_equal(ols.name_y, 'home value')
np.testing.assert_array_almost_equal(ols.predy[3], np.array([
33.53969014]),7)
np.testing.assert_array_almost_equal(ols.r2, \
0.34951437785126105 ,7)
np.testing.assert_array_almost_equal(ols.rlm_error, \
(3.3394727667427513, 0.067636278225568919),7)
np.testing.assert_array_almost_equal(ols.rlm_lag, \
(0.17146086940258459, 0.67881673703455414), 7)
np.testing.assert_equal(ols.robust, 'unadjusted')
np.testing.assert_array_almost_equal(ols.schwarz, \
414.41095054038061,7 )
np.testing.assert_array_almost_equal(ols.sig2, \
231.4568494392652,7 )
np.testing.assert_array_almost_equal(ols.sig2ML, \
217.28602192257551,7 )
np.testing.assert_array_almost_equal(ols.sig2n, \
217.28602192257551, 7)
np.testing.assert_array_almost_equal(ols.t_stat[2][0], \
-2.65440864272,7)
np.testing.assert_array_almost_equal(ols.t_stat[2][1], \
0.0108745049098,7)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
sevenbitbyte/mongodb_store | mongodb_store/scripts/config_manager.py | 10274 | #!/usr/bin/env python
import roslib; roslib.load_manifest('mongodb_store')
import rospy
import sys
import os
import collections
import json
import xmlrpclib
from bson.binary import Binary
import mongodb_store.util
from mongodb_store.srv import *
from std_srvs.srv import *
import rosparam
if not mongodb_store.util.check_for_pymongo():
sys.exit(1)
import pymongo
import pymongo.son_manipulator
MongoClient = mongodb_store.util.import_MongoClient()
class MongoTransformer(pymongo.son_manipulator.SONManipulator):
def __init__(self):
pass
def transform_incoming(self, son, collection):
if isinstance(son, list):
return self.transform_incoming_list(son, collection)
elif isinstance(son, dict):
for (key, value) in son.items():
son[key] = self.transform_incoming(value, collection)
elif isinstance(son, xmlrpclib.Binary):
return {'__xmlrpclib_object':'xmlrpclib.Binary',
'data': Binary(son.data)}
return son
def transform_incoming_list(self, lst, collection):
new_lst = map(lambda x: self.transform_incoming(x, collection),
lst)
return new_lst
def transform_outgoing(self, son, collection):
if isinstance(son, list):
return self.transform_outgoing_list(son, collection)
elif isinstance(son, dict):
for (key, value) in son.items():
son[key] = self.transform_outgoing(value, collection)
if "__xmlrpclib_object" in son:
if son["__xmlrpclib_object"] == "xmlrpclib.Binary":
b = xmlrpclib.Binary(son['data'])
return b
else:
raise Exception("Unhandled xmlrpclib type.")
else:
return son
return son
def transform_outgoing_list(self, lst, collection):
new_lst = map(lambda x: self.transform_outgoing(x, collection),
lst)
return new_lst
class ConfigManager(object):
def __init__(self):
rospy.init_node("config_manager")
rospy.on_shutdown(self._on_node_shutdown)
if not mongodb_store.util.wait_for_mongo():
sys.exit(1)
self._mongo_client = MongoClient(rospy.get_param("mongodb_host","localhost"),
int(rospy.get_param("mongodb_port")))
self._database=self._mongo_client.config
self._database.add_son_manipulator(MongoTransformer())
# Load the default settings from the defaults/ folder
try:
path = rospy.get_param("~defaults_path")
if len(path)==0:
raise
except:
rospy.loginfo("Default parameters path not supplied, assuming none.")
else:
if path.startswith("pkg://"):
parts = path.split("//")
parts=parts[1].split("/",1)
pkg=parts[0]
pkg_dir=parts[1]
try:
path = os.path.join(roslib.packages.get_pkg_dir(pkg), pkg_dir)
except roslib.packages.InvalidROSPkgException, e:
rospy.logerr("Supplied defaults path '%s' cannot be found. \n"%path +
"The ROS package '%s' could not be located."%pkg)
sys.exit(1)
if not os.path.isdir(path):
rospy.logwarn("Defaults path '%s' does not exist."%path)
sys.exit(1)
try:
files = os.listdir(path)
except OSError, e:
rospy.logerr("Can't list defaults directory %s. Check permissions."%path)
sys.exit(1)
defaults=[] # a list of 3-tuples, (param, val, originating_filename)
def flatten(d, c="", f_name="" ):
l=[]
for k, v in d.iteritems():
if isinstance(v, collections.Mapping):
l.extend(flatten(v,c+"/"+k, f_name))
else:
l.append((c+"/"+k, v, f_name))
return l
for f in files:
if not f.endswith(".yaml"):
continue
params = rosparam.load_file(os.path.join(path,f))
rospy.loginfo("Found default parameter file %s" % f)
for p, n in params:
defaults.extend(flatten(p,c="",f_name=f))
# Copy the defaults into the DB if not there already
defaults_collection = self._database.defaults
for param,val,filename in defaults:
existing = defaults_collection.find_one({"path":param}, manipulate=False)
if existing is None:
rospy.loginfo("New default parameter for %s"%param)
defaults_collection.insert({"path":param,
"value":val,
"from_file":filename})
elif existing["from_file"]!=filename:
rospy.logerr("Two defaults parameter files have the same key:\n%s and %s, key %s"%
(existing["from_file"],filename,param))
# Delete the entry so that it can be fixed...
defaults_collection.remove(existing)
rospy.signal_shutdown("Default parameter set error")
else: #if str(existing_value) != str(val):
existing_value = self._database._fix_outgoing(existing['value'], defaults_collection)
for i,j in zip(str(existing_value),str(val)):
if i !=j:
break
else:
if len(str(existing_value)) == len(str(val)):
continue
rospy.loginfo("Updating stored default for %s"%param)
new={}
new.update(existing)
new['value']=val
defaults_collection.update(existing, new, manipulate=True)
# Load the settings onto the ros parameter server
defaults_collection = self._database.defaults
local_collection = self._database.local
for param in defaults_collection.find():
name=param["path"]
val=param["value"]
if local_collection.find_one({"path":name}) is None:
rospy.set_param(name,val)
for param in local_collection.find():
name=param["path"]
val=param["value"]
rospy.set_param(name,val)
# Advertise ros services for parameter setting / getting
self._getparam_srv = rospy.Service("/config_manager/get_param",
GetParam,
self._getparam_srv_cb)
self._setparam_srv = rospy.Service("/config_manager/set_param",
SetParam,
self._setparam_srv_cb)
self._saveparam_srv = rospy.Service("/config_manager/save_param",
SetParam,
self._saveparam_srv_cb)
#self._list_params()
# Start the main loop
rospy.spin()
"""
debug function, prints out all parameters known
"""
def _list_params(self):
print "#"*10
print "Defaults:"
print
for param in self._database.defaults.find():
name=param["path"]
val=param["value"]
filename=param["from_file"]
print name, " "*(30-len(name)),val," "*(30-len(str(val))),filename
print
def _on_node_shutdown(self):
self._mongo_client.disconnect()
# Could just use the ros parameter server to get the params
# but one day might not back onto the parameter server...
def _getparam_srv_cb(self,req):
response = GetParamResponse()
config_db = self._mongo_client.config
value = config_db.local.find_one({"path":req.param_name})
if value is None:
value = config_db.defaults.find_one({"path":req.param_name})
if value is None:
response.success=False
return response
response.success=True
response.param_value=str(value["value"])
return response
"""
Set the local site-specific parameter.
"""
def _setparam_srv_cb(self,req):
print ("parse json")
new = json.loads(req.param)
if not (new.has_key("path") and new.has_key("value")):
rospy.logerr("Trying to set parameter but not giving full spec")
return SetParamResponse(False)
config_db_local = self._database.local
value = config_db_local.find_one({"path":new["path"]}, manipulate=False)
if value is None:
# insert it
config_db_local.insert(new)
else:
# update it
new['_id']=value['_id']
config_db_local.update(value,new, manipulate=True)
pass
return SetParamResponse(True)
# This will take the current value from the rosparam server and save it into the DB
def _saveparam_srv_cb(self,req):
if not rospy.has_param(req.param):
rospy.logerr("Trying to set a parameter from ros parameter server, but it is not on server.")
return SetParamResponse(False)
val=rospy.get_param(req.param)
new={}
new['path']=str(req.param)
new['value']=val
config_db_local = self._database.local
value = config_db_local.find_one({"path":new["path"]}, manipulate=False)
if value is None:
# insert it
config_db_local.insert(new)
else:
# update it
new['_id']=value['_id']
config_db_local.update(value, new, manipulate=True)
return SetParamResponse(True)
if __name__ == '__main__':
server = ConfigManager()
| bsd-3-clause |
lanselin/pysal | pysal/network/network.py | 42024 | from collections import defaultdict, OrderedDict
import math
import os
import cPickle
import copy
import numpy as np
import pysal as ps
from pysal.weights.util import get_ids
from analysis import NetworkG, NetworkK, NetworkF
import util
__all__ = ["Network", "PointPattern", "NetworkG", "NetworkK", "NetworkF"]
class Network:
"""
Spatially constrained network representation and analytical functionality.
Parameters
-----------
in_shp: str
The input shapefile. This must be in .shp format.
node_sig: int
Round the x and y coordinates of all nodes to node_sig significant
digits (combined significant digits on the left and right
of the decimal place)
-- Default is 11
-- Set to None for no rounding
unique_segs: bool
If True (default), keep only unique segments (i.e., prune out any
duplicated segments).
If False keep all segments.
Attributes
----------
in_shp: str
The input shapefile. This must be in .shp format.
adjacencylist: list
List of lists storing node adjacency.
nodes: dict
Keys are tuples of node coords and values are the node ID.
edge_lengths: dict
Keys are tuples of sorted node IDs representing an edge and values are
the length.
pointpatterns: dict
Keys are a string name of the pattern and values are point pattern
class instances.
node_coords: dict
Keys are the node ID and values are the (x,y) coordinates inverse
to nodes.
edges: list
List of edges, where each edge is a sorted tuple of node IDs.
node_list: list
List of node IDs.
alldistances: dict
Keys are the node IDs.
Values are tuples with two elements:
1. A list of the shortest path distances
2. A dict with the key being the id of the destination node and
the value being a list of the shortest path.
Examples
--------
Instantiate an instance of a network.
>>> ntw = ps.Network(ps.examples.get_path('streets.shp'))
Snap point observations to the network with attribute information.
>>> ntw.snapobservations(ps.examples.get_path('crimes.shp'), 'crimes', attribute=True)
And without attribute information.
>>> ntw.snapobservations(ps.examples.get_path('schools.shp'), 'schools', attribute=False)
"""
def __init__(self, in_shp=None, node_sig=11, unique_segs=True):
if in_shp:
self.in_shp = in_shp
self.node_sig = node_sig
self.unique_segs = unique_segs
self.adjacencylist = defaultdict(list)
self.nodes = {}
self.edge_lengths = {}
self.edges = []
self.pointpatterns = {}
self._extractnetwork()
self.node_coords = dict((value, key) for key, value in self.nodes.iteritems())
# This is a spatial representation of the network.
self.edges = sorted(self.edges)
# Extract the graph.
self.extractgraph()
self.node_list = sorted(self.nodes.values())
def _round_sig(self, v):
"""
Used internally to round the vertex to a set number of significant digits. If sig
is set to 4, then the following are some possible results for a coordinate:
0.0xxxx, 0.xxxx, x.xxx, xx.xx, xxx.x, xxxx.0, xxxx0.0
"""
sig = self.node_sig
if sig is None:
return v
out_v = [val if 0 \
else round(val, -int(math.floor(math.log10(math.fabs(val)))) +\
(sig-1)) \
for val in v]
return tuple(out_v)
def _extractnetwork(self):
"""
Used internally, to extract a network from a polyline shapefile.
"""
nodecount = 0
shps = ps.open(self.in_shp)
for shp in shps:
vertices = shp.vertices
for i, v in enumerate(vertices[:-1]):
v = self._round_sig(v)
try:
vid = self.nodes[v]
except:
self.nodes[v] = vid = nodecount
nodecount += 1
v2 = self._round_sig(vertices[i+1])
try:
nvid = self.nodes[v2]
except:
self.nodes[v2] = nvid = nodecount
nodecount += 1
self.adjacencylist[vid].append(nvid)
self.adjacencylist[nvid].append(vid)
# Sort the edges so that mono-directional keys can be stored.
edgenodes = sorted([vid, nvid])
edge = tuple(edgenodes)
self.edges.append(edge)
length = util.compute_length(v, vertices[i+1])
self.edge_lengths[edge] = length
if self.unique_segs == True:
# Remove duplicate edges and duplicate adjacent nodes.
self.edges = list(set(self.edges))
for k, v in self.adjacencylist.iteritems():
self.adjacencylist[k] = list(set(v))
def extractgraph(self):
"""
Using the existing network representation, create a graph based representation by
removing all nodes with a neighbor incidence of two. That is, we assume these
nodes are bridges between nodes with higher incidence.
"""
self.graphedges = []
self.edge_to_graph = {}
self.graph_lengths = {}
# Find all nodes with cardinality 2.
segment_nodes = []
for k, v in self.adjacencylist.iteritems():
#len(v) == 1 #cul-de-sac
#len(v) == 2 #bridge segment
#len(v) > 2 #intersection
if len(v) == 2:
segment_nodes.append(k)
# Start with a copy of the spatial representation and iteratively remove edges
# deemed to be segments.
self.graphedges = copy.deepcopy(self.edges)
self.graph_lengths = copy.deepcopy(self.edge_lengths)
# Mapping all the edges contained within a single graph represented edge.
self.graph_to_edges = {}
bridges = []
for s in segment_nodes:
bridge = [s]
neighbors = self._yieldneighbor(s, segment_nodes, bridge)
while neighbors:
cnode = neighbors.pop()
segment_nodes.remove(cnode)
bridge.append(cnode)
newneighbors = self._yieldneighbor(cnode, segment_nodes, bridge)
neighbors += newneighbors
bridges.append(bridge)
for bridge in bridges:
if len(bridge) == 1:
n = self.adjacencylist[bridge[0]]
newedge = tuple(sorted([n[0], n[1]]))
# Identify the edges to be removed.
e1 = tuple(sorted([bridge[0], n[0]]))
e2 = tuple(sorted([bridge[0], n[1]]))
# Remove them from the graph.
self.graphedges.remove(e1)
self.graphedges.remove(e2)
# Remove from the edge lengths.
length_e1 = self.edge_lengths[e1]
length_e2 = self.edge_lengths[e2]
self.graph_lengths.pop(e1, None)
self.graph_lengths.pop(e2, None)
self.graph_lengths[newedge] = length_e1 + length_e2
# Update the pointers.
self.graph_to_edges[e1] = newedge
self.graph_to_edges[e2] = newedge
else:
cumulative_length = 0
startend = {}
redundant = set([])
for b in bridge:
for n in self.adjacencylist[b]:
if n not in bridge:
startend[b] = n
else:
redundant.add(tuple(sorted([b,n])))
newedge = tuple(sorted(startend.values()))
for k, v in startend.iteritems():
redundant.add(tuple(sorted([k,v])))
for r in redundant:
self.graphedges.remove(r)
cumulative_length += self.edge_lengths[r]
self.graph_lengths.pop(r, None)
self.graph_to_edges[r] = newedge
self.graph_lengths[newedge] = cumulative_length
self.graphedges.append(newedge)
self.graphedges = sorted(self.graphedges)
def _yieldneighbor(self, node, segment_nodes, bridge):
"""
Used internally, this method traverses a bridge segement to find the source and
destination nodes.
"""
n = []
for i in self.adjacencylist[node]:
if i in segment_nodes and i not in bridge:
n.append(i)
return n
def contiguityweights(self, graph=True, weightings=None):
"""
Create a contiguity based W object
Parameters
----------
graph: bool
{True, False} controls whether the W is generated using the spatial
representation or the graph representation.
weightings: dict
Dict of lists of weightings for each edge.
Returns
-------
W: object
A PySAL W Object representing the binary adjacency of the network.
Examples
--------
>>> ntw = ps.Network(ps.examples.get_path('streets.shp'))
>>> w = ntw.contiguityweights(graph=False)
>>> ntw.snapobservations(ps.examples.get_path('crimes.shp'), 'crimes', attribute=True)
>>> counts = ntw.count_per_edge(ntw.pointpatterns['crimes'].obs_to_edge, graph=False)
Using the W object, access to ESDA functionality is provided. First,
a vector of attributes is created for all edges with observations.
>>> w = ntw.contiguityweights(graph=False)
>>> edges = w.neighbors.keys()
>>> y = np.zeros(len(edges))
>>> for i, e in enumerate(edges):
... if e in counts.keys():
... y[i] = counts[e]
Next, a standard call ot Moran is made and the result placed into `res`
>>> res = ps.esda.moran.Moran(y, w, permutations=99)
"""
neighbors = {}
neighbors = OrderedDict()
if graph:
edges = self.graphedges
else:
edges = self.edges
if weightings:
weights = {}
else:
weights = None
for key in edges:
neighbors[key] = []
if weightings:
weights[key] = []
for neigh in edges:
if key == neigh:
continue
if key[0] == neigh[0] or key[0] == neigh[1] or key[1] == neigh[0]\
or key[1] == neigh[1]:
neighbors[key].append(neigh)
if weightings:
weights[key].append(weightings[neigh])
# TODO: Add a break condition - everything is sorted, so we know when we
# have stepped beyond a possible neighbor.
#if key[1] > neigh[1]: #NOT THIS
#break
return ps.weights.W(neighbors, weights=weights)
def distancebandweights(self, threshold):
"""
Create distance based weights
Parameters
----------
threshold: float
Distance threshold value.
"""
try:
hasattr(self.alldistances)
except:
self.node_distance_matrix()
neighbor_query = np.where(self.distancematrix < threshold)
neighbors = defaultdict(list)
for i, n in enumerate(neighbor_query[0]):
neigh = neighbor_query[1][i]
if n != neigh:
neighbors[n].append(neighbor_query[1][i])
return ps.weights.W(neighbors)
def snapobservations(self, shapefile, name, idvariable=None, attribute=None):
"""
Snap a point pattern shapefile to this network object. The point pattern is
stored in the network.pointpattern['key'] attribute of the network object.
Parameters
----------
shapefile: str
The path to the shapefile.
name: str
Name to be assigned to the point dataset.
idvariable: str
Column name to be used as ID variable.
attribute: bool
Defines whether attributes should be extracted.
True for attribute extraction.
False for no attribute extraaction.
Returns
-------
"""
self.pointpatterns[name] = PointPattern(shapefile, idvariable=idvariable, attribute=attribute)
self._snap_to_edge(self.pointpatterns[name])
def compute_distance_to_nodes(self, x, y, edge):
"""
Given an observation on a network edge, return the distance to the two nodes that
bound that end.
Parameters
----------
x: float
x-coordinate of the snapped point.
y: float
y-coordiante of the snapped point.
edge: tuple
(node0, node1) representation of the network edge.
Returns
-------
d1: float
The distance to node0.
- always the node with the lesser id
d2: float
The distance to node1.
- always the node with the greater id
"""
d1 = util.compute_length((x,y), self.node_coords[edge[0]])
d2 = util.compute_length((x,y), self.node_coords[edge[1]])
return d1, d2
def _snap_to_edge(self, pointpattern):
"""
Used internally to snap point observations to network edges.
Parameters
-----------
pointpattern: object
PySAL Point Pattern Object
Returns
-------
obs_to_edge: dict
Dict with edges as keys and lists of points as values.
edge_to_obs: dict
Dict with point ids as keys and edge tuples as values.
dist_to_node: dict
Dict with point ids as keys and values as dicts with keys for
node ids and values as distances from point to node.
"""
obs_to_edge = {}
dist_to_node = {}
pointpattern.snapped_coordinates = {}
segments = []
s2e = {}
for edge in self.edges:
head = self.node_coords[edge[0]]
tail = self.node_coords[edge[1]]
segments.append(ps.cg.Chain([head,tail]))
s2e[(head,tail)] = edge
points = {}
p2id = {}
for pointIdx, point in pointpattern.points.iteritems():
points[pointIdx] = point['coordinates']
snapped = util.snapPointsOnSegments(points, segments)
for pointIdx, snapInfo in snapped.iteritems():
x,y = snapInfo[1].tolist()
edge = s2e[tuple(snapInfo[0])]
if edge not in obs_to_edge:
obs_to_edge[edge] = {}
obs_to_edge[edge][pointIdx] = (x,y)
pointpattern.snapped_coordinates[pointIdx] = (x,y)
d1,d2 = self.compute_distance_to_nodes(x, y, edge)
dist_to_node[pointIdx] = {edge[0]:d1, edge[1]:d2}
obs_to_node = defaultdict(list)
for k, v in obs_to_edge.iteritems():
keys = v.keys()
obs_to_node[k[0]] = keys
obs_to_node[k[1]] = keys
pointpattern.obs_to_edge = obs_to_edge
pointpattern.dist_to_node = dist_to_node
pointpattern.obs_to_node = obs_to_node
def count_per_edge(self, obs_on_network, graph=True):
"""
Compute the counts per edge.
Parameters
----------
obs_on_network: dict
Dict of observations on the network.
{(edge):{pt_id:(coords)}} or {edge:[(coord),(coord),(coord)]}
Returns
-------
counts: dict
{(edge):count}
Example
-------
Note that this passes the obs_to_edge attribute of a point pattern snapped to the
network.
>>> ntw = ps.Network(ps.examples.get_path('streets.shp'))
>>> ntw.snapobservations(ps.examples.get_path('crimes.shp'), 'crimes', attribute=True)
>>> counts = ntw.count_per_edge(ntw.pointpatterns['crimes'].obs_to_edge,graph=False)
>>> s = sum([v for v in counts.itervalues()])
>>> s
287
"""
counts = {}
if graph:
for key, observations in obs_on_network.iteritems():
cnt = len(observations)
if key in self.graph_to_edges.keys():
key = self.graph_to_edges[key]
try:
counts[key] += cnt
except:
counts[key] = cnt
else:
for key in obs_on_network.iterkeys():
counts[key] = len(obs_on_network[key])
return counts
def _newpoint_coords(self, edge, distance):
"""
Used internally to compute new point coordinates during snapping.
"""
x1 = self.node_coords[edge[0]][0]
y1 = self.node_coords[edge[0]][1]
x2 = self.node_coords[edge[1]][0]
y2 = self.node_coords[edge[1]][1]
if x1 == x2: # Vertical line case
x0 = x1
if y1 < y2:
y0 = y1 + distance
elif y1 > y2:
y0 = y2 + distance
else: # Zero length edge
y0 = y1
return x0, y0
m = (y2 - y1) / (x2 - x1)
if x1 > x2:
x0 = x1 - distance / math.sqrt(1 + m**2)
elif x1 < x2:
x0 = x1 + distance / math.sqrt(1 + m**2)
y0 = m * (x0 - x1) + y1
return x0, y0
def simulate_observations(self, count, distribution='uniform'):
"""
Generate a simulated point pattern on the network.
Parameters
----------
count: int
The number of points to create or mean of the distribution if not
'uniform'.
distribution: str
{'uniform', 'poisson'} distribution of random points.
Returns
-------
random_pts: dict
Keys are the edge tuple.
Value are a list of new point coordinates.
Example
-------
>>> ntw = ps.Network(ps.examples.get_path('streets.shp'))
>>> ntw.snapobservations(ps.examples.get_path('crimes.shp'), 'crimes', attribute=True)
>>> npts = ntw.pointpatterns['crimes'].npoints
>>> sim = ntw.simulate_observations(npts)
>>> isinstance(sim, ps.network.network.SimulatedPointPattern)
True
"""
simpts = SimulatedPointPattern()
# Cumulative Network Length.
edges = []
lengths = np.zeros(len(self.edge_lengths))
for i, key in enumerate(self.edge_lengths.iterkeys()):
edges.append(key)
lengths[i] = self.edge_lengths[key]
stops = np.cumsum(lengths)
totallength = stops[-1]
if distribution is 'uniform':
nrandompts = np.random.uniform(0, totallength, size=(count,))
elif distribution is 'poisson':
nrandompts = np.random.uniform(0, totallength, size=(np.random.poisson(count),))
for i, r in enumerate(nrandompts):
idx = np.where(r < stops)[0][0]
assignment_edge = edges[idx]
distance_from_start = stops[idx] - r
# Populate the coordinates dict.
x0, y0 = self._newpoint_coords(assignment_edge, distance_from_start)
simpts.snapped_coordinates[i] = (x0, y0)
simpts.obs_to_node[assignment_edge[0]].append(i)
simpts.obs_to_node[assignment_edge[1]].append(i)
# Populate the distance to node.
simpts.dist_to_node[i] = {assignment_edge[0] : distance_from_start,
assignment_edge[1] : self.edge_lengths[edges[idx]] - distance_from_start}
simpts.points = simpts.snapped_coordinates
simpts.npoints = len(simpts.points)
return simpts
def enum_links_node(self, v0):
"""
Returns the edges (links) around node
Parameters
-----------
v0: int
Node id
Returns
-------
links: list
List of tuple edges adjacent to the node.
"""
links = []
neighbornodes = self.adjacencylist[v0]
for n in neighbornodes:
links.append(tuple(sorted([n, v0])))
return links
def node_distance_matrix(self):
self.alldistances = {}
nnodes = len(self.node_list)
self.distancematrix = np.empty((nnodes, nnodes))
for node in self.node_list:
distance, pred = util.dijkstra(self, self.edge_lengths, node, n=float('inf'))
pred = np.array(pred)
#tree = util.generatetree(pred) <---- something to look at in the future
tree = None
self.alldistances[node] = (distance, tree)
self.distancematrix[node] = distance
def allneighbordistances(self, sourcepattern, destpattern=None, fill_diagonal=None):
"""
Compute either all distances between i and j in a single point pattern or all
distances between each i from a source pattern and all j from a destination pattern.
Parameters
----------
sourcepattern: str
The key of a point pattern snapped to the network.
destpattern: str
(Optional) The key of a point pattern snapped to the network.
fill_diagonal: float, int
(Optional) Fill the diagonal of the cost matrix.
Default in None and will populate the diagonal with numpy.nan
Do not declare a destpattern for a custom fill_diagonal.
Returns
-------
nearest: array (n,n)
An array of shape (n,n) storing distances between all points.
"""
if not hasattr(self,'alldistances'):
self.node_distance_matrix()
# Source setup
src_indices = sourcepattern.points.keys()
nsource_pts = len(src_indices)
src_dist_to_node = sourcepattern.dist_to_node
src_nodes = {}
for s in src_indices:
e1, e2 = src_dist_to_node[s].keys()
src_nodes[s] = (e1, e2)
# Destination setup
symmetric = False
if destpattern is None:
symmetric = True
destpattern = sourcepattern
dest_indices = destpattern.points.keys()
ndest_pts = len(dest_indices)
dest_dist_to_node = destpattern.dist_to_node
dest_searchpts = copy.deepcopy(dest_indices)
dest_nodes = {}
for s in dest_indices:
e1, e2 = dest_dist_to_node[s].keys()
dest_nodes[s] = (e1, e2)
# Output setup
nearest = np.empty((nsource_pts, ndest_pts))
nearest[:] = np.inf
for p1 in src_indices:
# Get the source nodes and dist to source nodes.
source1, source2 = src_nodes[p1]
set1 = set(src_nodes[p1])
# Distance from node1 to p, distance from node2 to p.
sdist1, sdist2 = src_dist_to_node[p1].values()
if symmetric:
# Only compute the upper triangle if symmetric.
dest_searchpts.remove(p1)
for p2 in dest_searchpts:
dest1, dest2 = dest_nodes[p2]
set2 = set(dest_nodes[p2])
if set1 == set2: # same edge
x1,y1 = sourcepattern.snapped_coordinates[p1]
x2,y2 = destpattern.snapped_coordinates[p2]
xd = x1-x2
yd = y1-y2
nearest[p1,p2] = np.sqrt(xd*xd + yd*yd)
else:
ddist1, ddist2 = dest_dist_to_node[p2].values()
d11 = self.alldistances[source1][0][dest1]
d21 = self.alldistances[source2][0][dest1]
d12 = self.alldistances[source1][0][dest2]
d22 = self.alldistances[source2][0][dest2]
# Find the shortest distance from the path passing through each of the
# two origin nodes to the first destination node.
sd_1 = d11 + sdist1
sd_21 = d21 + sdist2
if sd_1 > sd_21:
sd_1 = sd_21
# Now add the point to node one distance on the destination edge.
len_1 = sd_1 + ddist1
# Repeat the prior but now for the paths entering at the second node
# of the second edge.
sd_2 = d12 + sdist1
sd_22 = d22 + sdist2
b = 0
if sd_2 > sd_22:
sd_2 = sd_22
b = 1
len_2 = sd_2 + ddist2
# Now find the shortest distance path between point 1 on edge 1 and
# point 2 on edge 2, and assign.
sp_12 = len_1
if len_1 > len_2:
sp_12 = len_2
nearest[p1, p2] = sp_12
if symmetric:
# Mirror the upper and lower triangle when symmetric.
nearest[p2,p1] = nearest[p1,p2]
# Populate the main diagonal when symmetric.
if symmetric:
if fill_diagonal == None:
np.fill_diagonal(nearest, np.nan)
else:
np.fill_diagonal(nearest, fill_diagonal)
return nearest
def nearestneighbordistances(self, sourcepattern, destpattern=None):
"""
Compute the interpattern nearest neighbor distances or the intrapattern
nearest neighbor distances between a source pattern and a destination pattern.
Parameters
----------
sourcepattern: str
The key of a point pattern snapped to the network.
destpattern: str
(Optional) The key of a point pattern snapped to the network.
Returns
-------
nearest: ndarray (n,2)
With column[:,0] containing the id of the nearest neighbor and
column [:,1] containing the distance.
"""
if not sourcepattern in self.pointpatterns.keys():
raise KeyError("Available point patterns are {}".format(self.pointpatterns.keys()))
if not hasattr(self,'alldistances'):
self.node_distance_matrix()
pt_indices = self.pointpatterns[sourcepattern].points.keys()
dist_to_node = self.pointpatterns[sourcepattern].dist_to_node
nearest = np.zeros((len(pt_indices), 2), dtype=np.float32)
nearest[:,1] = np.inf
if destpattern == None:
destpattern = sourcepattern
searchpts = copy.deepcopy(pt_indices)
searchnodes = {}
for s in searchpts:
e1, e2 = dist_to_node[s].keys()
searchnodes[s] = (e1, e2)
for p1 in pt_indices:
# Get the source nodes and dist to source nodes.
source1, source2 = searchnodes[p1]
sdist1, sdist2 = dist_to_node[p1].values()
searchpts.remove(p1)
for p2 in searchpts:
dest1, dest2 = searchnodes[p2]
ddist1, ddist2 = dist_to_node[p2].values()
source1_to_dest1 = sdist1 + self.alldistances[source1][0][dest1] + ddist1
source1_to_dest2 = sdist1 + self.alldistances[source1][0][dest2] + ddist2
source2_to_dest1 = sdist2 + self.alldistances[source2][0][dest1] + ddist1
source2_to_dest2 = sdist2 + self.alldistances[source2][0][dest2] + ddist2
if source1_to_dest1 < nearest[p1, 1]:
nearest[p1, 0] = p2
nearest[p1, 1] = source1_to_dest1
if source1_to_dest1 < nearest[p2, 1]:
nearest[p2, 0] = p1
nearest[p2, 1] = source1_to_dest1
if source1_to_dest2 < nearest[p1, 1]:
nearest[p1, 0] = p2
nearest[p1, 1] = source1_to_dest2
if source1_to_dest1 < nearest[p2, 1]:
nearest[p2, 0] = p1
nearest[p2, 1] = source1_to_dest2
if source2_to_dest1 < nearest[p1, 1]:
nearest[p1, 0] = p2
nearest[p1, 1] = source2_to_dest1
if source2_to_dest1 < nearest[p2, 1]:
nearest[p2, 0] = p1
nearest[p2, 1] = source2_to_dest1
if source2_to_dest2 < nearest[p1, 1]:
nearest[p1, 0] = p2
nearest[p1, 1] = source2_to_dest2
if source2_to_dest2 < nearest[p2, 1]:
nearest[p2, 0] = p1
nearest[p2, 1] = source2_to_dest2
return nearest
def NetworkF(self, pointpattern, nsteps=10, permutations=99,
threshold=0.2, distribution='uniform',
lowerbound=None, upperbound=None):
"""
Computes a network constrained F-Function
Parameters
----------
pointpattern: object
A PySAL point pattern object.
nsteps: int
The number of steps at which the count of the nearest neighbors
is computed.
permutations: int
The number of permutations to perform (default 99).
threshold: float
The level at which significance is computed.
-- 0.5 would be 97.5% and 2.5%
distribution: str
The distribution from which random points are sampled:
-- uniform or poisson
lowerbound: float
The lower bound at which the F-function is computed. (Default 0)
upperbound: float
The upper bound at which the F-function is computed.
Defaults to the maximum observed nearest neighbor distance.
Returns
-------
NetworkF: object
A network F class instance.
"""
return NetworkF(self, pointpattern, nsteps=nsteps,
permutations=permutations,threshold=threshold,
distribution=distribution,lowerbound=lowerbound,
upperbound=upperbound)
def NetworkG(self, pointpattern, nsteps=10, permutations=99,
threshold=0.5, distribution='uniform',
lowerbound=None, upperbound=None):
"""
Computes a network constrained G-Function
Parameters
----------
pointpattern: object
A PySAL point pattern object.
nsteps: int
The number of steps at which the count of the nearest neighbors
is computed.
permutations: int
The number of permutations to perform (default 99).
threshold: float
The level at which significance is computed.
-- 0.5 would be 97.5% and 2.5%
distribution: str
The distribution from which random points are sampled:
-- uniform or poisson
lowerbound: float
The lower bound at which the G-function is computed. (Default 0)
upperbound: float
The upper bound at which the G-function is computed.
Defaults to the maximum observed nearest neighbor distance.
Returns
-------
NetworkG: object
A network G class instance.
"""
return NetworkG(self, pointpattern, nsteps=nsteps,
permutations=permutations,threshold=threshold,
distribution=distribution,lowerbound=lowerbound,
upperbound=upperbound)
def NetworkK(self, pointpattern, nsteps=10, permutations=99,
threshold=0.5, distribution='uniform',
lowerbound=None, upperbound=None):
"""
Computes a network constrained K-Function
Parameters
----------
pointpattern: object
A PySAL point pattern object.
nsteps: int
The number of steps at which the count of the nearest neighbors
is computed.
permutations: int
The number of permutations to perform (default 99).
threshold: float
The level at which significance is computed.
-- 0.5 would be 97.5% and 2.5%
distribution: str
The distribution from which random points are sampled:
-- uniform or poisson
lowerbound: float
The lower bound at which the K-function is computed. (Default 0)
upperbound: float
The upper bound at which the K-function is computed.
Defaults to the maximum observed nearest neighbor distance.
Returns
-------
NetworkK: object
A network K class instance.
"""
return NetworkK(self, pointpattern, nsteps=nsteps,
permutations=permutations,threshold=threshold,
distribution=distribution,lowerbound=lowerbound,
upperbound=upperbound)
def segment_edges(self, distance):
"""
Segment all of the edges in the network at either a fixed distance or a fixed
number of segments.
Parameters
-----------
distance: float
The distance at which edges are split.
Returns
-------
sn: object
PySAL Network Object.
Example
-------
>>> ntw = ps.Network(ps.examples.get_path('streets.shp'))
>>> n200 = ntw.segment_edges(200.0)
>>> len(n200.edges)
688
"""
sn = Network()
sn.adjacencylist = copy.deepcopy(self.adjacencylist)
sn.edge_lengths = copy.deepcopy(self.edge_lengths)
sn.edges = set(copy.deepcopy(self.edges))
sn.node_coords = copy.deepcopy(self.node_coords)
sn.node_list = copy.deepcopy(self.node_list)
sn.nodes = copy.deepcopy(self.nodes)
sn.pointpatterns = copy.deepcopy(self.pointpatterns)
sn.in_shp = self.in_shp
current_node_id = max(self.nodes.values())
newedges = set()
removeedges = set()
for e in sn.edges:
length = sn.edge_lengths[e]
interval = distance
totallength = 0
currentstart = startnode = e[0]
endnode = e[1]
# If the edge will be segmented remove the current edge from the adjacency list.
if interval < length:
sn.adjacencylist[e[0]].remove(e[1])
sn.adjacencylist[e[1]].remove(e[0])
sn.edge_lengths.pop(e, None)
removeedges.add(e)
else:
continue
while totallength < length:
currentstop = current_node_id
if totallength + interval > length:
currentstop = endnode
interval = length - totallength
totallength = length
else:
current_node_id += 1
currentstop = current_node_id
totallength += interval
# Compute the new node coordinate.
newx, newy = self._newpoint_coords(e, totallength)
# Update node_list.
if currentstop not in sn.node_list:
sn.node_list.append(currentstop)
# Update nodes and node_coords.
sn.node_coords[currentstop] = newx, newy
sn.nodes[(newx, newy)] = currentstop
# Update the adjacency list.
sn.adjacencylist[currentstart].append(currentstop)
sn.adjacencylist[currentstop].append(currentstart)
# Add the new edge to the edge dict.
# Iterating over this so we need to add after iterating.
newedges.add(tuple(sorted([currentstart, currentstop])))
# Modify edge_lengths.
sn.edge_lengths[tuple(sorted([currentstart, currentstop]))] = interval
# Increment the start to the stop.
currentstart = currentstop
sn.edges.update(newedges)
sn.edges.difference_update(removeedges)
sn.edges = list(sn.edges)
# Update the point pattern snapping.
for instance in sn.pointpatterns.itervalues():
sn._snap_to_edge(instance)
return sn
def savenetwork(self, filename):
"""
Save a network to disk as a binary file
Parameters
----------
filename: str
The filename where the network should be saved. This should be a full
path or the file is saved whereever this method is called from.
Example
--------
>>> ntw = ps.Network(ps.examples.get_path('streets.shp'))
>>> ntw.savenetwork('mynetwork.pkl')
"""
with open(filename, 'wb') as networkout:
cPickle.dump(self, networkout, protocol=2)
@staticmethod
def loadnetwork(filename):
with open(filename, 'rb') as networkin:
self = cPickle.load(networkin)
return self
class PointPattern():
"""
A stub point pattern class used to store a point pattern. This class is monkey patched
with network specific attributes when the points are snapped to a network.
In the future this class may be replaced with a generic point
pattern class.
Parameters
----------
shapefile: str
The input shapefile.
idvariable: str
Field in the shapefile to use as an id variable.
attribute: bool
{False, True}
A flag to indicate whether all attributes are tagged to this class.
Attributes
----------
points: dict
Keys are the point ids.
Values are the coordinates.
npoints: int
The number of points.
"""
def __init__(self, shapefile, idvariable=None, attribute=False):
self.points = {}
self.npoints = 0
if idvariable:
ids = get_ids(shapefile, idvariable)
else:
ids = None
pts = ps.open(shapefile)
# Get attributes if requested
if attribute == True:
dbname = os.path.splitext(shapefile)[0] + '.dbf'
db = ps.open(dbname)
else:
db = None
for i, pt in enumerate(pts):
if ids and db:
self.points[ids[i]] = {'coordinates':pt, 'properties':db[i]}
elif ids and not db:
self.points[ids[i]] = {'coordinates':pt, 'properties':None}
elif not ids and db:
self.points[i] = {'coordinates':pt, 'properties':db[i]}
else:
self.points[i] = {'coordinates':pt, 'properties':None}
pts.close()
if db:
db.close()
self.npoints = len(self.points.keys())
class SimulatedPointPattern():
"""
Struct style class to mirror the Point Pattern Class.
If the PointPattern class has methods, it might make sense to
make this a child of that class.
This class is not intended to be used by the external user.
"""
def __init__(self):
self.npoints = 0
self.obs_to_edge = {}
self.obs_to_node = defaultdict(list)
self.dist_to_node = {}
self.snapped_coordinates = {}
class SortedEdges(OrderedDict):
def next_key(self, key):
next = self._OrderedDict__map[key][1]
if next is self._OrderedDict__root:
raise ValueError("{!r} is the last key.".format(key))
return next[2]
def first_key(self):
for key in self: return key
raise ValueError("No sorted edges remain.")
| bsd-3-clause |
dammeheli75/blx | vendor/telerik/kendoui/Kendo/Data/DataSourceTransportDestroy.php | 2397 | <?php
namespace Kendo\Data;
class DataSourceTransportDestroy extends \Kendo\SerializableObject {
//>> Properties
/**
* If set to false the request result will not be cached by the browser. Setting cache to false will only work correctly with HEAD and GET requests. It works by appending "_={timestamp}" to the GET parameters.
By default "jsonp" requests are not cached.Refer to the jQuery.ajax documentation for further info.
* @param boolean $value
* @return \Kendo\Data\DataSourceTransportDestroy
*/
public function cache($value) {
return $this->setProperty('cache', $value);
}
/**
* The content-type HTTP header sent to the server. Default is "application/x-www-form-urlencoded". Use "application/json" if the content is JSON.
Refer to the jQuery.ajax documentation for further info.
* @param string $value
* @return \Kendo\Data\DataSourceTransportDestroy
*/
public function contentType($value) {
return $this->setProperty('contentType', $value);
}
/**
* Additional parameters which are sent to the remote service.Refer to the jQuery.ajax documentation for further info.
* @param |\Kendo\JavaScriptFunction $value
* @return \Kendo\Data\DataSourceTransportDestroy
*/
public function data($value) {
return $this->setProperty('data', $value);
}
/**
* The type of result expected from the server. Commonly used values are "json" and "jsonp".Refer to the jQuery.ajax documentation for further info.
* @param string $value
* @return \Kendo\Data\DataSourceTransportDestroy
*/
public function dataType($value) {
return $this->setProperty('dataType', $value);
}
/**
* The type of request to make ("POST", "GET", "PUT" or "DELETE"), default is "GET".Refer to the jQuery.ajax documentation for further info.
* @param string $value
* @return \Kendo\Data\DataSourceTransportDestroy
*/
public function type($value) {
return $this->setProperty('type', $value);
}
/**
* The URL to which the request is sent.If set to function the data source will invoke it and use the result as the URL.
* @param string|\Kendo\JavaScriptFunction $value
* @return \Kendo\Data\DataSourceTransportDestroy
*/
public function url($value) {
return $this->setProperty('url', $value);
}
//<< Properties
}
?>
| bsd-3-clause |
quantopian/odo | odo/into.py | 4965 | from __future__ import absolute_import, division, print_function
import functools
from toolz import merge
from multipledispatch import Dispatcher
from .convert import convert
from .append import append
from .resource import resource
from .utils import ignoring
import datashape
from datashape import discover
from datashape.dispatch import namespace
from datashape.predicates import isdimension
from .compatibility import unicode
from pandas import DataFrame, Series
from numpy import ndarray
not_appendable_types = DataFrame, Series, ndarray, tuple
__all__ = 'into',
if 'into' not in namespace:
namespace['into'] = Dispatcher('into')
into = namespace['into']
def validate(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
dshape = kwargs.pop('dshape', None)
if isinstance(dshape, (str, unicode)):
dshape = datashape.dshape(dshape)
if dshape is not None and not isinstance(dshape, datashape.DataShape):
raise TypeError('dshape argument is not an instance of DataShape')
kwargs['dshape'] = dshape
return f(*args, **kwargs)
return wrapped
@into.register(type, object)
@validate
def into_type(a, b, dshape=None, **kwargs):
with ignoring(NotImplementedError):
if dshape is None:
dshape = discover(b)
return convert(a, b, dshape=dshape, **kwargs)
@into.register(object, object)
@validate
def into_object(target, source, dshape=None, **kwargs):
""" Push one dataset into another
Parameters
----------
source: object or string
The source of your data. Either an object (e.g. DataFrame),
target: object or string or type
The target for where you want your data to go.
Either an object, (e.g. []), a type, (e.g. list)
or a string (e.g. 'postgresql://hostname::tablename'
raise_on_errors: bool (optional, defaults to False)
Raise exceptions rather than reroute around them
**kwargs:
keyword arguments to pass through to conversion functions.
Examples
--------
>>> L = into(list, (1, 2, 3)) # Convert things into new things
>>> L
[1, 2, 3]
>>> _ = into(L, (4, 5, 6)) # Append things onto existing things
>>> L
[1, 2, 3, 4, 5, 6]
>>> into('myfile.csv', [('Alice', 1), ('Bob', 2)]) # doctest: +SKIP
Explanation
-----------
We can specify data with a Python object like a ``list``, ``DataFrame``,
``sqlalchemy.Table``, ``h5py.Dataset``, etc..
We can specify data with a string URI like ``'myfile.csv'``,
``'myfiles.*.json'`` or ``'sqlite:///data.db::tablename'``. These are
matched by regular expression. See the ``resource`` function for more
details on string URIs.
We can optionally specify datatypes with the ``dshape=`` keyword, providing
a datashape. This allows us to be explicit about types when mismatches
occur or when our data doesn't hold the whole picture. See the
``discover`` function for more information on ``dshape``.
>>> ds = 'var * {name: string, balance: float64}'
>>> into('accounts.json', [('Alice', 100), ('Bob', 200)], dshape=ds) # doctest: +SKIP
We can optionally specify keyword arguments to pass down to relevant
conversion functions. For example, when converting a CSV file we might
want to specify delimiter
>>> into(list, 'accounts.csv', has_header=True, delimiter=';') # doctest: +SKIP
These keyword arguments trickle down to whatever function ``into`` uses
convert this particular format, functions like ``pandas.read_csv``.
See Also
--------
into.resource.resource - Specify things with strings
datashape.discover - Get datashape of data
into.convert.convert - Convert things into new things
into.append.append - Add things onto existing things
"""
if isinstance(source, (str, unicode)):
source = resource(source, dshape=dshape, **kwargs)
if type(target) in not_appendable_types:
raise TypeError('target of %s type does not support in-place append' % type(target))
with ignoring(NotImplementedError):
if dshape is None:
dshape = discover(source)
return append(target, source, dshape=dshape, **kwargs)
@into.register((str, unicode), object)
@validate
def into_string(uri, b, dshape=None, **kwargs):
if dshape is None:
dshape = discover(b)
resource_ds = 0 * dshape.subshape[0] if isdimension(dshape[0]) else dshape
a = resource(uri, dshape=resource_ds, expected_dshape=dshape, **kwargs)
return into(a, b, dshape=dshape, **kwargs)
@into.register((type, (str, unicode)), (str, unicode))
@validate
def into_string_string(a, b, **kwargs):
return into(a, resource(b, **kwargs), **kwargs)
@into.register(object)
@validate
def into_curried(o, **kwargs1):
def curried_into(other, **kwargs2):
return into(o, other, **merge(kwargs2, kwargs1))
return curried_into
| bsd-3-clause |
lortnus/ezc | Webdav/tests/client_cadaver_lock_test.php | 734 | <?php
/**
* Client test for Cadaver (with locking).
*
* @package Webdav
* @subpackage Tests
* @version 1.1.4
* @copyright Copyright (C) 2005-2010 eZ Systems AS. All rights reserved.
* @license http://ez.no/licenses/new_bsd New BSD License
*/
require_once 'client_test_suite.php';
require_once 'client_test_continuous_lock_setup.php';
/**
* Client test for Cadaver (with locking).
*
* @package Webdav
* @subpackage Tests
*/
class ezcWebdavCadaverLockClientTest extends ezcTestCase
{
public static function suite()
{
return new ezcWebdavClientTestSuite(
'Cadaver (lock)',
'clients/cadaver_lock.php',
new ezcWebdavClientTestContinuousLockSetup()
);
}
}
?>
| bsd-3-clause |
timorieber/wagtail | wagtail/admin/rich_text/editors/hallo.py | 5475 | import json
from collections import OrderedDict
from django.forms import Media, widgets
from django.utils.functional import cached_property
from wagtail.admin.edit_handlers import RichTextFieldPanel
from wagtail.admin.rich_text.converters.editor_html import EditorHTMLConverter
from wagtail.admin.staticfiles import versioned_static
from wagtail.core.rich_text import features
class HalloPlugin:
def __init__(self, **kwargs):
self.name = kwargs.get('name', None)
self.options = kwargs.get('options', {})
self.js = kwargs.get('js', [])
self.css = kwargs.get('css', {})
self.order = kwargs.get('order', 100)
def construct_plugins_list(self, plugins):
if self.name is not None:
plugins[self.name] = self.options
@property
def media(self):
js = [versioned_static(js_file) for js_file in self.js]
css = {}
for media_type, css_files in self.css.items():
css[media_type] = [versioned_static(css_file) for css_file in css_files]
return Media(js=js, css=css)
class HalloFormatPlugin(HalloPlugin):
def __init__(self, **kwargs):
kwargs.setdefault('name', 'halloformat')
kwargs.setdefault('order', 10)
self.format_name = kwargs['format_name']
super().__init__(**kwargs)
def construct_plugins_list(self, plugins):
plugins.setdefault(self.name, {'formattings': {
'bold': False, 'italic': False, 'strikeThrough': False, 'underline': False
}})
plugins[self.name]['formattings'][self.format_name] = True
class HalloHeadingPlugin(HalloPlugin):
default_order = 20
def __init__(self, **kwargs):
kwargs.setdefault('name', 'halloheadings')
kwargs.setdefault('order', self.default_order)
self.element = kwargs.pop('element')
super().__init__(**kwargs)
def construct_plugins_list(self, plugins):
plugins.setdefault(self.name, {'formatBlocks': []})
plugins[self.name]['formatBlocks'].append(self.element)
class HalloListPlugin(HalloPlugin):
def __init__(self, **kwargs):
kwargs.setdefault('name', 'hallolists')
kwargs.setdefault('order', 40)
self.list_type = kwargs['list_type']
super().__init__(**kwargs)
def construct_plugins_list(self, plugins):
plugins.setdefault(self.name, {'lists': {
'ordered': False, 'unordered': False
}})
plugins[self.name]['lists'][self.list_type] = True
class HalloRequireParagraphsPlugin(HalloPlugin):
@property
def media(self):
return Media(js=[
versioned_static('wagtailadmin/js/hallo-plugins/hallo-requireparagraphs.js'),
]) + super().media
# Plugins which are always imported, and cannot be enabled/disabled via 'features'
CORE_HALLO_PLUGINS = [
HalloPlugin(name='halloreundo', order=50),
HalloRequireParagraphsPlugin(name='hallorequireparagraphs'),
HalloHeadingPlugin(element='p')
]
class HalloRichTextArea(widgets.Textarea):
template_name = 'wagtailadmin/widgets/hallo_rich_text_area.html'
# this class's constructor accepts a 'features' kwarg
accepts_features = True
def get_panel(self):
return RichTextFieldPanel
def __init__(self, *args, **kwargs):
self.options = kwargs.pop('options', None)
self.features = kwargs.pop('features', None)
if self.features is None:
self.features = features.get_default_features()
self.converter = EditorHTMLConverter(self.features)
# construct a list of plugin objects, by querying the feature registry
# and keeping the non-null responses from get_editor_plugin
self.plugins = CORE_HALLO_PLUGINS + list(filter(None, [
features.get_editor_plugin('hallo', feature_name)
for feature_name in self.features
]))
self.plugins.sort(key=lambda plugin: plugin.order)
super().__init__(*args, **kwargs)
def format_value(self, value):
# Convert database rich text representation to the format required by
# the input field
value = super().format_value(value)
if value is None:
return None
return self.converter.from_database_format(value)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.options is not None and 'plugins' in self.options:
# explicit 'plugins' config passed in options, so use that
plugin_data = self.options['plugins']
else:
plugin_data = OrderedDict()
for plugin in self.plugins:
plugin.construct_plugins_list(plugin_data)
context['widget']['plugins_json'] = json.dumps(plugin_data)
return context
def value_from_datadict(self, data, files, name):
original_value = super().value_from_datadict(data, files, name)
if original_value is None:
return None
return self.converter.to_database_format(original_value)
@cached_property
def media(self):
media = Media(js=[
versioned_static('wagtailadmin/js/vendor/hallo.js'),
versioned_static('wagtailadmin/js/hallo-bootstrap.js'),
], css={
'all': [versioned_static('wagtailadmin/css/panels/hallo.css')]
})
for plugin in self.plugins:
media += plugin.media
return media
| bsd-3-clause |
ghedsouza/django | django/db/models/query.py | 69428 | """
The main QuerySet implementation. This provides the public API for the ORM.
"""
import copy
import sys
import warnings
from collections import OrderedDict, deque
from django.conf import settings
from django.core import exceptions
from django.db import (
DJANGO_VERSION_PICKLE_KEY, IntegrityError, connections, router,
transaction,
)
from django.db.models import DateField, DateTimeField, sql
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import Collector
from django.db.models.expressions import F
from django.db.models.fields import AutoField
from django.db.models.functions import Trunc
from django.db.models.query_utils import InvalidQuery, Q
from django.db.models.sql.constants import CURSOR
from django.utils import timezone
from django.utils.functional import cached_property, partition
from django.utils.version import get_version
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
# Pull into this namespace for backwards compatibility.
EmptyResultSet = sql.EmptyResultSet
class BaseIterable:
def __init__(self, queryset, chunked_fetch=False):
self.queryset = queryset
self.chunked_fetch = chunked_fetch
class ModelIterable(BaseIterable):
"""Iterable that yields a model instance for each row."""
def __iter__(self):
queryset = self.queryset
db = queryset.db
compiler = queryset.query.get_compiler(using=db)
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
results = compiler.execute_sql(chunked_fetch=self.chunked_fetch)
select, klass_info, annotation_col_map = (compiler.select, compiler.klass_info,
compiler.annotation_col_map)
model_cls = klass_info['model']
select_fields = klass_info['select_fields']
model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1
init_list = [f[0].target.attname
for f in select[model_fields_start:model_fields_end]]
related_populators = get_related_populators(klass_info, select, db)
for row in compiler.results_iter(results):
obj = model_cls.from_db(db, init_list, row[model_fields_start:model_fields_end])
if related_populators:
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
for attr_name, col_pos in annotation_col_map.items():
setattr(obj, attr_name, row[col_pos])
# Add the known related objects to the model, if there are any
if queryset._known_related_objects:
for field, rel_objs in queryset._known_related_objects.items():
# Avoid overwriting objects loaded e.g. by select_related
if hasattr(obj, field.get_cache_name()):
continue
pk = getattr(obj, field.get_attname())
try:
rel_obj = rel_objs[pk]
except KeyError:
pass # may happen in qs1 | qs2 scenarios
else:
setattr(obj, field.name, rel_obj)
yield obj
class ValuesIterable(BaseIterable):
"""
Iterable returned by QuerySet.values() that yields a dict for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
field_names = list(query.values_select)
extra_names = list(query.extra_select)
annotation_names = list(query.annotation_select)
# extra(select=...) cols are always at the start of the row.
names = extra_names + field_names + annotation_names
for row in compiler.results_iter():
yield dict(zip(names, row))
class ValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=False) that yields a tuple
for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
if not query.extra_select and not query.annotation_select:
for row in compiler.results_iter():
yield tuple(row)
else:
field_names = list(query.values_select)
extra_names = list(query.extra_select)
annotation_names = list(query.annotation_select)
# extra(select=...) cols are always at the start of the row.
names = extra_names + field_names + annotation_names
if queryset._fields:
# Reorder according to fields.
fields = list(queryset._fields) + [f for f in annotation_names if f not in queryset._fields]
else:
fields = names
for row in compiler.results_iter():
data = dict(zip(names, row))
yield tuple(data[f] for f in fields)
class FlatValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=True) that yields single
values.
"""
def __iter__(self):
queryset = self.queryset
compiler = queryset.query.get_compiler(queryset.db)
for row in compiler.results_iter():
yield row[0]
class QuerySet:
"""Represent a lazy database lookup for a set of objects."""
def __init__(self, model=None, query=None, using=None, hints=None):
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.Query(self.model)
self._result_cache = None
self._sticky_filter = False
self._for_write = False
self._prefetch_related_lookups = ()
self._prefetch_done = False
self._known_related_objects = {} # {rel_field: {pk: rel_obj}}
self._iterable_class = ModelIterable
self._fields = None
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
manager = Manager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
########################
# PYTHON MAGIC METHODS #
########################
def __deepcopy__(self, memo):
"""Don't populate the QuerySet's cache."""
obj = self.__class__()
for k, v in self.__dict__.items():
if k == '_result_cache':
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
return obj
def __getstate__(self):
# Force the cache to be fully populated.
self._fetch_all()
obj_dict = self.__dict__.copy()
obj_dict[DJANGO_VERSION_PICKLE_KEY] = get_version()
return obj_dict
def __setstate__(self, state):
msg = None
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
current_version = get_version()
if current_version != pickled_version:
msg = (
"Pickled queryset instance's Django version %s does not "
"match the current version %s." % (pickled_version, current_version)
)
else:
msg = "Pickled queryset instance's Django version is not specified."
if msg:
warnings.warn(msg, RuntimeWarning, stacklevel=2)
self.__dict__.update(state)
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return '<%s %r>' % (self.__class__.__name__, data)
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
"""
The queryset iterator protocol uses three nested iterators in the
default case:
1. sql.compiler:execute_sql()
- Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
using cursor.fetchmany(). This part is responsible for
doing some column masking, and returning the rows in chunks.
2. sql/compiler.results_iter()
- Returns one row at time. At this point the rows are still just
tuples. In some cases the return values are converted to
Python values at this location.
3. self.iterator()
- Responsible for turning the rows into model objects.
"""
self._fetch_all()
return iter(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""Retrieve an item or slice from the set of results."""
if not isinstance(k, (int, slice)):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0)) or
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
(k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
if self._result_cache is not None:
return self._result_cache[k]
if isinstance(k, slice):
qs = self._clone()
if k.start is not None:
start = int(k.start)
else:
start = None
if k.stop is not None:
stop = int(k.stop)
else:
stop = None
qs.query.set_limits(start, stop)
return list(qs)[::k.step] if k.step else qs
qs = self._clone()
qs.query.set_limits(k, k + 1)
return list(qs)[0]
def __and__(self, other):
self._merge_sanity_check(other)
if isinstance(other, EmptyQuerySet):
return other
if isinstance(self, EmptyQuerySet):
return self
combined = self._clone()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.AND)
return combined
def __or__(self, other):
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
combined = self._clone()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.OR)
return combined
####################################
# METHODS THAT DO DATABASE QUERIES #
####################################
def iterator(self):
"""
An iterator over the results from applying this QuerySet to the
database.
"""
use_chunked_fetch = not connections[self.db].settings_dict.get('DISABLE_SERVER_SIDE_CURSORS')
return iter(self._iterable_class(self, chunked_fetch=use_chunked_fetch))
def aggregate(self, *args, **kwargs):
"""
Return a dictionary containing the calculations (aggregation)
over the current queryset.
If args is present the expression is passed as a kwarg using
the Aggregate object's default alias.
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
for arg in args:
# The default_alias property raises TypeError if default_alias
# can't be set automatically or AttributeError if it isn't an
# attribute.
try:
arg.default_alias
except (AttributeError, TypeError):
raise TypeError("Complex aggregates require an alias")
kwargs[arg.default_alias] = arg
query = self.query.clone()
for (alias, aggregate_expr) in kwargs.items():
query.add_annotation(aggregate_expr, alias, is_summary=True)
if not query.annotations[alias].contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
return query.get_aggregation(self.db, kwargs.keys())
def count(self):
"""
Perform a SELECT COUNT() and return the number of records as an
integer.
If the QuerySet is already fully cached, return the length of the
cached results set to avoid multiple SELECT COUNT(*) calls.
"""
if self._result_cache is not None:
return len(self._result_cache)
return self.query.get_count(using=self.db)
def get(self, *args, **kwargs):
"""
Perform the query and return a single object matching the given
keyword arguments.
"""
clone = self.filter(*args, **kwargs)
if self.query.can_filter() and not self.query.distinct_fields:
clone = clone.order_by()
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
"%s matching query does not exist." %
self.model._meta.object_name
)
raise self.model.MultipleObjectsReturned(
"get() returned more than one %s -- it returned %s!" %
(self.model._meta.object_name, num)
)
def create(self, **kwargs):
"""
Create a new object with the given kwargs, saving it to the database
and returning the created object.
"""
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
return obj
def _populate_pk_values(self, objs):
for obj in objs:
if obj.pk is None:
obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
def bulk_create(self, objs, batch_size=None):
"""
Insert each of the instances into the database. Do *not* call
save() on each of the instances, do not send any pre/post_save
signals, and do not set the primary key attribute if it is an
autoincrement field (except if features.can_return_ids_from_bulk_insert=True).
Multi-table models are not supported.
"""
# When you bulk insert you don't get the primary keys back (if it's an
# autoincrement, except if can_return_ids_from_bulk_insert=True), so
# you can't insert into the child tables which references this. There
# are two workarounds:
# 1) This could be implemented if you didn't have an autoincrement pk
# 2) You could do it by doing O(n) normal inserts into the parent
# tables to get the primary keys back and then doing a single bulk
# insert into the childmost table.
# We currently set the primary keys on the objects when using
# PostgreSQL via the RETURNING ID clause. It should be possible for
# Oracle as well, but the semantics for extracting the primary keys is
# trickier so it's not done yet.
assert batch_size is None or batch_size > 0
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
# MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy
# would not identify that case as involving multiple tables.
for parent in self.model._meta.get_parent_list():
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
if not objs:
return objs
self._for_write = True
connection = connections[self.db]
fields = self.model._meta.concrete_fields
objs = list(objs)
self._populate_pk_values(objs)
with transaction.atomic(using=self.db, savepoint=False):
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
if objs_with_pk:
self._batched_insert(objs_with_pk, fields, batch_size)
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
ids = self._batched_insert(objs_without_pk, fields, batch_size)
if connection.features.can_return_ids_from_bulk_insert:
assert len(ids) == len(objs_without_pk)
for obj_without_pk, pk in zip(objs_without_pk, ids):
obj_without_pk.pk = pk
obj_without_pk._state.adding = False
obj_without_pk._state.db = self.db
return objs
def get_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, creating one if necessary.
Return a tuple of (object, created), where created is a boolean
specifying whether an object was created.
"""
lookup, params = self._extract_model_params(defaults, **kwargs)
# The get() needs to be targeted at the write database in order
# to avoid potential transaction consistency problems.
self._for_write = True
try:
return self.get(**lookup), False
except self.model.DoesNotExist:
return self._create_object_from_params(lookup, params)
def update_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, updating one with defaults
if it exists, otherwise create a new one.
Return a tuple (object, created), where created is a boolean
specifying whether an object was created.
"""
defaults = defaults or {}
lookup, params = self._extract_model_params(defaults, **kwargs)
self._for_write = True
with transaction.atomic(using=self.db):
try:
obj = self.select_for_update().get(**lookup)
except self.model.DoesNotExist:
obj, created = self._create_object_from_params(lookup, params)
if created:
return obj, created
for k, v in defaults.items():
setattr(obj, k, v() if callable(v) else v)
obj.save(using=self.db)
return obj, False
def _create_object_from_params(self, lookup, params):
"""
Try to create an object using passed params. Used by get_or_create()
and update_or_create().
"""
try:
with transaction.atomic(using=self.db):
params = {k: v() if callable(v) else v for k, v in params.items()}
obj = self.create(**params)
return obj, True
except IntegrityError:
exc_info = sys.exc_info()
try:
return self.get(**lookup), False
except self.model.DoesNotExist:
pass
raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
def _extract_model_params(self, defaults, **kwargs):
"""
Prepare `lookup` (kwargs that are valid model attributes), `params`
(for creating a model instance) based on given kwargs; for use by
get_or_create() and update_or_create().
"""
defaults = defaults or {}
lookup = kwargs.copy()
for f in self.model._meta.fields:
if f.attname in lookup:
lookup[f.name] = lookup.pop(f.attname)
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
params.update(defaults)
invalid_params = []
for param in params:
try:
self.model._meta.get_field(param)
except exceptions.FieldDoesNotExist:
if param != 'pk': # It's okay to use a model's pk property.
invalid_params.append(param)
if invalid_params:
raise exceptions.FieldError(
"Invalid field name(s) for model %s: '%s'." % (
self.model._meta.object_name,
"', '".join(sorted(invalid_params)),
))
return lookup, params
def _earliest_or_latest(self, field_name=None, direction="-"):
"""
Return the latest object, according to the model's
'get_latest_by' option or optional given field_name.
"""
order_by = field_name or getattr(self.model._meta, 'get_latest_by')
assert bool(order_by), "earliest() and latest() require either a "\
"field_name parameter or 'get_latest_by' in the model"
assert self.query.can_filter(), \
"Cannot change a query once a slice has been taken."
obj = self._clone()
obj.query.set_limits(high=1)
obj.query.clear_ordering(force_empty=True)
obj.query.add_ordering('%s%s' % (direction, order_by))
return obj.get()
def earliest(self, field_name=None):
return self._earliest_or_latest(field_name=field_name, direction="")
def latest(self, field_name=None):
return self._earliest_or_latest(field_name=field_name, direction="-")
def first(self):
"""Return the first object of a query or None if no match is found."""
objects = list((self if self.ordered else self.order_by('pk'))[:1])
if objects:
return objects[0]
return None
def last(self):
"""Return the last object of a query or None if no match is found."""
objects = list((self.reverse() if self.ordered else self.order_by('-pk'))[:1])
if objects:
return objects[0]
return None
def in_bulk(self, id_list=None):
"""
Return a dictionary mapping each of the given IDs to the object with
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
"""
assert self.query.can_filter(), \
"Cannot use 'limit' or 'offset' with in_bulk"
if id_list is not None:
if not id_list:
return {}
batch_size = connections[self.db].features.max_query_params
id_list = tuple(id_list)
# If the database has a limit on the number of query parameters
# (e.g. SQLite), retrieve objects in batches if necessary.
if batch_size and batch_size < len(id_list):
qs = ()
for offset in range(0, len(id_list), batch_size):
batch = id_list[offset:offset + batch_size]
qs += tuple(self.filter(pk__in=batch).order_by())
else:
qs = self.filter(pk__in=id_list).order_by()
else:
qs = self._clone()
return {obj._get_pk_val(): obj for obj in qs}
def delete(self):
"""Delete the records in the current QuerySet."""
assert self.query.can_filter(), \
"Cannot use 'limit' or 'offset' with delete."
if self._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
del_query = self._clone()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force_empty=True)
collector = Collector(using=del_query.db)
collector.collect(del_query)
deleted, _rows_count = collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
return deleted, _rows_count
delete.alters_data = True
delete.queryset_only = True
def _raw_delete(self, using):
"""
Delete objects found from the given queryset in single direct SQL
query. No signals are sent and there is no protection for cascades.
"""
return sql.DeleteQuery(self.model).delete_qs(self, using)
_raw_delete.alters_data = True
def update(self, **kwargs):
"""
Update all elements in the current QuerySet, setting all the given
fields to the appropriate values.
"""
assert self.query.can_filter(), \
"Cannot update a query once a slice has been taken."
self._for_write = True
query = self.query.clone(sql.UpdateQuery)
query.add_update_values(kwargs)
# Clear any annotations so that they won't be present in subqueries.
query._annotations = None
with transaction.atomic(using=self.db, savepoint=False):
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows
update.alters_data = True
def _update(self, values):
"""
A version of update() that accepts field objects instead of field names.
Used primarily for model saving and not intended for use by general
code (it requires too much poking around at model internals to be
useful at that level).
"""
assert self.query.can_filter(), \
"Cannot update a query once a slice has been taken."
query = self.query.clone(sql.UpdateQuery)
query.add_update_fields(values)
self._result_cache = None
return query.get_compiler(self.db).execute_sql(CURSOR)
_update.alters_data = True
_update.queryset_only = False
def exists(self):
if self._result_cache is None:
return self.query.has_results(using=self.db)
return bool(self._result_cache)
def _prefetch_related_objects(self):
# This method can only be called once the result cache has been filled.
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
##################################################
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
##################################################
def raw(self, raw_query, params=None, translations=None, using=None):
if using is None:
using = self.db
return RawQuerySet(raw_query, model=self.model, params=params, translations=translations, using=using)
def _values(self, *fields, **expressions):
clone = self._clone()
if expressions:
clone = clone.annotate(**expressions)
clone._fields = fields
clone.query.set_values(fields)
return clone
def values(self, *fields, **expressions):
fields += tuple(expressions)
clone = self._values(*fields, **expressions)
clone._iterable_class = ValuesIterable
return clone
def values_list(self, *fields, flat=False):
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
_fields = []
expressions = {}
for field in fields:
if hasattr(field, 'resolve_expression'):
field_id = str(id(field))
expressions[field_id] = field
_fields.append(field_id)
else:
_fields.append(field)
clone = self._values(*_fields, **expressions)
clone._iterable_class = FlatValuesListIterable if flat else ValuesListIterable
return clone
def dates(self, field_name, kind, order='ASC'):
"""
Return a list of date objects representing all available dates for
the given field_name, scoped to 'kind'.
"""
assert kind in ("year", "month", "day"), \
"'kind' must be one of 'year', 'month' or 'day'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
return self.annotate(
datefield=Trunc(field_name, kind, output_field=DateField()),
plain_field=F(field_name)
).values_list(
'datefield', flat=True
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datefield')
def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
"""
Return a list of datetime objects representing all available
datetimes for the given field_name, scoped to 'kind'.
"""
assert kind in ("year", "month", "day", "hour", "minute", "second"), \
"'kind' must be one of 'year', 'month', 'day', 'hour', 'minute' or 'second'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
if settings.USE_TZ:
if tzinfo is None:
tzinfo = timezone.get_current_timezone()
else:
tzinfo = None
return self.annotate(
datetimefield=Trunc(field_name, kind, output_field=DateTimeField(), tzinfo=tzinfo),
plain_field=F(field_name)
).values_list(
'datetimefield', flat=True
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield')
def none(self):
"""Return an empty QuerySet."""
clone = self._clone()
clone.query.set_empty()
return clone
##################################################################
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
##################################################################
def all(self):
"""
Return a new QuerySet that is a copy of the current one. This allows a
QuerySet to proxy for a model manager in some cases.
"""
return self._clone()
def filter(self, *args, **kwargs):
"""
Return a new QuerySet instance with the args ANDed to the existing
set.
"""
return self._filter_or_exclude(False, *args, **kwargs)
def exclude(self, *args, **kwargs):
"""
Return a new QuerySet instance with NOT (args) ANDed to the existing
set.
"""
return self._filter_or_exclude(True, *args, **kwargs)
def _filter_or_exclude(self, negate, *args, **kwargs):
if args or kwargs:
assert self.query.can_filter(), \
"Cannot filter a query once a slice has been taken."
clone = self._clone()
if negate:
clone.query.add_q(~Q(*args, **kwargs))
else:
clone.query.add_q(Q(*args, **kwargs))
return clone
def complex_filter(self, filter_obj):
"""
Return a new QuerySet instance with filter_obj added to the filters.
filter_obj can be a Q object (or anything with an add_to_query()
method) or a dictionary of keyword lookup arguments.
This exists to support framework features such as 'limit_choices_to',
and usually it will be more natural to use other methods.
"""
if isinstance(filter_obj, Q) or hasattr(filter_obj, 'add_to_query'):
clone = self._clone()
clone.query.add_q(filter_obj)
return clone
else:
return self._filter_or_exclude(None, **filter_obj)
def _combinator_query(self, combinator, *other_qs, all=False):
# Clone the query to inherit the select list and everything
clone = self._clone()
# Clear limits and ordering so they can be reapplied
clone.query.clear_ordering(True)
clone.query.clear_limits()
clone.query.combined_queries = (self.query,) + tuple(qs.query for qs in other_qs)
clone.query.combinator = combinator
clone.query.combinator_all = all
return clone
def union(self, *other_qs, all=False):
return self._combinator_query('union', *other_qs, all=all)
def intersection(self, *other_qs):
return self._combinator_query('intersection', *other_qs)
def difference(self, *other_qs):
return self._combinator_query('difference', *other_qs)
def select_for_update(self, nowait=False, skip_locked=False):
"""
Return a new QuerySet instance that will select objects with a
FOR UPDATE lock.
"""
if nowait and skip_locked:
raise ValueError('The nowait option cannot be used with skip_locked.')
obj = self._clone()
obj._for_write = True
obj.query.select_for_update = True
obj.query.select_for_update_nowait = nowait
obj.query.select_for_update_skip_locked = skip_locked
return obj
def select_related(self, *fields):
"""
Return a new QuerySet instance that will select related objects.
If fields are specified, they must be ForeignKey fields and only those
related objects are included in the selection.
If select_related(None) is called, clear the list.
"""
if self._fields is not None:
raise TypeError("Cannot call select_related() after .values() or .values_list()")
obj = self._clone()
if fields == (None,):
obj.query.select_related = False
elif fields:
obj.query.add_select_related(fields)
else:
obj.query.select_related = True
return obj
def prefetch_related(self, *lookups):
"""
Return a new QuerySet instance that will prefetch the specified
Many-To-One and Many-To-Many related objects when the QuerySet is
evaluated.
When prefetch_related() is called more than once, append to the list of
prefetch lookups. If prefetch_related(None) is called, clear the list.
"""
clone = self._clone()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def annotate(self, *args, **kwargs):
"""
Return a query set in which the returned objects have been annotated
with extra data or aggregations.
"""
annotations = OrderedDict() # To preserve ordering of args
for arg in args:
# The default_alias property may raise a TypeError.
try:
if arg.default_alias in kwargs:
raise ValueError("The named annotation '%s' conflicts with the "
"default name for another annotation."
% arg.default_alias)
except TypeError:
raise TypeError("Complex annotations require an alias")
annotations[arg.default_alias] = arg
annotations.update(kwargs)
clone = self._clone()
names = self._fields
if names is None:
names = {f.name for f in self.model._meta.get_fields()}
for alias, annotation in annotations.items():
if alias in names:
raise ValueError("The annotation '%s' conflicts with a field on "
"the model." % alias)
clone.query.add_annotation(annotation, alias, is_summary=False)
for alias, annotation in clone.query.annotations.items():
if alias in annotations and annotation.contains_aggregate:
if clone._fields is None:
clone.query.group_by = True
else:
clone.query.set_group_by()
break
return clone
def order_by(self, *field_names):
"""Return a new QuerySet instance with the ordering changed."""
assert self.query.can_filter(), \
"Cannot reorder a query once a slice has been taken."
obj = self._clone()
obj.query.clear_ordering(force_empty=False)
obj.query.add_ordering(*field_names)
return obj
def distinct(self, *field_names):
"""
Return a new QuerySet instance that will select only distinct results.
"""
assert self.query.can_filter(), \
"Cannot create distinct fields once a slice has been taken."
obj = self._clone()
obj.query.add_distinct_fields(*field_names)
return obj
def extra(self, select=None, where=None, params=None, tables=None,
order_by=None, select_params=None):
"""Add extra SQL fragments to the query."""
assert self.query.can_filter(), \
"Cannot change a query once a slice has been taken"
clone = self._clone()
clone.query.add_extra(select, select_params, where, params, tables, order_by)
return clone
def reverse(self):
"""Reverse the ordering of the QuerySet."""
clone = self._clone()
clone.query.standard_ordering = not clone.query.standard_ordering
return clone
def defer(self, *fields):
"""
Defer the loading of data for certain fields until they are accessed.
Add the set of deferred fields to any existing set of deferred fields.
The only exception to this is if None is passed in as the only
parameter, in which case removal all deferrals.
"""
if self._fields is not None:
raise TypeError("Cannot call defer() after .values() or .values_list()")
clone = self._clone()
if fields == (None,):
clone.query.clear_deferred_loading()
else:
clone.query.add_deferred_loading(fields)
return clone
def only(self, *fields):
"""
Essentially, the opposite of defer(). Only the fields passed into this
method and that are not already specified as deferred are loaded
immediately when the queryset is evaluated.
"""
if self._fields is not None:
raise TypeError("Cannot call only() after .values() or .values_list()")
if fields == (None,):
# Can only pass None to defer(), not only(), as the rest option.
# That won't stop people trying to do this, so let's be explicit.
raise TypeError("Cannot pass None as an argument to only().")
clone = self._clone()
clone.query.add_immediate_loading(fields)
return clone
def using(self, alias):
"""Select which database this QuerySet should execute against."""
clone = self._clone()
clone._db = alias
return clone
###################################
# PUBLIC INTROSPECTION ATTRIBUTES #
###################################
@property
def ordered(self):
"""
Return True if the QuerySet is ordered -- i.e. has an order_by()
clause or a default ordering on the model.
"""
if self.query.extra_order_by or self.query.order_by:
return True
elif self.query.default_ordering and self.query.get_meta().ordering:
return True
else:
return False
@property
def db(self):
"""Return the database used if this query is executed now."""
if self._for_write:
return self._db or router.db_for_write(self.model, **self._hints)
return self._db or router.db_for_read(self.model, **self._hints)
###################
# PRIVATE METHODS #
###################
def _insert(self, objs, fields, return_id=False, raw=False, using=None):
"""
Insert a new record for the given model. This provides an interface to
the InsertQuery class and is how Model.save() is implemented.
"""
self._for_write = True
if using is None:
using = self.db
query = sql.InsertQuery(self.model)
query.insert_values(fields, objs, raw=raw)
return query.get_compiler(using=using).execute_sql(return_id)
_insert.alters_data = True
_insert.queryset_only = False
def _batched_insert(self, objs, fields, batch_size):
"""
A helper method for bulk_create() to insert the bulk one batch at a
time. Insert recursively a batch from the front of the bulk and then
_batched_insert() the remaining objects again.
"""
if not objs:
return
ops = connections[self.db].ops
batch_size = (batch_size or max(ops.bulk_batch_size(fields, objs), 1))
inserted_ids = []
for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
if connections[self.db].features.can_return_ids_from_bulk_insert:
inserted_id = self._insert(item, fields=fields, using=self.db, return_id=True)
if isinstance(inserted_id, list):
inserted_ids.extend(inserted_id)
else:
inserted_ids.append(inserted_id)
else:
self._insert(item, fields=fields, using=self.db)
return inserted_ids
def _clone(self, **kwargs):
query = self.query.clone()
if self._sticky_filter:
query.filter_is_sticky = True
clone = self.__class__(model=self.model, query=query, using=self._db, hints=self._hints)
clone._for_write = self._for_write
clone._prefetch_related_lookups = self._prefetch_related_lookups
clone._known_related_objects = self._known_related_objects
clone._iterable_class = self._iterable_class
clone._fields = self._fields
clone.__dict__.update(kwargs)
return clone
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self._iterable_class(self))
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def _next_is_sticky(self):
"""
Indicate that the next filter call and the one following that should
be treated as a single filter. This is only important when it comes to
determining when to reuse tables for many-to-many filters. Required so
that we can filter naturally on the results of related managers.
This doesn't return a clone of the current QuerySet (it returns
"self"). The method is only used internally and should be immediately
followed by a filter() that does create a clone.
"""
self._sticky_filter = True
return self
def _merge_sanity_check(self, other):
"""Check that two QuerySet classes may be merged."""
if self._fields is not None and (
set(self.query.values_select) != set(other.query.values_select) or
set(self.query.extra_select) != set(other.query.extra_select) or
set(self.query.annotation_select) != set(other.query.annotation_select)):
raise TypeError(
"Merging '%s' classes must involve the same values in each case."
% self.__class__.__name__
)
def _merge_known_related_objects(self, other):
"""
Keep track of all known related objects from either QuerySet instance.
"""
for field, objects in other._known_related_objects.items():
self._known_related_objects.setdefault(field, {}).update(objects)
def resolve_expression(self, *args, **kwargs):
if self._fields and len(self._fields) > 1:
# values() queryset can only be used as nested queries
# if they are set up to select only a single field.
if len(self._fields) > 1:
raise TypeError('Cannot use multi-field values as a filter value.')
query = self.query.resolve_expression(*args, **kwargs)
query._db = self._db
return query
resolve_expression.queryset_only = True
def _add_hints(self, **hints):
"""
Update hinting information for use by routers. Add new key/values or
overwrite existing key/values.
"""
self._hints.update(hints)
def _has_filters(self):
"""
Check if this QuerySet has any filtering going on. This isn't
equivalent with checking if all objects are present in results, for
example, qs[1:]._has_filters() -> False.
"""
return self.query.has_filters()
class InstanceCheckMeta(type):
def __instancecheck__(self, instance):
return isinstance(instance, QuerySet) and instance.query.is_empty()
class EmptyQuerySet(metaclass=InstanceCheckMeta):
"""
Marker class to checking if a queryset is empty by .none():
isinstance(qs.none(), EmptyQuerySet) -> True
"""
def __init__(self, *args, **kwargs):
raise TypeError("EmptyQuerySet can't be instantiated")
class RawQuerySet:
"""
Provide an iterator which converts the results of raw SQL queries into
annotated model instances.
"""
def __init__(self, raw_query, model=None, query=None, params=None,
translations=None, using=None, hints=None):
self.raw_query = raw_query
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
self.params = params or ()
self.translations = translations or {}
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
converter = connections[self.db].introspection.column_name_converter
model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns]
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
if column not in self.model_fields]
model_init_order = [self.columns.index(converter(f.column)) for f in model_init_fields]
model_init_names = [f.attname for f in model_init_fields]
return model_init_names, model_init_order, annotation_fields
def __iter__(self):
# Cache some things for performance reasons outside the loop.
db = self.db
compiler = connections[db].ops.compiler('SQLCompiler')(
self.query, connections[db], db
)
query = iter(self.query)
try:
model_init_names, model_init_pos, annotation_fields = self.resolve_model_init_order()
# Find out which model's fields are not present in the query.
skip = set()
for field in self.model._meta.fields:
if field.attname not in model_init_names:
skip.add(field.attname)
if skip:
if self.model._meta.pk.attname in skip:
raise InvalidQuery('Raw query must include the primary key')
model_cls = self.model
fields = [self.model_fields.get(c) for c in self.columns]
converters = compiler.get_converters([
f.get_col(f.model._meta.db_table) if f else None for f in fields
])
for values in query:
if converters:
values = compiler.apply_converters(values, converters)
# Associate fields to values
model_init_values = [values[pos] for pos in model_init_pos]
instance = model_cls.from_db(db, model_init_names, model_init_values)
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
if hasattr(self.query, 'cursor') and self.query.cursor:
self.query.cursor.close()
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.query)
def __getitem__(self, k):
return list(self)[k]
@property
def db(self):
"""Return the database used if this query is executed now."""
return self._db or router.db_for_read(self.model, **self._hints)
def using(self, alias):
"""Select the database this RawQuerySet should execute against."""
return RawQuerySet(
self.raw_query, model=self.model,
query=self.query.clone(using=alias),
params=self.params, translations=self.translations,
using=alias,
)
@cached_property
def columns(self):
"""
A list of model field names in the order they'll appear in the
query results.
"""
columns = self.query.get_columns()
# Adjust any column names which don't match field names
for (query_name, model_name) in self.translations.items():
try:
index = columns.index(query_name)
columns[index] = model_name
except ValueError:
# Ignore translations for nonexistent column names
pass
return columns
@cached_property
def model_fields(self):
"""A dict mapping column names to model field names."""
converter = connections[self.db].introspection.table_name_converter
model_fields = {}
for field in self.model._meta.fields:
name, column = field.get_attname_column()
model_fields[converter(column)] = field
return model_fields
class Prefetch:
def __init__(self, lookup, queryset=None, to_attr=None):
# `prefetch_through` is the path we traverse to perform the prefetch.
self.prefetch_through = lookup
# `prefetch_to` is the path to the attribute that stores the result.
self.prefetch_to = lookup
if queryset is not None and not issubclass(queryset._iterable_class, ModelIterable):
raise ValueError('Prefetch querysets cannot use values().')
if to_attr:
self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])
self.queryset = queryset
self.to_attr = to_attr
def __getstate__(self):
obj_dict = self.__dict__.copy()
if self.queryset is not None:
# Prevent the QuerySet from being evaluated
obj_dict['queryset'] = self.queryset._clone(
_result_cache=[],
_prefetch_done=True,
)
return obj_dict
def add_prefix(self, prefix):
self.prefetch_through = LOOKUP_SEP.join([prefix, self.prefetch_through])
self.prefetch_to = LOOKUP_SEP.join([prefix, self.prefetch_to])
def get_current_prefetch_to(self, level):
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[:level + 1])
def get_current_to_attr(self, level):
parts = self.prefetch_to.split(LOOKUP_SEP)
to_attr = parts[level]
as_attr = self.to_attr and level == len(parts) - 1
return to_attr, as_attr
def get_current_queryset(self, level):
if self.get_current_prefetch_to(level) == self.prefetch_to:
return self.queryset
return None
def __eq__(self, other):
if isinstance(other, Prefetch):
return self.prefetch_to == other.prefetch_to
return False
def __hash__(self):
return hash(self.__class__) ^ hash(self.prefetch_to)
def normalize_prefetch_lookups(lookups, prefix=None):
"""Normalize lookups into Prefetch objects."""
ret = []
for lookup in lookups:
if not isinstance(lookup, Prefetch):
lookup = Prefetch(lookup)
if prefix:
lookup.add_prefix(prefix)
ret.append(lookup)
return ret
def prefetch_related_objects(model_instances, *related_lookups):
"""
Populate prefetched object caches for a list of model instances based on
the lookups/Prefetch instances given.
"""
if len(model_instances) == 0:
return # nothing to do
related_lookups = normalize_prefetch_lookups(related_lookups)
# We need to be able to dynamically add to the list of prefetch_related
# lookups that we look up (see below). So we need some book keeping to
# ensure we don't do duplicate work.
done_queries = {} # dictionary of things like 'foo__bar': [results]
auto_lookups = set() # we add to this as we go through.
followed_descriptors = set() # recursion protection
all_lookups = deque(related_lookups)
while all_lookups:
lookup = all_lookups.popleft()
if lookup.prefetch_to in done_queries:
if lookup.queryset:
raise ValueError("'%s' lookup was already seen with a different queryset. "
"You may need to adjust the ordering of your lookups." % lookup.prefetch_to)
continue
# Top level, the list of objects to decorate is the result cache
# from the primary QuerySet. It won't be for deeper levels.
obj_list = model_instances
through_attrs = lookup.prefetch_through.split(LOOKUP_SEP)
for level, through_attr in enumerate(through_attrs):
# Prepare main instances
if len(obj_list) == 0:
break
prefetch_to = lookup.get_current_prefetch_to(level)
if prefetch_to in done_queries:
# Skip any prefetching, and any object preparation
obj_list = done_queries[prefetch_to]
continue
# Prepare objects:
good_objects = True
for obj in obj_list:
# Since prefetching can re-use instances, it is possible to have
# the same instance multiple times in obj_list, so obj might
# already be prepared.
if not hasattr(obj, '_prefetched_objects_cache'):
try:
obj._prefetched_objects_cache = {}
except (AttributeError, TypeError):
# Must be an immutable object from
# values_list(flat=True), for example (TypeError) or
# a QuerySet subclass that isn't returning Model
# instances (AttributeError), either in Django or a 3rd
# party. prefetch_related() doesn't make sense, so quit.
good_objects = False
break
if not good_objects:
break
# Descend down tree
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0]
to_attr = lookup.get_current_to_attr(level)[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr, to_attr)
if not attr_found:
raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid "
"parameter to prefetch_related()" %
(through_attr, first_obj.__class__.__name__, lookup.prefetch_through))
if level == len(through_attrs) - 1 and prefetcher is None:
# Last one, this *must* resolve to something that supports
# prefetching, otherwise there is no point adding it and the
# developer asking for it has made a mistake.
raise ValueError("'%s' does not resolve to an item that supports "
"prefetching - this is an invalid parameter to "
"prefetch_related()." % lookup.prefetch_through)
if prefetcher is not None and not is_fetched:
obj_list, additional_lookups = prefetch_one_level(obj_list, prefetcher, lookup, level)
# We need to ensure we don't keep adding lookups from the
# same relationships to stop infinite recursion. So, if we
# are already on an automatically added lookup, don't add
# the new lookups from relationships we've seen already.
if not (lookup in auto_lookups and descriptor in followed_descriptors):
done_queries[prefetch_to] = obj_list
new_lookups = normalize_prefetch_lookups(additional_lookups, prefetch_to)
auto_lookups.update(new_lookups)
all_lookups.extendleft(new_lookups)
followed_descriptors.add(descriptor)
else:
# Either a singly related object that has already been fetched
# (e.g. via select_related), or hopefully some other property
# that doesn't support prefetching but needs to be traversed.
# We replace the current list of parent objects with the list
# of related objects, filtering out empty or missing values so
# that we can continue with nullable or reverse relations.
new_obj_list = []
for obj in obj_list:
if through_attr in getattr(obj, '_prefetched_objects_cache', ()):
# If related objects have been prefetched, use the
# cache rather than the object's through_attr.
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
else:
try:
new_obj = getattr(obj, through_attr)
except exceptions.ObjectDoesNotExist:
continue
if new_obj is None:
continue
# We special-case `list` rather than something more generic
# like `Iterable` because we don't want to accidentally match
# user models that define __iter__.
if isinstance(new_obj, list):
new_obj_list.extend(new_obj)
else:
new_obj_list.append(new_obj)
obj_list = new_obj_list
def get_prefetcher(instance, through_attr, to_attr):
"""
For the attribute 'through_attr' on the given instance, find
an object that has a get_prefetch_queryset().
Return a 4 tuple containing:
(the object with get_prefetch_queryset (or None),
the descriptor object representing this relationship (or None),
a boolean that is False if the attribute was not found at all,
a boolean that is True if the attribute has already been fetched)
"""
prefetcher = None
is_fetched = False
# For singly related objects, we have to avoid getting the attribute
# from the object, as this will trigger the query. So we first try
# on the class, in order to get the descriptor object.
rel_obj_descriptor = getattr(instance.__class__, through_attr, None)
if rel_obj_descriptor is None:
attr_found = hasattr(instance, through_attr)
else:
attr_found = True
if rel_obj_descriptor:
# singly related object, descriptor object has the
# get_prefetch_queryset() method.
if hasattr(rel_obj_descriptor, 'get_prefetch_queryset'):
prefetcher = rel_obj_descriptor
if rel_obj_descriptor.is_cached(instance):
is_fetched = True
else:
# descriptor doesn't support prefetching, so we go ahead and get
# the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, through_attr)
if hasattr(rel_obj, 'get_prefetch_queryset'):
prefetcher = rel_obj
if through_attr != to_attr:
# Special case cached_property instances because hasattr
# triggers attribute computation and assignment.
if isinstance(getattr(instance.__class__, to_attr, None), cached_property):
is_fetched = to_attr in instance.__dict__
else:
is_fetched = hasattr(instance, to_attr)
else:
is_fetched = through_attr in instance._prefetched_objects_cache
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
def prefetch_one_level(instances, prefetcher, lookup, level):
"""
Helper function for prefetch_related_objects().
Run prefetches on all instances using the prefetcher object,
assigning results to relevant caches in instance.
Return the prefetched objects along with any additional prefetches that
must be done due to prefetch_related lookups found from default managers.
"""
# prefetcher must have a method get_prefetch_queryset() which takes a list
# of instances, and returns a tuple:
# (queryset of instances of self.model that are related to passed in instances,
# callable that gets value to be matched for returned instances,
# callable that gets value to be matched for passed in instances,
# boolean that is True for singly related objects,
# cache name to assign to).
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
rel_qs, rel_obj_attr, instance_attr, single, cache_name = (
prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level)))
# We have to handle the possibility that the QuerySet we just got back
# contains some prefetch_related lookups. We don't want to trigger the
# prefetch_related functionality by evaluating the query. Rather, we need
# to merge in the prefetch_related lookups.
# Copy the lookups in case it is a Prefetch object which could be reused
# later (happens in nested prefetch_related).
additional_lookups = [
copy.copy(additional_lookup) for additional_lookup
in getattr(rel_qs, '_prefetch_related_lookups', ())
]
if additional_lookups:
# Don't need to clone because the manager should have given us a fresh
# instance, so we access an internal instead of using public interface
# for performance reasons.
rel_qs._prefetch_related_lookups = ()
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
to_attr, as_attr = lookup.get_current_to_attr(level)
# Make sure `to_attr` does not conflict with a field.
if as_attr and instances:
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
model = instances[0].__class__
try:
model._meta.get_field(to_attr)
except exceptions.FieldDoesNotExist:
pass
else:
msg = 'to_attr={} conflicts with a field on the {} model.'
raise ValueError(msg.format(to_attr, model.__name__))
# Whether or not we're prefetching the last part of the lookup.
leaf = len(lookup.prefetch_through.split(LOOKUP_SEP)) - 1 == level
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
if single:
val = vals[0] if vals else None
to_attr = to_attr if as_attr else cache_name
setattr(obj, to_attr, val)
else:
if as_attr:
setattr(obj, to_attr, vals)
else:
manager = getattr(obj, to_attr)
if leaf and lookup.queryset is not None:
qs = manager._apply_rel_filters(lookup.queryset)
else:
qs = manager.get_queryset()
qs._result_cache = vals
# We don't want the individual qs doing prefetch_related now,
# since we have merged this into the current work.
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs
return all_related_objects, additional_lookups
class RelatedPopulator:
"""
RelatedPopulator is used for select_related() object instantiation.
The idea is that each select_related() model will be populated by a
different RelatedPopulator instance. The RelatedPopulator instances get
klass_info and select (computed in SQLCompiler) plus the used db as
input for initialization. That data is used to compute which columns
to use, how to instantiate the model, and how to populate the links
between the objects.
The actual creation of the objects is done in populate() method. This
method gets row and from_obj as input and populates the select_related()
model instance.
"""
def __init__(self, klass_info, select, db):
self.db = db
# Pre-compute needed attributes. The attributes are:
# - model_cls: the possibly deferred model class to instantiate
# - either:
# - cols_start, cols_end: usually the columns in the row are
# in the same order model_cls.__init__ expects them, so we
# can instantiate by model_cls(*row[cols_start:cols_end])
# - reorder_for_init: When select_related descends to a child
# class, then we want to reuse the already selected parent
# data. However, in this case the parent data isn't necessarily
# in the same order that Model.__init__ expects it to be, so
# we have to reorder the parent data. The reorder_for_init
# attribute contains a function used to reorder the field data
# in the order __init__ expects it.
# - pk_idx: the index of the primary key field in the reordered
# model data. Used to check if a related object exists at all.
# - init_list: the field attnames fetched from the database. For
# deferred models this isn't the same as all attnames of the
# model's fields.
# - related_populators: a list of RelatedPopulator instances if
# select_related() descends to related models from this model.
# - cache_name, reverse_cache_name: the names to use for setattr
# when assigning the fetched object to the from_obj. If the
# reverse_cache_name is set, then we also set the reverse link.
select_fields = klass_info['select_fields']
from_parent = klass_info['from_parent']
if not from_parent:
self.cols_start = select_fields[0]
self.cols_end = select_fields[-1] + 1
self.init_list = [
f[0].target.attname for f in select[self.cols_start:self.cols_end]
]
self.reorder_for_init = None
else:
model_init_attnames = [
f.attname for f in klass_info['model']._meta.concrete_fields
]
reorder_map = []
for idx in select_fields:
field = select[idx][0].target
init_pos = model_init_attnames.index(field.attname)
reorder_map.append((init_pos, field.attname, idx))
reorder_map.sort()
self.init_list = [v[1] for v in reorder_map]
pos_list = [row_pos for _, _, row_pos in reorder_map]
def reorder_for_init(row):
return [row[row_pos] for row_pos in pos_list]
self.reorder_for_init = reorder_for_init
self.model_cls = klass_info['model']
self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname)
self.related_populators = get_related_populators(klass_info, select, self.db)
field = klass_info['field']
reverse = klass_info['reverse']
self.reverse_cache_name = None
if reverse:
self.cache_name = field.remote_field.get_cache_name()
self.reverse_cache_name = field.get_cache_name()
else:
self.cache_name = field.get_cache_name()
if field.unique:
self.reverse_cache_name = field.remote_field.get_cache_name()
def populate(self, row, from_obj):
if self.reorder_for_init:
obj_data = self.reorder_for_init(row)
else:
obj_data = row[self.cols_start:self.cols_end]
if obj_data[self.pk_idx] is None:
obj = None
else:
obj = self.model_cls.from_db(self.db, self.init_list, obj_data)
if obj and self.related_populators:
for rel_iter in self.related_populators:
rel_iter.populate(row, obj)
setattr(from_obj, self.cache_name, obj)
if obj and self.reverse_cache_name:
setattr(obj, self.reverse_cache_name, from_obj)
def get_related_populators(klass_info, select, db):
iterators = []
related_klass_infos = klass_info.get('related_klass_infos', [])
for rel_klass_info in related_klass_infos:
rel_cls = RelatedPopulator(rel_klass_info, select, db)
iterators.append(rel_cls)
return iterators
| bsd-3-clause |
phergie/phergie | Phergie/Plugin/Ping.php | 3865 | <?php
/**
* Phergie
*
* PHP version 5
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.
* It is also available through the world-wide-web at this URL:
* http://phergie.org/license
*
* @category Phergie
* @package Phergie_Plugin_Ping
* @author Phergie Development Team <team@phergie.org>
* @copyright 2008-2012 Phergie Development Team (http://phergie.org)
* @license http://phergie.org/license New BSD License
* @link http://pear.phergie.org/package/Phergie_Plugin_Ping
*/
/**
* Uses a self CTCP PING to ensure that the client connection has not been
* dropped.
*
* @category Phergie
* @package Phergie_Plugin_Ping
* @author Phergie Development Team <team@phergie.org>
* @license http://phergie.org/license New BSD License
* @link http://pear.phergie.org/package/Phergie_Plugin_Ping
*/
class Phergie_Plugin_Ping extends Phergie_Plugin_Abstract
{
/**
* Timestamp for the last instance in which an event was received
*
* @var int
*/
protected $lastEvent;
/**
* Timestamp for the last instance in which a PING was sent
*
* @var int
*/
protected $lastPing;
/**
* Initialize event timestamps upon connecting to the server.
*
* @return void
*/
public function onConnect()
{
$this->lastEvent = time();
$this->lastPing = null;
}
/**
* Updates the timestamp since the last received event when a new event
* arrives. Also, clears the ping time as well.
*
* @return void
*/
public function preEvent()
{
$this->lastEvent = time();
$this->lastPing = null;
}
/**
* Performs a self ping if the event threshold has been exceeded or
* issues a termination command if the ping threshold has been exceeded.
*
* @return void
*/
public function onTick()
{
$time = time();
if (!empty($this->lastPing)) {
if ($time - $this->lastPing > $this->getConfig('ping.ping', 20)) {
$this->doQuit();
}
} elseif (
$time - $this->lastEvent > $this->getConfig('ping.event', 300)
) {
$this->lastPing = $time;
$this->doPing($this->getConnection()->getNick(), $this->lastPing);
}
}
/**
* Gets the last ping time
* lastPing needs exposing for things such as unit testing
*
* @return int timestamp of last ping
*/
public function getLastPing()
{
return $this->lastPing;
}
/**
* Set the last ping time
* lastPing needs to be exposed for unit testing
*
* @param int|null $ping timestamp of last ping
*
* @return self
*/
public function setLastPing($ping = null)
{
if (null === $ping) {
$ping = time();
}
if (!is_int($ping)) {
throw new InvalidArgumentException('$ping must be an integer or null');
}
$this->lastPing = $ping;
return $this;
}
/**
* Gets the last event time
* lastEvent needs exposing for things such as unit testing
*
* @return int timestamp of last ping
*/
public function getLastEvent()
{
return $this->lastEvent;
}
/**
* Set the last event time
* lastEvent needs to be exposed for unit testing
*
* @param int|null $event timestamp of last ping
*
* @return self
*/
public function setLastEvent($event = null)
{
if (null === $event) {
$event = time();
}
if (!is_int($event)) {
throw new InvalidArgumentException('$ping must be an integer or null');
}
$this->lastEvent = $event;
return $this;
}
}
| bsd-3-clause |
exocad/exotrac | trac/upgrades/db17.py | 1475 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2013 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from trac.db import Table, Column, Index, DatabaseManager
def do_upgrade(env, ver, cursor):
"""Rename the columns `kind` and `change` in the `node_change` table for
compatibity with MySQL.
"""
cursor.execute("CREATE TEMPORARY TABLE nc_old AS SELECT * FROM node_change")
cursor.execute("DROP TABLE node_change")
table = Table('node_change', key=('rev', 'path', 'change_type'))[
Column('rev'),
Column('path'),
Column('node_type', size=1),
Column('change_type', size=1),
Column('base_path'),
Column('base_rev'),
Index(['rev'])
]
db_connector, _ = DatabaseManager(env).get_connector()
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
cursor.execute("INSERT INTO node_change (rev,path,node_type,change_type,"
"base_path,base_rev) SELECT rev,path,kind,change,"
"base_path,base_rev FROM nc_old")
cursor.execute("DROP TABLE nc_old")
| bsd-3-clause |
Sampa/PP | vendor/yiisoft/yii2/db/sqlite/Schema.php | 8310 | <?php
/**
* @link http://www.yiiframework.com/
* @copyright Copyright (c) 2008 Yii Software LLC
* @license http://www.yiiframework.com/license/
*/
namespace yii\db\sqlite;
use yii\db\TableSchema;
use yii\db\ColumnSchema;
/**
* Schema is the class for retrieving metadata from a SQLite (2/3) database.
*
* @author Qiang Xue <qiang.xue@gmail.com>
* @since 2.0
*/
class Schema extends \yii\db\Schema
{
/**
* @var array mapping from physical column types (keys) to abstract column types (values)
*/
public $typeMap = [
'tinyint' => self::TYPE_SMALLINT,
'bit' => self::TYPE_SMALLINT,
'boolean' => self::TYPE_BOOLEAN,
'bool' => self::TYPE_BOOLEAN,
'smallint' => self::TYPE_SMALLINT,
'mediumint' => self::TYPE_INTEGER,
'int' => self::TYPE_INTEGER,
'integer' => self::TYPE_INTEGER,
'bigint' => self::TYPE_BIGINT,
'float' => self::TYPE_FLOAT,
'double' => self::TYPE_FLOAT,
'real' => self::TYPE_FLOAT,
'decimal' => self::TYPE_DECIMAL,
'numeric' => self::TYPE_DECIMAL,
'tinytext' => self::TYPE_TEXT,
'mediumtext' => self::TYPE_TEXT,
'longtext' => self::TYPE_TEXT,
'text' => self::TYPE_TEXT,
'varchar' => self::TYPE_STRING,
'string' => self::TYPE_STRING,
'char' => self::TYPE_STRING,
'datetime' => self::TYPE_DATETIME,
'year' => self::TYPE_DATE,
'date' => self::TYPE_DATE,
'time' => self::TYPE_TIME,
'timestamp' => self::TYPE_TIMESTAMP,
'enum' => self::TYPE_STRING,
];
/**
* Quotes a table name for use in a query.
* A simple table name has no schema prefix.
* @param string $name table name
* @return string the properly quoted table name
*/
public function quoteSimpleTableName($name)
{
return strpos($name, "`") !== false ? $name : "`" . $name . "`";
}
/**
* Quotes a column name for use in a query.
* A simple column name has no prefix.
* @param string $name column name
* @return string the properly quoted column name
*/
public function quoteSimpleColumnName($name)
{
return strpos($name, '`') !== false || $name === '*' ? $name : '`' . $name . '`';
}
/**
* Creates a query builder for the MySQL database.
* This method may be overridden by child classes to create a DBMS-specific query builder.
* @return QueryBuilder query builder instance
*/
public function createQueryBuilder()
{
return new QueryBuilder($this->db);
}
/**
* Returns all table names in the database.
* @param string $schema the schema of the tables. Defaults to empty string, meaning the current or default schema.
* @return array all table names in the database. The names have NO schema name prefix.
*/
protected function findTableNames($schema = '')
{
$sql = "SELECT DISTINCT tbl_name FROM sqlite_master WHERE tbl_name<>'sqlite_sequence'";
return $this->db->createCommand($sql)->queryColumn();
}
/**
* Loads the metadata for the specified table.
* @param string $name table name
* @return TableSchema driver dependent table metadata. Null if the table does not exist.
*/
protected function loadTableSchema($name)
{
$table = new TableSchema;
$table->name = $name;
$table->fullName = $name;
if ($this->findColumns($table)) {
$this->findConstraints($table);
return $table;
} else {
return null;
}
}
/**
* Collects the table column metadata.
* @param TableSchema $table the table metadata
* @return boolean whether the table exists in the database
*/
protected function findColumns($table)
{
$sql = "PRAGMA table_info(" . $this->quoteSimpleTableName($table->name) . ')';
$columns = $this->db->createCommand($sql)->queryAll();
if (empty($columns)) {
return false;
}
foreach ($columns as $info) {
$column = $this->loadColumnSchema($info);
$table->columns[$column->name] = $column;
if ($column->isPrimaryKey) {
$table->primaryKey[] = $column->name;
}
}
if (count($table->primaryKey) === 1 && !strncasecmp($table->columns[$table->primaryKey[0]]->dbType, 'int', 3)) {
$table->sequenceName = '';
$table->columns[$table->primaryKey[0]]->autoIncrement = true;
}
return true;
}
/**
* Collects the foreign key column details for the given table.
* @param TableSchema $table the table metadata
*/
protected function findConstraints($table)
{
$sql = "PRAGMA foreign_key_list(" . $this->quoteSimpleTableName($table->name) . ')';
$keys = $this->db->createCommand($sql)->queryAll();
foreach ($keys as $key) {
$id = (int) $key['id'];
if (!isset($table->foreignKeys[$id])) {
$table->foreignKeys[$id] = [$key['table'], $key['from'] => $key['to']];
} else {
// composite FK
$table->foreignKeys[$id][$key['from']] = $key['to'];
}
}
}
/**
* Returns all unique indexes for the given table.
* Each array element is of the following structure:
*
* ~~~
* [
* 'IndexName1' => ['col1' [, ...]],
* 'IndexName2' => ['col2' [, ...]],
* ]
* ~~~
*
* @param TableSchema $table the table metadata
* @return array all unique indexes for the given table.
*/
public function findUniqueIndexes($table)
{
$sql = "PRAGMA index_list(" . $this->quoteSimpleTableName($table->name) . ')';
$indexes = $this->db->createCommand($sql)->queryAll();
$uniqueIndexes = [];
foreach ($indexes as $index) {
$indexName = $index['name'];
$indexInfo = $this->db->createCommand("PRAGMA index_info(" . $this->quoteValue($index['name']) . ")")->queryAll();
if ($index['unique']) {
$uniqueIndexes[$indexName] = [];
foreach ($indexInfo as $row) {
$uniqueIndexes[$indexName][] = $row['name'];
}
}
}
return $uniqueIndexes;
}
/**
* Loads the column information into a [[ColumnSchema]] object.
* @param array $info column information
* @return ColumnSchema the column schema object
*/
protected function loadColumnSchema($info)
{
$column = new ColumnSchema;
$column->name = $info['name'];
$column->allowNull = !$info['notnull'];
$column->isPrimaryKey = $info['pk'] != 0;
$column->dbType = $info['type'];
$column->unsigned = strpos($column->dbType, 'unsigned') !== false;
$column->type = self::TYPE_STRING;
if (preg_match('/^(\w+)(?:\(([^\)]+)\))?/', $column->dbType, $matches)) {
$type = strtolower($matches[1]);
if (isset($this->typeMap[$type])) {
$column->type = $this->typeMap[$type];
}
if (!empty($matches[2])) {
$values = explode(',', $matches[2]);
$column->size = $column->precision = (int) $values[0];
if (isset($values[1])) {
$column->scale = (int) $values[1];
}
if ($column->size === 1 && ($type === 'tinyint' || $type === 'bit')) {
$column->type = 'boolean';
} elseif ($type === 'bit') {
if ($column->size > 32) {
$column->type = 'bigint';
} elseif ($column->size === 32) {
$column->type = 'integer';
}
}
}
}
$column->phpType = $this->getColumnPhpType($column);
$value = trim($info['dflt_value'], "'\"");
if ($column->type === 'string') {
$column->defaultValue = $value;
} else {
$column->defaultValue = $column->typecast(strcasecmp($value, 'null') ? $value : null);
}
return $column;
}
}
| bsd-3-clause |
DasIch/django | django/contrib/admin/static/admin/js/change_form.js | 1756 | /*global showAddAnotherPopup, showRelatedObjectLookupPopup showRelatedObjectPopup updateRelatedObjectLinks*/
(function($) {
'use strict';
$(document).ready(function() {
var modelName = $('#django-admin-form-add-constants').data('modelName');
$('.add-another').click(function(e) {
e.preventDefault();
var event = $.Event('django:add-another-related');
$(this).trigger(event);
if (!event.isDefaultPrevented()) {
showAddAnotherPopup(this);
}
});
$('.related-lookup').click(function(e) {
e.preventDefault();
var event = $.Event('django:lookup-related');
$(this).trigger(event);
if (!event.isDefaultPrevented()) {
showRelatedObjectLookupPopup(this);
}
});
$('body').on('click', '.related-widget-wrapper-link', function(e) {
e.preventDefault();
if (this.href) {
var event = $.Event('django:show-related', {href: this.href});
$(this).trigger(event);
if (!event.isDefaultPrevented()) {
showRelatedObjectPopup(this);
}
}
});
$('body').on('change', '.related-widget-wrapper select', function(e) {
var event = $.Event('django:update-related');
$(this).trigger(event);
if (!event.isDefaultPrevented()) {
updateRelatedObjectLinks(this);
}
});
$('.related-widget-wrapper select').trigger('change');
if (modelName) {
$('form#' + modelName + '_form :input:visible:enabled:first').focus();
}
});
})(django.jQuery);
| bsd-3-clause |
michaelrice/yavijava | src/main/java/com/vmware/vim25/DrsResourceConfigureFailedEvent.java | 2050 | /*================================================================================
Copyright (c) 2013 Steve Jin. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of VMware, Inc. nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25;
/**
* @author Steve Jin (http://www.doublecloud.org)
* @version 5.1
*/
@SuppressWarnings("all")
public class DrsResourceConfigureFailedEvent extends HostEvent {
public LocalizedMethodFault reason;
public LocalizedMethodFault getReason() {
return this.reason;
}
public void setReason(LocalizedMethodFault reason) {
this.reason = reason;
}
} | bsd-3-clause |
qtmdev/u2f-ref-code | u2f-chrome-extension/messagetypes.js | 572 | // Copyright 2014 Google Inc. All rights reserved
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
/**
* @fileoverview U2F message types.
*/
'use strict';
/**
* Message types for messsages to/from the extension
* @const
* @enum {string}
*/
var MessageTypes = {
U2F_REGISTER_REQUEST: 'u2f_register_request',
U2F_SIGN_REQUEST: 'u2f_sign_request',
U2F_REGISTER_RESPONSE: 'u2f_register_response',
U2F_SIGN_RESPONSE: 'u2f_sign_response'
};
| bsd-3-clause |
fregaham/KiWi | src/action/kiwi/api/user/MailServiceLocal.java | 1913 | /*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2008-2009, The KiWi Project (http://www.kiwi-project.eu)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the KiWi Project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* Contributor(s):
*
*
*/
package kiwi.api.user;
import javax.ejb.Local;
/**
*
* @author Stephanie Stroka
* (stephanie.stroka@salzburgresearch.at)
*
*/
@Local
public interface MailServiceLocal extends MailService {
}
| bsd-3-clause |
devilcoders/solidus | core/app/models/spree/payment.rb | 10552 | module Spree
class Payment < Spree::Base
include Spree::Payment::Processing
IDENTIFIER_CHARS = (('A'..'Z').to_a + ('0'..'9').to_a - %w(0 1 I O)).freeze
NON_RISKY_AVS_CODES = ['B', 'D', 'H', 'J', 'M', 'Q', 'T', 'V', 'X', 'Y'].freeze
RISKY_AVS_CODES = ['A', 'C', 'E', 'F', 'G', 'I', 'K', 'L', 'N', 'O', 'P', 'R', 'S', 'U', 'W', 'Z'].freeze
belongs_to :order, class_name: 'Spree::Order', touch: true, inverse_of: :payments
belongs_to :source, polymorphic: true
belongs_to :payment_method, class_name: 'Spree::PaymentMethod', inverse_of: :payments
has_many :offsets, -> { offset_payment }, class_name: "Spree::Payment", foreign_key: :source_id
has_many :log_entries, as: :source
has_many :state_changes, as: :stateful
has_many :capture_events, :class_name => 'Spree::PaymentCaptureEvent'
has_many :refunds, inverse_of: :payment
before_validation :validate_source, unless: :invalid?
before_create :set_unique_identifier
after_save :create_payment_profile, if: :profiles_supported?
# update the order totals, etc.
after_save :update_order
after_create :create_eligible_credit_event
# invalidate previously entered payments
after_create :invalidate_old_payments
attr_accessor :source_attributes, :request_env
after_initialize :build_source
validates :amount, numericality: true
validates :source, presence: true, if: :source_required?
default_scope -> { order(:created_at) }
scope :from_credit_card, -> { where(source_type: 'Spree::CreditCard') }
scope :with_state, ->(s) { where(state: s.to_s) }
# "offset" is reserved by activerecord
scope :offset_payment, -> { where("source_type = 'Spree::Payment' AND amount < 0 AND state = 'completed'") }
scope :checkout, -> { with_state('checkout') }
scope :completed, -> { with_state('completed') }
scope :pending, -> { with_state('pending') }
scope :processing, -> { with_state('processing') }
scope :failed, -> { with_state('failed') }
scope :risky, -> { where("avs_response IN (?) OR (cvv_response_code IS NOT NULL and cvv_response_code != 'M') OR state = 'failed'", RISKY_AVS_CODES) }
scope :valid, -> { where.not(state: %w(failed invalid)) }
scope :store_credits, -> { where(source_type: Spree::StoreCredit.to_s) }
scope :not_store_credits, -> { where(arel_table[:source_type].not_eq(Spree::StoreCredit.to_s).or(arel_table[:source_type].eq(nil))) }
# order state machine (see http://github.com/pluginaweek/state_machine/tree/master for details)
state_machine initial: :checkout do
# With card payments, happens before purchase or authorization happens
#
# Setting it after creating a profile and authorizing a full amount will
# prevent the payment from being authorized again once Order transitions
# to complete
event :started_processing do
transition from: [:checkout, :pending, :completed, :processing], to: :processing
end
# When processing during checkout fails
event :failure do
transition from: [:pending, :processing], to: :failed
end
# With card payments this represents authorizing the payment
event :pend do
transition from: [:checkout, :processing], to: :pending
end
# With card payments this represents completing a purchase or capture transaction
event :complete do
transition from: [:processing, :pending, :checkout], to: :completed
end
event :void do
transition from: [:pending, :processing, :completed, :checkout], to: :void
end
# when the card brand isnt supported
event :invalidate do
transition from: [:checkout], to: :invalid
end
after_transition do |payment, transition|
payment.state_changes.create!(
previous_state: transition.from,
next_state: transition.to,
name: 'payment',
)
end
end
# @return [String] this payment's response code
def transaction_id
response_code
end
# @return [String] this payment's currency
delegate :currency, to: :order
# @return [Spree::Money] this amount of this payment as money object
def money
Spree::Money.new(amount, { currency: currency })
end
alias display_amount money
# Sets the amount, parsing it based on i18n settings if it is a string.
#
# @param amount [BigDecimal, String] the desired new amount
def amount=(amount)
self[:amount] =
case amount
when String
separator = I18n.t('number.currency.format.separator')
number = amount.delete("^0-9-#{separator}\.").tr(separator, '.')
number.to_d if number.present?
end || amount
end
# The total amount of the offsets (for old-style refunds) for this payment.
#
# @return [BigDecimal] the total amount of this payment's offsets
def offsets_total
offsets.pluck(:amount).sum
end
# The total amount this payment can be credited.
#
# @return [BigDecimal] the amount of this payment minus the offsets
# (old-style refunds) and refunds
def credit_allowed
amount - (offsets_total.abs + refunds.sum(:amount))
end
# @return [Boolean] true when this payment can be credited
def can_credit?
credit_allowed > 0
end
# When this is a new record without a source, builds a new source based on
# this payment's payment method and associates it correctly.
#
# @see https://github.com/spree/spree/issues/981
def build_source
return unless new_record?
if source_attributes.present? && source.blank? && payment_method.try(:payment_source_class)
self.source = payment_method.payment_source_class.new(source_attributes)
self.source.payment_method_id = payment_method.id
self.source.user_id = self.order.user_id if self.order
end
end
# @return [Array<String>] the actions available on this payment
def actions
sa = source_actions
sa |= ["failure"] if processing?
sa
end
# @return [Object] the source of ths payment
def payment_source
res = source.is_a?(Payment) ? source.source : source
res || payment_method
end
# @return [Boolean] true when this payment is risky based on address
def is_avs_risky?
return false if avs_response.blank? || NON_RISKY_AVS_CODES.include?(avs_response)
return true
end
# @return [Boolean] true when this payment is risky based on cvv
def is_cvv_risky?
return false if cvv_response_code == "M"
return false if cvv_response_code.nil?
return false if cvv_response_message.present?
return true
end
# @return [BigDecimal] the total amount captured on this payment
def captured_amount
capture_events.sum(:amount)
end
# @return [BigDecimal] the total amount left uncaptured on this payment
def uncaptured_amount
amount - captured_amount
end
# @return [Boolean] true when the payment method exists and is a store credit payment method
def store_credit?
payment_method.try!(:store_credit?)
end
private
def source_actions
return [] unless payment_source and payment_source.respond_to? :actions
payment_source.actions.select { |action| !payment_source.respond_to?("can_#{action}?") or payment_source.send("can_#{action}?", self) }
end
def validate_source
if source && !source.valid?
source.errors.each do |field, error|
field_name = I18n.t("activerecord.attributes.#{source.class.to_s.underscore}.#{field}")
self.errors.add(Spree.t(source.class.to_s.demodulize.underscore), "#{field_name} #{error}")
end
end
return !errors.present?
end
def source_required?
payment_method.present? && payment_method.source_required?
end
def profiles_supported?
payment_method.respond_to?(:payment_profiles_supported?) && payment_method.payment_profiles_supported?
end
def create_payment_profile
# Don't attempt to create on bad payments.
return if %w(invalid failed).include?(state)
# Payment profile cannot be created without source
return unless source
# Imported payments shouldn't create a payment profile.
return if source.imported
payment_method.create_profile(self)
rescue ActiveMerchant::ConnectionError => e
gateway_error e
end
def invalidate_old_payments
if !store_credit? && !['invalid', 'failed'].include?(state)
order.payments.checkout.where(payment_method: payment_method).where("id != ?", self.id).each do |payment|
payment.invalidate!
end
end
end
def update_order
if completed? || void?
order.updater.update_payment_total
end
if order.completed?
order.updater.update_payment_state
order.updater.update_shipments
order.updater.update_shipment_state
end
if self.completed? || order.completed?
order.persist_totals
end
end
# Necessary because some payment gateways will refuse payments with
# duplicate IDs. We *were* using the Order number, but that's set once and
# is unchanging. What we need is a unique identifier on a per-payment basis,
# and this is it. Related to #1998.
# See https://github.com/spree/spree/issues/1998#issuecomment-12869105
def set_unique_identifier
begin
self.identifier = generate_identifier
end while self.class.exists?(identifier: self.identifier)
end
def generate_identifier
Array.new(8){ IDENTIFIER_CHARS.sample }.join
end
def create_eligible_credit_event
# When cancelling an order, a payment with the negative amount
# of the payment total is created to refund the customer. That
# payment has a source of itself (Spree::Payment) no matter the
# type of payment getting refunded, hence the additional check
# if the source is a store credit.
if store_credit? && source.is_a?(Spree::StoreCredit)
source.update_attributes!({
action: Spree::StoreCredit::ELIGIBLE_ACTION,
action_amount: amount,
action_authorization_code: response_code,
})
end
end
end
end
| bsd-3-clause |
quyen/site_announcements | app/models/site_announcement_category.rb | 88 | class SiteAnnouncementCategory < ActiveRecord::Base
has_many :site_announcements
end
| bsd-3-clause |
taydakov/cefgluesimpleajax | CefGlue/Classes.Handlers/CefFocusHandler.cs | 2277 | namespace Xilium.CefGlue
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
using Xilium.CefGlue.Interop;
/// <summary>
/// Implement this interface to handle events related to focus. The methods of
/// this class will be called on the UI thread.
/// </summary>
public abstract unsafe partial class CefFocusHandler
{
private void on_take_focus(cef_focus_handler_t* self, cef_browser_t* browser, int next)
{
CheckSelf(self);
var m_browser = CefBrowser.FromNative(browser);
OnTakeFocus(m_browser, next != 0);
}
/// <summary>
/// Called when the browser component is about to loose focus. For instance, if
/// focus was on the last HTML element and the user pressed the TAB key. |next|
/// will be true if the browser is giving focus to the next component and false
/// if the browser is giving focus to the previous component.
/// </summary>
protected virtual void OnTakeFocus(CefBrowser browser, bool next)
{
}
private int on_set_focus(cef_focus_handler_t* self, cef_browser_t* browser, CefFocusSource source)
{
CheckSelf(self);
var m_browser = CefBrowser.FromNative(browser);
return OnSetFocus(m_browser, source) ? 1 : 0;
}
/// <summary>
/// Called when the browser component is requesting focus. |source| indicates
/// where the focus request is originating from. Return false to allow the
/// focus to be set or true to cancel setting the focus.
/// </summary>
protected virtual bool OnSetFocus(CefBrowser browser, CefFocusSource source)
{
return false;
}
private void on_got_focus(cef_focus_handler_t* self, cef_browser_t* browser)
{
CheckSelf(self);
var m_browser = CefBrowser.FromNative(browser);
OnGotFocus(m_browser);
}
/// <summary>
/// Called when the browser component has received focus.
/// </summary>
protected virtual void OnGotFocus(CefBrowser browser)
{
}
}
}
| mit |
v5developer/maven-framework-project | hibernate-tutorials/hibernate-inheritance-table-per-concrete-class-xml-example/src/main/java/org/hibernate/inheritance/table/per/concrete/xml/example/Owner.java | 671 | package org.hibernate.inheritance.table.per.concrete.xml.example;
public class Owner extends Person {
private Long stocks;
private Long partnershipStake;
public Owner() {
}
public Owner(String firstname, String lastname, Long stocks, Long partnershipStake) {
super(firstname, lastname);
this.stocks = stocks;
this.partnershipStake = partnershipStake;
}
public Long getStocks() {
return stocks;
}
public void setStocks(Long stocks) {
this.stocks = stocks;
}
public Long getPartnershipStake() {
return partnershipStake;
}
public void setPartnershipStake(Long partnershipStake) {
this.partnershipStake = partnershipStake;
}
}
| mit |
EstebanVerbel/XamarinCert | 5_ResourcesAndStyles/1_TipCalculator/TipCalculator/TipCalculator.Droid/MainActivity.cs | 609 | using System;
using Android.App;
using Android.Content.PM;
using Android.Runtime;
using Android.Views;
using Android.Widget;
using Android.OS;
namespace TipCalculator.Droid
{
[Activity(Label = "TipCalculator", Icon = "@drawable/icon", MainLauncher = true, ConfigurationChanges = ConfigChanges.ScreenSize | ConfigChanges.Orientation)]
public class MainActivity : global::Xamarin.Forms.Platform.Android.FormsApplicationActivity
{
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
global::Xamarin.Forms.Forms.Init(this, bundle);
LoadApplication(new App());
}
}
}
| mit |
enclose-io/compiler | lts/test/parallel/test-async-hooks-http-agent.js | 2919 | 'use strict';
// Flags: --expose-internals
const common = require('../common');
const assert = require('assert');
const { async_id_symbol } = require('internal/async_hooks').symbols;
const http = require('http');
// Regression test for https://github.com/nodejs/node/issues/13325
// Checks that an http.Agent properly asyncReset()s a reused socket handle, and
// re-assigns the fresh async id to the reused `net.Socket` instance.
// Make sure a single socket is transparently reused for 2 requests.
const agent = new http.Agent({
keepAlive: true,
keepAliveMsecs: Infinity,
maxSockets: 1
});
const server = http.createServer(common.mustCall((req, res) => {
req.once('data', common.mustCallAtLeast(() => {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.write('foo');
}));
req.on('end', common.mustCall(() => {
res.end('bar');
}));
}, 2)).listen(0, common.mustCall(() => {
const port = server.address().port;
const payload = 'hello world';
// First request. This is useless except for adding a socket to the
// agent’s pool for reuse.
const r1 = http.request({
agent, port, method: 'POST'
}, common.mustCall((res) => {
// Remember which socket we used.
const socket = res.socket;
const asyncIdAtFirstRequest = socket[async_id_symbol];
assert.ok(asyncIdAtFirstRequest > 0, `${asyncIdAtFirstRequest} > 0`);
// Check that request and response share their socket.
assert.strictEqual(r1.socket, socket);
res.on('data', common.mustCallAtLeast(() => {}));
res.on('end', common.mustCall(() => {
// setImmediate() to give the agent time to register the freed socket.
setImmediate(common.mustCall(() => {
// The socket is free for reuse now.
assert.strictEqual(socket[async_id_symbol], -1);
// Second request. To re-create the exact conditions from the
// referenced issue, we use a POST request without chunked encoding
// (hence the Content-Length header) and call .end() after the
// response header has already been received.
const r2 = http.request({
agent, port, method: 'POST', headers: {
'Content-Length': payload.length
}
}, common.mustCall((res) => {
const asyncId = res.socket[async_id_symbol];
assert.ok(asyncId > 0, `${asyncId} > 0`);
assert.strictEqual(r2.socket, socket);
// Empty payload, to hit the “right” code path.
r2.end('');
res.on('data', common.mustCallAtLeast(() => {}));
res.on('end', common.mustCall(() => {
// Clean up to let the event loop stop.
server.close();
agent.destroy();
}));
}));
// Schedule a payload to be written immediately, but do not end the
// request just yet.
r2.write(payload);
}));
}));
}));
r1.end(payload);
}));
| mit |
slamperone/consolidador | assets/bower/perfect-scrollbar/src/js/lib/helper.js | 2163 | 'use strict';
var cls = require('./class')
, d = require('./dom');
exports.toInt = function (x) {
return parseInt(x, 10) || 0;
};
exports.clone = function (obj) {
if (obj === null) {
return null;
} else if (typeof obj === 'object') {
var result = {};
for (var key in obj) {
result[key] = this.clone(obj[key]);
}
return result;
} else {
return obj;
}
};
exports.extend = function (original, source) {
var result = this.clone(original);
for (var key in source) {
result[key] = this.clone(source[key]);
}
return result;
};
exports.isEditable = function (el) {
return d.matches(el, "input,[contenteditable]") ||
d.matches(el, "select,[contenteditable]") ||
d.matches(el, "textarea,[contenteditable]") ||
d.matches(el, "button,[contenteditable]");
};
exports.removePsClasses = function (element) {
var clsList = cls.list(element);
for (var i = 0; i < clsList.length; i++) {
var className = clsList[i];
if (className.indexOf('ps-') === 0) {
cls.remove(element, className);
}
}
};
exports.outerWidth = function (element) {
return this.toInt(d.css(element, 'width')) +
this.toInt(d.css(element, 'paddingLeft')) +
this.toInt(d.css(element, 'paddingRight')) +
this.toInt(d.css(element, 'borderLeftWidth')) +
this.toInt(d.css(element, 'borderRightWidth'));
};
exports.startScrolling = function (element, axis) {
cls.add(element, 'ps-in-scrolling');
if (typeof axis !== 'undefined') {
cls.add(element, 'ps-' + axis);
} else {
cls.add(element, 'ps-x');
cls.add(element, 'ps-y');
}
};
exports.stopScrolling = function (element, axis) {
cls.remove(element, 'ps-in-scrolling');
if (typeof axis !== 'undefined') {
cls.remove(element, 'ps-' + axis);
} else {
cls.remove(element, 'ps-x');
cls.remove(element, 'ps-y');
}
};
exports.env = {
isWebKit: 'WebkitAppearance' in document.documentElement.style,
supportsTouch: (('ontouchstart' in window) || window.DocumentTouch && document instanceof window.DocumentTouch),
supportsIePointer: window.navigator.msMaxTouchPoints !== null
};
| mit |
hoka-plus/p-01-web | tmp/babel-output_path-MNyU5yIO.tmp/modules/lodash/internal/isLength.js | 641 | /**
* Used as the [maximum length](http://ecma-international.org/ecma-262/6.0/#sec-number.max_safe_integer)
* of an array-like value.
*/
var MAX_SAFE_INTEGER = 9007199254740991;
/**
* Checks if `value` is a valid array-like length.
*
* **Note:** This function is based on [`ToLength`](http://ecma-international.org/ecma-262/6.0/#sec-tolength).
*
* @private
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a valid length, else `false`.
*/
function isLength(value) {
return typeof value == 'number' && value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER;
}
export default isLength; | mit |
samsonasik/Slim | tests/Http/ResponseTest.php | 8638 | <?php
/**
* Slim Framework (http://slimframework.com)
*
* @link https://github.com/slimphp/Slim
* @copyright Copyright (c) 2011-2015 Josh Lockhart
* @license https://github.com/slimphp/Slim/blob/master/LICENSE.md (MIT License)
*/
namespace Slim\Tests\Http;
use ReflectionProperty;
use Slim\Http\Body;
use Slim\Http\Headers;
use Slim\Http\Response;
class ResponseTest extends \PHPUnit_Framework_TestCase
{
/*******************************************************************************
* Create
******************************************************************************/
public function testConstructoWithDefaultArgs()
{
$response = new Response();
$this->assertAttributeEquals(200, 'status', $response);
$this->assertAttributeInstanceOf('\Slim\Http\Headers', 'headers', $response);
$this->assertAttributeInstanceOf('\Psr\Http\Message\StreamInterface', 'body', $response);
}
public function testConstructorWithCustomArgs()
{
$headers = new Headers();
$body = new Body(fopen('php://temp', 'r+'));
$response = new Response(404, $headers, $body);
$this->assertAttributeEquals(404, 'status', $response);
$this->assertAttributeSame($headers, 'headers', $response);
$this->assertAttributeSame($body, 'body', $response);
}
public function testDeepCopyClone()
{
$headers = new Headers();
$body = new Body(fopen('php://temp', 'r+'));
$response = new Response(404, $headers, $body);
$clone = clone $response;
$this->assertAttributeEquals('1.1', 'protocolVersion', $clone);
$this->assertAttributeEquals(404, 'status', $clone);
$this->assertAttributeNotSame($headers, 'headers', $clone);
$this->assertAttributeNotSame($body, 'body', $clone);
}
public function testDisableSetter()
{
$response = new Response();
$response->foo = 'bar';
$this->assertFalse(property_exists($response, 'foo'));
}
/*******************************************************************************
* Status
******************************************************************************/
public function testGetStatusCode()
{
$response = new Response();
$responseStatus = new ReflectionProperty($response, 'status');
$responseStatus->setAccessible(true);
$responseStatus->setValue($response, '404');
$this->assertEquals(404, $response->getStatusCode());
}
public function testWithStatus()
{
$response = new Response();
$clone = $response->withStatus(302);
$this->assertAttributeEquals(302, 'status', $clone);
}
/**
* @expectedException \InvalidArgumentException
*/
public function testWithStatusInvalidStatusCodeThrowsException()
{
$response = new Response();
$response->withStatus(800);
}
/**
* @expectedException \InvalidArgumentException
*/
public function testWithStatusInvalidReasonPhraseThrowsException()
{
$response = new Response();
$response->withStatus(200, null);
}
public function testGetReasonPhrase()
{
$response = new Response();
$responseStatus = new ReflectionProperty($response, 'status');
$responseStatus->setAccessible(true);
$responseStatus->setValue($response, '404');
$this->assertEquals('Not Found', $response->getReasonPhrase());
}
public function testGetCustomReasonPhrase()
{
$response = new Response();
$clone = $response->withStatus(200, 'Custom Phrase');
$this->assertEquals('Custom Phrase', $clone->getReasonPhrase());
}
/**
* @covers Slim\Http\Response::withRedirect
*/
public function testWithRedirect()
{
$response = new Response(200);
$clone = $response->withRedirect('/foo', 301);
$this->assertSame(200, $response->getStatusCode());
$this->assertFalse($response->hasHeader('Location'));
$this->assertSame(301, $clone->getStatusCode());
$this->assertTrue($clone->hasHeader('Location'));
$this->assertEquals('/foo', $clone->getHeaderLine('Location'));
}
/*******************************************************************************
* Behaviors
******************************************************************************/
public function testIsEmpty()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 204);
$this->assertTrue($response->isEmpty());
}
public function testIsInformational()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 100);
$this->assertTrue($response->isInformational());
}
public function testIsOk()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 200);
$this->assertTrue($response->isOk());
}
public function testIsSuccessful()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 201);
$this->assertTrue($response->isSuccessful());
}
public function testIsRedirect()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 302);
$this->assertTrue($response->isRedirect());
}
public function testIsRedirection()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 308);
$this->assertTrue($response->isRedirection());
}
public function testIsForbidden()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 403);
$this->assertTrue($response->isForbidden());
}
public function testIsNotFound()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 404);
$this->assertTrue($response->isNotFound());
}
public function testIsClientError()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 400);
$this->assertTrue($response->isClientError());
}
public function testIsServerError()
{
$response = new Response();
$prop = new ReflectionProperty($response, 'status');
$prop->setAccessible(true);
$prop->setValue($response, 503);
$this->assertTrue($response->isServerError());
}
public function testToString()
{
$output = 'HTTP/1.1 404 Not Found' . PHP_EOL .
'X-Foo: Bar' . PHP_EOL . PHP_EOL .
'Where am I?';
$this->expectOutputString($output);
$response = new Response();
$response = $response->withStatus(404)->withHeader('X-Foo', 'Bar')->write('Where am I?');
echo $response;
}
public function testWithJson()
{
$data = ['foo' => 'bar1&bar2'];
$response = new Response();
$response = $response->withJson($data, 201);
$this->assertEquals(201, $response->getStatusCode());
$this->assertEquals('application/json;charset=utf-8', $response->getHeaderLine('Content-Type'));
$body = $response->getBody();
$body->rewind();
$dataJson = $body->getContents(); //json_decode($body->getContents(), true);
$this->assertEquals('{"foo":"bar1&bar2"}', $dataJson);
$this->assertEquals($data['foo'], json_decode($dataJson, true)['foo']);
// Test encoding option
$response = $response->withJson($data, 200, JSON_HEX_AMP);
$body = $response->getBody();
$body->rewind();
$dataJson = $body->getContents();
$this->assertEquals('{"foo":"bar1\u0026bar2"}', $dataJson);
$this->assertEquals($data['foo'], json_decode($dataJson, true)['foo']);
}
}
| mit |
eslint/eslint | lib/rules/template-curly-spacing.js | 4767 | /**
* @fileoverview Rule to enforce spacing around embedded expressions of template strings
* @author Toru Nagashima
*/
"use strict";
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
const astUtils = require("./utils/ast-utils");
//------------------------------------------------------------------------------
// Rule Definition
//------------------------------------------------------------------------------
/** @type {import('../shared/types').Rule} */
module.exports = {
meta: {
type: "layout",
docs: {
description: "require or disallow spacing around embedded expressions of template strings",
recommended: false,
url: "https://eslint.org/docs/rules/template-curly-spacing"
},
fixable: "whitespace",
schema: [
{ enum: ["always", "never"] }
],
messages: {
expectedBefore: "Expected space(s) before '}'.",
expectedAfter: "Expected space(s) after '${'.",
unexpectedBefore: "Unexpected space(s) before '}'.",
unexpectedAfter: "Unexpected space(s) after '${'."
}
},
create(context) {
const sourceCode = context.getSourceCode();
const always = context.options[0] === "always";
/**
* Checks spacing before `}` of a given token.
* @param {Token} token A token to check. This is a Template token.
* @returns {void}
*/
function checkSpacingBefore(token) {
if (!token.value.startsWith("}")) {
return; // starts with a backtick, this is the first template element in the template literal
}
const prevToken = sourceCode.getTokenBefore(token, { includeComments: true }),
hasSpace = sourceCode.isSpaceBetween(prevToken, token);
if (!astUtils.isTokenOnSameLine(prevToken, token)) {
return;
}
if (always && !hasSpace) {
context.report({
loc: {
start: token.loc.start,
end: {
line: token.loc.start.line,
column: token.loc.start.column + 1
}
},
messageId: "expectedBefore",
fix: fixer => fixer.insertTextBefore(token, " ")
});
}
if (!always && hasSpace) {
context.report({
loc: {
start: prevToken.loc.end,
end: token.loc.start
},
messageId: "unexpectedBefore",
fix: fixer => fixer.removeRange([prevToken.range[1], token.range[0]])
});
}
}
/**
* Checks spacing after `${` of a given token.
* @param {Token} token A token to check. This is a Template token.
* @returns {void}
*/
function checkSpacingAfter(token) {
if (!token.value.endsWith("${")) {
return; // ends with a backtick, this is the last template element in the template literal
}
const nextToken = sourceCode.getTokenAfter(token, { includeComments: true }),
hasSpace = sourceCode.isSpaceBetween(token, nextToken);
if (!astUtils.isTokenOnSameLine(token, nextToken)) {
return;
}
if (always && !hasSpace) {
context.report({
loc: {
start: {
line: token.loc.end.line,
column: token.loc.end.column - 2
},
end: token.loc.end
},
messageId: "expectedAfter",
fix: fixer => fixer.insertTextAfter(token, " ")
});
}
if (!always && hasSpace) {
context.report({
loc: {
start: token.loc.end,
end: nextToken.loc.start
},
messageId: "unexpectedAfter",
fix: fixer => fixer.removeRange([token.range[1], nextToken.range[0]])
});
}
}
return {
TemplateElement(node) {
const token = sourceCode.getFirstToken(node);
checkSpacingBefore(token);
checkSpacingAfter(token);
}
};
}
};
| mit |
praecipula/mongoid | spec/mongoid/relations/referenced/many_to_many_spec.rb | 83444 | require "spec_helper"
describe Mongoid::Relations::Referenced::ManyToMany do
before(:all) do
Mongoid.raise_not_found_error = true
Person.autosave(Person.relations["preferences"].merge!(autosave: true))
Person.synced(Person.relations["preferences"])
end
after(:all) do
Person.reset_callbacks(:save)
Person.reset_callbacks(:destroy)
end
[ :<<, :push ].each do |method|
describe "##{method}" do
context "when the inverse_of is nil" do
let!(:article) do
Article.create
end
context "when the child document is new" do
let(:preference) do
Preference.new
end
before do
article.preferences.send(method, preference)
end
it "persists the child document" do
expect(preference).to be_persisted
end
end
context "when the child document is not new" do
let(:preference) do
Preference.create
end
it "does not persist the child document" do
preference.should_receive(:save).never
article.preferences.send(method, preference)
end
end
end
context "when the parent is a new record" do
let(:person) do
Person.new
end
let!(:preference) do
Preference.new
end
let(:result) do
person.preferences.send(method, preference)
end
it "returns an array of loaded documents" do
expect(result).to eq([ preference ])
end
end
context "when the parent is not a new record" do
let(:person) do
Person.create
end
let!(:preference) do
Preference.new
end
let(:result) do
person.preferences.send(method, preference)
end
it "returns an array of loaded documents" do
expect(result).to eq([ preference ])
end
end
context "when the relations are not polymorphic" do
context "when the inverse relation is not defined" do
let(:person) do
Person.new
end
let(:house) do
House.new
end
before do
person.houses << house
end
it "appends the document to the relation" do
expect(person.houses).to eq([ house ])
end
it "sets the foreign key on the relation" do
expect(person.house_ids).to eq([ house.id ])
end
end
context "when appending in a parent create block" do
let!(:preference) do
Preference.create(name: "testing")
end
let!(:person) do
Person.create do |doc|
doc.preferences << preference
end
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "saves the target" do
expect(preference).to be_persisted
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
it "persists the link" do
expect(person.reload.preferences).to eq([ preference ])
end
end
context "when the parent is a new record" do
let(:person) do
Person.new
end
context "when the child is new" do
let(:preference) do
Preference.new
end
before do
person.preferences.send(method, preference)
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "does not save the target" do
expect(preference).to be_new_record
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
context "when appending a second time" do
before do
person.preferences.send(method, preference)
end
it "does not allow the document to be added again" do
expect(person.preferences).to eq([ preference ])
end
it "does not allow duplicate ids" do
expect(person.preference_ids).to eq([ preference.id ])
end
end
end
context "when the child is already persisted" do
let!(:persisted) do
Preference.create(name: "testy")
end
let(:preference) do
Preference.first
end
before do
person.preferences.send(method, preference)
person.save
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "saves the target" do
expect(preference).to be_persisted
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
it "persists the link" do
expect(person.reload.preferences).to eq([ preference ])
end
end
context "when setting via the associated ids" do
let!(:persisted) do
Preference.create(name: "testy")
end
let(:preference) do
Preference.first
end
let(:person) do
Person.new(preference_ids: [ preference.id ])
end
before do
person.save
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.reload.person_ids).to eq([ person.id ])
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
it "persists the link" do
expect(person.reload.preferences).to eq([ preference ])
end
end
end
context "when the parent is not a new record" do
let(:person) do
Person.create
end
let(:preference) do
Preference.new
end
before do
person.preferences.send(method, preference)
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "sets the base on the inverse relation" do
expect(preference.people).to eq([ person ])
end
it "sets the same instance on the inverse relation" do
expect(preference.people.first).to eql(person)
end
it "saves the target" do
expect(preference).to_not be_new_record
end
it "adds the document to the target" do
expect(person.preferences.count).to eq(1)
end
context "when documents already exist on the relation" do
let(:preference_two) do
Preference.new
end
before do
person.preferences.send(method, preference_two)
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference, preference_two ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id, preference_two.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference_two.person_ids).to eq([ person.id ])
end
it "sets the base on the inverse relation" do
expect(preference_two.people).to eq([ person ])
end
it "sets the same instance on the inverse relation" do
expect(preference_two.people.first).to eql(person)
end
it "saves the target" do
expect(preference).to_not be_new_record
end
it "adds the document to the target" do
expect(person.preferences.count).to eq(2)
end
end
end
context "when both sides have been persisted" do
let(:person) do
Person.create
end
let(:event) do
Event.create
end
before do
person.administrated_events << event
end
it "sets the front side of the relation" do
expect(person.administrated_events).to eq([ event ])
end
it "sets the inverse side of the relation" do
expect(event.administrators(true)).to eq([ person ])
end
context "when reloading" do
it "sets the front side of the relation" do
expect(person.reload.administrated_events).to eq([ event ])
end
it "sets the inverse side of the relation" do
expect(event.reload.administrators).to eq([ person ])
end
end
context "when performing a new database query" do
let(:loaded_person) do
Person.find(person.id)
end
let(:loaded_event) do
Event.find(event.id)
end
it "sets the front side of the relation" do
expect(loaded_person.administrated_events).to eq([ event ])
end
it "sets the inverse side of the relation" do
expect(loaded_event.administrators).to eq([ person ])
end
end
end
context "when the relation also includes a has_many relation" do
let(:artwork) do
Artwork.create
end
let(:exhibition) do
Exhibition.create
end
let(:exhibitor) do
Exhibitor.create(exhibition: exhibition)
end
before do
artwork.exhibitors << exhibitor
end
it "creates a single artwork object" do
expect(Artwork.count).to eq(1)
end
end
context "when the relation is self referencing" do
let(:tag_one) do
Tag.create(text: "one")
end
let(:tag_two) do
Tag.create(text: "two")
end
before do
tag_one.related << tag_two
end
it "sets the front side of the relation" do
expect(tag_one.related).to eq([ tag_two ])
end
it "sets the inverse side of the relation" do
expect(tag_two.related(true)).to eq([ tag_one ])
end
context "when reloading" do
it "sets the front side of the relation" do
expect(tag_one.reload.related).to eq([ tag_two ])
end
it "sets the inverse side of the relation" do
expect(tag_two.reload.related).to eq([ tag_one ])
end
end
context "when performing a new database query" do
let(:loaded_tag_one) do
Tag.find(tag_one.id)
end
let(:loaded_tag_two) do
Tag.find(tag_two.id)
end
it "sets the front side of the relation" do
expect(loaded_tag_one.related).to eq([ tag_two ])
end
it "sets the inverse side of the relation" do
expect(loaded_tag_two.related).to eq([ tag_one ])
end
end
end
end
context "when association has callbacks" do
let(:post) do
Post.new
end
let(:tag) do
Tag.new
end
context "when the callback is a before_add" do
it "executes the callback" do
post.tags.send(method, tag)
expect(post.before_add_called).to be_true
end
context "when errors are raised" do
before do
post.should_receive(:before_add_tag).and_raise
end
it "does not add the document to the relation" do
expect {
post.tags.send(method, tag)
}.to raise_error
expect(post.tags).to be_empty
end
end
end
context "when the callback is an after_add" do
it "executes the callback" do
post.tags.send(method, tag)
expect(post.after_add_called).to be_true
end
context "when errors are raised" do
before do
post.should_receive(:after_add_tag).and_raise
end
it "adds the document to the relation" do
expect {
post.tags.send(method, tag)
}.to raise_error
expect(post.tags).to eq([ tag ])
end
end
end
end
end
end
describe "#=" do
context "when trying to add duplicate entries" do
let(:person) do
Person.new
end
let(:preference) do
Preference.create(name: "one")
end
before do
person.preferences = [ preference, preference ]
end
context "when the document is new" do
it "does not add the duplicates" do
expect(person.preferences).to eq([ preference ])
end
it "does not create duplicate keys" do
expect(person.preference_ids).to eq([ preference.id ])
end
end
context "when the document is persisted" do
before do
person.save
end
it "does not add the duplicates" do
expect(person.preferences).to eq([ preference ])
end
it "does not create duplicate keys" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "does not add duplicates on the inverse" do
expect(preference.people).to eq([ person ])
end
it "does not add duplicate inverse keys" do
expect(preference.person_ids).to eq([ person.id ])
end
context "when reloading document from db" do
let(:from_db) { Preference.last }
it "does not create duplicate keys" do
person.preferences = [ from_db ]
expect(from_db.person_ids).to eq([ person.id ])
end
end
end
end
context "when the relation is not polymorphic" do
context "when the parent and relation are new records" do
let(:person) do
Person.new
end
let(:preference) do
Preference.new
end
before do
person.preferences = [ preference ]
end
it "sets the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "does not save the target" do
expect(preference).to be_new_record
end
end
context "when the parent is new but the relation exists" do
let(:person) do
Person.new
end
let!(:preference) do
Preference.create
end
before do
person.preferences = [ preference ]
end
it "sets the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
context "and the parent is persisted" do
before do
person.save!
preference.reload
end
it "maintains the relation" do
expect(person.preferences).to eq([ preference ])
end
it "maintains the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "maintains the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "maintains the base on the inverse relation" do
expect(preference.people.first).to eq(person)
end
end
end
context "when the parent is not a new record" do
let(:person) do
Person.create
end
let(:preference) do
Preference.new
end
before do
person.preferences = [ preference ]
end
it "sets the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "sets the base on the inverse relation" do
expect(preference.people.first).to eq(person)
end
it "saves the target" do
expect(preference).to be_persisted
end
it "persists the relation" do
person.reload.preferences == [ preference ]
end
context "when overwriting an existing relation" do
let(:another_preference) do
Preference.new
end
before do
person.preferences = [ another_preference ]
end
it "sets the relation" do
expect(person.preferences).to eq([ another_preference ])
end
it "saves the target" do
expect(another_preference).to be_persisted
end
it "does not leave foreign keys of the previous relation" do
expect(person.preference_ids).to eq([ another_preference.id ])
end
it "clears its own key on the foreign relation" do
expect(preference.person_ids).to be_empty
end
context "and then overwriting it again with the same value" do
before do
person.preferences = [ another_preference ]
end
it "persists the relation between another_preference and person" do
expect(another_preference.reload.people).to eq([ person ])
end
end
context "and person reloaded instead of saved" do
before do
person.reload
another_preference.reload
end
it "persists the relation between person and another_preference" do
expect(person.preferences).to eq([ another_preference ])
end
it "persists the relation between another_prefrence and person" do
expect(another_preference.people).to eq([ person ])
end
it "no longer has any relation between preference and person" do
expect(preference.people).to be_empty
end
end
context "and person is saved" do
before do
person.save
person.reload
another_preference.reload
end
it "persists the relation between person and another_preference" do
expect(person.preferences).to eq([ another_preference ])
end
it "persists the relation between another_prefrence and person" do
expect(another_preference.people).to eq([ person ])
end
it "no longer has any relation between preference and person" do
expect(preference.people).to be_empty
end
end
end
end
end
end
[ nil, [] ].each do |value|
describe "#= #{value}" do
context "when the relation is not polymorphic" do
context "when the inverse relation is not defined" do
let(:person) do
Person.new
end
let(:house) do
House.new
end
before do
person.houses << house
person.houses = value
end
it "clears the relation" do
expect(person.houses).to be_empty
end
it "clears the foreign keys" do
expect(person.house_ids).to be_empty
end
end
context "when the parent is a new record" do
let(:person) do
Person.new
end
let(:preference) do
Preference.new
end
before do
person.preferences = [ preference ]
person.preferences = value
end
it "sets the relation to an empty array" do
expect(person.preferences).to be_empty
end
it "removed the inverse relation" do
expect(preference.people).to be_empty
end
it "removes the foreign key values" do
expect(person.preference_ids).to be_empty
end
it "removes the inverse foreign key values" do
expect(preference.person_ids).to be_empty
end
end
context "when the parent is not a new record" do
context "when the relation has been loaded" do
let(:person) do
Person.create
end
let(:preference) do
Preference.new
end
before do
person.preferences = [ preference ]
person.preferences = value
end
it "sets the relation to an empty array" do
expect(person.preferences).to be_empty
end
it "removed the inverse relation" do
expect(preference.people).to be_empty
end
it "removes the foreign key values" do
expect(person.preference_ids).to be_empty
end
it "removes the inverse foreign key values" do
expect(preference.person_ids).to be_empty
end
it "does not delete the target from the database" do
expect(preference).to_not be_destroyed
end
end
context "when the relation has not been loaded" do
let(:preference) do
Preference.new
end
let(:person) do
Person.create.tap do |p|
p.preferences = [ preference ]
end
end
let!(:from_db) do
Person.find(person.id)
end
before do
from_db.preferences = value
end
it "sets the relation to an empty array" do
expect(from_db.preferences).to be_empty
end
it "removes the foreign key values" do
expect(from_db.preference_ids).to be_empty
end
end
end
end
end
end
[ :build, :new ].each do |method|
describe "##{method}" do
context "when the relation is not polymorphic" do
context "when the parent is a new record" do
let(:person) do
Person.new
end
let!(:preference) do
person.preferences.send(method, name: "settings")
end
it "adds the document to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the inverse foreign key on the relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "sets the attributes" do
expect(preference.name).to eq("settings")
end
it "does not save the target" do
expect(preference).to be_new_record
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
end
context "when the parent is not a new record" do
let(:person) do
Person.create
end
let!(:preference) do
person.preferences.send(method, name: "settings")
end
it "adds the document to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the inverse foreign key on the relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "sets the base on the inverse relation" do
expect(preference.people).to eq([ person ])
end
it "sets the attributes" do
expect(preference.name).to eq("settings")
end
it "does not save the target" do
expect(preference).to be_new_record
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
context "when saving the target" do
before do
preference.save
end
it "persists the parent keys" do
expect(person.reload.preference_ids).to eq([ preference.id ])
end
end
end
end
end
end
describe ".builder" do
let(:builder_klass) do
Mongoid::Relations::Builders::Referenced::ManyToMany
end
let(:document) do
double
end
let(:metadata) do
double(extension?: false)
end
it "returns the embedded in builder" do
expect(
described_class.builder(nil, metadata, document)
).to be_a_kind_of(builder_klass)
end
end
describe "#clear" do
context "when the relation is not polymorphic" do
context "when the parent has been persisted" do
let!(:person) do
Person.create
end
context "when the children are persisted" do
let!(:preference) do
person.preferences.create(name: "settings")
end
let!(:relation) do
person.preferences.clear
end
it "clears out the relation" do
expect(person.preferences).to be_empty
end
it "removes the parent from the inverse relation" do
expect(preference.people).to_not include(person)
end
it "removes the foreign keys" do
expect(person.preference_ids).to be_empty
end
it "removes the parent key from the inverse" do
expect(preference.person_ids).to_not include(person.id)
end
it "does not delete the documents" do
expect(preference).to_not be_destroyed
end
it "persists the nullification" do
expect(person.reload.preferences).to be_empty
end
it "returns the relation" do
expect(relation).to be_empty
end
end
context "when the children are not persisted" do
let!(:preference) do
person.preferences.build(name: "setting")
end
let!(:relation) do
person.preferences.clear
end
it "clears out the relation" do
expect(person.preferences).to be_empty
end
end
end
context "when the parent is not persisted" do
let(:person) do
Person.new
end
let!(:preference) do
person.preferences.build(name: "setting")
end
let!(:relation) do
person.preferences.clear
end
it "clears out the relation" do
expect(person.preferences).to be_empty
end
end
end
context "when the association has callbacks" do
let(:post) do
Post.new
end
let(:tag) do
Tag.new
end
before do
post.tags << tag
end
context "when the callback is a before_remove" do
context "when no errors are raised" do
before do
post.tags.clear
end
it "executes the callback" do
expect(post.before_remove_called).to be_true
end
it "removes the document from the relation" do
expect(post.tags).to be_empty
end
end
context "when errors are raised" do
before do
post.should_receive(:before_remove_tag).and_raise
end
it "does not remove the document from the relation" do
expect {
post.tags.clear
}.to raise_error
expect(post.tags).to eq([ tag ])
end
end
end
context "when the callback is an after_remove" do
context "when no errors are raised" do
before do
post.tags.clear
end
it "executes the callback" do
expect(post.after_remove_called).to be_true
end
it "removes the document from the relation" do
expect(post.tags).to be_empty
end
end
context "when errors are raised" do
before do
post.should_receive(:after_remove_tag).and_raise
end
it "removes the document from the relation" do
expect {
post.tags.clear
}.to raise_error
expect(post.tags).to be_empty
end
end
end
end
end
describe "#concat" do
context "when the parent is a new record" do
let(:person) do
Person.new
end
let!(:preference) do
Preference.new
end
let(:result) do
person.preferences.concat([ preference ])
end
it "returns an array of loaded documents" do
expect(result).to eq([ preference ])
end
end
context "when the parent is not a new record" do
let(:person) do
Person.create
end
let!(:preference) do
Preference.new
end
let(:result) do
person.preferences.concat([ preference ])
end
it "returns an array of loaded documents" do
expect(result).to eq([ preference ])
end
end
context "when the relations are not polymorphic" do
context "when the inverse relation is not defined" do
let(:person) do
Person.new
end
let(:house) do
House.new
end
before do
person.houses.concat([ house ])
end
it "appends the document to the relation" do
expect(person.houses).to eq([ house ])
end
it "sets the foreign key on the relation" do
expect(person.house_ids).to eq([ house.id ])
end
end
context "when appending in a parent create block" do
let!(:preference) do
Preference.create(name: "testing")
end
let!(:person) do
Person.create do |doc|
doc.preferences.concat([ preference ])
end
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "saves the target" do
expect(preference).to be_persisted
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
it "persists the link" do
expect(person.reload.preferences).to eq([ preference ])
end
end
context "when the parent is a new record" do
let(:person) do
Person.new
end
context "when the child is new" do
let(:preference) do
Preference.new
end
before do
person.preferences.concat([ preference ])
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "does not save the target" do
expect(preference).to be_new_record
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
context "when appending a second time" do
before do
person.preferences.concat([ preference ])
end
it "does not allow the document to be added again" do
expect(person.preferences).to eq([ preference ])
end
it "does not allow duplicate ids" do
expect(person.preference_ids).to eq([ preference.id ])
end
end
end
context "when the child is already persisted" do
let!(:persisted) do
Preference.create(name: "testy")
end
let(:preference) do
Preference.first
end
before do
person.preferences.concat([ preference ])
person.save
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "saves the target" do
expect(preference).to be_persisted
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
it "persists the link" do
expect(person.reload.preferences).to eq([ preference ])
end
end
context "when setting via the associated ids" do
let!(:persisted) do
Preference.create(name: "testy")
end
let(:preference) do
Preference.first
end
let(:person) do
Person.new(preference_ids: [ preference.id ])
end
before do
person.save
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.reload.person_ids).to eq([ person.id ])
end
it "adds the correct number of documents" do
expect(person.preferences.size).to eq(1)
end
it "persists the link" do
expect(person.reload.preferences).to eq([ preference ])
end
end
end
context "when the parent is not a new record" do
let(:person) do
Person.create
end
let(:preference) do
Preference.new
end
before do
person.preferences.concat([ preference ])
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "sets the base on the inverse relation" do
expect(preference.people).to eq([ person ])
end
it "sets the same instance on the inverse relation" do
expect(preference.people.first).to eql(person)
end
it "saves the target" do
expect(preference).to_not be_new_record
end
it "adds the document to the target" do
expect(person.preferences.count).to eq(1)
end
context "when documents already exist on the relation" do
let(:preference_two) do
Preference.new
end
before do
person.preferences.concat([ preference_two ])
end
it "adds the documents to the relation" do
expect(person.preferences).to eq([ preference, preference_two ])
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id, preference_two.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference_two.person_ids).to eq([ person.id ])
end
it "sets the base on the inverse relation" do
expect(preference_two.people).to eq([ person ])
end
it "sets the same instance on the inverse relation" do
expect(preference_two.people.first).to eql(person)
end
it "saves the target" do
expect(preference).to_not be_new_record
end
it "adds the document to the target" do
expect(person.preferences.count).to eq(2)
end
end
end
context "when both sides have been persisted" do
let(:person) do
Person.create
end
let(:event) do
Event.create
end
before do
person.administrated_events.concat([ event ])
end
it "sets the front side of the relation" do
expect(person.administrated_events).to eq([ event ])
end
it "sets the inverse side of the relation" do
expect(event.administrators(true)).to eq([ person ])
end
context "when reloading" do
it "sets the front side of the relation" do
expect(person.reload.administrated_events).to eq([ event ])
end
it "sets the inverse side of the relation" do
expect(event.reload.administrators).to eq([ person ])
end
end
context "when performing a new database query" do
let(:loaded_person) do
Person.find(person.id)
end
let(:loaded_event) do
Event.find(event.id)
end
it "sets the front side of the relation" do
expect(loaded_person.administrated_events).to eq([ event ])
end
it "sets the inverse side of the relation" do
expect(loaded_event.administrators).to eq([ person ])
end
end
end
context "when the relation also includes a has_many relation" do
let(:artwork) do
Artwork.create
end
let(:exhibition) do
Exhibition.create
end
let(:exhibitor) do
Exhibitor.create(exhibition: exhibition)
end
before do
artwork.exhibitors.concat([ exhibitor ])
end
it "creates a single artwork object" do
expect(Artwork.count).to eq(1)
end
end
context "when the relation is self referencing" do
let(:tag_one) do
Tag.create(text: "one")
end
let(:tag_two) do
Tag.create(text: "two")
end
before do
tag_one.related.concat([ tag_two ])
end
it "sets the front side of the relation" do
expect(tag_one.related).to eq([ tag_two ])
end
it "sets the inverse side of the relation" do
expect(tag_two.related(true)).to eq([ tag_one ])
end
context "when reloading" do
it "sets the front side of the relation" do
expect(tag_one.reload.related).to eq([ tag_two ])
end
it "sets the inverse side of the relation" do
expect(tag_two.reload.related).to eq([ tag_one ])
end
end
context "when performing a new database query" do
let(:loaded_tag_one) do
Tag.find(tag_one.id)
end
let(:loaded_tag_two) do
Tag.find(tag_two.id)
end
it "sets the front side of the relation" do
expect(loaded_tag_one.related).to eq([ tag_two ])
end
it "sets the inverse side of the relation" do
expect(loaded_tag_two.related).to eq([ tag_one ])
end
end
end
end
end
describe "#count" do
let(:person) do
Person.create
end
context "when nothing exists on the relation" do
context "when the document is destroyed" do
before do
Meat.create!
end
let!(:sandwich) do
Sandwich.create!
end
it "returns zero" do
sandwich.destroy
expect(sandwich.meats.count).to eq(0)
end
end
end
context "when documents have been persisted" do
let!(:preference) do
person.preferences.create(name: "setting")
end
it "returns the number of persisted documents" do
expect(person.preferences.count).to eq(1)
end
end
context "when appending to a loaded relation" do
let!(:preference) do
person.preferences.create(name: "setting")
end
before do
person.preferences.count
person.preferences << Preference.create(name: "two")
end
it "returns the number of persisted documents" do
expect(person.preferences.count).to eq(2)
end
end
context "when documents have not been persisted" do
let!(:preference) do
person.preferences.build(name: "settings")
end
it "returns 0" do
expect(person.preferences.count).to eq(0)
end
end
context "when new documents exist in the database" do
context "when the documents are part of the relation" do
before do
Preference.create(person_ids: [ person.id ])
end
it "returns the count from the db" do
expect(person.reload.preferences.count).to eq(1)
end
end
context "when the documents are not part of the relation" do
before do
Preference.create
end
it "returns the count from the db" do
expect(person.preferences.count).to eq(0)
end
end
end
context "when the inverse relation is not defined" do
context "when documents have been persisted" do
let!(:house) do
person.houses.create(name: "Wayne Manor")
end
it "returns the number of persisted documents" do
expect(person.houses.count).to eq(1)
end
end
context "when documents have not been persisted" do
let!(:house) do
person.houses.build(name: "Ryugyong Hotel")
end
it "returns 0" do
expect(person.preferences.count).to eq(0)
end
end
end
end
[ :create, :create! ].each do |method|
describe "##{method}" do
context "when the relation is not polymorphic" do
context "when using string keys" do
let(:agent) do
Agent.create(number: "007")
end
before do
agent.accounts.create(name: "testing again")
end
it "does not convert the string key to an object id" do
expect(agent.account_ids).to eq([ "testing-again" ])
end
end
context "when the parent is a new record" do
let(:person) do
Person.new
end
it "raises an unsaved document error" do
expect {
person.preferences.send(method, name: "Testing")
}.to raise_error(Mongoid::Errors::UnsavedDocument)
end
end
context "when the parent is not a new record" do
let(:person) do
Person.send(method)
end
let!(:preference) do
person.preferences.send(method, name: "Testing")
end
it "sets the foreign key on the relation" do
expect(person.preference_ids).to eq([ preference.id ])
end
it "sets the foreign key on the inverse relation" do
expect(preference.person_ids).to eq([ person.id ])
end
it "adds the document" do
expect(person.preferences).to eq([ preference ])
end
it "sets the base on the inverse relation" do
expect(preference.people).to eq([ person ])
end
it "sets the attributes" do
expect(preference.name).to eq("Testing")
end
it "saves the target" do
expect(preference).to be_persisted
end
it "adds the document to the target" do
expect(person.preferences.count).to eq(1)
end
it "does not duplicate documents" do
expect(person.reload.preferences.count).to eq(1)
end
it "does not duplicate ids" do
expect(person.reload.preference_ids.count).to eq(1)
end
end
end
end
end
describe "#create!" do
context "when validation fails" do
let(:person) do
Person.create
end
context "when the relation is not polymorphic" do
it "raises an error" do
expect {
person.preferences.create!(name: "a")
}.to raise_error(Mongoid::Errors::Validations)
end
end
end
end
describe "#delete" do
let(:person) do
Person.create
end
let(:preference_one) do
Preference.create(name: "Testing")
end
let(:preference_two) do
Preference.create(name: "Test")
end
before do
person.preferences << [ preference_one, preference_two ]
end
context "when the document exists" do
let!(:deleted) do
person.preferences.delete(preference_one)
end
it "removes the document from the relation" do
expect(person.preferences).to eq([ preference_two ])
end
it "returns the document" do
expect(deleted).to eq(preference_one)
end
it "removes the document key from the foreign key" do
expect(person.preference_ids).to eq([ preference_two.id ])
end
it "removes the inverse reference" do
expect(deleted.reload.people).to be_empty
end
it "removes the base id from the inverse keys" do
expect(deleted.reload.person_ids).to be_empty
end
context "and person and preferences are reloaded" do
before do
person.reload
preference_one.reload
preference_two.reload
end
it "nullifies the deleted preference" do
expect(person.preferences).to eq([ preference_two ])
end
it "retains the ids for one preference" do
expect(person.preference_ids).to eq([ preference_two.id ])
end
end
end
context "when the document does not exist" do
let!(:deleted) do
person.preferences.delete(Preference.new)
end
it "returns nil" do
expect(deleted).to be_nil
end
it "does not modify the relation" do
expect(person.preferences).to eq([ preference_one, preference_two ])
end
it "does not modify the keys" do
expect(person.preference_ids).to eq([ preference_one.id, preference_two.id ])
end
end
context "when :dependent => :nullify is set" do
context "when :inverse_of is set" do
let(:event) do
Event.create
end
before do
person.administrated_events << [ event ]
end
it "deletes the document" do
expect(event.delete).to be_true
end
end
end
context "when the relationships are self referencing" do
let(:tag_one) do
Tag.create(text: "one")
end
let(:tag_two) do
Tag.create(text: "two")
end
before do
tag_one.related << tag_two
end
context "when deleting without reloading" do
let!(:deleted) do
tag_one.related.delete(tag_two)
end
it "deletes the document from the relation" do
expect(tag_one.related).to be_empty
end
it "deletes the foreign key from the relation" do
expect(tag_one.related_ids).to be_empty
end
it "removes the reference from the inverse" do
expect(deleted.related).to be_empty
end
it "removes the foreign keys from the inverse" do
expect(deleted.related_ids).to be_empty
end
end
context "when deleting with reloading" do
context "when deleting from the front side" do
let(:reloaded) do
tag_one.reload
end
let!(:deleted) do
reloaded.related.delete(tag_two)
end
it "deletes the document from the relation" do
expect(reloaded.related).to be_empty
end
it "deletes the foreign key from the relation" do
expect(reloaded.related_ids).to be_empty
end
it "removes the reference from the inverse" do
expect(deleted.related).to be_empty
end
it "removes the foreign keys from the inverse" do
expect(deleted.related_ids).to be_empty
end
end
context "when deleting from the inverse side" do
let(:reloaded) do
tag_two.reload
end
let!(:deleted) do
reloaded.related.delete(tag_one)
end
it "deletes the document from the relation" do
expect(reloaded.related).to be_empty
end
it "deletes the foreign key from the relation" do
expect(reloaded.related_ids).to be_empty
end
it "removes the foreign keys from the inverse" do
expect(deleted.related_ids).to be_empty
end
end
end
end
context "when the association has callbacks" do
let(:post) do
Post.new
end
let(:tag) do
Tag.new
end
before do
post.tags << tag
end
context "when the callback is a before_remove" do
context "when there are no errors" do
before do
post.tags.delete tag
end
it "executes the callback" do
expect(post.before_remove_called).to be_true
end
it "removes the document from the relation" do
expect(post.tags).to be_empty
end
end
context "when errors are raised" do
before do
post.should_receive(:before_remove_tag).and_raise
end
it "does not remove the document from the relation" do
expect {
post.tags.delete tag
}.to raise_error
expect(post.tags).to eq([ tag ])
end
end
end
context "when the callback is an after_remove" do
context "when no errors are raised" do
before do
post.tags.delete(tag)
end
it "executes the callback" do
expect(post.after_remove_called).to be_true
end
it "removes the document from the relation" do
expect(post.tags).to be_empty
end
end
context "when errors are raised" do
before do
post.should_receive(:after_remove_tag).and_raise
end
it "removes the document from the relation" do
expect {
post.tags.delete(tag)
}.to raise_error
expect(post.tags).to be_empty
end
end
end
end
end
[ :delete_all, :destroy_all ].each do |method|
describe "##{method}" do
context "when the relation is not polymorphic" do
context "when conditions are provided" do
let(:person) do
Person.create
end
let!(:preference_one) do
person.preferences.create(name: "Testing")
end
let!(:preference_two) do
person.preferences.create(name: "Test")
end
let!(:deleted) do
person.preferences.send(
method,
{ name: "Testing" }
)
end
it "removes the correct preferences" do
expect(person.preferences.count).to eq(1)
end
it "deletes the documents from the database" do
expect(Preference.where(name: "Testing").count).to eq(0)
end
it "returns the number of documents deleted" do
expect(deleted).to eq(1)
end
it "removes the ids from the foreign key" do
expect(person.preference_ids).to eq([ preference_two.id ])
end
end
context "when conditions are not provided" do
let(:person) do
Person.create.tap do |person|
person.preferences.create(name: "Testing")
person.preferences.create(name: "Test")
end
end
let!(:deleted) do
person.preferences.send(method)
end
it "removes the correct preferences" do
expect(person.preferences.count).to eq(0)
end
it "deletes the documents from the database" do
expect(Preference.count).to eq(0)
end
it "returns the number of documents deleted" do
expect(deleted).to eq(2)
end
end
end
end
end
describe ".embedded?" do
it "returns false" do
expect(described_class).to_not be_embedded
end
end
describe "#exists?" do
let!(:person) do
Person.create
end
context "when documents exist in the database" do
before do
person.preferences.create
end
it "returns true" do
expect(person.preferences.exists?).to be_true
end
end
context "when no documents exist in the database" do
before do
person.preferences.build
end
it "returns false" do
expect(person.preferences.exists?).to be_false
end
end
end
describe "#find" do
context "when the relation is not polymorphic" do
let(:person) do
Person.create
end
let!(:preference_one) do
person.preferences.create(name: "Test")
end
let!(:preference_two) do
person.preferences.create(name: "OMG I has relations")
end
let!(:unrelated_pref) do
Preference.create(name: "orphan annie")
end
let!(:unrelated_pref_two) do
Preference.create(name: "orphan two")
end
context "when providing an id" do
context "when the id matches" do
let(:preference) do
person.preferences.find(preference_one.id)
end
it "returns the matching document" do
expect(preference).to eq(preference_one)
end
end
context "when the id matches an unreferenced document" do
let(:preference) do
person.preferences.find(unrelated_pref.id)
end
it "raises an error" do
expect {
preference
}.to raise_error(Mongoid::Errors::DocumentNotFound)
end
end
context "when the id does not match" do
context "when config set to raise error" do
before do
Mongoid.raise_not_found_error = true
end
it "raises an error" do
expect {
person.preferences.find(BSON::ObjectId.new)
}.to raise_error(Mongoid::Errors::DocumentNotFound)
end
end
context "when config set not to raise error" do
let(:preference) do
person.preferences.find(BSON::ObjectId.new)
end
before do
Mongoid.raise_not_found_error = false
end
after do
Mongoid.raise_not_found_error = true
end
it "returns nil" do
expect(preference).to be_nil
end
end
end
end
context "when providing an array of ids" do
context "when the ids match" do
let(:preferences) do
person.preferences.find([ preference_one.id, preference_two.id ])
end
it "returns the matching documents" do
expect(preferences).to eq([ preference_one, preference_two ])
end
end
context "when the ids matche unreferenced documents" do
let(:preferences) do
person.preferences.find(
[ unrelated_pref.id, unrelated_pref_two.id ]
)
end
it "raises an error" do
expect {
preferences
}.to raise_error(Mongoid::Errors::DocumentNotFound)
end
end
context "when the ids do not match" do
context "when config set to raise error" do
before do
Mongoid.raise_not_found_error = true
end
it "raises an error" do
expect {
person.preferences.find([ BSON::ObjectId.new ])
}.to raise_error(Mongoid::Errors::DocumentNotFound)
end
end
context "when config set not to raise error" do
let(:preferences) do
person.preferences.find([ BSON::ObjectId.new ])
end
before do
Mongoid.raise_not_found_error = false
end
after do
Mongoid.raise_not_found_error = true
end
it "returns an empty array" do
expect(preferences).to be_empty
end
end
end
end
end
end
describe "#find_or_create_by" do
context "when the relation is not polymorphic" do
let(:person) do
Person.create
end
let!(:preference) do
person.preferences.create(name: "Testing")
end
context "when the document exists" do
let(:found) do
person.preferences.find_or_create_by(name: "Testing")
end
it "returns the document" do
expect(found).to eq(preference)
end
end
context "when the document does not exist" do
let(:found) do
person.preferences.find_or_create_by(name: "Test")
end
it "sets the new document attributes" do
expect(found.name).to eq("Test")
end
it "returns a newly persisted document" do
expect(found).to be_persisted
end
end
end
end
describe "#find_or_initialize_by" do
context "when the relation is not polymorphic" do
let(:person) do
Person.create
end
let!(:preference) do
person.preferences.create(name: "Testing")
end
context "when the document exists" do
let(:found) do
person.preferences.find_or_initialize_by(name: "Testing")
end
it "returns the document" do
expect(found).to eq(preference)
end
end
context "when the document does not exist" do
let(:found) do
person.preferences.find_or_initialize_by(name: "Test")
end
it "sets the new document attributes" do
expect(found.name).to eq("Test")
end
it "returns a non persisted document" do
expect(found).to_not be_persisted
end
end
end
end
describe ".foreign_key_suffix" do
it "returns _ids" do
expect(described_class.foreign_key_suffix).to eq("_ids")
end
end
describe ".macro" do
it "returns has_and_belongs_to_many" do
expect(described_class.macro).to eq(:has_and_belongs_to_many)
end
end
describe "#max" do
let(:person) do
Person.create
end
let(:preference_one) do
Preference.create(ranking: 5)
end
let(:preference_two) do
Preference.create(ranking: 10)
end
before do
person.preferences.push(preference_one, preference_two)
end
let(:max) do
person.preferences.max do |a,b|
a.ranking <=> b.ranking
end
end
it "returns the document with the max value of the supplied field" do
expect(max).to eq(preference_two)
end
end
describe "#max_by" do
let(:person) do
Person.create
end
let(:preference_one) do
Preference.create(ranking: 5)
end
let(:preference_two) do
Preference.create(ranking: 10)
end
before do
person.preferences.push(preference_one, preference_two)
end
let(:max) do
person.preferences.max_by(&:ranking)
end
it "returns the document with the max value of the supplied field" do
expect(max).to eq(preference_two)
end
end
describe "#method_missing" do
let!(:person) do
Person.create
end
let!(:preference_one) do
person.preferences.create(name: "First", value: "Posting")
end
let!(:preference_two) do
person.preferences.create(name: "Second", value: "Testing")
end
let!(:unrelated) do
Preference.create(name: "Third")
end
context "when providing a single criteria" do
let(:preferences) do
person.preferences.where(name: "First")
end
it "applies the criteria to the documents" do
expect(preferences).to eq([ preference_one ])
end
end
context "when providing a criteria on id" do
let(:preferences) do
person.preferences.where(_id: unrelated.id)
end
it "does not return unrelated documents" do
expect(preferences).to be_empty
end
end
context "when providing a criteria class method" do
let(:preferences) do
person.preferences.posting
end
it "applies the criteria to the documents" do
expect(preferences).to eq([ preference_one ])
end
end
context "when chaining criteria" do
let(:preferences) do
person.preferences.posting.where(:name.in => [ "First" ])
end
it "applies the criteria to the documents" do
expect(preferences).to eq([ preference_one ])
end
end
context "when delegating methods" do
describe "#distinct" do
let(:values) do
person.preferences.distinct(:name)
end
it "returns the distinct values for the fields" do
expect(values).to include("First")
expect(values).to include("Second")
end
context "when the inverse relation is not defined" do
let!(:house) do
person.houses.create(name: "Wayne Manor")
end
it "returns the distinct values for the fields" do
expect(person.houses.distinct(:name)).to eq([ house.name ])
end
end
end
end
end
describe "#min" do
let(:person) do
Person.create
end
let(:preference_one) do
Preference.create(ranking: 5)
end
let(:preference_two) do
Preference.create(ranking: 10)
end
before do
person.preferences.push(preference_one, preference_two)
end
let(:min) do
person.preferences.min do |a, b|
a.ranking <=> b.ranking
end
end
it "returns the min value of the supplied field" do
expect(min).to eq(preference_one)
end
end
describe "#min_by" do
let(:person) do
Person.create
end
let(:preference_one) do
Preference.create(ranking: 5)
end
let(:preference_two) do
Preference.create(ranking: 10)
end
before do
person.preferences.push(preference_one, preference_two)
end
let(:min) do
person.preferences.min_by(&:ranking)
end
it "returns the min value of the supplied field" do
expect(min).to eq(preference_one)
end
end
describe "#nil?" do
it "returns false" do
expect(Person.new.preferences).to_not be_nil
end
end
describe "#nullify_all" do
let(:person) do
Person.create
end
let!(:preference_one) do
person.preferences.create(name: "One")
end
let!(:preference_two) do
person.preferences.create(name: "Two")
end
before do
person.preferences.nullify_all
end
it "removes the foreign key from the base document" do
[ preference_one, preference_two ].each do |preference|
expect(person.preference_ids).to_not include(preference.id)
end
end
it "removes the foreign key from the target documents" do
[ preference_one, preference_two ].each do |preference|
expect(preference.person_ids).to_not include(person.id)
end
end
it "removes the reference from the base document" do
[ preference_one, preference_two ].each do |preference|
expect(person.preferences).to_not include(preference)
end
end
it "removes the reference from the target documents" do
[ preference_one, preference_two ].each do |preference|
expect(preference.people).to_not include(person)
end
end
it "saves the documents" do
expect(preference_one.reload.people).to_not include(person)
end
end
describe "#respond_to?" do
let(:person) do
Person.new
end
let(:preferences) do
person.preferences
end
Array.public_instance_methods.each do |method|
context "when checking #{method}" do
it "returns true" do
expect(preferences.respond_to?(method)).to be_true
end
end
end
Mongoid::Relations::Referenced::Many.public_instance_methods.each do |method|
context "when checking #{method}" do
it "returns true" do
expect(preferences.respond_to?(method)).to be_true
end
end
end
Preference.scopes.keys.each do |method|
context "when checking #{method}" do
it "returns true" do
expect(preferences.respond_to?(method)).to be_true
end
end
end
end
describe ".stores_foreign_key?" do
it "returns true" do
expect(described_class.stores_foreign_key?).to be_true
end
end
describe "#scoped" do
let(:person) do
Person.new
end
let(:scoped) do
person.preferences.scoped
end
it "returns the relation criteria" do
expect(scoped).to be_a(Mongoid::Criteria)
end
it "returns with an empty selector" do
expect(scoped.selector).to eq({ "$and" => [{ "_id" => { "$in" => [] }}]})
end
end
[ :size, :length ].each do |method|
describe "##{method}" do
let(:person) do
Person.create
end
context "when documents have been persisted" do
let!(:preference) do
person.preferences.create(name: "Testing")
end
it "returns the number of documents" do
expect(person.preferences.send(method)).to eq(1)
end
end
context "when documents have not been persisted" do
before do
person.preferences.build(name: "Test")
person.preferences.create(name: "Test 2")
end
it "returns the total number of documents" do
expect(person.preferences.send(method)).to eq(2)
end
end
end
end
describe "#unscoped" do
context "when the relation has no default scope" do
let!(:person) do
Person.create
end
let!(:preference_one) do
person.preferences.create(name: "first")
end
let!(:preference_two) do
Preference.create(name: "second")
end
let(:unscoped) do
person.preferences.unscoped
end
it "returns only the associated documents" do
expect(unscoped).to eq([ preference_one ])
end
end
context "when the relation has a default scope" do
let!(:person) do
Person.create
end
let!(:house_one) do
person.houses.create(name: "first")
end
let!(:house_two) do
House.create(name: "second")
end
let(:unscoped) do
person.houses.unscoped
end
it "only returns associated documents" do
expect(unscoped).to eq([ house_one ])
end
it "removes the default scoping options" do
expect(unscoped.options).to eq({})
end
end
end
describe ".valid_options" do
it "returns the valid options" do
expect(described_class.valid_options).to eq(
[
:after_add,
:after_remove,
:autosave,
:before_add,
:before_remove,
:dependent,
:foreign_key,
:index,
:order,
:primary_key
]
)
end
end
describe ".validation_default" do
it "returns true" do
expect(described_class.validation_default).to be_true
end
end
context "when setting the ids directly after the documents" do
let!(:person) do
Person.create!(title: "The Boss")
end
let!(:girlfriend_house) do
House.create!(name: "Girlfriend")
end
let!(:wife_house) do
House.create!(name: "Wife")
end
let!(:exwife_house) do
House.create!(name: "Ex-Wife")
end
before do
person.update_attributes(
houses: [ wife_house, exwife_house, girlfriend_house ]
)
person.update_attributes(house_ids: [ girlfriend_house.id ])
end
context "when reloading" do
it "properly sets the references" do
expect(person.houses(true)).to eq([ girlfriend_house ])
end
end
end
context "when setting both sides in a single call" do
context "when the documents are new" do
let(:user) do
User.new(name: "testing")
end
let(:business) do
Business.new(name: "serious", owners: [ user ])
end
before do
user.businesses = [ business ]
end
it "sets the businesses" do
expect(user.businesses).to eq([ business ])
end
it "sets the inverse users" do
expect(user.businesses.first.owners.first).to eq(user)
end
it "sets the inverse businesses" do
expect(business.owners).to eq([ user ])
end
end
context "when one side is persisted" do
let!(:user) do
User.new(name: "testing")
end
let!(:business) do
Business.create(name: "serious", owners: [ user ])
end
before do
user.businesses = [ business ]
end
it "sets the businesses" do
expect(user.businesses).to eq([ business ])
end
it "sets the inverse users" do
expect(user.businesses.first.owners.first).to eq(user)
end
it "sets the inverse businesses" do
expect(business.owners).to eq([ user ])
end
context "when reloading" do
before do
user.reload
business.reload
end
it "persists the businesses" do
expect(user.businesses).to eq([ business ])
end
it "persists the inverse users" do
expect(user.businesses.first.owners.first).to eq(user)
end
it "persists the inverse businesses" do
expect(business.owners).to eq([ user ])
end
end
end
context "when the documents are persisted" do
let(:user) do
User.create(name: "tst")
end
let(:business) do
Business.create(name: "srs", owners: [ user ])
end
before do
user.businesses = [ business ]
end
it "sets the businesses" do
expect(user.businesses).to eq([ business ])
end
it "sets the inverse users" do
expect(user.businesses.first.owners.first).to eq(user)
end
it "sets the inverse businesses" do
expect(business.owners).to eq([ user ])
end
context "when reloading" do
before do
user.reload
business.reload
end
it "persists the businesses" do
expect(user.businesses).to eq([ business ])
end
it "persists the inverse users" do
expect(user.businesses.first.owners.first).to eq(user)
end
it "persists the inverse businesses" do
expect(business.owners).to eq([ user ])
end
end
end
end
context "when binding the relation multiple times" do
let(:person) do
Person.create
end
let(:preference) do
person.preferences.create(name: "testing")
end
before do
2.times do
person.preferences.each do |preference|
expect(preference.person_ids).to eq([ person.id ])
end
end
end
it "does not duplicate foreign keys" do
expect(person.preference_ids).to eq([ preference.id ])
end
end
context "when the association has order criteria" do
let(:person) do
Person.create
end
let(:preference_one) do
OrderedPreference.create(name: 'preference-1', value: 10)
end
let(:preference_two) do
OrderedPreference.create(name: 'preference-2', value: 20)
end
let(:preference_three) do
OrderedPreference.create(name: 'preference-3', value: 20)
end
before do
person.ordered_preferences.nullify_all
person.ordered_preferences.push(preference_one, preference_two, preference_three)
end
it "orders the documents" do
expect(person.ordered_preferences(true)).to eq(
[preference_two, preference_three, preference_one]
)
end
it "chains default criteria with additional" do
expect(person.ordered_preferences.order_by(:name.desc).to_a).to eq(
[preference_three, preference_two, preference_one]
)
end
end
context "when the parent is not a new record and freshly loaded" do
let(:person) do
Person.create
end
let(:preference) do
Preference.new
end
before do
person.preferences = [ preference ]
person.save
person.reload
person.preferences = nil
end
it "sets the relation to an empty array" do
expect(person.preferences).to be_empty
end
it "removes the foreign key values" do
expect(person.preference_ids).to be_empty
end
it "does not delete the target from the database" do
expect {
preference.reload
}.not_to raise_error
end
end
context "when reloading the relation" do
let!(:person) do
Person.create
end
let!(:preference_one) do
Preference.create(name: "one")
end
let!(:preference_two) do
Preference.create(name: "two")
end
before do
person.preferences << preference_one
end
context "when the relation references the same documents" do
before do
Preference.collection.find({ _id: preference_one.id }).
update({ "$set" => { name: "reloaded" }})
end
let(:reloaded) do
person.preferences(true)
end
it "reloads the document from the database" do
expect(reloaded.first.name).to eq("reloaded")
end
end
context "when the relation references different documents" do
before do
person.preferences << preference_two
end
let(:reloaded) do
person.preferences(true)
end
it "reloads the new document from the database" do
expect(reloaded).to eq([ preference_one, preference_two ])
end
end
end
context "when adding to a relation via a field setter" do
context "when the document is new" do
let!(:person) do
Person.create(preference_names: "one, two")
end
let(:preference_one) do
person.reload.preferences.first
end
let(:preference_two) do
person.reload.preferences.last
end
it "persists the first preference" do
expect(preference_one).to_not be_nil
end
it "sets the first inverse" do
expect(preference_one.people).to eq([ person ])
end
it "persists the second preference" do
expect(preference_two).to_not be_nil
end
it "sets the second inverse keys" do
expect(preference_two.people).to eq([ person ])
end
end
end
context "when changing the order of existing ids" do
let(:person) do
Person.new
end
let(:preference_one) do
Preference.create(name: "one")
end
let(:preference_two) do
Preference.create(name: "two")
end
before do
person.preference_ids = [ preference_one.id, preference_two.id ]
person.save
end
context "and the order is changed" do
before do
person.preference_ids = [ preference_two.id, preference_one.id ]
person.save
end
let(:reloaded) do
Person.find(person.id)
end
it "persists the change in id order" do
expect(reloaded.preference_ids).to eq(
[ preference_two.id, preference_one.id ]
)
end
end
context "and the order is changed and an element is added" do
let(:preference_three) do
Preference.create(name: "three")
end
before do
person.preference_ids =
[ preference_two.id, preference_one.id, preference_three.id ]
person.save
end
let(:reloaded) do
Person.find(person.id)
end
it "also persists the change in id order" do
expect(reloaded.preference_ids).to eq(
[ preference_two.id, preference_one.id, preference_three.id ]
)
end
end
context "and the order is changed and an element is removed" do
let(:preference_three) do
Preference.create(name: "three")
end
before do
person.preference_ids =
[ preference_one.id, preference_two.id, preference_three.id ]
person.save
person.preference_ids =
[ preference_three.id, preference_two.id ]
person.save
end
let(:reloaded) do
Person.find(person.id)
end
it "also persists the change in id order" do
expect(reloaded.preference_ids).to eq(
[ preference_three.id, preference_two.id ]
)
end
end
end
context "when adding a document" do
let(:person) do
Person.new
end
let(:preference_one) do
Preference.new
end
let(:first_add) do
person.preferences.push(preference_one)
end
context "when chaining a second add" do
let(:preference_two) do
Preference.new
end
let(:result) do
first_add.push(preference_two)
end
it "adds both documents" do
expect(result).to eq([ preference_one, preference_two ])
end
end
end
context "when setting the relation via the foreign key" do
context "when the relation exists" do
let!(:person) do
Person.create
end
let!(:pref_one) do
person.preferences.create
end
let!(:pref_two) do
Preference.create
end
before do
person.preference_ids = [ pref_two.id ]
end
it "sets the new documents on the relation" do
expect(person.preferences).to eq([ pref_two ])
end
end
end
context "when using a different primary key" do
let(:dog) do
Dog.create(name: 'Doggie')
end
let(:cat) do
Cat.create(name: 'Kitty')
end
let(:fire_hydrant) do
FireHydrant.create(location: '221B Baker Street')
end
context "when adding to a one-way many to many" do
before do
fire_hydrant.cats.push(cat)
end
it "adds the pk value to the fk set" do
expect(fire_hydrant.cat_ids).to eq([cat.name])
end
end
context "when adding to a two-way many to many" do
before do
fire_hydrant.dogs.push(dog)
end
it "adds the pk value to the fk set" do
expect(fire_hydrant.dog_ids).to eq([dog.name])
end
it "adds the base pk value to the inverse fk set" do
expect(dog.fire_hydrant_ids).to eq([fire_hydrant.location])
end
end
context "when deleting from a two-way many to many" do
before do
dog.fire_hydrants.push(fire_hydrant)
fire_hydrant.dogs.delete(dog)
end
it "removes the pk value from the fk set" do
expect(fire_hydrant.dog_ids).to eq([])
end
it "removes the base pk value from the inverse fk set" do
expect(dog.fire_hydrant_ids).to eq([])
end
end
end
end
| mit |
chapter-three/AppleNewsAPI | tests/Document/Components/BodyTest.php | 536 | <?php
/**
* @file
* Tests for ChapterThree\AppleNewsAPI\Document\Components\Body.
*/
use ChapterThree\AppleNewsAPI\Document\Components\Body;
/**
* Tests for the Body class.
*/
class BodyTest extends PHPUnit_Framework_TestCase {
/**
* Setting properties and outputting json.
*/
public function testSetters() {
$obj = new Body('some body text.');
// Optional properties.
$expected = '{"role":"body","text":"some body text."}';
$this->assertJsonStringEqualsJsonString($expected, $obj->json());
}
}
| mit |
mokelab/pkgname | Godeps/_workspace/src/github.com/google/go-github/github/github.go | 16317 | // Copyright 2013 The go-github AUTHORS. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package github
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"reflect"
"strconv"
"strings"
"time"
"github.com/google/go-querystring/query"
)
const (
libraryVersion = "0.1"
defaultBaseURL = "https://api.github.com/"
uploadBaseURL = "https://uploads.github.com/"
userAgent = "go-github/" + libraryVersion
headerRateLimit = "X-RateLimit-Limit"
headerRateRemaining = "X-RateLimit-Remaining"
headerRateReset = "X-RateLimit-Reset"
mediaTypeV3 = "application/vnd.github.v3+json"
defaultMediaType = "application/octet-stream"
)
// A Client manages communication with the GitHub API.
type Client struct {
// HTTP client used to communicate with the API.
client *http.Client
// Base URL for API requests. Defaults to the public GitHub API, but can be
// set to a domain endpoint to use with GitHub Enterprise. BaseURL should
// always be specified with a trailing slash.
BaseURL *url.URL
// Base URL for uploading files.
UploadURL *url.URL
// User agent used when communicating with the GitHub API.
UserAgent string
// Rate specifies the current rate limit for the client as determined by the
// most recent API call. If the client is used in a multi-user application,
// this rate may not always be up-to-date. Call RateLimit() to check the
// current rate.
Rate Rate
// Services used for talking to different parts of the GitHub API.
Activity *ActivityService
Gists *GistsService
Git *GitService
Gitignores *GitignoresService
Issues *IssuesService
Organizations *OrganizationsService
PullRequests *PullRequestsService
Repositories *RepositoriesService
Search *SearchService
Users *UsersService
}
// ListOptions specifies the optional parameters to various List methods that
// support pagination.
type ListOptions struct {
// For paginated result sets, page of results to retrieve.
Page int `url:"page,omitempty"`
// For paginated result sets, the number of results to include per page.
PerPage int `url:"per_page,omitempty"`
}
// UploadOptions specifies the parameters to methods that support uploads.
type UploadOptions struct {
Name string `url:"name,omitempty"`
}
// addOptions adds the parameters in opt as URL query parameters to s. opt
// must be a struct whose fields may contain "url" tags.
func addOptions(s string, opt interface{}) (string, error) {
v := reflect.ValueOf(opt)
if v.Kind() == reflect.Ptr && v.IsNil() {
return s, nil
}
u, err := url.Parse(s)
if err != nil {
return s, err
}
qs, err := query.Values(opt)
if err != nil {
return s, err
}
u.RawQuery = qs.Encode()
return u.String(), nil
}
// NewClient returns a new GitHub API client. If a nil httpClient is
// provided, http.DefaultClient will be used. To use API methods which require
// authentication, provide an http.Client that will perform the authentication
// for you (such as that provided by the goauth2 library).
func NewClient(httpClient *http.Client) *Client {
if httpClient == nil {
httpClient = http.DefaultClient
}
baseURL, _ := url.Parse(defaultBaseURL)
uploadURL, _ := url.Parse(uploadBaseURL)
c := &Client{client: httpClient, BaseURL: baseURL, UserAgent: userAgent, UploadURL: uploadURL}
c.Activity = &ActivityService{client: c}
c.Gists = &GistsService{client: c}
c.Git = &GitService{client: c}
c.Gitignores = &GitignoresService{client: c}
c.Issues = &IssuesService{client: c}
c.Organizations = &OrganizationsService{client: c}
c.PullRequests = &PullRequestsService{client: c}
c.Repositories = &RepositoriesService{client: c}
c.Search = &SearchService{client: c}
c.Users = &UsersService{client: c}
return c
}
// NewRequest creates an API request. A relative URL can be provided in urlStr,
// in which case it is resolved relative to the BaseURL of the Client.
// Relative URLs should always be specified without a preceding slash. If
// specified, the value pointed to by body is JSON encoded and included as the
// request body.
func (c *Client) NewRequest(method, urlStr string, body interface{}) (*http.Request, error) {
rel, err := url.Parse(urlStr)
if err != nil {
return nil, err
}
u := c.BaseURL.ResolveReference(rel)
buf := new(bytes.Buffer)
if body != nil {
err := json.NewEncoder(buf).Encode(body)
if err != nil {
return nil, err
}
}
req, err := http.NewRequest(method, u.String(), buf)
if err != nil {
return nil, err
}
req.Header.Add("Accept", mediaTypeV3)
req.Header.Add("User-Agent", c.UserAgent)
return req, nil
}
// NewUploadRequest creates an upload request. A relative URL can be provided in
// urlStr, in which case it is resolved relative to the UploadURL of the Client.
// Relative URLs should always be specified without a preceding slash.
func (c *Client) NewUploadRequest(urlStr string, reader io.Reader, size int64, mediaType string) (*http.Request, error) {
rel, err := url.Parse(urlStr)
if err != nil {
return nil, err
}
u := c.UploadURL.ResolveReference(rel)
req, err := http.NewRequest("POST", u.String(), reader)
if err != nil {
return nil, err
}
req.ContentLength = size
if len(mediaType) == 0 {
mediaType = defaultMediaType
}
req.Header.Add("Content-Type", mediaType)
req.Header.Add("Accept", mediaTypeV3)
req.Header.Add("User-Agent", c.UserAgent)
return req, nil
}
// Response is a GitHub API response. This wraps the standard http.Response
// returned from GitHub and provides convenient access to things like
// pagination links.
type Response struct {
*http.Response
// These fields provide the page values for paginating through a set of
// results. Any or all of these may be set to the zero value for
// responses that are not part of a paginated set, or for which there
// are no additional pages.
NextPage int
PrevPage int
FirstPage int
LastPage int
Rate
}
// newResponse creats a new Response for the provided http.Response.
func newResponse(r *http.Response) *Response {
response := &Response{Response: r}
response.populatePageValues()
response.populateRate()
return response
}
// populatePageValues parses the HTTP Link response headers and populates the
// various pagination link values in the Reponse.
func (r *Response) populatePageValues() {
if links, ok := r.Response.Header["Link"]; ok && len(links) > 0 {
for _, link := range strings.Split(links[0], ",") {
segments := strings.Split(strings.TrimSpace(link), ";")
// link must at least have href and rel
if len(segments) < 2 {
continue
}
// ensure href is properly formatted
if !strings.HasPrefix(segments[0], "<") || !strings.HasSuffix(segments[0], ">") {
continue
}
// try to pull out page parameter
url, err := url.Parse(segments[0][1 : len(segments[0])-1])
if err != nil {
continue
}
page := url.Query().Get("page")
if page == "" {
continue
}
for _, segment := range segments[1:] {
switch strings.TrimSpace(segment) {
case `rel="next"`:
r.NextPage, _ = strconv.Atoi(page)
case `rel="prev"`:
r.PrevPage, _ = strconv.Atoi(page)
case `rel="first"`:
r.FirstPage, _ = strconv.Atoi(page)
case `rel="last"`:
r.LastPage, _ = strconv.Atoi(page)
}
}
}
}
}
// populateRate parses the rate related headers and populates the response Rate.
func (r *Response) populateRate() {
if limit := r.Header.Get(headerRateLimit); limit != "" {
r.Rate.Limit, _ = strconv.Atoi(limit)
}
if remaining := r.Header.Get(headerRateRemaining); remaining != "" {
r.Rate.Remaining, _ = strconv.Atoi(remaining)
}
if reset := r.Header.Get(headerRateReset); reset != "" {
if v, _ := strconv.ParseInt(reset, 10, 64); v != 0 {
r.Rate.Reset = Timestamp{time.Unix(v, 0)}
}
}
}
// Do sends an API request and returns the API response. The API response is
// JSON decoded and stored in the value pointed to by v, or returned as an
// error if an API error has occurred. If v implements the io.Writer
// interface, the raw response body will be written to v, without attempting to
// first decode it.
func (c *Client) Do(req *http.Request, v interface{}) (*Response, error) {
resp, err := c.client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
response := newResponse(resp)
c.Rate = response.Rate
err = CheckResponse(resp)
if err != nil {
// even though there was an error, we still return the response
// in case the caller wants to inspect it further
return response, err
}
if v != nil {
if w, ok := v.(io.Writer); ok {
io.Copy(w, resp.Body)
} else {
err = json.NewDecoder(resp.Body).Decode(v)
}
}
return response, err
}
/*
An ErrorResponse reports one or more errors caused by an API request.
GitHub API docs: http://developer.github.com/v3/#client-errors
*/
type ErrorResponse struct {
Response *http.Response // HTTP response that caused this error
Message string `json:"message"` // error message
Errors []Error `json:"errors"` // more detail on individual errors
}
func (r *ErrorResponse) Error() string {
return fmt.Sprintf("%v %v: %d %v %+v",
r.Response.Request.Method, r.Response.Request.URL,
r.Response.StatusCode, r.Message, r.Errors)
}
/*
An Error reports more details on an individual error in an ErrorResponse.
These are the possible validation error codes:
missing:
resource does not exist
missing_field:
a required field on a resource has not been set
invalid:
the formatting of a field is invalid
already_exists:
another resource has the same valid as this field
GitHub API docs: http://developer.github.com/v3/#client-errors
*/
type Error struct {
Resource string `json:"resource"` // resource on which the error occurred
Field string `json:"field"` // field on which the error occurred
Code string `json:"code"` // validation error code
}
func (e *Error) Error() string {
return fmt.Sprintf("%v error caused by %v field on %v resource",
e.Code, e.Field, e.Resource)
}
// CheckResponse checks the API response for errors, and returns them if
// present. A response is considered an error if it has a status code outside
// the 200 range. API error responses are expected to have either no response
// body, or a JSON response body that maps to ErrorResponse. Any other
// response body will be silently ignored.
func CheckResponse(r *http.Response) error {
if c := r.StatusCode; 200 <= c && c <= 299 {
return nil
}
errorResponse := &ErrorResponse{Response: r}
data, err := ioutil.ReadAll(r.Body)
if err == nil && data != nil {
json.Unmarshal(data, errorResponse)
}
return errorResponse
}
// parseBoolResponse determines the boolean result from a GitHub API response.
// Several GitHub API methods return boolean responses indicated by the HTTP
// status code in the response (true indicated by a 204, false indicated by a
// 404). This helper function will determine that result and hide the 404
// error if present. Any other error will be returned through as-is.
func parseBoolResponse(err error) (bool, error) {
if err == nil {
return true, nil
}
if err, ok := err.(*ErrorResponse); ok && err.Response.StatusCode == http.StatusNotFound {
// Simply false. In this one case, we do not pass the error through.
return false, nil
}
// some other real error occurred
return false, err
}
// Rate represents the rate limit for the current client.
type Rate struct {
// The number of requests per hour the client is currently limited to.
Limit int `json:"limit"`
// The number of remaining requests the client can make this hour.
Remaining int `json:"remaining"`
// The time at which the current rate limit will reset.
Reset Timestamp `json:"reset"`
}
func (r Rate) String() string {
return Stringify(r)
}
// RateLimits represents the rate limits for the current client.
type RateLimits struct {
// The rate limit for non-search API requests. Unauthenticated
// requests are limited to 60 per hour. Authenticated requests are
// limited to 5,000 per hour.
Core *Rate `json:"core"`
// The rate limit for search API requests. Unauthenticated requests
// are limited to 5 requests per minutes. Authenticated requests are
// limited to 20 per minute.
//
// GitHub API docs: https://developer.github.com/v3/search/#rate-limit
Search *Rate `json:"search"`
}
func (r RateLimits) String() string {
return Stringify(r)
}
// RateLimit is deprecated. Use RateLimits instead.
func (c *Client) RateLimit() (*Rate, *Response, error) {
limits, resp, err := c.RateLimits()
if limits == nil {
return nil, nil, err
}
return limits.Core, resp, err
}
// RateLimits returns the rate limits for the current client.
func (c *Client) RateLimits() (*RateLimits, *Response, error) {
req, err := c.NewRequest("GET", "rate_limit", nil)
if err != nil {
return nil, nil, err
}
response := new(struct {
Resources *RateLimits `json:"resources"`
})
resp, err := c.Do(req, response)
if err != nil {
return nil, nil, err
}
return response.Resources, resp, err
}
/*
UnauthenticatedRateLimitedTransport allows you to make unauthenticated calls
that need to use a higher rate limit associated with your OAuth application.
t := &github.UnauthenticatedRateLimitedTransport{
ClientID: "your app's client ID",
ClientSecret: "your app's client secret",
}
client := github.NewClient(t.Client())
This will append the querystring params client_id=xxx&client_secret=yyy to all
requests.
See http://developer.github.com/v3/#unauthenticated-rate-limited-requests for
more information.
*/
type UnauthenticatedRateLimitedTransport struct {
// ClientID is the GitHub OAuth client ID of the current application, which
// can be found by selecting its entry in the list at
// https://github.com/settings/applications.
ClientID string
// ClientSecret is the GitHub OAuth client secret of the current
// application.
ClientSecret string
// Transport is the underlying HTTP transport to use when making requests.
// It will default to http.DefaultTransport if nil.
Transport http.RoundTripper
}
// RoundTrip implements the RoundTripper interface.
func (t *UnauthenticatedRateLimitedTransport) RoundTrip(req *http.Request) (*http.Response, error) {
if t.ClientID == "" {
return nil, errors.New("t.ClientID is empty")
}
if t.ClientSecret == "" {
return nil, errors.New("t.ClientSecret is empty")
}
// To set extra querystring params, we must make a copy of the Request so
// that we don't modify the Request we were given. This is required by the
// specification of http.RoundTripper.
req = cloneRequest(req)
q := req.URL.Query()
q.Set("client_id", t.ClientID)
q.Set("client_secret", t.ClientSecret)
req.URL.RawQuery = q.Encode()
// Make the HTTP request.
return t.transport().RoundTrip(req)
}
// Client returns an *http.Client that makes requests which are subject to the
// rate limit of your OAuth application.
func (t *UnauthenticatedRateLimitedTransport) Client() *http.Client {
return &http.Client{Transport: t}
}
func (t *UnauthenticatedRateLimitedTransport) transport() http.RoundTripper {
if t.Transport != nil {
return t.Transport
}
return http.DefaultTransport
}
// cloneRequest returns a clone of the provided *http.Request. The clone is a
// shallow copy of the struct and its Header map.
func cloneRequest(r *http.Request) *http.Request {
// shallow copy of the struct
r2 := new(http.Request)
*r2 = *r
// deep copy of the Header
r2.Header = make(http.Header)
for k, s := range r.Header {
r2.Header[k] = s
}
return r2
}
// Bool is a helper routine that allocates a new bool value
// to store v and returns a pointer to it.
func Bool(v bool) *bool {
p := new(bool)
*p = v
return p
}
// Int is a helper routine that allocates a new int32 value
// to store v and returns a pointer to it, but unlike Int32
// its argument value is an int.
func Int(v int) *int {
p := new(int)
*p = v
return p
}
// String is a helper routine that allocates a new string value
// to store v and returns a pointer to it.
func String(v string) *string {
p := new(string)
*p = v
return p
}
| mit |
tuminitumina/magentobasic | app/code/core/Mage/Adminhtml/controllers/Sales/Order/CreateController.php | 21852 | <?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magento.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magento.com for more information.
*
* @category Mage
* @package Mage_Adminhtml
* @copyright Copyright (c) 2006-2016 X.commerce, Inc. and affiliates (http://www.magento.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Adminhtml sales orders creation process controller
*
* @category Mage
* @package Mage_Adminhtml
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Adminhtml_Sales_Order_CreateController extends Mage_Adminhtml_Controller_Action
{
/**
* Additional initialization
*
*/
protected function _construct()
{
$this->setUsedModuleName('Mage_Sales');
// During order creation in the backend admin has ability to add any products to order
Mage::helper('catalog/product')->setSkipSaleableCheck(true);
}
/**
* Retrieve session object
*
* @return Mage_Adminhtml_Model_Session_Quote
*/
protected function _getSession()
{
return Mage::getSingleton('adminhtml/session_quote');
}
/**
* Retrieve quote object
*
* @return Mage_Sales_Model_Quote
*/
protected function _getQuote()
{
return $this->_getSession()->getQuote();
}
/**
* Retrieve order create model
*
* @return Mage_Adminhtml_Model_Sales_Order_Create
*/
protected function _getOrderCreateModel()
{
return Mage::getSingleton('adminhtml/sales_order_create');
}
/**
* Retrieve gift message save model
*
* @return Mage_Adminhtml_Model_Giftmessage_Save
*/
protected function _getGiftmessageSaveModel()
{
return Mage::getSingleton('adminhtml/giftmessage_save');
}
/**
* Initialize order creation session data
*
* @return Mage_Adminhtml_Sales_Order_CreateController
*/
protected function _initSession()
{
/**
* Identify customer
*/
if ($customerId = $this->getRequest()->getParam('customer_id')) {
$this->_getSession()->setCustomerId((int) $customerId);
}
/**
* Identify store
*/
if ($storeId = $this->getRequest()->getParam('store_id')) {
$this->_getSession()->setStoreId((int) $storeId);
}
/**
* Identify currency
*/
if ($currencyId = $this->getRequest()->getParam('currency_id')) {
$this->_getSession()->setCurrencyId((string) $currencyId);
$this->_getOrderCreateModel()->setRecollect(true);
}
//Notify other modules about the session quote
Mage::dispatchEvent('create_order_session_quote_initialized',
array('session_quote' => $this->_getSession()));
return $this;
}
/**
* Processing request data
*
* @return Mage_Adminhtml_Sales_Order_CreateController
*/
protected function _processData()
{
return $this->_processActionData();
}
/**
* Process request data with additional logic for saving quote and creating order
*
* @param string $action
* @return Mage_Adminhtml_Sales_Order_CreateController
*/
protected function _processActionData($action = null)
{
$eventData = array(
'order_create_model' => $this->_getOrderCreateModel(),
'request_model' => $this->getRequest(),
'session' => $this->_getSession(),
);
Mage::dispatchEvent('adminhtml_sales_order_create_process_data_before', $eventData);
/**
* Saving order data
*/
if ($data = $this->getRequest()->getPost('order')) {
$this->_getOrderCreateModel()->importPostData($data);
}
/**
* Initialize catalog rule data
*/
$this->_getOrderCreateModel()->initRuleData();
/**
* init first billing address, need for virtual products
*/
$this->_getOrderCreateModel()->getBillingAddress();
/**
* Flag for using billing address for shipping
*/
if (!$this->_getOrderCreateModel()->getQuote()->isVirtual()) {
$syncFlag = $this->getRequest()->getPost('shipping_as_billing');
$shippingMethod = $this->_getOrderCreateModel()->getShippingAddress()->getShippingMethod();
if (is_null($syncFlag)
&& $this->_getOrderCreateModel()->getShippingAddress()->getSameAsBilling()
&& empty($shippingMethod)
) {
$this->_getOrderCreateModel()->setShippingAsBilling(1);
} else {
$this->_getOrderCreateModel()->setShippingAsBilling((int)$syncFlag);
}
}
/**
* Change shipping address flag
*/
if (!$this->_getOrderCreateModel()->getQuote()->isVirtual() && $this->getRequest()->getPost('reset_shipping')) {
$this->_getOrderCreateModel()->resetShippingMethod(true);
}
/**
* Collecting shipping rates
*/
if (!$this->_getOrderCreateModel()->getQuote()->isVirtual() &&
$this->getRequest()->getPost('collect_shipping_rates')
) {
$this->_getOrderCreateModel()->collectShippingRates();
}
/**
* Apply mass changes from sidebar
*/
if ($data = $this->getRequest()->getPost('sidebar')) {
$this->_getOrderCreateModel()->applySidebarData($data);
}
/**
* Adding product to quote from shopping cart, wishlist etc.
*/
if ($productId = (int) $this->getRequest()->getPost('add_product')) {
$this->_getOrderCreateModel()->addProduct($productId, $this->getRequest()->getPost());
}
/**
* Adding products to quote from special grid
*/
if ($this->getRequest()->has('item') && !$this->getRequest()->getPost('update_items') && !($action == 'save')) {
$items = $this->getRequest()->getPost('item');
$items = $this->_processFiles($items);
$this->_getOrderCreateModel()->addProducts($items);
}
/**
* Update quote items
*/
if ($this->getRequest()->getPost('update_items')) {
$items = $this->getRequest()->getPost('item', array());
$items = $this->_processFiles($items);
$this->_getOrderCreateModel()->updateQuoteItems($items);
}
/**
* Remove quote item
*/
$removeItemId = (int) $this->getRequest()->getPost('remove_item');
$removeFrom = (string) $this->getRequest()->getPost('from');
if ($removeItemId && $removeFrom) {
$this->_getOrderCreateModel()->removeItem($removeItemId, $removeFrom);
}
/**
* Move quote item
*/
$moveItemId = (int) $this->getRequest()->getPost('move_item');
$moveTo = (string) $this->getRequest()->getPost('to');
if ($moveItemId && $moveTo) {
$this->_getOrderCreateModel()->moveQuoteItem($moveItemId, $moveTo);
}
if ($paymentData = $this->getRequest()->getPost('payment')) {
$this->_getOrderCreateModel()->getQuote()->getPayment()->addData($paymentData);
}
$eventData = array(
'order_create_model' => $this->_getOrderCreateModel(),
'request' => $this->getRequest()->getPost(),
);
Mage::dispatchEvent('adminhtml_sales_order_create_process_data', $eventData);
$this->_getOrderCreateModel()
->saveQuote();
if ($paymentData = $this->getRequest()->getPost('payment')) {
$this->_getOrderCreateModel()->getQuote()->getPayment()->addData($paymentData);
}
/**
* Saving of giftmessages
*/
$giftmessages = $this->getRequest()->getPost('giftmessage');
if ($giftmessages) {
$this->_getGiftmessageSaveModel()->setGiftmessages($giftmessages)
->saveAllInQuote();
}
/**
* Importing gift message allow items from specific product grid
*/
if ($data = $this->getRequest()->getPost('add_products')) {
$this->_getGiftmessageSaveModel()
->importAllowQuoteItemsFromProducts(Mage::helper('core')->jsonDecode($data));
}
/**
* Importing gift message allow items on update quote items
*/
if ($this->getRequest()->getPost('update_items')) {
$items = $this->getRequest()->getPost('item', array());
$this->_getGiftmessageSaveModel()->importAllowQuoteItemsFromItems($items);
}
$data = $this->getRequest()->getPost('order');
$couponCode = '';
if (isset($data) && isset($data['coupon']['code'])) {
$couponCode = trim($data['coupon']['code']);
}
if (!empty($couponCode)) {
if ($this->_getQuote()->getCouponCode() !== $couponCode) {
$this->_getSession()->addError(
$this->__('"%s" coupon code is not valid.', $this->_getHelper()->escapeHtml($couponCode)));
} else {
$this->_getSession()->addSuccess($this->__('The coupon code has been accepted.'));
}
}
return $this;
}
/**
* Process buyRequest file options of items
*
* @param array $items
* @return array
*/
protected function _processFiles($items)
{
/* @var $productHelper Mage_Catalog_Helper_Product */
$productHelper = Mage::helper('catalog/product');
foreach ($items as $id => $item) {
$buyRequest = new Varien_Object($item);
$params = array('files_prefix' => 'item_' . $id . '_');
$buyRequest = $productHelper->addParamsToBuyRequest($buyRequest, $params);
if ($buyRequest->hasData()) {
$items[$id] = $buyRequest->toArray();
}
}
return $items;
}
/**
* Index page
*/
public function indexAction()
{
$this->_title($this->__('Sales'))->_title($this->__('Orders'))->_title($this->__('New Order'));
$this->_initSession();
$this->loadLayout();
$this->_setActiveMenu('sales/order')
->renderLayout();
}
public function reorderAction()
{
$this->_getSession()->clear();
$orderId = $this->getRequest()->getParam('order_id');
$order = Mage::getModel('sales/order')->load($orderId);
if (!Mage::helper('sales/reorder')->canReorder($order)) {
return $this->_forward('noRoute');
}
if ($order->getId()) {
$order->setReordered(true);
$this->_getSession()->setUseOldShippingMethod(true);
$this->_getOrderCreateModel()->initFromOrder($order);
$this->_redirect('*/*');
}
else {
$this->_redirect('*/sales_order/');
}
}
protected function _reloadQuote()
{
$id = $this->_getQuote()->getId();
$this->_getQuote()->load($id);
return $this;
}
/**
* Loading page block
*/
public function loadBlockAction()
{
$request = $this->getRequest();
try {
$this->_initSession()
->_processData();
}
catch (Mage_Core_Exception $e){
$this->_reloadQuote();
$this->_getSession()->addError($e->getMessage());
}
catch (Exception $e){
$this->_reloadQuote();
$this->_getSession()->addException($e, $e->getMessage());
}
$asJson= $request->getParam('json');
$block = $request->getParam('block');
$update = $this->getLayout()->getUpdate();
if ($asJson) {
$update->addHandle('adminhtml_sales_order_create_load_block_json');
} else {
$update->addHandle('adminhtml_sales_order_create_load_block_plain');
}
if ($block) {
$blocks = explode(',', $block);
if ($asJson && !in_array('message', $blocks)) {
$blocks[] = 'message';
}
foreach ($blocks as $block) {
$update->addHandle('adminhtml_sales_order_create_load_block_' . $block);
}
}
$this->loadLayoutUpdates()->generateLayoutXml()->generateLayoutBlocks();
$result = $this->getLayout()->getBlock('content')->toHtml();
if ($request->getParam('as_js_varname')) {
Mage::getSingleton('adminhtml/session')->setUpdateResult($result);
$this->_redirect('*/*/showUpdateResult');
} else {
$this->getResponse()->setBody($result);
}
}
/**
* Adds configured product to quote
*/
public function addConfiguredAction()
{
$errorMessage = null;
try {
$this->_initSession()
->_processData();
}
catch (Exception $e){
$this->_reloadQuote();
$errorMessage = $e->getMessage();
}
// Form result for client javascript
$updateResult = new Varien_Object();
if ($errorMessage) {
$updateResult->setError(true);
$updateResult->setMessage($errorMessage);
} else {
$updateResult->setOk(true);
}
$updateResult->setJsVarName($this->getRequest()->getParam('as_js_varname'));
Mage::getSingleton('adminhtml/session')->setCompositeProductResult($updateResult);
$this->_redirect('*/catalog_product/showUpdateResult');
}
/**
* Start order create action
*/
public function startAction()
{
$this->_getSession()->clear();
$this->_redirect('*/*', array('customer_id' => $this->getRequest()->getParam('customer_id')));
}
/**
* Cancel order create
*/
public function cancelAction()
{
if ($orderId = $this->_getSession()->getReordered()) {
$this->_getSession()->clear();
$this->_redirect('*/sales_order/view', array(
'order_id'=>$orderId
));
} else {
$this->_getSession()->clear();
$this->_redirect('*/*');
}
}
/**
* Saving quote and create order
*/
public function saveAction()
{
try {
$this->_processActionData('save');
$paymentData = $this->getRequest()->getPost('payment');
if ($paymentData) {
$paymentData['checks'] = Mage_Payment_Model_Method_Abstract::CHECK_USE_INTERNAL
| Mage_Payment_Model_Method_Abstract::CHECK_USE_FOR_COUNTRY
| Mage_Payment_Model_Method_Abstract::CHECK_USE_FOR_CURRENCY
| Mage_Payment_Model_Method_Abstract::CHECK_ORDER_TOTAL_MIN_MAX
| Mage_Payment_Model_Method_Abstract::CHECK_ZERO_TOTAL;
$this->_getOrderCreateModel()->setPaymentData($paymentData);
$this->_getOrderCreateModel()->getQuote()->getPayment()->addData($paymentData);
}
$order = $this->_getOrderCreateModel()
->setIsValidate(true)
->importPostData($this->getRequest()->getPost('order'))
->createOrder();
$this->_getSession()->clear();
Mage::getSingleton('adminhtml/session')->addSuccess($this->__('The order has been created.'));
if (Mage::getSingleton('admin/session')->isAllowed('sales/order/actions/view')) {
$this->_redirect('*/sales_order/view', array('order_id' => $order->getId()));
} else {
$this->_redirect('*/sales_order/index');
}
} catch (Mage_Payment_Model_Info_Exception $e) {
$this->_getOrderCreateModel()->saveQuote();
$message = $e->getMessage();
if( !empty($message) ) {
$this->_getSession()->addError($message);
}
$this->_redirect('*/*/');
} catch (Mage_Core_Exception $e){
$message = $e->getMessage();
if( !empty($message) ) {
$this->_getSession()->addError($message);
}
$this->_redirect('*/*/');
}
catch (Exception $e){
$this->_getSession()->addException($e, $this->__('Order saving error: %s', $e->getMessage()));
$this->_redirect('*/*/');
}
}
/**
* Acl check for admin
*
* @return bool
*/
protected function _isAllowed()
{
return Mage::getSingleton('admin/session')->isAllowed($this->_getAclResourse());
}
/**
* Get acl resource
*
* @return string
*/
protected function _getAclResourse()
{
$action = strtolower($this->getRequest()->getActionName());
if (in_array($action, array('index', 'save')) && $this->_getSession()->getReordered()) {
$action = 'reorder';
}
switch ($action) {
case 'index':
case 'save':
$aclResource = 'sales/order/actions/create';
break;
case 'reorder':
$aclResource = 'sales/order/actions/reorder';
break;
case 'cancel':
$aclResource = 'sales/order/actions/cancel';
break;
default:
$aclResource = 'sales/order/actions';
break;
}
return $aclResource;
}
/*
* Ajax handler to response configuration fieldset of composite product in order
*
* @return Mage_Adminhtml_Sales_Order_CreateController
*/
public function configureProductToAddAction()
{
// Prepare data
$productId = (int) $this->getRequest()->getParam('id');
$configureResult = new Varien_Object();
$configureResult->setOk(true);
$configureResult->setProductId($productId);
$sessionQuote = Mage::getSingleton('adminhtml/session_quote');
$configureResult->setCurrentStoreId($sessionQuote->getStore()->getId());
$configureResult->setCurrentCustomerId($sessionQuote->getCustomerId());
// Render page
/* @var $helper Mage_Adminhtml_Helper_Catalog_Product_Composite */
$helper = Mage::helper('adminhtml/catalog_product_composite');
$helper->renderConfigureResult($this, $configureResult);
return $this;
}
/*
* Ajax handler to response configuration fieldset of composite product in quote items
*
* @return Mage_Adminhtml_Sales_Order_CreateController
*/
public function configureQuoteItemsAction()
{
// Prepare data
$configureResult = new Varien_Object();
try {
$quoteItemId = (int) $this->getRequest()->getParam('id');
if (!$quoteItemId) {
Mage::throwException($this->__('Quote item id is not received.'));
}
$quoteItem = Mage::getModel('sales/quote_item')->load($quoteItemId);
if (!$quoteItem->getId()) {
Mage::throwException($this->__('Quote item is not loaded.'));
}
$configureResult->setOk(true);
$optionCollection = Mage::getModel('sales/quote_item_option')->getCollection()
->addItemFilter(array($quoteItemId));
$quoteItem->setOptions($optionCollection->getOptionsByItem($quoteItem));
$configureResult->setBuyRequest($quoteItem->getBuyRequest());
$configureResult->setCurrentStoreId($quoteItem->getStoreId());
$configureResult->setProductId($quoteItem->getProductId());
$sessionQuote = Mage::getSingleton('adminhtml/session_quote');
$configureResult->setCurrentCustomerId($sessionQuote->getCustomerId());
} catch (Exception $e) {
$configureResult->setError(true);
$configureResult->setMessage($e->getMessage());
}
// Render page
/* @var $helper Mage_Adminhtml_Helper_Catalog_Product_Composite */
$helper = Mage::helper('adminhtml/catalog_product_composite');
$helper->renderConfigureResult($this, $configureResult);
return $this;
}
/**
* Show item update result from loadBlockAction
* to prevent popup alert with resend data question
*
*/
public function showUpdateResultAction()
{
$session = Mage::getSingleton('adminhtml/session');
if ($session->hasUpdateResult() && is_scalar($session->getUpdateResult())){
$this->getResponse()->setBody($session->getUpdateResult());
$session->unsUpdateResult();
} else {
$session->unsUpdateResult();
return false;
}
}
/**
* Process data and display index page
*/
public function processDataAction()
{
$this->_initSession();
$this->_processData();
$this->_forward('index');
}
}
| mit |
erikzhouxin/CSharpSolution | OSS/Alipay/F2FPayDll/Projects/alipay-sdk-NET20161213174056/AopResponse.cs | 1702 | using System;
using System.Xml.Serialization;
namespace Aop.Api
{
[Serializable]
public abstract class AopResponse
{
private string code;
private string msg;
private string subCode;
private string subMsg;
private string body;
/// <summary>
/// 错误码
/// 对应 ErrCode
/// </summary>
[XmlElement("code")]
public string Code
{
get { return code; }
set { code = value; }
}
/// <summary>
/// 错误信息
/// 对应 ErrMsg
/// </summary>
[XmlElement("msg")]
public string Msg
{
get { return msg; }
set { msg = value; }
}
/// <summary>
/// 子错误码
/// 对应 SubErrCode
/// </summary>
[XmlElement("sub_code")]
public string SubCode
{
get { return subCode; }
set { subCode = value; }
}
/// <summary>
/// 子错误信息
/// 对应 SubErrMsg
/// </summary>
[XmlElement("sub_msg")]
public string SubMsg
{
get { return subMsg; }
set { subMsg = value; }
}
/// <summary>
/// 响应原始内容
/// </summary>
public string Body
{
get { return body; }
set { body = value; }
}
/// <summary>
/// 响应结果是否错误
/// </summary>
public bool IsError
{
get
{
return !string.IsNullOrEmpty(this.SubCode);
}
}
}
}
| mit |
NestorSegura/operationcode_frontend | stories/components/iconCard/index.js | 1134 | import React from 'react';
import { storiesOf } from '@storybook/react';
import IconCard from 'shared/components/iconCard/iconCard';
storiesOf('shared/components/iconCard', module)
.add('Default', () => (
<IconCard
title="Title"
fontAwesomeIcon="FaRoad"
/>
))
.add('With subText', () => (
<IconCard
title="Title"
subText="Sub-text showing additional information"
fontAwesomeIcon="FaGroup"
/>
))
.add('Linked', () => (
<IconCard
title="Title"
subText="http://www.slack.com"
url="http://www.slack.com"
fontAwesomeIcon="FaSlack"
/>
))
.add('Sized icon', () => (
<IconCard
title="Title"
fontAwesomeIcon="FaStar"
iconSize="200"
/>
))
.add('Icon above heading', () => (
<IconCard
title="Title"
fontAwesomeIcon="FaThumbsUp"
iconAboveHeading
/>
))
.add('Preformatted subtext (using HTML)', () => (
<IconCard
title="Title"
fontAwesomeIcon="FaHtml5"
subText="<strong>Test</strong><br/>
<em>Test</em><br/>
<strike>Test<strike>"
usingHtml
/>
));
| mit |
azri/Insoshi-Rails-3 | db/migrate/20081015002258_migrate_photo_data.rb | 968 | class MigratePhotoData < ActiveRecord::Migration
def self.up
# For each person, create a gallery and put all their photos there.
Person.find(:all).each do |person|
gallery = person.galleries.new
gallery.title = "Primary"
primary_photo = person.photos.detect(&:primary?)
gallery.primary_photo_id = primary_photo.id unless primary_photo.nil?
Gallery.skip_callback(:log_activity) do
gallery.save!
end
person.photos.each do |photo|
photo.gallery_id = gallery.reload.id
photo.avatar = photo.primary
Photo.skip_callback(:log_activity) do
photo.save!
end
end
end
end
def self.down
Photo.find(:all).each do |photo|
Photo.skip_callback(:log_activity) do
photo.gallery_id = nil
photo.primary = photo.avatar
photo.save(false)
end
end
Gallery.find(:all).each do |gallery|
gallery.destroy
end
end
end
| mit |
cultofmetatron/mephisto | lib/authenticated_system.rb | 3046 | module AuthenticatedSystem
protected
def logged_in?
current_user != :false
end
# Accesses the current user from the session.
def current_user
@current_user ||= (session[:user] && User.find_by_site(site, session[:user])) || :false
end
# Store the given user in the session.
def current_user=(new_user)
session[:user] = (new_user.nil? || new_user.is_a?(Symbol)) ? nil : new_user.id
@current_user = new_user
end
def authorized?
true
end
def login_required
username, passwd = get_auth_data
self.current_user ||= User.authenticate_for(site, username, passwd) || :false if username && passwd
logged_in? && authorized? ? true : access_denied
end
def access_denied
respond_to do |accepts|
accepts.html do
store_location
redirect_to :controller => "/account", :action => "login"
end
accepts.xml { access_denied_with_basic_auth }
end
false
end
# store current uri in the session.
# we can return to this location by calling return_location
# Options:
# * :overwrite - (default = true) Overwrite existing stored location
# * :uri - Return to the specified URI (defaults to request.request_uri)
def store_location(uri = nil)
session[:return_to] = uri || request.request_uri
end
def location_stored?
!session[:return_to].nil?
end
# move to the last store_location call or to the passed default one
def redirect_back_or_default(default)
redirect_to(location_stored? ? session[:return_to] : default)
session[:return_to] = nil
end
def basic_auth_required
User.authenticate_for(*get_auth_data.unshift(site)) || access_denied_with_basic_auth
end
# adds ActionView helper methods
def self.included(base)
base.send :helper_method, :current_user, :logged_in?
end
# When called with before_filter :login_from_cookie will check for an :auth_token
# cookie and log the user back in if apropriate
def login_from_cookie
return unless cookies[:token] && !logged_in?
self.current_user = site.user_by_token(cookies[:token])
cookies[:token] = { :value => self.current_user.reset_token! , :expires => self.current_user.token_expires_at } if logged_in?
true
end
private
def access_denied_with_basic_auth
headers["Status"] = "Unauthorized"
headers["WWW-Authenticate"] = %(Basic realm="Web Password")
render :text => "Could't authenticate you", :status => '401 Unauthorized'
end
@@http_auth_headers = %w(X-HTTP_AUTHORIZATION HTTP_AUTHORIZATION Authorization)
# gets BASIC auth info
def get_auth_data
auth_key = @@http_auth_headers.detect { |h| request.env.has_key?(h) }
auth_data = request.env[auth_key].to_s.split unless auth_key.blank?
return auth_data && auth_data[0] == 'Basic' ? Base64.decode64(auth_data[1]).split(':')[0..1] : [nil, nil]
end
end
| mit |
wieslawsoltes/Perspex | src/Avalonia.Animation/Animators/UInt64Animator.cs | 631 | using System;
namespace Avalonia.Animation.Animators
{
/// <summary>
/// Animator that handles <see cref="UInt64"/> properties.
/// </summary>
public class UInt64Animator : Animator<UInt64>
{
const double maxVal = (double)UInt64.MaxValue;
/// <inheritdocs/>
public override UInt64 Interpolate(double progress, UInt64 oldValue, UInt64 newValue)
{
var normOV = oldValue / maxVal;
var normNV = newValue / maxVal;
var deltaV = normNV - normOV;
return (UInt64)Math.Round(maxVal * ((deltaV * progress) + normOV));
}
}
}
| mit |
magicaner/n98-magerun | tests/N98/Magento/Command/Developer/Translate/InlineShopCommandTest.php | 1163 | <?php
namespace N98\Magento\Command\Developer\Translate;
use Symfony\Component\Console\Tester\CommandTester;
use N98\Magento\Command\PHPUnit\TestCase;
class InlineShopCommandTest extends TestCase
{
public function testExecute()
{
$application = $this->getApplication();
$application->add(new InlineAdminCommand());
$application->setAutoExit(false);
$command = $this->getApplication()->find('dev:translate:shop');
$commandTester = new CommandTester($command);
$commandTester->execute(
array(
'command' => $command->getName(),
'store' => 'admin',
'--on' => true,
)
);
$this->assertContains('Inline Translation enabled', $commandTester->getDisplay());
$commandTester = new CommandTester($command);
$commandTester->execute(
array(
'command' => $command->getName(),
'store' => 'admin',
'--off' => true,
)
);
$this->assertContains('Inline Translation disabled', $commandTester->getDisplay());
}
} | mit |
rbalda/neural_ocr | env/lib/python2.7/site-packages/mpl_toolkits/axes_grid1/colorbar.py | 27919 | '''
Colorbar toolkit with two classes and a function:
:class:`ColorbarBase`
the base class with full colorbar drawing functionality.
It can be used as-is to make a colorbar for a given colormap;
a mappable object (e.g., image) is not needed.
:class:`Colorbar`
the derived class for use with images or contour plots.
:func:`make_axes`
a function for resizing an axes and adding a second axes
suitable for a colorbar
The :meth:`~matplotlib.figure.Figure.colorbar` method uses :func:`make_axes`
and :class:`Colorbar`; the :func:`~matplotlib.pyplot.colorbar` function
is a thin wrapper over :meth:`~matplotlib.figure.Figure.colorbar`.
'''
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from matplotlib.externals import six
from matplotlib.externals.six.moves import xrange, zip
import numpy as np
import matplotlib as mpl
import matplotlib.colors as colors
import matplotlib.cm as cm
from matplotlib import docstring
import matplotlib.ticker as ticker
import matplotlib.cbook as cbook
import matplotlib.collections as collections
import matplotlib.contour as contour
from matplotlib.path import Path
from matplotlib.patches import PathPatch
from matplotlib.transforms import Bbox
make_axes_kw_doc = '''
============= ====================================================
Property Description
============= ====================================================
*orientation* vertical or horizontal
*fraction* 0.15; fraction of original axes to use for colorbar
*pad* 0.05 if vertical, 0.15 if horizontal; fraction
of original axes between colorbar and new image axes
*shrink* 1.0; fraction by which to shrink the colorbar
*aspect* 20; ratio of long to short dimensions
============= ====================================================
'''
colormap_kw_doc = '''
=========== ====================================================
Property Description
=========== ====================================================
*extend* [ 'neither' | 'both' | 'min' | 'max' ]
If not 'neither', make pointed end(s) for out-of-
range values. These are set for a given colormap
using the colormap set_under and set_over methods.
*spacing* [ 'uniform' | 'proportional' ]
Uniform spacing gives each discrete color the same
space; proportional makes the space proportional to
the data interval.
*ticks* [ None | list of ticks | Locator object ]
If None, ticks are determined automatically from the
input.
*format* [ None | format string | Formatter object ]
If None, the
:class:`~matplotlib.ticker.ScalarFormatter` is used.
If a format string is given, e.g., '%.3f', that is
used. An alternative
:class:`~matplotlib.ticker.Formatter` object may be
given instead.
*drawedges* [ False | True ] If true, draw lines at color
boundaries.
=========== ====================================================
The following will probably be useful only in the context of
indexed colors (that is, when the mappable has norm=NoNorm()),
or other unusual circumstances.
============ ===================================================
Property Description
============ ===================================================
*boundaries* None or a sequence
*values* None or a sequence which must be of length 1 less
than the sequence of *boundaries*. For each region
delimited by adjacent entries in *boundaries*, the
color mapped to the corresponding value in values
will be used.
============ ===================================================
'''
colorbar_doc = '''
Add a colorbar to a plot.
Function signatures for the :mod:`~matplotlib.pyplot` interface; all
but the first are also method signatures for the
:meth:`~matplotlib.figure.Figure.colorbar` method::
colorbar(**kwargs)
colorbar(mappable, **kwargs)
colorbar(mappable, cax=cax, **kwargs)
colorbar(mappable, ax=ax, **kwargs)
arguments:
*mappable*
the :class:`~matplotlib.image.Image`,
:class:`~matplotlib.contour.ContourSet`, etc. to
which the colorbar applies; this argument is mandatory for the
:meth:`~matplotlib.figure.Figure.colorbar` method but optional for the
:func:`~matplotlib.pyplot.colorbar` function, which sets the
default to the current image.
keyword arguments:
*cax*
None | axes object into which the colorbar will be drawn
*ax*
None | parent axes object from which space for a new
colorbar axes will be stolen
Additional keyword arguments are of two kinds:
axes properties:
%s
colorbar properties:
%s
If *mappable* is a :class:`~matplotlib.contours.ContourSet`, its *extend*
kwarg is included automatically.
Note that the *shrink* kwarg provides a simple way to keep a vertical
colorbar, for example, from being taller than the axes of the mappable
to which the colorbar is attached; but it is a manual method requiring
some trial and error. If the colorbar is too tall (or a horizontal
colorbar is too wide) use a smaller value of *shrink*.
For more precise control, you can manually specify the positions of
the axes objects in which the mappable and the colorbar are drawn. In
this case, do not use any of the axes properties kwargs.
It is known that some vector graphics viewer (svg and pdf) renders white gaps
between segments of the colorbar. This is due to bugs in the viewers not
matplotlib. As a workaround the colorbar can be rendered with overlapping
segments::
cbar = colorbar()
cbar.solids.set_edgecolor("face")
draw()
However this has negative consequences in other circumstances. Particularly with
semi transparent images (alpha < 1) and colorbar extensions and is not enabled
by default see (issue #1188).
returns:
:class:`~matplotlib.colorbar.Colorbar` instance; see also its base class,
:class:`~matplotlib.colorbar.ColorbarBase`. Call the
:meth:`~matplotlib.colorbar.ColorbarBase.set_label` method
to label the colorbar.
The transData of the *cax* is adjusted so that the limits in the
longest axis actually corresponds to the limits in colorbar range. On
the other hand, the shortest axis has a data limits of [1,2], whose
unconventional value is to prevent underflow when log scale is used.
''' % (make_axes_kw_doc, colormap_kw_doc)
docstring.interpd.update(colorbar_doc=colorbar_doc)
class CbarAxesLocator(object):
"""
CbarAxesLocator is a axes_locator for colorbar axes. It adjust the
position of the axes to make a room for extended ends, i.e., the
extended ends are located outside the axes area.
"""
def __init__(self, locator=None, extend="neither", orientation="vertical"):
"""
*locator* : the bbox returned from the locator is used as a
initial axes location. If None, axes.bbox is used.
*extend* : same as in ColorbarBase
*orientation* : same as in ColorbarBase
"""
self._locator = locator
self.extesion_fraction = 0.05
self.extend = extend
self.orientation = orientation
def get_original_position(self, axes, renderer):
"""
get the original position of the axes.
"""
if self._locator is None:
bbox = axes.get_position(original=True)
else:
bbox = self._locator(axes, renderer)
return bbox
def get_end_vertices(self):
"""
return a tuple of two vertices for the colorbar extended ends.
The first vertices is for the minimum end, and the second is for
the maximum end.
"""
# Note that concatenating two vertices needs to make a
# vertices for the frame.
extesion_fraction = self.extesion_fraction
corx = extesion_fraction*2.
cory = 1./(1. - corx)
x1, y1, w, h = 0, 0, 1, 1
x2, y2 = x1 + w, y1 + h
dw, dh = w*extesion_fraction, h*extesion_fraction*cory
if self.extend in ["min", "both"]:
bottom = [(x1, y1),
(x1+w/2., y1-dh),
(x2, y1)]
else:
bottom = [(x1, y1),
(x2, y1)]
if self.extend in ["max", "both"]:
top = [(x2, y2),
(x1+w/2., y2+dh),
(x1, y2)]
else:
top = [(x2, y2),
(x1, y2)]
if self.orientation == "horizontal":
bottom = [(y,x) for (x,y) in bottom]
top = [(y,x) for (x,y) in top]
return bottom, top
def get_path_patch(self):
"""
get the path for axes patch
"""
end1, end2 = self.get_end_vertices()
verts = [] + end1 + end2 + end1[:1]
return Path(verts)
def get_path_ends(self):
"""
get the paths for extended ends
"""
end1, end2 = self.get_end_vertices()
return Path(end1), Path(end2)
def __call__(self, axes, renderer):
"""
Return the adjusted position of the axes
"""
bbox0 = self.get_original_position(axes, renderer)
bbox = bbox0
x1, y1, w, h = bbox.bounds
extesion_fraction = self.extesion_fraction
dw, dh = w*extesion_fraction, h*extesion_fraction
if self.extend in ["min", "both"]:
if self.orientation == "horizontal":
x1 = x1 + dw
else:
y1 = y1+dh
if self.extend in ["max", "both"]:
if self.orientation == "horizontal":
w = w-2*dw
else:
h = h-2*dh
return Bbox.from_bounds(x1, y1, w, h)
class ColorbarBase(cm.ScalarMappable):
'''
Draw a colorbar in an existing axes.
This is a base class for the :class:`Colorbar` class, which is the
basis for the :func:`~matplotlib.pyplot.colorbar` method and pylab
function.
It is also useful by itself for showing a colormap. If the *cmap*
kwarg is given but *boundaries* and *values* are left as None,
then the colormap will be displayed on a 0-1 scale. To show the
under- and over-value colors, specify the *norm* as::
colors.Normalize(clip=False)
To show the colors versus index instead of on the 0-1 scale,
use::
norm=colors.NoNorm.
Useful attributes:
:attr:`ax`
the Axes instance in which the colorbar is drawn
:attr:`lines`
a LineCollection if lines were drawn, otherwise None
:attr:`dividers`
a LineCollection if *drawedges* is True, otherwise None
Useful public methods are :meth:`set_label` and :meth:`add_lines`.
'''
def __init__(self, ax, cmap=None,
norm=None,
alpha=1.0,
values=None,
boundaries=None,
orientation='vertical',
extend='neither',
spacing='uniform', # uniform or proportional
ticks=None,
format=None,
drawedges=False,
filled=True,
):
self.ax = ax
if cmap is None: cmap = cm.get_cmap()
if norm is None: norm = colors.Normalize()
self.alpha = alpha
cm.ScalarMappable.__init__(self, cmap=cmap, norm=norm)
self.values = values
self.boundaries = boundaries
self.extend = extend
self.spacing = spacing
self.orientation = orientation
self.drawedges = drawedges
self.filled = filled
# artists
self.solids = None
self.lines = None
self.dividers = None
self.extension_patch1 = None
self.extension_patch2 = None
if orientation == "vertical":
self.cbar_axis = self.ax.yaxis
else:
self.cbar_axis = self.ax.xaxis
if format is None:
if isinstance(self.norm, colors.LogNorm):
# change both axis for proper aspect
self.ax.set_xscale("log")
self.ax.set_yscale("log")
self.cbar_axis.set_minor_locator(ticker.NullLocator())
formatter = ticker.LogFormatter()
else:
formatter = None
elif cbook.is_string_like(format):
formatter = ticker.FormatStrFormatter(format)
else:
formatter = format # Assume it is a Formatter
if formatter is None:
formatter = self.cbar_axis.get_major_formatter()
else:
self.cbar_axis.set_major_formatter(formatter)
if cbook.iterable(ticks):
self.cbar_axis.set_ticks(ticks)
elif ticks is not None:
self.cbar_axis.set_major_locator(ticks)
else:
self._select_locator(formatter)
self._config_axes()
self.update_artists()
self.set_label_text('')
def _get_colorbar_limits(self):
"""
initial limits for colorbar range. The returned min, max values
will be used to create colorbar solid(?) and etc.
"""
if self.boundaries is not None:
C = self.boundaries
if self.extend in ["min", "both"]:
C = C[1:]
if self.extend in ["max", "both"]:
C = C[:-1]
return min(C), max(C)
else:
return self.get_clim()
def _config_axes(self):
'''
Adjust the properties of the axes to be adequate for colorbar display.
'''
ax = self.ax
axes_locator = CbarAxesLocator(ax.get_axes_locator(),
extend=self.extend,
orientation=self.orientation)
ax.set_axes_locator(axes_locator)
# override the get_data_ratio for the aspect works.
def _f():
return 1.
ax.get_data_ratio = _f
ax.get_data_ratio_log = _f
ax.set_frame_on(True)
ax.set_navigate(False)
self.ax.set_autoscalex_on(False)
self.ax.set_autoscaley_on(False)
if self.orientation == 'horizontal':
ax.xaxis.set_label_position('bottom')
ax.set_yticks([])
else:
ax.set_xticks([])
ax.yaxis.set_label_position('right')
ax.yaxis.set_ticks_position('right')
def update_artists(self):
"""
Update the colorbar associated artists, *filled* and
*ends*. Note that *lines* are not updated. This needs to be
called whenever clim of associated image changes.
"""
self._process_values()
self._add_ends()
X, Y = self._mesh()
if self.filled:
C = self._values[:,np.newaxis]
self._add_solids(X, Y, C)
ax = self.ax
vmin, vmax = self._get_colorbar_limits()
if self.orientation == 'horizontal':
ax.set_ylim(1, 2)
ax.set_xlim(vmin, vmax)
else:
ax.set_xlim(1, 2)
ax.set_ylim(vmin, vmax)
def _add_ends(self):
"""
Create patches from extended ends and add them to the axes.
"""
del self.extension_patch1
del self.extension_patch2
path1, path2 = self.ax.get_axes_locator().get_path_ends()
fc=mpl.rcParams['axes.facecolor']
ec=mpl.rcParams['axes.edgecolor']
linewidths=0.5*mpl.rcParams['axes.linewidth']
self.extension_patch1 = PathPatch(path1,
fc=fc, ec=ec, lw=linewidths,
zorder=2.,
transform=self.ax.transAxes,
clip_on=False)
self.extension_patch2 = PathPatch(path2,
fc=fc, ec=ec, lw=linewidths,
zorder=2.,
transform=self.ax.transAxes,
clip_on=False)
self.ax.add_artist(self.extension_patch1)
self.ax.add_artist(self.extension_patch2)
def _set_label_text(self):
"""
set label.
"""
self.cbar_axis.set_label_text(self._label, **self._labelkw)
def set_label_text(self, label, **kw):
'''
Label the long axis of the colorbar
'''
self._label = label
self._labelkw = kw
self._set_label_text()
def _edges(self, X, Y):
'''
Return the separator line segments; helper for _add_solids.
'''
N = X.shape[0]
# Using the non-array form of these line segments is much
# simpler than making them into arrays.
if self.orientation == 'vertical':
return [list(zip(X[i], Y[i])) for i in xrange(1, N-1)]
else:
return [list(zip(Y[i], X[i])) for i in xrange(1, N-1)]
def _add_solids(self, X, Y, C):
'''
Draw the colors using :meth:`~matplotlib.axes.Axes.pcolormesh`;
optionally add separators.
'''
## Change to pcolorfast after fixing bugs in some backends...
if self.extend in ["min", "both"]:
cc = self.to_rgba([C[0][0]])
self.extension_patch1.set_fc(cc[0])
X, Y, C = X[1:], Y[1:], C[1:]
if self.extend in ["max", "both"]:
cc = self.to_rgba([C[-1][0]])
self.extension_patch2.set_fc(cc[0])
X, Y, C = X[:-1], Y[:-1], C[:-1]
if self.orientation == 'vertical':
args = (X, Y, C)
else:
args = (np.transpose(Y), np.transpose(X), np.transpose(C))
kw = {'cmap':self.cmap, 'norm':self.norm,
'shading':'flat', 'alpha':self.alpha,
}
del self.solids
del self.dividers
col = self.ax.pcolormesh(*args, **kw)
self.solids = col
if self.drawedges:
self.dividers = collections.LineCollection(self._edges(X,Y),
colors=(mpl.rcParams['axes.edgecolor'],),
linewidths=(0.5*mpl.rcParams['axes.linewidth'],),
)
self.ax.add_collection(self.dividers)
else:
self.dividers = None
def add_lines(self, levels, colors, linewidths):
'''
Draw lines on the colorbar. It deletes preexisting lines.
'''
del self.lines
N = len(levels)
x = np.array([1.0, 2.0])
X, Y = np.meshgrid(x,levels)
if self.orientation == 'vertical':
xy = [list(zip(X[i], Y[i])) for i in xrange(N)]
else:
xy = [list(zip(Y[i], X[i])) for i in xrange(N)]
col = collections.LineCollection(xy, linewidths=linewidths,
)
self.lines = col
col.set_color(colors)
self.ax.add_collection(col)
def _select_locator(self, formatter):
'''
select a suitable locator
'''
if self.boundaries is None:
if isinstance(self.norm, colors.NoNorm):
nv = len(self._values)
base = 1 + int(nv/10)
locator = ticker.IndexLocator(base=base, offset=0)
elif isinstance(self.norm, colors.BoundaryNorm):
b = self.norm.boundaries
locator = ticker.FixedLocator(b, nbins=10)
elif isinstance(self.norm, colors.LogNorm):
locator = ticker.LogLocator()
else:
locator = ticker.MaxNLocator(nbins=5)
else:
b = self._boundaries[self._inside]
locator = ticker.FixedLocator(b) #, nbins=10)
self.cbar_axis.set_major_locator(locator)
def _process_values(self, b=None):
'''
Set the :attr:`_boundaries` and :attr:`_values` attributes
based on the input boundaries and values. Input boundaries
can be *self.boundaries* or the argument *b*.
'''
if b is None:
b = self.boundaries
if b is not None:
self._boundaries = np.asarray(b, dtype=float)
if self.values is None:
self._values = 0.5*(self._boundaries[:-1]
+ self._boundaries[1:])
if isinstance(self.norm, colors.NoNorm):
self._values = (self._values + 0.00001).astype(np.int16)
return
self._values = np.array(self.values)
return
if self.values is not None:
self._values = np.array(self.values)
if self.boundaries is None:
b = np.zeros(len(self.values)+1, 'd')
b[1:-1] = 0.5*(self._values[:-1] - self._values[1:])
b[0] = 2.0*b[1] - b[2]
b[-1] = 2.0*b[-2] - b[-3]
self._boundaries = b
return
self._boundaries = np.array(self.boundaries)
return
# Neither boundaries nor values are specified;
# make reasonable ones based on cmap and norm.
if isinstance(self.norm, colors.NoNorm):
b = self._uniform_y(self.cmap.N+1) * self.cmap.N - 0.5
v = np.zeros((len(b)-1,), dtype=np.int16)
v = np.arange(self.cmap.N, dtype=np.int16)
self._boundaries = b
self._values = v
return
elif isinstance(self.norm, colors.BoundaryNorm):
b = np.array(self.norm.boundaries)
v = np.zeros((len(b)-1,), dtype=float)
bi = self.norm.boundaries
v = 0.5*(bi[:-1] + bi[1:])
self._boundaries = b
self._values = v
return
else:
b = self._uniform_y(self.cmap.N+1)
self._process_values(b)
def _uniform_y(self, N):
'''
Return colorbar data coordinates for *N* uniformly
spaced boundaries.
'''
vmin, vmax = self._get_colorbar_limits()
if isinstance(self.norm, colors.LogNorm):
y = np.logspace(np.log10(vmin), np.log10(vmax), N)
else:
y = np.linspace(vmin, vmax, N)
return y
def _mesh(self):
'''
Return X,Y, the coordinate arrays for the colorbar pcolormesh.
These are suitable for a vertical colorbar; swapping and
transposition for a horizontal colorbar are done outside
this function.
'''
x = np.array([1.0, 2.0])
if self.spacing == 'uniform':
y = self._uniform_y(len(self._boundaries))
else:
y = self._boundaries
self._y = y
X, Y = np.meshgrid(x,y)
return X, Y
def set_alpha(self, alpha):
"""
set alpha value.
"""
self.alpha = alpha
class Colorbar(ColorbarBase):
def __init__(self, ax, mappable, **kw):
mappable.autoscale_None() # Ensure mappable.norm.vmin, vmax
# are set when colorbar is called,
# even if mappable.draw has not yet
# been called. This will not change
# vmin, vmax if they are already set.
self.mappable = mappable
kw['cmap'] = mappable.cmap
kw['norm'] = mappable.norm
kw['alpha'] = mappable.get_alpha()
if isinstance(mappable, contour.ContourSet):
CS = mappable
kw['boundaries'] = CS._levels
kw['values'] = CS.cvalues
kw['extend'] = CS.extend
#kw['ticks'] = CS._levels
kw.setdefault('ticks', ticker.FixedLocator(CS.levels, nbins=10))
kw['filled'] = CS.filled
ColorbarBase.__init__(self, ax, **kw)
if not CS.filled:
self.add_lines(CS)
else:
ColorbarBase.__init__(self, ax, **kw)
def add_lines(self, CS):
'''
Add the lines from a non-filled
:class:`~matplotlib.contour.ContourSet` to the colorbar.
'''
if not isinstance(CS, contour.ContourSet) or CS.filled:
raise ValueError('add_lines is only for a ContourSet of lines')
tcolors = [c[0] for c in CS.tcolors]
tlinewidths = [t[0] for t in CS.tlinewidths]
# The following was an attempt to get the colorbar lines
# to follow subsequent changes in the contour lines,
# but more work is needed: specifically, a careful
# look at event sequences, and at how
# to make one object track another automatically.
#tcolors = [col.get_colors()[0] for col in CS.collections]
#tlinewidths = [col.get_linewidth()[0] for lw in CS.collections]
#print 'tlinewidths:', tlinewidths
ColorbarBase.add_lines(self, CS.levels, tcolors, tlinewidths)
def update_bruteforce(self, mappable):
"""
Update the colorbar artists to reflect the change of the
associated mappable.
"""
self.update_artists()
if isinstance(mappable, contour.ContourSet):
if not mappable.filled:
self.add_lines(mappable)
@docstring.Substitution(make_axes_kw_doc)
def make_axes(parent, **kw):
'''
Resize and reposition a parent axes, and return a child
axes suitable for a colorbar::
cax, kw = make_axes(parent, **kw)
Keyword arguments may include the following (with defaults):
*orientation*
'vertical' or 'horizontal'
%s
All but the first of these are stripped from the input kw set.
Returns (cax, kw), the child axes and the reduced kw dictionary.
'''
orientation = kw.setdefault('orientation', 'vertical')
fraction = kw.pop('fraction', 0.15)
shrink = kw.pop('shrink', 1.0)
aspect = kw.pop('aspect', 20)
#pb = transforms.PBox(parent.get_position())
pb = parent.get_position(original=True).frozen()
if orientation == 'vertical':
pad = kw.pop('pad', 0.05)
x1 = 1.0-fraction
pb1, pbx, pbcb = pb.splitx(x1-pad, x1)
pbcb = pbcb.shrunk(1.0, shrink).anchored('C', pbcb)
anchor = (0.0, 0.5)
panchor = (1.0, 0.5)
else:
pad = kw.pop('pad', 0.15)
pbcb, pbx, pb1 = pb.splity(fraction, fraction+pad)
pbcb = pbcb.shrunk(shrink, 1.0).anchored('C', pbcb)
aspect = 1.0/aspect
anchor = (0.5, 1.0)
panchor = (0.5, 0.0)
parent.set_position(pb1)
parent.set_anchor(panchor)
fig = parent.get_figure()
cax = fig.add_axes(pbcb)
cax.set_aspect(aspect, anchor=anchor, adjustable='box')
return cax, kw
def colorbar(mappable, cax=None, ax=None, **kw):
"""
Create a colorbar for a ScalarMappable instance.
Documentation for the pylab thin wrapper:
%(colorbar_doc)s
"""
import matplotlib.pyplot as plt
if ax is None:
ax = plt.gca()
if cax is None:
cax, kw = make_axes(ax, **kw)
cax.hold(True)
cb = Colorbar(cax, mappable, **kw)
def on_changed(m):
cb.set_cmap(m.get_cmap())
cb.set_clim(m.get_clim())
cb.update_bruteforce(m)
cbid = mappable.callbacksSM.connect('changed', on_changed)
mappable.colorbar = cb
ax.figure.sca(ax)
return cb
| mit |
insiders/KunstmaanBundlesCMS | src/Kunstmaan/NodeBundle/Tests/Entity/NodeVersionTest.php | 2884 | <?php
namespace Kunstmaan\NodeBundle\Tests\Entity;
use Doctrine\ORM\EntityManager;
use Doctrine\ORM\EntityRepository;
use Kunstmaan\NodeBundle\Entity\HasNodeInterface;
use Kunstmaan\NodeBundle\Entity\NodeTranslation;
use Kunstmaan\NodeBundle\Entity\NodeVersion;
use PHPUnit\Framework\TestCase;
class NodeVersionTest extends TestCase
{
/**
* @var NodeVersion
*/
protected $object;
protected function setUp(): void
{
$this->object = new NodeVersion();
}
public function testSetGetNodeTranslation()
{
$nodeTrans = new NodeTranslation();
$this->object->setNodeTranslation($nodeTrans);
$this->assertEquals($nodeTrans, $this->object->getNodeTranslation());
}
public function testSetGetType()
{
$this->object->setType(NodeVersion::DRAFT_VERSION);
$this->assertEquals(NodeVersion::DRAFT_VERSION, $this->object->getType());
}
public function testSetGetOwner()
{
$this->object->setOwner('owner');
$this->assertEquals('owner', $this->object->getOwner());
}
public function testSetGetCreated()
{
$created = new \DateTime();
$this->object->setCreated($created);
$this->assertEquals($created, $this->object->getCreated());
}
public function testSetGetUpdated()
{
$updated = new \DateTime();
$this->object->setUpdated($updated);
$this->assertEquals($updated, $this->object->getUpdated());
}
public function testGetSetRef()
{
$entity = $this->createMock(HasNodeInterface::class);
$entity->method('getId')->willReturn(1);
$em = $this->getMockBuilder(EntityManager::class)
->disableOriginalConstructor()
->getMock();
$repo = $this->getMockBuilder(EntityRepository::class)
->disableOriginalConstructor()
->getMock();
$repo->expects($this->any())
->method('find')
->willReturn($entity);
$em->expects($this->any())
->method('getRepository')
->willReturn($repo);
$this->object->setRef($entity);
$this->assertEquals(1, $this->object->getRefId());
$this->assertEquals(\get_class($entity), $this->object->getRefEntityName());
$this->assertInstanceOf(\get_class($entity), $this->object->getRef($em));
}
public function testGetDefaultAdminType()
{
$this->assertNull($this->object->getDefaultAdminType());
}
public function testGetSetOrigin()
{
$entity = new NodeVersion();
$this->object->setOrigin($entity);
$this->assertInstanceOf(NodeVersion::class, $this->object->getOrigin());
}
public function testIsPublic()
{
$this->object->setType(NodeVersion::PUBLIC_VERSION);
$this->assertTrue($this->object->isPublic());
}
}
| mit |
mizoki/shirasagi | spec/features/cms/node/nodes_spec.rb | 1564 | require 'spec_helper'
describe "cms_node_nodes", type: :feature, dbscope: :example do
let(:site) { cms_site }
let(:node) { create :cms_node }
let(:item) { create :cms_node, filename: "#{node.filename}/name" }
let(:index_path) { node_nodes_path site.id, node }
let(:new_path) { "#{index_path}/new" }
let(:show_path) { "#{index_path}/#{item.id}" }
let(:edit_path) { "#{index_path}/#{item.id}/edit" }
let(:delete_path) { "#{index_path}/#{item.id}/delete" }
context "with auth" do
before { login_cms_user }
it "#index" do
visit index_path
expect(current_path).not_to eq sns_login_path
end
it "#new" do
visit new_path
within "form#item-form" do
fill_in "item[name]", with: "sample"
fill_in "item[basename]", with: "sample"
click_button "保存"
end
expect(status_code).to eq 200
expect(current_path).not_to eq new_path
expect(page).to have_no_css("form#item-form")
end
it "#show" do
visit show_path
expect(status_code).to eq 200
expect(current_path).not_to eq sns_login_path
end
it "#edit" do
visit edit_path
within "form#item-form" do
fill_in "item[name]", with: "modify"
click_button "保存"
end
expect(current_path).not_to eq sns_login_path
expect(page).to have_no_css("form#item-form")
end
it "#delete" do
visit delete_path
within "form" do
click_button "削除"
end
expect(current_path).to eq index_path
end
end
end
| mit |
ggoral/whitehall | features/step_definitions/topic_steps.rb | 9575 | Given /^a topic called "([^"]*)" exists$/ do |name|
@topic = create(:topic, name: name)
end
Given /^a topic called "([^"]*)" with description "([^"]*)"$/ do |name, description|
create(:topic, name: name, description: description)
end
Given(/^the publication "(.*?)" is associated with the topic "(.*?)"$/) do |publication_name, topic_name|
publication = Publication.find_by!(title: publication_name)
topic = Topic.find_by!(name: topic_name)
publication.topics << topic
end
Given /^the topic "([^"]*)" is associated with organisation "([^"]*)"$/ do |topic_name, organisation_name|
topic = Topic.find_by(name: topic_name) || create(:topic, name: topic_name)
organisation = Organisation.find_by(name: organisation_name) || create(:ministerial_department, name: organisation_name)
organisation.topics << topic
end
Given /^the topic "([^"]*)" has "([^"]*)" as a lead organisation$/ do |topic_name, organisation_name|
topic = Topic.find_by(name: topic_name) || create(:topic, name: topic_name)
organisation = Organisation.find_by(name: organisation_name) || create(:ministerial_department, name: organisation_name)
OrganisationClassification.create(topic: topic, organisation: organisation, lead: true)
end
Given /^two topics "([^"]*)" and "([^"]*)" exist$/ do |first_topic, second_topic|
create(:topic, name: first_topic)
create(:topic, name: second_topic)
end
Given /^the topic "([^"]*)" is related to the topic "([^"]*)"$/ do |name, related_name|
related_topic = create(:topic, name: related_name)
topic = Topic.find_by(name: name)
topic.update_attributes!(related_classifications: [related_topic])
end
Given(/^a (topic|topical event) called "(.*?)" exists with featured documents$/) do |type, name|
classification = if type == 'topic'
create(:topic, name: name)
else
create(:topical_event, name: name)
end
create(:classification_featuring, classification: classification)
end
Given(/^I have an offsite link "(.*?)" for the topic "(.*?)"$/) do |title, topic_name|
topic = Topic.find_by(name: topic_name)
@offsite_link = create :offsite_link, title: title, parent: topic
end
When /^I create a new topic "([^"]*)" with description "([^"]*)"$/ do |name, description|
create_topic(name: name, description: description)
end
When /^I create a new topic "([^"]*)" related to topic "([^"]*)"$/ do |name, related_name|
create_topic(name: related_name)
create_topic(name: name, related_classifications: [related_name])
end
When /^I edit the topic "([^"]*)" to have description "([^"]*)"$/ do |name, description|
visit admin_root_path
click_link "Policy Areas"
click_link name
click_on "Edit"
fill_in "Description", with: description
click_button "Save"
end
When /^I visit the list of topics$/ do
visit topics_path
end
When /^I visit the "([^"]*)" (topic|topical event)$/ do |name, type|
classification = if type == 'topic'
Topic.find_by!(name: name)
else
TopicalEvent.find_by!(name: name)
end
visit polymorphic_path(classification)
end
When /^I set the order of the policies in the "([^"]*)" topic to:$/ do |name, table|
topic = Topic.find_by!(name: name)
visit edit_admin_topic_path(topic)
table.rows.each_with_index do |(policy_name), index|
fill_in policy_name, with: index
end
click_button "Save"
end
When /^I set the order of the lead organisations in the "([^"]*)" topic to:$/ do |topic_name, table|
topic = Topic.find_by!(name: topic_name)
visit edit_admin_topic_path(topic)
lead_organisations = table.rows.map { |(organisation_name)| organisation_name }
lead_organisations.each_with_index do |organisation_name, index|
fill_in organisation_name, with: index
fill_in organisation_name + ' is lead?', with: '1'
end
other_organisations = topic.organisations.map(&:name) - lead_organisations
other_organisations.each do |organisation_name|
fill_in organisation_name, with: ''
fill_in organisation_name + ' is lead?', with: '0'
end
click_button "Save"
end
Then /^I should see in the admin the "([^"]*)" topic description is "([^"]*)"$/ do |name, description|
visit admin_topics_path
assert page.has_css?(".name", text: name)
assert page.has_css?(".description", text: description)
end
Then /^I should see in the admin the "([^"]*)" topic is related to topic "([^"]*)"$/ do |name, related_name|
visit admin_topics_path
topic = Topic.find_by(name: name)
related_topic = Topic.find_by(name: related_name)
assert page.has_css?("#{record_css_selector(topic)} .related #{record_css_selector(related_topic)}")
end
Then /^I should be able to delete the topic "([^"]*)"$/ do |name|
visit admin_topics_path
click_link name
click_on 'Edit'
click_button 'Delete'
end
Then /^I should see the order of the policies in the "([^"]*)" topic is:$/ do |name, expected_table|
topic = Topic.find_by!(name: name)
visit topic_path(topic)
rows = find("#policies").all('h2')
table = rows.map { |r| r.all('a').map { |c| c.text.strip } }
expected_table.diff!(table)
end
Then /^I should see the order of the lead organisations in the "([^"]*)" topic is:$/ do |topic_name, expected_table|
topic = Topic.find_by!(name: topic_name)
visit edit_admin_topic_path(topic)
rows = find("#lead_organisation_order").all(:xpath, './/label[./a]')
table = rows.map { |r| r.all('a').map { |c| c.text.strip } }
expected_table.diff!(table)
end
Then /^I should see the following organisations for the "([^"]*)" topic:$/ do |topic_name, expected_table|
topic = Topic.find_by!(name: topic_name)
visit edit_admin_topic_path(topic)
rows = find("#organisations").all(:xpath, './/label[./a]')
table = rows.map { |r| r.all('a').map { |c| c.text.strip } }
expected_table.diff!(table)
end
Then /^I should see the topics "([^"]*)" and "([^"]*)"$/ do |first_topic_name, second_topic_name|
first_topic = Topic.find_by!(name: first_topic_name)
second_topic = Topic.find_by!(name: second_topic_name)
assert page.has_css?(record_css_selector(first_topic), text: first_topic_name)
assert page.has_css?(record_css_selector(second_topic), text: second_topic_name)
end
Then /^I should see a link to the related topic "([^"]*)"$/ do |related_name|
related_topic = Topic.find_by(name: related_name)
assert page.has_css?(".related-topics a[href='#{topic_path(related_topic)}']", text: related_name)
end
When(/^I feature the publication "([^"]*)" on the topic "([^"]*)"$/) do |publication_name, topic_name|
publication = Publication.find_by!(title: publication_name)
topic = Topic.find_by!(name: topic_name)
visit admin_topic_path(topic)
click_on 'Features'
within record_css_selector(publication) do
click_link "Feature"
end
attach_file "Select a 960px wide and 640px tall image to be shown when featuring", jpg_image
fill_in :classification_featuring_alt_text, with: "An accessible description of the image"
click_button "Save"
end
When(/^I add the offsite link "(.*?)" of type "(.*?)" to the topic "(.*?)"$/) do |title, type, topic_name|
topic = Topic.find_by!(name: topic_name)
visit admin_topic_classification_featurings_path(topic)
click_link "Create an offsite link"
fill_in :offsite_link_title, with: title
select type, from: 'offsite_link_link_type'
fill_in :offsite_link_summary, with: "summary"
fill_in :offsite_link_url, with: "http://gov.uk"
click_button "Save"
end
When(/^I feature the offsite link "(.*?)" for topic "(.*?)" with image "(.*?)"$/) do |offsite_link_title, topic_name, image_filename|
topic = Topic.find_by!(name: topic_name)
visit admin_topic_classification_featurings_path(topic)
@offsite_link = OffsiteLink.find_by(title: offsite_link_title)
within record_css_selector(@offsite_link) do
click_link "Feature"
end
attach_file "Select a 960px wide and 640px tall image to be shown when featuring", Rails.root.join("test/fixtures/#{image_filename}")
fill_in :classification_featuring_alt_text, with: "An accessible description of the image"
click_button "Save"
end
Then(/^I should see the publication "([^"]*)" featured on the public topic page for "([^"]*)"$/) do |publication_name, topic_name|
publication = Publication.find_by!(title: publication_name)
topic = Topic.find_by!(name: topic_name)
visit topic_path(topic)
within('section.featured-news') do
assert page.has_content?(publication.title)
end
end
Then(/^I should see the offsite link featured on the public topic page$/) do
visit topic_path(@topic)
within('section.featured-news') do
assert page.has_content?(@offsite_link.title)
end
end
When /^I add some featured links to the topic "([^"]*)" via the admin$/ do |topic_name|
topic = Topic.find_by!(name: topic_name)
visit admin_topic_path(topic)
click_link "Edit"
within ".featured-links" do
fill_in "URL", with: "https://www.gov.uk/mainstream/tool-alpha"
fill_in "Title", with: "Tool Alpha"
end
click_button "Save"
end
Then /^the featured links for the topic "([^"]*)" should be visible on the public site$/ do |topic_name|
visit_topic topic_name
within ".featured-links" do
assert page.has_css?("a[href='https://www.gov.uk/mainstream/tool-alpha']", "Tool Alpha")
end
end
Then(/^I should see the edit offsite link "(.*?)" on the "(.*?)" topic page$/) do |title, topic_name|
topic = Topic.find_by!(name: topic_name)
offsite_link = OffsiteLink.find_by!(title: title)
visit admin_topic_path(topic)
page.has_link?(title, href: edit_admin_topic_offsite_link_path(topic.id, offsite_link.id))
end
When(/^I start creating a topic$/) do
start_creating_topic
end
| mit |
quyixia/BeeFramework_Android | src/com/example/adapter/TrendAdapter.java | 2668 | package com.example.adapter;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.BeeFramework.example.R;
import com.BeeFramework.adapter.BeeBaseAdapter;
import java.util.ArrayList;
/*
* ______ ______ ______
* /\ __ \ /\ ___\ /\ ___\
* \ \ __< \ \ __\_ \ \ __\_
* \ \_____\ \ \_____\ \ \_____\
* \/_____/ \/_____/ \/_____/
*
*
* Copyright (c) 2013-2014, {Bee} open source community
* http://www.bee-framework.com
*
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
public class TrendAdapter extends BeeBaseAdapter{
public TrendAdapter(Context c, ArrayList dataList)
{
super(c, dataList);
}
public class TrendHolder extends BeeCellHolder
{
public TextView trendTimeTextView;
public TextView trendContentTextView;
}
@Override
protected BeeCellHolder createCellHolder(View cellView)
{
TrendHolder holder = new TrendHolder();
holder.trendTimeTextView = (TextView)cellView.findViewById(R.id.topic_time);
holder.trendContentTextView = (TextView)cellView.findViewById(R.id.topic_content);
return holder;
}
@Override
protected View bindData(int position, View cellView, ViewGroup parent, BeeCellHolder h)
{
String trend_name = (String)dataList.get(position);
TrendHolder holder = (TrendHolder)h;
holder.trendContentTextView.setText(trend_name);
return cellView;
}
@Override
public View createCellView()
{
return mInflater.inflate(R.layout.trend_topic,null);
}
}
| mit |
ddaspit/libpalaso | SIL.Windows.Forms.WritingSystems/WSIdentifiers/VoiceIdentifierView.Designer.cs | 1963 | using SIL.Windows.Forms.Widgets;
namespace SIL.Windows.Forms.WritingSystems.WSIdentifiers
{
partial class VoiceIdentifierView
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.betterLabel1 = new BetterLabel();
this.SuspendLayout();
//
// betterLabel1
//
this.betterLabel1.Dock = System.Windows.Forms.DockStyle.Fill;
this.betterLabel1.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.betterLabel1.Location = new System.Drawing.Point(0, 11);
this.betterLabel1.Multiline = true;
this.betterLabel1.Name = "betterLabel1";
this.betterLabel1.ReadOnly = true;
this.betterLabel1.Size = new System.Drawing.Size(221, 65);
this.betterLabel1.TabIndex = 0;
this.betterLabel1.TabStop = false;
this.betterLabel1.Text = "In applications which support this option, fields with this input system will b" +
"e able to play and record voice.";
//
// VoiceIdentifierView
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.Controls.Add(this.betterLabel1);
this.Name = "VoiceIdentifierView";
this.Size = new System.Drawing.Size(221, 76);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private BetterLabel betterLabel1;
}
}
| mit |
ortroyaner/GithubFinder | node_modules/ionic-angular/umd/components/tabs/tabs.js | 25756 | var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
(function (factory) {
if (typeof module === "object" && typeof module.exports === "object") {
var v = factory(require, exports);
if (v !== undefined) module.exports = v;
}
else if (typeof define === "function" && define.amd) {
define(["require", "exports", "@angular/core", "../app/app", "../../config/config", "../../navigation/deep-linker", "../ion", "../../util/util", "../../navigation/nav-controller", "../../navigation/nav-util", "../split-pane/split-pane", "../../platform/platform", "./tab-highlight", "../../navigation/view-controller"], factory);
}
})(function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var core_1 = require("@angular/core");
var app_1 = require("../app/app");
var config_1 = require("../../config/config");
var deep_linker_1 = require("../../navigation/deep-linker");
var ion_1 = require("../ion");
var util_1 = require("../../util/util");
var nav_controller_1 = require("../../navigation/nav-controller");
var nav_util_1 = require("../../navigation/nav-util");
var split_pane_1 = require("../split-pane/split-pane");
var platform_1 = require("../../platform/platform");
var tab_highlight_1 = require("./tab-highlight");
var view_controller_1 = require("../../navigation/view-controller");
/**
* \@name Tabs
* \@description
* Tabs make it easy to navigate between different pages or functional
* aspects of an app. The Tabs component, written as `<ion-tabs>`, is
* a container of individual [Tab](../Tab/) components. Each individual `ion-tab`
* is a declarative component for a [NavController](../../../navigation/NavController/)
*
* For more information on using nav controllers like Tab or [Nav](../../nav/Nav/),
* take a look at the [NavController API Docs](../../../navigation/NavController/).
*
* ### Placement
*
* The position of the tabs relative to the content varies based on
* the mode. The tabs are placed at the bottom of the screen
* for iOS and Android, and at the top for Windows by default. The position can
* be configured using the `tabsPlacement` attribute on the `<ion-tabs>` component,
* or in an app's [config](../../config/Config/).
* See the [Input Properties](#input-properties) below for the available
* values of `tabsPlacement`.
*
* ### Layout
*
* The layout for all of the tabs can be defined using the `tabsLayout`
* property. If the individual tab has a title and icon, the icons will
* show on top of the title by default. All tabs can be changed by setting
* the value of `tabsLayout` on the `<ion-tabs>` element, or in your
* app's [config](../../config/Config/). For example, this is useful if
* you want to show tabs with a title only on Android, but show icons
* and a title for iOS. See the [Input Properties](#input-properties)
* below for the available values of `tabsLayout`.
*
* ### Selecting a Tab
*
* There are different ways you can select a specific tab from the tabs
* component. You can use the `selectedIndex` property to set the index
* on the `<ion-tabs>` element, or you can call `select()` from the `Tabs`
* instance after creation. See [usage](#usage) below for more information.
*
* \@usage
*
* You can add a basic tabs template to a `\@Component` using the following
* template:
*
* ```html
* <ion-tabs>
* <ion-tab [root]="tab1Root"></ion-tab>
* <ion-tab [root]="tab2Root"></ion-tab>
* <ion-tab [root]="tab3Root"></ion-tab>
* </ion-tabs>
* ```
*
* Where `tab1Root`, `tab2Root`, and `tab3Root` are each a page:
*
* ```ts
* \@Component({
* templateUrl: 'build/pages/tabs/tabs.html'
* })
* export class TabsPage {
* // this tells the tabs component which Pages
* // should be each tab's root Page
* tab1Root = Page1;
* tab2Root = Page2;
* tab3Root = Page3;
*
* constructor() {
*
* }
* }
* ```
*
* By default, the first tab will be selected upon navigation to the
* Tabs page. We can change the selected tab by using `selectedIndex`
* on the `<ion-tabs>` element:
*
* ```html
* <ion-tabs selectedIndex="2">
* <ion-tab [root]="tab1Root"></ion-tab>
* <ion-tab [root]="tab2Root"></ion-tab>
* <ion-tab [root]="tab3Root"></ion-tab>
* </ion-tabs>
* ```
*
* Since the index starts at `0`, this will select the 3rd tab which has
* root set to `tab3Root`. If you wanted to change it dynamically from
* your class, you could use [property binding](https://angular.io/docs/ts/latest/guide/template-syntax.html#!#property-binding).
*
* Alternatively, you can grab the `Tabs` instance and call the `select()`
* method. This requires the `<ion-tabs>` element to have an `id`. For
* example, set the value of `id` to `myTabs`:
*
* ```html
* <ion-tabs #myTabs>
* <ion-tab [root]="tab1Root"></ion-tab>
* <ion-tab [root]="tab2Root"></ion-tab>
* <ion-tab [root]="tab3Root"></ion-tab>
* </ion-tabs>
* ```
*
* Then in your class you can grab the `Tabs` instance and call `select()`,
* passing the index of the tab as the argument. Here we're grabbing the tabs
* by using ViewChild.
*
* ```ts
* export class TabsPage {
*
* \@ViewChild('myTabs') tabRef: Tabs;
*
* ionViewDidEnter() {
* this.tabRef.select(2);
* }
*
* }
* ```
*
* You can also switch tabs from a child component by calling `select()` on the
* parent view using the `NavController` instance. For example, assuming you have
* a `TabsPage` component, you could call the following from any of the child
* components to switch to `TabsRoot3`:
*
* ```ts
* switchTabs() {
* this.navCtrl.parent.select(2);
* }
* ```
* \@demo /docs/demos/src/tabs/
*
* @see {\@link /docs/components#tabs Tabs Component Docs}
* @see {\@link ../Tab Tab API Docs}
* @see {\@link ../../config/Config Config API Docs}
*
*/
var Tabs = (function (_super) {
__extends(Tabs, _super);
/**
* @param {?} parent
* @param {?} viewCtrl
* @param {?} _app
* @param {?} config
* @param {?} elementRef
* @param {?} _plt
* @param {?} renderer
* @param {?} _linker
*/
function Tabs(parent, viewCtrl, _app, config, elementRef, _plt, renderer, _linker) {
var _this = _super.call(this, config, elementRef, renderer, 'tabs') || this;
_this.viewCtrl = viewCtrl;
_this._app = _app;
_this._plt = _plt;
_this._linker = _linker;
/**
* \@internal
*/
_this._ids = -1;
/**
* \@internal
*/
_this._tabs = [];
/**
* \@internal
*/
_this._selectHistory = [];
/**
* \@output {any} Emitted when the tab changes.
*/
_this.ionChange = new core_1.EventEmitter();
_this.parent = parent;
_this.id = 't' + (++tabIds);
_this._sbPadding = config.getBoolean('statusbarPadding');
_this.tabsHighlight = config.getBoolean('tabsHighlight');
if (_this.parent) {
// this Tabs has a parent Nav
_this.parent.registerChildNav(_this);
}
else if (viewCtrl && viewCtrl.getNav()) {
// this Nav was opened from a modal
_this.parent = viewCtrl.getNav();
_this.parent.registerChildNav(_this);
}
else if (_this._app) {
// this is the root navcontroller for the entire app
_this._app._setRootNav(_this);
}
// Tabs may also be an actual ViewController which was navigated to
// if Tabs is static and not navigated to within a NavController
// then skip this and don't treat it as it's own ViewController
if (viewCtrl) {
viewCtrl._setContent(_this);
viewCtrl._setContentRef(elementRef);
}
return _this;
}
/**
* @return {?}
*/
Tabs.prototype.ngOnDestroy = function () {
this._resizeObs && this._resizeObs.unsubscribe();
this.parent.unregisterChildNav(this);
};
/**
* \@internal
* @return {?}
*/
Tabs.prototype.ngAfterViewInit = function () {
var _this = this;
this._setConfig('tabsPlacement', 'bottom');
this._setConfig('tabsLayout', 'icon-top');
this._setConfig('tabsHighlight', this.tabsHighlight);
if (this.tabsHighlight) {
this._resizeObs = this._plt.resize.subscribe(function () {
_this._highlight.select(_this.getSelected());
});
}
this.initTabs();
};
/**
* \@internal
* @return {?}
*/
Tabs.prototype.initTabs = function () {
// get the selected index from the input
// otherwise default it to use the first index
var /** @type {?} */ selectedIndex = (util_1.isBlank(this.selectedIndex) ? 0 : parseInt(/** @type {?} */ (this.selectedIndex), 10));
// now see if the deep linker can find a tab index
var /** @type {?} */ tabsSegment = this._linker.initNav(this);
if (tabsSegment && util_1.isBlank(tabsSegment.component)) {
// we found a segment which probably represents which tab to select
selectedIndex = this._linker.getSelectedTabIndex(this, tabsSegment.name, selectedIndex);
}
// get the selectedIndex and ensure it isn't hidden or disabled
var /** @type {?} */ selectedTab = this._tabs.find(function (t, i) { return i === selectedIndex && t.enabled && t.show; });
if (!selectedTab) {
// wasn't able to select the tab they wanted
// try to find the first tab that's available
selectedTab = this._tabs.find(function (t) { return t.enabled && t.show; });
}
if (selectedTab) {
// we found a tab to select
// get the segment the deep linker says this tab should load with
var /** @type {?} */ pageId = null;
if (tabsSegment) {
var /** @type {?} */ selectedTabSegment = this._linker.initNav(selectedTab);
if (selectedTabSegment && selectedTabSegment.component) {
selectedTab.root = selectedTabSegment.component;
selectedTab.rootParams = selectedTabSegment.data;
pageId = selectedTabSegment.id;
}
}
this.select(selectedTab, {
id: pageId
});
}
// set the initial href attribute values for each tab
this._tabs.forEach(function (t) {
t.updateHref(t.root, t.rootParams);
});
};
/**
* \@internal
* @param {?} attrKey
* @param {?} fallback
* @return {?}
*/
Tabs.prototype._setConfig = function (attrKey, fallback) {
var /** @type {?} */ val = ((this))[attrKey];
if (util_1.isBlank(val)) {
val = this._config.get(attrKey, fallback);
}
this.setElementAttribute(attrKey, val);
};
/**
* @hidden
* @param {?} tab
* @return {?}
*/
Tabs.prototype.add = function (tab) {
this._tabs.push(tab);
return this.id + '-' + (++this._ids);
};
/**
* @param {?} tabOrIndex
* @param {?=} opts
* @return {?}
*/
Tabs.prototype.select = function (tabOrIndex, opts) {
var _this = this;
if (opts === void 0) { opts = {}; }
var /** @type {?} */ selectedTab = (typeof tabOrIndex === 'number' ? this.getByIndex(tabOrIndex) : tabOrIndex);
if (util_1.isBlank(selectedTab)) {
return;
}
// If the selected tab is the current selected tab, we do not switch
var /** @type {?} */ currentTab = this.getSelected();
if (selectedTab === currentTab) {
return this._touchActive(selectedTab);
}
// If the selected tab does not have a root, we do not switch (#9392)
// it's possible the tab is only for opening modal's or signing out
// and doesn't actually have content. In the case there's no content
// for a tab then do nothing and leave the current view as is
if (selectedTab.root) {
// At this point we are going to perform a page switch
// Let's fire willLeave in the current tab page
var /** @type {?} */ currentPage;
if (currentTab) {
currentPage = currentTab.getActive();
currentPage && currentPage._willLeave(false);
}
// Fire willEnter in the new selected tab
var /** @type {?} */ selectedPage_1 = selectedTab.getActive();
selectedPage_1 && selectedPage_1._willEnter();
// Let's start the transition
opts.animate = false;
selectedTab.load(opts, function () {
_this._tabSwitchEnd(selectedTab, selectedPage_1, currentPage);
if (opts.updateUrl !== false) {
_this._linker.navChange(nav_util_1.DIRECTION_SWITCH);
}
(void 0) /* assert */;
_this._fireChangeEvent(selectedTab);
});
}
else {
this._fireChangeEvent(selectedTab);
}
};
/**
* @param {?} selectedTab
* @return {?}
*/
Tabs.prototype._fireChangeEvent = function (selectedTab) {
selectedTab.ionSelect.emit(selectedTab);
this.ionChange.emit(selectedTab);
};
/**
* @param {?} selectedTab
* @param {?} selectedPage
* @param {?} currentPage
* @return {?}
*/
Tabs.prototype._tabSwitchEnd = function (selectedTab, selectedPage, currentPage) {
(void 0) /* assert */;
(void 0) /* assert */;
// Update tabs selection state
var /** @type {?} */ tabs = this._tabs;
var /** @type {?} */ tab;
for (var /** @type {?} */ i = 0; i < tabs.length; i++) {
tab = tabs[i];
tab.setSelected(tab === selectedTab);
}
if (this.tabsHighlight) {
this._highlight.select(selectedTab);
}
// Fire didEnter/didLeave lifecycle events
selectedPage && selectedPage._didEnter();
currentPage && currentPage._didLeave();
// track the order of which tabs have been selected, by their index
// do not track if the tab index is the same as the previous
if (this._selectHistory[this._selectHistory.length - 1] !== selectedTab.id) {
this._selectHistory.push(selectedTab.id);
}
};
/**
* Get the previously selected Tab which is currently not disabled or hidden.
* @param {?=} trimHistory
* @return {?}
*/
Tabs.prototype.previousTab = function (trimHistory) {
var _this = this;
if (trimHistory === void 0) { trimHistory = true; }
// walk backwards through the tab selection history
// and find the first previous tab that is enabled and shown
(void 0) /* console.debug */;
for (var /** @type {?} */ i = this._selectHistory.length - 2; i >= 0; i--) {
var /** @type {?} */ tab = this._tabs.find(function (t) { return t.id === _this._selectHistory[i]; });
if (tab && tab.enabled && tab.show) {
if (trimHistory) {
this._selectHistory.splice(i + 1);
}
return tab;
}
}
return null;
};
/**
* @param {?} index
* @return {?}
*/
Tabs.prototype.getByIndex = function (index) {
return this._tabs[index];
};
/**
* @return {?}
*/
Tabs.prototype.getSelected = function () {
var /** @type {?} */ tabs = this._tabs;
for (var /** @type {?} */ i = 0; i < tabs.length; i++) {
if (tabs[i].isSelected) {
return tabs[i];
}
}
return null;
};
/**
* \@internal
* @return {?}
*/
Tabs.prototype.getActiveChildNav = function () {
return this.getSelected();
};
/**
* \@internal
* @param {?} tab
* @return {?}
*/
Tabs.prototype.getIndex = function (tab) {
return this._tabs.indexOf(tab);
};
/**
* \@internal
* @return {?}
*/
Tabs.prototype.length = function () {
return this._tabs.length;
};
/**
* "Touch" the active tab, going back to the root view of the tab
* or optionally letting the tab handle the event
* @param {?} tab
* @return {?}
*/
Tabs.prototype._touchActive = function (tab) {
var /** @type {?} */ active = tab.getActive();
if (active) {
if (active._cmp && active._cmp.instance.ionSelected) {
// if they have a custom tab selected handler, call it
active._cmp.instance.ionSelected();
}
else if (tab.length() > 1) {
// if we're a few pages deep, pop to root
tab.popToRoot();
}
else {
nav_util_1.getComponent(this._linker, tab.root).then(function (viewController) {
if (viewController.component !== active.component) {
// Otherwise, if the page we're on is not our real root
// reset it to our default root type
return tab.setRoot(tab.root);
}
}).catch(function () {
(void 0) /* console.debug */;
});
}
}
};
/**
* \@internal
* DOM WRITE
* @param {?} top
* @param {?} bottom
* @return {?}
*/
Tabs.prototype.setTabbarPosition = function (top, bottom) {
if (this._top !== top || this._bottom !== bottom) {
var /** @type {?} */ tabbarEle = (this._tabbar.nativeElement);
tabbarEle.style.top = (top > -1 ? top + 'px' : '');
tabbarEle.style.bottom = (bottom > -1 ? bottom + 'px' : '');
tabbarEle.classList.add('show-tabbar');
this._top = top;
this._bottom = bottom;
}
};
/**
* \@internal
* @return {?}
*/
Tabs.prototype.resize = function () {
var /** @type {?} */ tab = this.getSelected();
tab && tab.resize();
};
/**
* \@internal
* @return {?}
*/
Tabs.prototype.initPane = function () {
var /** @type {?} */ isMain = this._elementRef.nativeElement.hasAttribute('main');
return isMain;
};
/**
* \@internal
* @param {?} isPane
* @return {?}
*/
Tabs.prototype.paneChanged = function (isPane) {
if (isPane) {
this.resize();
}
};
return Tabs;
}(ion_1.Ion));
Tabs.decorators = [
{ type: core_1.Component, args: [{
selector: 'ion-tabs',
template: '<div class="tabbar" role="tablist" #tabbar>' +
'<a *ngFor="let t of _tabs" [tab]="t" class="tab-button" role="tab" href="#" (ionSelect)="select(t)"></a>' +
'<div class="tab-highlight"></div>' +
'</div>' +
'<ng-content></ng-content>' +
'<div #portal tab-portal></div>',
encapsulation: core_1.ViewEncapsulation.None,
providers: [{ provide: split_pane_1.RootNode, useExisting: core_1.forwardRef(function () { return Tabs; }) }]
},] },
];
/**
* @nocollapse
*/
Tabs.ctorParameters = function () { return [
{ type: nav_controller_1.NavController, decorators: [{ type: core_1.Optional },] },
{ type: view_controller_1.ViewController, decorators: [{ type: core_1.Optional },] },
{ type: app_1.App, },
{ type: config_1.Config, },
{ type: core_1.ElementRef, },
{ type: platform_1.Platform, },
{ type: core_1.Renderer, },
{ type: deep_linker_1.DeepLinker, },
]; };
Tabs.propDecorators = {
'selectedIndex': [{ type: core_1.Input },],
'tabsLayout': [{ type: core_1.Input },],
'tabsPlacement': [{ type: core_1.Input },],
'tabsHighlight': [{ type: core_1.Input },],
'ionChange': [{ type: core_1.Output },],
'_highlight': [{ type: core_1.ViewChild, args: [tab_highlight_1.TabHighlight,] },],
'_tabbar': [{ type: core_1.ViewChild, args: ['tabbar',] },],
'portal': [{ type: core_1.ViewChild, args: ['portal', { read: core_1.ViewContainerRef },] },],
};
exports.Tabs = Tabs;
function Tabs_tsickle_Closure_declarations() {
/** @type {?} */
Tabs.decorators;
/**
* @nocollapse
* @type {?}
*/
Tabs.ctorParameters;
/** @type {?} */
Tabs.propDecorators;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._ids;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._tabs;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._sbPadding;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._top;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._bottom;
/**
* \@internal
* @type {?}
*/
Tabs.prototype.id;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._selectHistory;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._resizeObs;
/**
* \@input {number} The default selected tab index when first loaded. If a selected index isn't provided then it will use `0`, the first tab.
* @type {?}
*/
Tabs.prototype.selectedIndex;
/**
* \@input {string} Set the tabbar layout: `icon-top`, `icon-left`, `icon-right`, `icon-bottom`, `icon-hide`, `title-hide`.
* @type {?}
*/
Tabs.prototype.tabsLayout;
/**
* \@input {string} Set position of the tabbar: `top`, `bottom`.
* @type {?}
*/
Tabs.prototype.tabsPlacement;
/**
* \@input {boolean} If true, show the tab highlight bar under the selected tab.
* @type {?}
*/
Tabs.prototype.tabsHighlight;
/**
* \@output {any} Emitted when the tab changes.
* @type {?}
*/
Tabs.prototype.ionChange;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._highlight;
/**
* \@internal
* @type {?}
*/
Tabs.prototype._tabbar;
/**
* \@internal
* @type {?}
*/
Tabs.prototype.portal;
/**
* @hidden
* @type {?}
*/
Tabs.prototype.parent;
/** @type {?} */
Tabs.prototype.viewCtrl;
/** @type {?} */
Tabs.prototype._app;
/** @type {?} */
Tabs.prototype._plt;
/** @type {?} */
Tabs.prototype._linker;
}
var /** @type {?} */ tabIds = -1;
});
//# sourceMappingURL=tabs.js.map | mit |
rokn/Count_Words_2015 | testing/openjdk2/nashorn/src/jdk/nashorn/internal/runtime/linker/NashornLinker.java | 14387 | /*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.nashorn.internal.runtime.linker;
import static jdk.nashorn.internal.lookup.Lookup.MH;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodHandles.Lookup;
import java.lang.reflect.Modifier;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Deque;
import java.util.List;
import java.util.Map;
import javax.script.Bindings;
import jdk.internal.dynalink.CallSiteDescriptor;
import jdk.internal.dynalink.linker.ConversionComparator;
import jdk.internal.dynalink.linker.GuardedInvocation;
import jdk.internal.dynalink.linker.GuardedTypeConversion;
import jdk.internal.dynalink.linker.GuardingTypeConverterFactory;
import jdk.internal.dynalink.linker.LinkRequest;
import jdk.internal.dynalink.linker.LinkerServices;
import jdk.internal.dynalink.linker.TypeBasedGuardingDynamicLinker;
import jdk.internal.dynalink.support.Guards;
import jdk.internal.dynalink.support.LinkerServicesImpl;
import jdk.nashorn.api.scripting.JSObject;
import jdk.nashorn.api.scripting.ScriptObjectMirror;
import jdk.nashorn.api.scripting.ScriptUtils;
import jdk.nashorn.internal.objects.NativeArray;
import jdk.nashorn.internal.runtime.JSType;
import jdk.nashorn.internal.runtime.ScriptFunction;
import jdk.nashorn.internal.runtime.ScriptObject;
import jdk.nashorn.internal.runtime.Undefined;
/**
* This is the main dynamic linker for Nashorn. It is used for linking all {@link ScriptObject} and its subclasses (this
* includes {@link ScriptFunction} and its subclasses) as well as {@link Undefined}.
*/
final class NashornLinker implements TypeBasedGuardingDynamicLinker, GuardingTypeConverterFactory, ConversionComparator {
private static final ClassValue<MethodHandle> ARRAY_CONVERTERS = new ClassValue<MethodHandle>() {
@Override
protected MethodHandle computeValue(Class<?> type) {
return createArrayConverter(type);
}
};
/**
* Returns true if {@code ScriptObject} is assignable from {@code type}, or it is {@code Undefined}.
*/
@Override
public boolean canLinkType(final Class<?> type) {
return canLinkTypeStatic(type);
}
static boolean canLinkTypeStatic(final Class<?> type) {
return ScriptObject.class.isAssignableFrom(type) || Undefined.class == type;
}
@Override
public GuardedInvocation getGuardedInvocation(final LinkRequest request, final LinkerServices linkerServices) throws Exception {
final LinkRequest requestWithoutContext = request.withoutRuntimeContext(); // Nashorn has no runtime context
final Object self = requestWithoutContext.getReceiver();
final CallSiteDescriptor desc = requestWithoutContext.getCallSiteDescriptor();
if (desc.getNameTokenCount() < 2 || !"dyn".equals(desc.getNameToken(CallSiteDescriptor.SCHEME))) {
// We only support standard "dyn:*[:*]" operations
return null;
}
final GuardedInvocation inv;
if (self instanceof ScriptObject) {
inv = ((ScriptObject)self).lookup(desc, request);
} else if (self instanceof Undefined) {
inv = Undefined.lookup(desc);
} else {
throw new AssertionError(); // Should never reach here.
}
return Bootstrap.asType(inv, linkerServices, desc);
}
@Override
public GuardedTypeConversion convertToType(final Class<?> sourceType, final Class<?> targetType) throws Exception {
GuardedInvocation gi = convertToTypeNoCast(sourceType, targetType);
if(gi != null) {
return new GuardedTypeConversion(gi.asType(MH.type(targetType, sourceType)), true);
}
gi = getSamTypeConverter(sourceType, targetType);
if(gi != null) {
return new GuardedTypeConversion(gi.asType(MH.type(targetType, sourceType)), false);
}
return null;
}
/**
* Main part of the implementation of {@link GuardingTypeConverterFactory#convertToType(Class, Class)} that doesn't
* care about adapting the method signature; that's done by the invoking method. Returns either a built-in
* conversion to primitive (or primitive wrapper) Java types or to String, or a just-in-time generated converter to
* a SAM type (if the target type is a SAM type).
* @param sourceType the source type
* @param targetType the target type
* @return a guarded invocation that converts from the source type to the target type.
* @throws Exception if something goes wrong
*/
private static GuardedInvocation convertToTypeNoCast(final Class<?> sourceType, final Class<?> targetType) throws Exception {
final MethodHandle mh = JavaArgumentConverters.getConverter(targetType);
if (mh != null) {
return new GuardedInvocation(mh, canLinkTypeStatic(sourceType) ? null : IS_NASHORN_OR_UNDEFINED_TYPE);
}
final GuardedInvocation arrayConverter = getArrayConverter(sourceType, targetType);
if(arrayConverter != null) {
return arrayConverter;
}
return getMirrorConverter(sourceType, targetType);
}
/**
* Returns a guarded invocation that converts from a source type that is ScriptFunction, or a subclass or a
* superclass of it) to a SAM type.
* @param sourceType the source type (presumably ScriptFunction or a subclass or a superclass of it)
* @param targetType the target type (presumably a SAM type)
* @return a guarded invocation that converts from the source type to the target SAM type. null is returned if
* either the source type is neither ScriptFunction, nor a subclass, nor a superclass of it, or if the target type
* is not a SAM type.
* @throws Exception if something goes wrong; generally, if there's an issue with creation of the SAM proxy type
* constructor.
*/
private static GuardedInvocation getSamTypeConverter(final Class<?> sourceType, final Class<?> targetType) throws Exception {
// If source type is more generic than ScriptFunction class, we'll need to use a guard
final boolean isSourceTypeGeneric = sourceType.isAssignableFrom(ScriptFunction.class);
if ((isSourceTypeGeneric || ScriptFunction.class.isAssignableFrom(sourceType)) && isAutoConvertibleFromFunction(targetType)) {
final MethodHandle ctor = JavaAdapterFactory.getConstructor(ScriptFunction.class, targetType, getCurrentLookup());
assert ctor != null; // if isAutoConvertibleFromFunction() returned true, then ctor must exist.
return new GuardedInvocation(ctor, isSourceTypeGeneric ? IS_SCRIPT_FUNCTION : null);
}
return null;
}
private static Lookup getCurrentLookup() {
final LinkRequest currentRequest = AccessController.doPrivileged(new PrivilegedAction<LinkRequest>() {
@Override
public LinkRequest run() {
return LinkerServicesImpl.getCurrentLinkRequest();
}
});
return currentRequest == null ? MethodHandles.publicLookup() : currentRequest.getCallSiteDescriptor().getLookup();
}
/**
* Returns a guarded invocation that converts from a source type that is NativeArray to a Java array or List or
* Deque type.
* @param sourceType the source type (presumably NativeArray a superclass of it)
* @param targetType the target type (presumably an array type, or List or Deque)
* @return a guarded invocation that converts from the source type to the target type. null is returned if
* either the source type is neither NativeArray, nor a superclass of it, or if the target type is not an array
* type, List, or Deque.
*/
private static GuardedInvocation getArrayConverter(final Class<?> sourceType, final Class<?> targetType) {
final boolean isSourceTypeNativeArray = sourceType == NativeArray.class;
// If source type is more generic than ScriptFunction class, we'll need to use a guard
final boolean isSourceTypeGeneric = !isSourceTypeNativeArray && sourceType.isAssignableFrom(NativeArray.class);
if (isSourceTypeNativeArray || isSourceTypeGeneric) {
final MethodHandle guard = isSourceTypeGeneric ? IS_NATIVE_ARRAY : null;
if(targetType.isArray()) {
return new GuardedInvocation(ARRAY_CONVERTERS.get(targetType), guard);
}
if(targetType == List.class) {
return new GuardedInvocation(JSType.TO_JAVA_LIST.methodHandle(), guard);
}
if(targetType == Deque.class) {
return new GuardedInvocation(JSType.TO_JAVA_DEQUE.methodHandle(), guard);
}
}
return null;
}
private static MethodHandle createArrayConverter(final Class<?> type) {
assert type.isArray();
final MethodHandle converter = MH.insertArguments(JSType.TO_JAVA_ARRAY.methodHandle(), 1, type.getComponentType());
return MH.asType(converter, converter.type().changeReturnType(type));
}
private static GuardedInvocation getMirrorConverter(Class<?> sourceType, Class<?> targetType) {
// Could've also used (targetType.isAssignableFrom(ScriptObjectMirror.class) && targetType != Object.class) but
// it's probably better to explicitly spell out the supported target types
if (targetType == Map.class || targetType == Bindings.class || targetType == JSObject.class || targetType == ScriptObjectMirror.class) {
if(ScriptObject.class.isAssignableFrom(sourceType)) {
return new GuardedInvocation(CREATE_MIRROR, null);
}
return new GuardedInvocation(CREATE_MIRROR, IS_SCRIPT_OBJECT);
}
return null;
}
private static boolean isAutoConvertibleFromFunction(final Class<?> clazz) {
return isAbstractClass(clazz) && !ScriptObject.class.isAssignableFrom(clazz) &&
JavaAdapterFactory.isAutoConvertibleFromFunction(clazz);
}
/**
* Utility method used by few other places in the code. Tests if the class has the abstract modifier and is not an
* array class. For some reason, array classes have the abstract modifier set in HotSpot JVM, and we don't want to
* treat array classes as abstract.
* @param clazz the inspected class
* @return true if the class is abstract and is not an array type.
*/
static boolean isAbstractClass(final Class<?> clazz) {
return Modifier.isAbstract(clazz.getModifiers()) && !clazz.isArray();
}
@Override
public Comparison compareConversion(final Class<?> sourceType, final Class<?> targetType1, final Class<?> targetType2) {
if(sourceType == NativeArray.class) {
// Prefer lists, as they're less costly to create than arrays.
if(isList(targetType1)) {
if(!isList(targetType2)) {
return Comparison.TYPE_1_BETTER;
}
} else if(isList(targetType2)) {
return Comparison.TYPE_2_BETTER;
}
// Then prefer arrays
if(targetType1.isArray()) {
if(!targetType2.isArray()) {
return Comparison.TYPE_1_BETTER;
}
} else if(targetType2.isArray()) {
return Comparison.TYPE_2_BETTER;
}
}
if(ScriptObject.class.isAssignableFrom(sourceType)) {
// Prefer interfaces
if(targetType1.isInterface()) {
if(!targetType2.isInterface()) {
return Comparison.TYPE_1_BETTER;
}
} else if(targetType2.isInterface()) {
return Comparison.TYPE_2_BETTER;
}
}
return Comparison.INDETERMINATE;
}
private static boolean isList(Class<?> clazz) {
return clazz == List.class || clazz == Deque.class;
}
private static final MethodHandle IS_SCRIPT_OBJECT = Guards.isInstance(ScriptObject.class, MH.type(Boolean.TYPE, Object.class));
private static final MethodHandle IS_SCRIPT_FUNCTION = Guards.isInstance(ScriptFunction.class, MH.type(Boolean.TYPE, Object.class));
private static final MethodHandle IS_NATIVE_ARRAY = Guards.isOfClass(NativeArray.class, MH.type(Boolean.TYPE, Object.class));
private static final MethodHandle IS_NASHORN_OR_UNDEFINED_TYPE = findOwnMH("isNashornTypeOrUndefined", Boolean.TYPE, Object.class);
private static final MethodHandle CREATE_MIRROR = findOwnMH("createMirror", Object.class, Object.class);
@SuppressWarnings("unused")
private static boolean isNashornTypeOrUndefined(final Object obj) {
return obj instanceof ScriptObject || obj instanceof Undefined;
}
@SuppressWarnings("unused")
private static Object createMirror(final Object obj) {
return ScriptUtils.wrap(obj);
}
private static MethodHandle findOwnMH(final String name, final Class<?> rtype, final Class<?>... types) {
return MH.findStatic(MethodHandles.lookup(), NashornLinker.class, name, MH.type(rtype, types));
}
}
| mit |
ralfonso/spree | vendor/go.uber.org/zap/zaptest/observer/observer.go | 4455 | // Copyright (c) 2016 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package observer
import (
"sync"
"time"
"go.uber.org/zap/zapcore"
)
// An LoggedEntry is an encoding-agnostic representation of a log message.
// Field availability is context dependant.
type LoggedEntry struct {
zapcore.Entry
Context []zapcore.Field
}
// ObservedLogs is a concurrency-safe, ordered collection of observed logs.
type ObservedLogs struct {
mu sync.RWMutex
logs []LoggedEntry
}
// Len returns the number of items in the collection.
func (o *ObservedLogs) Len() int {
o.mu.RLock()
n := len(o.logs)
o.mu.RUnlock()
return n
}
// All returns a copy of all the observed logs.
func (o *ObservedLogs) All() []LoggedEntry {
o.mu.RLock()
ret := make([]LoggedEntry, len(o.logs))
for i := range o.logs {
ret[i] = o.logs[i]
}
o.mu.RUnlock()
return ret
}
// TakeAll returns a copy of all the observed logs, and truncates the observed
// slice.
func (o *ObservedLogs) TakeAll() []LoggedEntry {
o.mu.Lock()
ret := o.logs
o.logs = nil
o.mu.Unlock()
return ret
}
// AllUntimed returns a copy of all the observed logs, but overwrites the
// observed timestamps with time.Time's zero value. This is useful when making
// assertions in tests.
func (o *ObservedLogs) AllUntimed() []LoggedEntry {
ret := o.All()
for i := range ret {
ret[i].Time = time.Time{}
}
return ret
}
// FilterMessage filters entries to those that have the specified message.
func (o *ObservedLogs) FilterMessage(msg string) *ObservedLogs {
return o.filter(func(e LoggedEntry) bool {
return e.Message == msg
})
}
// FilterField filters entries to those that have the specified field.
func (o *ObservedLogs) FilterField(field zapcore.Field) *ObservedLogs {
return o.filter(func(e LoggedEntry) bool {
for _, ctxField := range e.Context {
if ctxField == field {
return true
}
}
return false
})
}
func (o *ObservedLogs) filter(match func(LoggedEntry) bool) *ObservedLogs {
o.mu.RLock()
defer o.mu.RUnlock()
var filtered []LoggedEntry
for _, entry := range o.logs {
if match(entry) {
filtered = append(filtered, entry)
}
}
return &ObservedLogs{logs: filtered}
}
func (o *ObservedLogs) add(log LoggedEntry) {
o.mu.Lock()
o.logs = append(o.logs, log)
o.mu.Unlock()
}
// New creates a new Core that buffers logs in memory (without any encoding).
// It's particularly useful in tests.
func New(enab zapcore.LevelEnabler) (zapcore.Core, *ObservedLogs) {
ol := &ObservedLogs{}
return &contextObserver{
LevelEnabler: enab,
logs: ol,
}, ol
}
type contextObserver struct {
zapcore.LevelEnabler
logs *ObservedLogs
context []zapcore.Field
}
func (co *contextObserver) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
if co.Enabled(ent.Level) {
return ce.AddCore(ent, co)
}
return ce
}
func (co *contextObserver) With(fields []zapcore.Field) zapcore.Core {
return &contextObserver{
LevelEnabler: co.LevelEnabler,
logs: co.logs,
context: append(co.context[:len(co.context):len(co.context)], fields...),
}
}
func (co *contextObserver) Write(ent zapcore.Entry, fields []zapcore.Field) error {
all := make([]zapcore.Field, 0, len(fields)+len(co.context))
all = append(all, co.context...)
all = append(all, fields...)
co.logs.add(LoggedEntry{ent, all})
return nil
}
func (co *contextObserver) Sync() error {
return nil
}
| mit |
mclark--/mclark--.github.io | _posts/2020-03-23-covid/covid_files/highcharts-7.0.1/modules/variable-pie.js | 3104 | /*
Highcharts JS v7.0.1 (2018-12-19)
Variable Pie module for Highcharts
(c) 2010-2018 Grzegorz Blachliski
License: www.highcharts.com/license
*/
(function(c){"object"===typeof module&&module.exports?module.exports=c:"function"===typeof define&&define.amd?define(function(){return c}):c("undefined"!==typeof Highcharts?Highcharts:void 0)})(function(c){(function(c){var q=c.pick,r=c.arrayMin,t=c.arrayMax,w=c.seriesType,x=c.seriesTypes.pie.prototype;w("variablepie","pie",{minPointSize:"10%",maxPointSize:"100%",zMin:void 0,zMax:void 0,sizeBy:"area",tooltip:{pointFormat:'\x3cspan style\x3d"color:{point.color}"\x3e\u25cf\x3c/span\x3e {series.name}\x3cbr/\x3eValue: {point.y}\x3cbr/\x3eSize: {point.z}\x3cbr/\x3e'}},
{pointArrayMap:["y","z"],parallelArrays:["x","y","z"],redraw:function(){this.center=null;x.redraw.call(this,arguments)},zValEval:function(a){return"number"!==typeof a||isNaN(a)?null:!0},calculateExtremes:function(){var a=this.chart,c=this.options,d;d=this.zData;var l=Math.min(a.plotWidth,a.plotHeight)-2*(c.slicedOffset||0),g={},a=this.center||this.getCenter();["minPointSize","maxPointSize"].forEach(function(a){var b=c[a],d=/%$/.test(b),b=parseInt(b,10);g[a]=d?l*b/100:2*b});this.minPxSize=a[3]+g.minPointSize;
this.maxPxSize=Math.max(Math.min(a[2],g.maxPointSize),a[3]+g.minPointSize);d.length&&(a=q(c.zMin,r(d.filter(this.zValEval))),d=q(c.zMax,t(d.filter(this.zValEval))),this.getRadii(a,d,this.minPxSize,this.maxPxSize))},getRadii:function(a,c,d,l){var g=0,e,b=this.zData,k=b.length,m=[],p="radius"!==this.options.sizeBy,h=c-a;for(g;g<k;g++)e=this.zValEval(b[g])?b[g]:a,e<=a?e=d/2:e>=c?e=l/2:(e=0<h?(e-a)/h:.5,p&&(e=Math.sqrt(e)),e=Math.ceil(d+e*(l-d))/2),m.push(e);this.radii=m},translate:function(a){this.generatePoints();
var c=0,d=this.options,l=d.slicedOffset,g=l+(d.borderWidth||0),e,b,k,m=d.startAngle||0,p=Math.PI/180*(m-90),h=Math.PI/180*(q(d.endAngle,m+360)-90),m=h-p,u=this.points,v,r=d.dataLabels.distance,d=d.ignoreHiddenPoint,t=u.length,f,n;this.startAngleRad=p;this.endAngleRad=h;this.calculateExtremes();a||(this.center=a=this.getCenter());for(h=0;h<t;h++){f=u[h];n=this.radii[h];f.labelDistance=q(f.options.dataLabels&&f.options.dataLabels.distance,r);this.maxLabelDistance=Math.max(this.maxLabelDistance||0,f.labelDistance);
b=p+c*m;if(!d||f.visible)c+=f.percentage/100;k=p+c*m;f.shapeType="arc";f.shapeArgs={x:a[0],y:a[1],r:n,innerR:a[3]/2,start:Math.round(1E3*b)/1E3,end:Math.round(1E3*k)/1E3};b=(k+b)/2;b>1.5*Math.PI?b-=2*Math.PI:b<-Math.PI/2&&(b+=2*Math.PI);f.slicedTranslation={translateX:Math.round(Math.cos(b)*l),translateY:Math.round(Math.sin(b)*l)};e=Math.cos(b)*a[2]/2;v=Math.sin(b)*a[2]/2;k=Math.cos(b)*n;n*=Math.sin(b);f.tooltipPos=[a[0]+.7*e,a[1]+.7*v];f.half=b<-Math.PI/2||b>Math.PI/2?1:0;f.angle=b;e=Math.min(g,
f.labelDistance/5);f.labelPosition={natural:{x:a[0]+k+Math.cos(b)*f.labelDistance,y:a[1]+n+Math.sin(b)*f.labelDistance},"final":{},alignment:f.half?"right":"left",connectorPosition:{breakAt:{x:a[0]+k+Math.cos(b)*e,y:a[1]+n+Math.sin(b)*e},touchingSliceAt:{x:a[0]+k,y:a[1]+n}}}}}})})(c)});
//# sourceMappingURL=variable-pie.js.map
| mit |
tiaozi0912/apn_on_rails | spec/active_record/setup_ar.rb | 489 | require 'rubygems'
require 'activerecord'
logger = Logger.new(STDOUT)
logger.level = Logger::INFO
ActiveRecord::Base.logger = logger
db_file = File.join(File.dirname(__FILE__), 'test.db')
FileUtils.rm(db_file) if File.exists?(db_file)
# File.open(db_file, 'w')
ActiveRecord::Base.establish_connection({
:adapter => 'sqlite3',
:database => db_file
})
ActiveRecord::Migrator.up(File.join(File.dirname(__FILE__), '..', '..', 'generators', 'templates', 'apn_migrations'))
# raise hell | mit |
chenfengzi/do_WebView | src/designer/do_WebView/do_WebView/Properties/AssemblyInfo.cs | 1308 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// 有关程序集的常规信息通过以下
// 特性集控制。更改这些特性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("UIViewBase")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("UIViewBase")]
[assembly: AssemblyCopyright("Copyright © 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// 将 ComVisible 设置为 false 使此程序集中的类型
// 对 COM 组件不可见。 如果需要从 COM 访问此程序集中的类型,
// 则将该类型上的 ComVisible 特性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("05f2507f-02fb-45cf-9273-2ff2aea99464")]
// 程序集的版本信息由下面四个值组成:
//
// 主版本
// 次版本
// 生成号
// 修订号
//
// 可以指定所有这些值,也可以使用“生成号”和“修订号”的默认值,
// 方法是按如下所示使用“*”:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| mit |
r22016/azure-sdk-for-net | src/ResourceManagement/Compute/Compute.Tests/ScenarioTests/VMImageTests.cs | 9569 | //
// Copyright (c) Microsoft. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using Microsoft.Azure.Management.Compute;
using Microsoft.Azure.Management.Compute.Models;
using Microsoft.Rest.ClientRuntime.Azure.TestFramework;
using System;
using System.Linq;
using System.Net;
using Xunit;
namespace Compute.Tests
{
public class VMImagesTests
{
[Fact]
public void TestVMImageGet()
{
using (MockContext context = MockContext.Start(this.GetType().FullName))
{
ComputeManagementClient _pirClient = ComputeManagementTestUtilities.GetComputeManagementClient(context,
new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK });
string[] availableWindowsServerImageVersions = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter").Select(t => t.Name).ToArray();
var vmimage = _pirClient.VirtualMachineImages.Get(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
availableWindowsServerImageVersions[0]);
Assert.Equal(availableWindowsServerImageVersions[0], vmimage.Name);
Assert.Equal(ComputeManagementTestUtilities.DefaultLocation, vmimage.Location, StringComparer.OrdinalIgnoreCase);
// FIXME: This doesn't work with a real Windows Server images, which is what's in the query parameters.
// Bug 4196378
/*
Assert.True(vmimage.VirtualMachineImage.PurchasePlan.Name == "name");
Assert.True(vmimage.VirtualMachineImage.PurchasePlan.Publisher == "publisher");
Assert.True(vmimage.VirtualMachineImage.PurchasePlan.Product == "product");
*/
Assert.Equal(OperatingSystemTypes.Windows, vmimage.OsDiskImage.OperatingSystem);
//Assert.True(vmimage.VirtualMachineImage.DataDiskImages.Count(ddi => ddi.Lun == 123456789) != 0);
}
}
[Fact]
public void TestVMImageListNoFilter()
{
using (MockContext context = MockContext.Start(this.GetType().FullName))
{
ComputeManagementClient _pirClient = ComputeManagementTestUtilities.GetComputeManagementClient(context,
new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK });
var vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter");
Assert.True(vmimages.Count > 0);
//Assert.True(vmimages.Count(vmi => vmi.Name == AvailableWindowsServerImageVersions[0]) != 0);
//Assert.True(vmimages.Count(vmi => vmi.Name == AvailableWindowsServerImageVersions[1]) != 0);
}
}
[Fact]
public void TestVMImageListFilters()
{
using (MockContext context = MockContext.Start(this.GetType().FullName))
{
ComputeManagementClient _pirClient = ComputeManagementTestUtilities.GetComputeManagementClient(context,
new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK });
var query = new Microsoft.Rest.Azure.OData.ODataQuery<VirtualMachineImageResource>();
// Filter: top - Negative Test
query.Top = 0;
var vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
query);
Assert.True(vmimages.Count == 0);
// Filter: top - Positive Test
query.Top = 1;
vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
query);
Assert.True(vmimages.Count == 1);
// Filter: top - Positive Test
query.Top = 2;
vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
query);
Assert.True(vmimages.Count == 2);
// Filter: orderby - Positive Test
query.Top = null;
query.OrderBy = "name desc";
vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
query);
// Filter: orderby - Positive Test
query.Top = 2;
query.OrderBy = "name asc";
vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
query);
Assert.True(vmimages.Count == 2);
// Filter: top orderby - Positive Test
query.Top = 1;
query.OrderBy = "name desc";
vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
query);
Assert.True(vmimages.Count == 1);
// Filter: top orderby - Positive Test
query.Top = 1;
query.OrderBy = "name asc";
vmimages = _pirClient.VirtualMachineImages.List(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer",
"2012-R2-Datacenter",
query);
Assert.True(vmimages.Count == 1);
}
}
[Fact]
public void TestVMImageListPublishers()
{
using (MockContext context = MockContext.Start(this.GetType().FullName))
{
ComputeManagementClient _pirClient = ComputeManagementTestUtilities.GetComputeManagementClient(context,
new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK });
var publishers = _pirClient.VirtualMachineImages.ListPublishers(
ComputeManagementTestUtilities.DefaultLocation);
Assert.True(publishers.Count > 0);
Assert.True(publishers.Count(pub => pub.Name == "MicrosoftWindowsServer") != 0);
}
}
[Fact]
public void TestVMImageListOffers()
{
using (MockContext context = MockContext.Start(this.GetType().FullName))
{
ComputeManagementClient _pirClient = ComputeManagementTestUtilities.GetComputeManagementClient(context,
new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK });
var offers = _pirClient.VirtualMachineImages.ListOffers(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer");
Assert.True(offers.Count > 0);
Assert.True(offers.Count(offer => offer.Name == "WindowsServer") != 0);
}
}
[Fact]
public void TestVMImageListSkus()
{
using (MockContext context = MockContext.Start(this.GetType().FullName))
{
ComputeManagementClient _pirClient = ComputeManagementTestUtilities.GetComputeManagementClient(context,
new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK });
var skus = _pirClient.VirtualMachineImages.ListSkus(
ComputeManagementTestUtilities.DefaultLocation,
"MicrosoftWindowsServer",
"WindowsServer");
Assert.True(skus.Count > 0);
Assert.True(skus.Count(sku => sku.Name == "2012-R2-Datacenter") != 0);
}
}
}
}
| mit |
guxuede/gm | src/main/java/com/badlogic/gdx/tests/VibratorTest.java | 1414 | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.tests;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.tests.utils.GdxTest;
public class VibratorTest extends GdxTest {
SpriteBatch batch;
BitmapFont font;
@Override
public void create () {
batch = new SpriteBatch();
font = new BitmapFont();
}
@Override
public void render () {
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
batch.begin();
font.draw(batch, "Touch screen to vibrate", 100, 100);
batch.end();
if (Gdx.input.justTouched()) Gdx.input.vibrate(100);
}
}
| mit |
gruberro/Sylius | src/Sylius/Component/Core/Repository/ProductRepositoryInterface.php | 1831 | <?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Component\Core\Repository;
use Doctrine\ORM\QueryBuilder;
use Sylius\Component\Core\Model\ChannelInterface;
use Sylius\Component\Core\Model\ProductInterface;
use Sylius\Component\Core\Model\TaxonInterface;
use Sylius\Component\Product\Repository\ProductRepositoryInterface as BaseProductRepositoryInterface;
/**
* @author Łukasz Chruściel <lukasz.chrusciel@lakion.com>
*/
interface ProductRepositoryInterface extends BaseProductRepositoryInterface
{
/**
* @param string $locale
* @param mixed|null $taxonId
*
* @return QueryBuilder
*/
public function createListQueryBuilder($locale, $taxonId = null);
/**
* @param ChannelInterface $channel
* @param TaxonInterface $taxon
* @param string $locale
* @param array $sorting
*
* @return QueryBuilder
*/
public function createShopListQueryBuilder(ChannelInterface $channel, TaxonInterface $taxon, $locale, array $sorting = []);
/**
* @param ChannelInterface $channel
* @param string $locale
* @param int $count
*
* @return ProductInterface[]
*/
public function findLatestByChannel(ChannelInterface $channel, $locale, $count);
/**
* @param ChannelInterface $channel
* @param string $locale
* @param string $slug
*
* @return ProductInterface|null
*/
public function findOneByChannelAndSlug(ChannelInterface $channel, $locale, $slug);
/**
* @param string $code
*
* @return ProductInterface|null
*/
public function findOneByCode($code);
}
| mit |
AbraaoAlves/ionic | src/components/infinite-scroll/infinite-scroll-content.ts | 1180 | import { Component, Input, ViewEncapsulation } from '@angular/core';
import { Config } from '../../config/config';
import { InfiniteScroll } from './infinite-scroll';
/**
* @private
*/
@Component({
selector: 'ion-infinite-scroll-content',
template:
'<div class="infinite-loading">' +
'<div class="infinite-loading-spinner" *ngIf="loadingSpinner">' +
'<ion-spinner [name]="loadingSpinner"></ion-spinner>' +
'</div>' +
'<div class="infinite-loading-text" [innerHTML]="loadingText" *ngIf="loadingText"></div>' +
'</div>',
host: {
'[attr.state]': 'inf.state'
},
encapsulation: ViewEncapsulation.None,
})
export class InfiniteScrollContent {
/**
* @input {string} An animated SVG spinner that shows while loading.
*/
@Input() loadingSpinner: string;
/**
* @input {string} Optional text to display while loading.
*/
@Input() loadingText: string;
constructor(public inf: InfiniteScroll, private _config: Config) {}
/**
* @private
*/
ngOnInit() {
if (!this.loadingSpinner) {
this.loadingSpinner = this._config.get('infiniteLoadingSpinner', this._config.get('spinner', 'ios'));
}
}
}
| mit |
MrMaidx/godot | scene/2d/line_builder.cpp | 17241 | /*************************************************************************/
/* line_builder.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "line_builder.h"
//----------------------------------------------------------------------------
// Util
//----------------------------------------------------------------------------
enum SegmentIntersectionResult {
SEGMENT_PARALLEL = 0,
SEGMENT_NO_INTERSECT = 1,
SEGMENT_INTERSECT = 2
};
static SegmentIntersectionResult segment_intersection(
Vector2 a, Vector2 b, Vector2 c, Vector2 d,
Vector2 *out_intersection) {
// http://paulbourke.net/geometry/pointlineplane/ <-- Good stuff
Vector2 cd = d - c;
Vector2 ab = b - a;
float div = cd.y * ab.x - cd.x * ab.y;
if (Math::abs(div) > 0.001f) {
float ua = (cd.x * (a.y - c.y) - cd.y * (a.x - c.x)) / div;
float ub = (ab.x * (a.y - c.y) - ab.y * (a.x - c.x)) / div;
*out_intersection = a + ua * ab;
if (ua >= 0.f && ua <= 1.f &&
ub >= 0.f && ub <= 1.f)
return SEGMENT_INTERSECT;
return SEGMENT_NO_INTERSECT;
}
return SEGMENT_PARALLEL;
}
// TODO I'm pretty sure there is an even faster way to swap things
template <typename T>
static inline void swap(T &a, T &b) {
T tmp = a;
a = b;
b = tmp;
}
static float calculate_total_distance(const Vector<Vector2> &points) {
float d = 0.f;
for (int i = 1; i < points.size(); ++i) {
d += points[i].distance_to(points[i - 1]);
}
return d;
}
static inline Vector2 rotate90(const Vector2 &v) {
// Note: the 2D referential is X-right, Y-down
return Vector2(v.y, -v.x);
}
static inline Vector2 interpolate(const Rect2 &r, const Vector2 &v) {
return Vector2(
Math::lerp(r.get_pos().x, r.get_pos().x + r.get_size().x, v.x),
Math::lerp(r.get_pos().y, r.get_pos().y + r.get_size().y, v.y));
}
//----------------------------------------------------------------------------
// LineBuilder
//----------------------------------------------------------------------------
LineBuilder::LineBuilder() {
joint_mode = LINE_JOINT_SHARP;
width = 10;
default_color = Color(0.4, 0.5, 1);
gradient = NULL;
sharp_limit = 2.f;
round_precision = 8;
begin_cap_mode = LINE_CAP_NONE;
end_cap_mode = LINE_CAP_NONE;
_interpolate_color = false;
_last_index[0] = 0;
_last_index[1] = 0;
}
void LineBuilder::clear_output() {
vertices.clear();
colors.clear();
indices.clear();
}
void LineBuilder::build() {
// Need at least 2 points to draw a line
if (points.size() < 2) {
clear_output();
return;
}
const float hw = width / 2.f;
const float hw_sq = hw * hw;
const float sharp_limit_sq = sharp_limit * sharp_limit;
const int len = points.size();
// Initial values
Vector2 pos0 = points[0];
Vector2 pos1 = points[1];
Vector2 f0 = (pos1 - pos0).normalized();
Vector2 u0 = rotate90(f0);
Vector2 pos_up0 = pos0 + u0 * hw;
Vector2 pos_down0 = pos0 - u0 * hw;
Color color0;
Color color1;
float current_distance0 = 0.f;
float current_distance1 = 0.f;
float total_distance;
_interpolate_color = gradient != NULL;
bool distance_required = _interpolate_color || texture_mode == LINE_TEXTURE_TILE;
if (distance_required)
total_distance = calculate_total_distance(points);
if (_interpolate_color)
color0 = gradient->get_color(0);
else
colors.push_back(default_color);
float uvx0 = 0.f;
float uvx1 = 0.f;
// Begin cap
if (begin_cap_mode == LINE_CAP_BOX) {
// Push back first vertices a little bit
pos_up0 -= f0 * hw;
pos_down0 -= f0 * hw;
// The line's outer length will be a little higher due to begin and end caps
total_distance += width;
current_distance0 += hw;
current_distance1 = current_distance0;
} else if (begin_cap_mode == LINE_CAP_ROUND) {
if (texture_mode == LINE_TEXTURE_TILE) {
uvx0 = 0.5f;
}
new_arc(pos0, pos_up0 - pos0, -Math_PI, color0, Rect2(0.f, 0.f, 1.f, 1.f));
total_distance += width;
current_distance0 += hw;
current_distance1 = current_distance0;
}
strip_begin(pos_up0, pos_down0, color0, uvx0);
// pos_up0 ------------- pos_up1 --------------------
// | |
// pos0 - - - - - - - - - pos1 - - - - - - - - - pos2
// | |
// pos_down0 ------------ pos_down1 ------------------
//
// i-1 i i+1
// http://labs.hyperandroid.com/tag/opengl-lines
// (not the same implementation but visuals help a lot)
// For each additional segment
for (int i = 1; i < len - 1; ++i) {
pos1 = points[i];
Vector2 pos2 = points[i + 1];
Vector2 f1 = (pos2 - pos1).normalized();
Vector2 u1 = rotate90(f1);
// Determine joint orientation
const float dp = u0.dot(f1);
const Orientation orientation = (dp > 0.f ? UP : DOWN);
Vector2 inner_normal0, inner_normal1;
if (orientation == UP) {
inner_normal0 = u0 * hw;
inner_normal1 = u1 * hw;
} else {
inner_normal0 = -u0 * hw;
inner_normal1 = -u1 * hw;
}
// ---------------------------
// /
// 0 / 1
// / /
// --------------------x------ /
// / / (here shown with orientation == DOWN)
// / /
// / /
// / /
// 2 /
// /
// Find inner intersection at the joint
Vector2 corner_pos_in, corner_pos_out;
SegmentIntersectionResult intersection_result = segment_intersection(
pos0 + inner_normal0, pos1 + inner_normal0,
pos1 + inner_normal1, pos2 + inner_normal1,
&corner_pos_in);
if (intersection_result == SEGMENT_INTERSECT)
// Inner parts of the segments intersect
corner_pos_out = 2.f * pos1 - corner_pos_in;
else {
// No intersection, segments are either parallel or too sharp
corner_pos_in = pos1 + inner_normal0;
corner_pos_out = pos1 - inner_normal0;
}
Vector2 corner_pos_up, corner_pos_down;
if (orientation == UP) {
corner_pos_up = corner_pos_in;
corner_pos_down = corner_pos_out;
} else {
corner_pos_up = corner_pos_out;
corner_pos_down = corner_pos_in;
}
LineJointMode current_joint_mode = joint_mode;
Vector2 pos_up1, pos_down1;
if (intersection_result == SEGMENT_INTERSECT) {
// Fallback on bevel if sharp angle is too high (because it would produce very long miters)
if (current_joint_mode == LINE_JOINT_SHARP && corner_pos_out.distance_squared_to(pos1) / hw_sq > sharp_limit_sq) {
current_joint_mode = LINE_JOINT_BEVEL;
}
if (current_joint_mode == LINE_JOINT_SHARP) {
// In this case, we won't create joint geometry,
// The previous and next line quads will directly share an edge.
pos_up1 = corner_pos_up;
pos_down1 = corner_pos_down;
} else {
// Bevel or round
if (orientation == UP) {
pos_up1 = corner_pos_up;
pos_down1 = pos1 - u0 * hw;
} else {
pos_up1 = pos1 + u0 * hw;
pos_down1 = corner_pos_down;
}
}
} else {
// No intersection: fallback
pos_up1 = corner_pos_up;
pos_down1 = corner_pos_down;
}
// Add current line body quad
// Triangles are clockwise
if (distance_required) {
current_distance1 += pos0.distance_to(pos1);
}
if (_interpolate_color) {
color1 = gradient->get_color_at_offset(current_distance1 / total_distance);
}
if (texture_mode == LINE_TEXTURE_TILE) {
uvx0 = current_distance0 / width;
uvx1 = current_distance1 / width;
}
strip_add_quad(pos_up1, pos_down1, color1, uvx1);
// Swap vars for use in the next line
color0 = color1;
u0 = u1;
f0 = f1;
pos0 = pos1;
current_distance0 = current_distance1;
if (intersection_result == SEGMENT_INTERSECT) {
if (current_joint_mode == LINE_JOINT_SHARP) {
pos_up0 = pos_up1;
pos_down0 = pos_down1;
} else {
if (orientation == UP) {
pos_up0 = corner_pos_up;
pos_down0 = pos1 - u1 * hw;
} else {
pos_up0 = pos1 + u1 * hw;
pos_down0 = corner_pos_down;
}
}
} else {
pos_up0 = pos1 + u1 * hw;
pos_down0 = pos1 - u1 * hw;
}
// From this point, bu0 and bd0 concern the next segment
// Add joint geometry
if (current_joint_mode != LINE_JOINT_SHARP) {
// ________________ cbegin
// / \
// / \
// ____________/_ _ _\ cend
// | |
// | |
// | |
Vector2 cbegin, cend;
if (orientation == UP) {
cbegin = pos_down1;
cend = pos_down0;
} else {
cbegin = pos_up1;
cend = pos_up0;
}
if (current_joint_mode == LINE_JOINT_BEVEL) {
strip_add_tri(cend, orientation);
} else if (current_joint_mode == LINE_JOINT_ROUND) {
Vector2 vbegin = cbegin - pos1;
Vector2 vend = cend - pos1;
strip_add_arc(pos1, vend.angle_to(vbegin), orientation);
}
if (intersection_result != SEGMENT_INTERSECT)
// In this case the joint is too fucked up to be re-used,
// start again the strip with fallback points
strip_begin(pos_up0, pos_down0, color1, uvx1);
}
}
// Last (or only) segment
pos1 = points[points.size() - 1];
Vector2 pos_up1 = pos1 + u0 * hw;
Vector2 pos_down1 = pos1 - u0 * hw;
// End cap (box)
if (end_cap_mode == LINE_CAP_BOX) {
pos_up1 += f0 * hw;
pos_down1 += f0 * hw;
}
if (distance_required) {
current_distance1 += pos0.distance_to(pos1);
}
if (_interpolate_color) {
color1 = gradient->get_color(gradient->get_points_count() - 1);
}
if (texture_mode == LINE_TEXTURE_TILE) {
uvx1 = current_distance1 / width;
}
strip_add_quad(pos_up1, pos_down1, color1, uvx1);
// End cap (round)
if (end_cap_mode == LINE_CAP_ROUND) {
// Note: color is not used in case we don't interpolate...
Color color = _interpolate_color ? gradient->get_color(gradient->get_points_count() - 1) : Color(0, 0, 0);
new_arc(pos1, pos_up1 - pos1, Math_PI, color, Rect2(uvx1 - 0.5f, 0.f, 1.f, 1.f));
}
}
void LineBuilder::strip_begin(Vector2 up, Vector2 down, Color color, float uvx) {
int vi = vertices.size();
vertices.push_back(up);
vertices.push_back(down);
if (_interpolate_color) {
colors.push_back(color);
colors.push_back(color);
}
if (texture_mode != LINE_TEXTURE_NONE) {
uvs.push_back(Vector2(uvx, 0.f));
uvs.push_back(Vector2(uvx, 1.f));
}
_last_index[UP] = vi;
_last_index[DOWN] = vi + 1;
}
void LineBuilder::strip_new_quad(Vector2 up, Vector2 down, Color color, float uvx) {
int vi = vertices.size();
vertices.push_back(vertices[_last_index[UP]]);
vertices.push_back(vertices[_last_index[DOWN]]);
vertices.push_back(up);
vertices.push_back(down);
if (_interpolate_color) {
colors.push_back(color);
colors.push_back(color);
colors.push_back(color);
colors.push_back(color);
}
if (texture_mode != LINE_TEXTURE_NONE) {
uvs.push_back(uvs[_last_index[UP]]);
uvs.push_back(uvs[_last_index[DOWN]]);
uvs.push_back(Vector2(uvx, UP));
uvs.push_back(Vector2(uvx, DOWN));
}
indices.push_back(vi);
indices.push_back(vi + 3);
indices.push_back(vi + 1);
indices.push_back(vi);
indices.push_back(vi + 2);
indices.push_back(vi + 3);
_last_index[UP] = vi + 2;
_last_index[DOWN] = vi + 3;
}
void LineBuilder::strip_add_quad(Vector2 up, Vector2 down, Color color, float uvx) {
int vi = vertices.size();
vertices.push_back(up);
vertices.push_back(down);
if (_interpolate_color) {
colors.push_back(color);
colors.push_back(color);
}
if (texture_mode != LINE_TEXTURE_NONE) {
uvs.push_back(Vector2(uvx, 0.f));
uvs.push_back(Vector2(uvx, 1.f));
}
indices.push_back(_last_index[UP]);
indices.push_back(vi + 1);
indices.push_back(_last_index[DOWN]);
indices.push_back(_last_index[UP]);
indices.push_back(vi);
indices.push_back(vi + 1);
_last_index[UP] = vi;
_last_index[DOWN] = vi + 1;
}
void LineBuilder::strip_add_tri(Vector2 up, Orientation orientation) {
int vi = vertices.size();
vertices.push_back(up);
if (_interpolate_color) {
colors.push_back(colors[colors.size() - 1]);
}
Orientation opposite_orientation = orientation == UP ? DOWN : UP;
if (texture_mode != LINE_TEXTURE_NONE) {
// UVs are just one slice of the texture all along
// (otherwise we can't share the bottom vertice)
uvs.push_back(uvs[_last_index[opposite_orientation]]);
}
indices.push_back(_last_index[opposite_orientation]);
indices.push_back(vi);
indices.push_back(_last_index[orientation]);
_last_index[opposite_orientation] = vi;
}
void LineBuilder::strip_add_arc(Vector2 center, float angle_delta, Orientation orientation) {
// Take the two last vertices and extrude an arc made of triangles
// that all share one of the initial vertices
Orientation opposite_orientation = orientation == UP ? DOWN : UP;
Vector2 vbegin = vertices[_last_index[opposite_orientation]] - center;
float radius = vbegin.length();
float angle_step = Math_PI / static_cast<float>(round_precision);
float steps = Math::abs(angle_delta) / angle_step;
if (angle_delta < 0.f)
angle_step = -angle_step;
float t = vbegin.angle_to(Vector2(1, 0));
float end_angle = t + angle_delta;
Vector2 rpos(0, 0);
// Arc vertices
for (int ti = 0; ti < steps; ++ti, t += angle_step) {
rpos = center + Vector2(Math::cos(t), Math::sin(t)) * radius;
strip_add_tri(rpos, orientation);
}
// Last arc vertice
rpos = center + Vector2(Math::cos(end_angle), Math::sin(end_angle)) * radius;
strip_add_tri(rpos, orientation);
}
void LineBuilder::new_arc(Vector2 center, Vector2 vbegin, float angle_delta, Color color, Rect2 uv_rect) {
// Make a standalone arc that doesn't use existing vertices,
// with undistorted UVs from within a square section
float radius = vbegin.length();
float angle_step = Math_PI / static_cast<float>(round_precision);
float steps = Math::abs(angle_delta) / angle_step;
if (angle_delta < 0.f)
angle_step = -angle_step;
float t = vbegin.angle_to(Vector2(1, 0));
float end_angle = t + angle_delta;
Vector2 rpos(0, 0);
float tt_begin = -Math_PI / 2.f;
float tt = tt_begin;
// Center vertice
int vi = vertices.size();
vertices.push_back(center);
if (_interpolate_color)
colors.push_back(color);
if (texture_mode != LINE_TEXTURE_NONE)
uvs.push_back(interpolate(uv_rect, Vector2(0.5f, 0.5f)));
// Arc vertices
for (int ti = 0; ti < steps; ++ti, t += angle_step) {
Vector2 sc = Vector2(Math::cos(t), Math::sin(t));
rpos = center + sc * radius;
vertices.push_back(rpos);
if (_interpolate_color)
colors.push_back(color);
if (texture_mode != LINE_TEXTURE_NONE) {
Vector2 tsc = Vector2(Math::cos(tt), Math::sin(tt));
uvs.push_back(interpolate(uv_rect, 0.5f * (tsc + Vector2(1.f, 1.f))));
tt += angle_step;
}
}
// Last arc vertice
Vector2 sc = Vector2(Math::cos(end_angle), Math::sin(end_angle));
rpos = center + sc * radius;
vertices.push_back(rpos);
if (_interpolate_color)
colors.push_back(color);
if (texture_mode != LINE_TEXTURE_NONE) {
tt = tt_begin + angle_delta;
Vector2 tsc = Vector2(Math::cos(tt), Math::sin(tt));
uvs.push_back(interpolate(uv_rect, 0.5f * (tsc + Vector2(1.f, 1.f))));
}
// Make up triangles
int vi0 = vi;
for (int ti = 0; ti < steps; ++ti) {
indices.push_back(vi0);
indices.push_back(++vi);
indices.push_back(vi + 1);
}
}
| mit |
janhenke/corefx | src/System.Diagnostics.Debug/src/System/Diagnostics/Debug.Public.cs | 440 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Diagnostics
{
// Intentionally empty class, which is only used to define the public visibility
public static partial class Debug
{
// Debug members are comming from the common Debug.cs files
}
}
| mit |
kidney/guido | examples/template-helper/webpack.config.js | 329 | 'use strict';
module.exports = {
mode: 'development',
entry: {
index: './src/js/index.js',
},
externals: [
{
'handlebars/runtime': {
root: 'window.handlebars',
var: 'window.handlebars',
commonjs2: 'handlebars',
commonjs: 'handlebars',
amd: 'handlebars',
umd: 'handlebars',
},
},
],
};
| mit |
wieslawsoltes/Perspex | src/Skia/Avalonia.Skia/Helpers/DrawingContextHelper.cs | 1157 | using Avalonia.Platform;
using Avalonia.Rendering;
using SkiaSharp;
namespace Avalonia.Skia.Helpers
{
public class DrawingContextHelper
{
/// <summary>
/// Wrap Skia canvas in drawing context so we can use Avalonia api to render to external skia canvas
/// this is useful in scenarios where canvas is not controlled by application, but received from another non avalonia api
/// like: SKCanvas canvas = SKDocument.BeginPage(...);
/// </summary>
/// <param name="canvas"></param>
/// <param name="dpi"></param>
/// <param name="visualBrushRenderer"></param>
/// <returns>DrawingContext</returns>
public static IDrawingContextImpl WrapSkiaCanvas(SKCanvas canvas, Vector dpi, IVisualBrushRenderer visualBrushRenderer = null)
{
var createInfo = new DrawingContextImpl.CreateInfo
{
Canvas = canvas,
Dpi = dpi,
VisualBrushRenderer = visualBrushRenderer,
DisableTextLcdRendering = true,
};
return new DrawingContextImpl(createInfo);
}
}
}
| mit |
sevoku/oxyplot | Source/OxyPlot/Rendering/OxyColorExtensions.cs | 6281 | // --------------------------------------------------------------------------------------------------------------------
// <copyright file="OxyColorExtensions.cs" company="OxyPlot">
// Copyright (c) 2014 OxyPlot contributors
// </copyright>
// <summary>
// Provides extension methods for <see cref="OxyColor" />.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace OxyPlot
{
using System;
using System.Globalization;
using System.Linq;
using System.Reflection;
/// <summary>
/// Provides extension methods for <see cref="OxyColor" />.
/// </summary>
/// <remarks>These are pure methods. They could also be placed in the <see cref="OxyColor" /> type with a <see cref="System.Diagnostics.Contracts.PureAttribute" />.</remarks>
public static class OxyColorExtensions
{
/// <summary>
/// Changes the intensity.
/// </summary>
/// <param name="color">The color.</param>
/// <param name="factor">The factor.</param>
/// <returns>A color with the new intensity.</returns>
public static OxyColor ChangeIntensity(this OxyColor color, double factor)
{
var hsv = color.ToHsv();
hsv[2] *= factor;
if (hsv[2] > 1.0)
{
hsv[2] = 1.0;
}
return OxyColor.FromHsv(hsv);
}
/// <summary>
/// Changes the intensity.
/// </summary>
/// <param name="color">The color.</param>
/// <param name="factor">The factor.</param>
/// <returns>A color with the new intensity.</returns>
public static OxyColor ChangeSaturation(this OxyColor color, double factor)
{
var hsv = color.ToHsv();
hsv[1] *= factor;
if (hsv[1] > 1.0)
{
hsv[1] = 1.0;
}
return OxyColor.FromHsv(hsv);
}
/// <summary>
/// Calculates the complementary color.
/// </summary>
/// <param name="color">The color to convert.</param>
/// <returns>The complementary color.</returns>
public static OxyColor Complementary(this OxyColor color)
{
// http://en.wikipedia.org/wiki/Complementary_Color
var hsv = color.ToHsv();
double newHue = hsv[0] - 0.5;
// clamp to [0,1]
if (newHue < 0)
{
newHue += 1.0;
}
return OxyColor.FromHsv(newHue, hsv[1], hsv[2]);
}
/// <summary>
/// Converts from a <see cref="OxyColor" /> to HSV values (double)
/// </summary>
/// <param name="color">The color.</param>
/// <returns>Array of [Hue,Saturation,Value] in the range [0,1]</returns>
public static double[] ToHsv(this OxyColor color)
{
byte r = color.R;
byte g = color.G;
byte b = color.B;
byte min = Math.Min(Math.Min(r, g), b);
byte v = Math.Max(Math.Max(r, g), b);
double delta = v - min;
double s = v.Equals(0) ? 0 : delta / v;
double h = 0;
if (s.Equals(0))
{
h = 0.0;
}
else
{
if (r == v)
{
h = (g - b) / delta;
}
else if (g == v)
{
h = 2 + ((b - r) / delta);
}
else if (b == v)
{
h = 4 + ((r - g) / delta);
}
h *= 60;
if (h < 0.0)
{
h += 360;
}
}
var hsv = new double[3];
hsv[0] = h / 360.0;
hsv[1] = s;
hsv[2] = v / 255.0;
return hsv;
}
/// <summary>
/// Converts to an unsigned integer.
/// </summary>
/// <param name="color">The color.</param>
/// <returns>The color as an unsigned integer.</returns>
[CLSCompliant(false)]
public static uint ToUint(this OxyColor color)
{
var u = (uint)color.A << 24;
u += (uint)color.R << 16;
u += (uint)color.G << 8;
u += color.B;
return u;
}
/// <summary>
/// Converts an <see cref="OxyColor" /> to a string containing the ARGB byte values.
/// </summary>
/// <param name="color">The color.</param>
/// <returns>A string that contains byte values of the alpha, red, green and blue components separated by comma.</returns>
public static string ToByteString(this OxyColor color)
{
return string.Format(CultureInfo.InvariantCulture, "{0},{1},{2},{3}", color.A, color.R, color.G, color.B);
}
/// <summary>
/// Returns C# code that generates this instance.
/// </summary>
/// <param name="color">The color.</param>
/// <returns>The code.</returns>
public static string ToCode(this OxyColor color)
{
var name = color.GetColorName();
if (name != null)
{
return string.Format("OxyColors.{0}", name);
}
return string.Format("OxyColor.FromArgb({0}, {1}, {2}, {3})", color.A, color.R, color.G, color.B);
}
/// <summary>
/// Gets the name of the color if it is defined in the <see cref="OxyColors" /> class.
/// </summary>
/// <param name="color">The color.</param>
/// <returns>The color name or <c>null</c> if the color is not found.</returns>
public static string GetColorName(this OxyColor color)
{
var t = typeof(OxyColors);
#if UNIVERSAL
var colors = t.GetFields();
#else
var colors = t.GetFields(BindingFlags.Public | BindingFlags.Static);
#endif
var colorField = colors.FirstOrDefault(field => color.Equals(field.GetValue(null)));
return colorField != null ? colorField.Name : null;
}
}
} | mit |
awakener1986/myPetrolApp | node_modules/ionic-angular/es2015/platform/query-params.d.ts | 358 | import { OpaqueToken } from '@angular/core';
/**
* @private
*/
export declare class QueryParams {
data: {
[key: string]: any;
};
constructor(url: string);
get(key: string): any;
}
/**
* @private
*/
export declare const UrlToken: OpaqueToken;
/**
* @private
*/
export declare function setupQueryParams(url: string): QueryParams;
| mit |
colinskow/material | src/components/checkbox/checkbox.js | 6894 | /**
* @ngdoc module
* @name material.components.checkbox
* @description Checkbox module!
*/
angular
.module('material.components.checkbox', ['material.core'])
.directive('mdCheckbox', MdCheckboxDirective);
/**
* @ngdoc directive
* @name mdCheckbox
* @module material.components.checkbox
* @restrict E
*
* @description
* The checkbox directive is used like the normal [angular checkbox](https://docs.angularjs.org/api/ng/input/input%5Bcheckbox%5D).
*
* As per the [material design spec](http://www.google.com/design/spec/style/color.html#color-ui-color-application)
* the checkbox is in the accent color by default. The primary color palette may be used with
* the `md-primary` class.
*
* @param {string} ng-model Assignable angular expression to data-bind to.
* @param {string=} name Property name of the form under which the control is published.
* @param {expression=} ng-true-value The value to which the expression should be set when selected.
* @param {expression=} ng-false-value The value to which the expression should be set when not selected.
* @param {string=} ng-change Angular expression to be executed when input changes due to user interaction with the input element.
* @param {boolean=} md-no-ink Use of attribute indicates use of ripple ink effects
* @param {string=} aria-label Adds label to checkbox for accessibility.
* Defaults to checkbox's text. If no default text is found, a warning will be logged.
* @param {expression=} md-indeterminate This determines when the checkbox should be rendered as 'indeterminate'.
* If a truthy expression or no value is passed in the checkbox renders in the md-indeterminate state.
* If falsy expression is passed in it just looks like a normal unchecked checkbox.
* The indeterminate, checked, and unchecked states are mutually exclusive. A box cannot be in any two states at the same time.
* When a checkbox is indeterminate that overrides any checked/unchecked rendering logic.
*
* @usage
* <hljs lang="html">
* <md-checkbox ng-model="isChecked" aria-label="Finished?">
* Finished ?
* </md-checkbox>
*
* <md-checkbox md-no-ink ng-model="hasInk" aria-label="No Ink Effects">
* No Ink Effects
* </md-checkbox>
*
* <md-checkbox ng-disabled="true" ng-model="isDisabled" aria-label="Disabled">
* Disabled
* </md-checkbox>
*
* </hljs>
*
*/
function MdCheckboxDirective(inputDirective, $mdAria, $mdConstant, $mdTheming, $mdUtil, $timeout) {
inputDirective = inputDirective[0];
var CHECKED_CSS = 'md-checked';
return {
restrict: 'E',
transclude: true,
require: '?ngModel',
priority: 210, // Run before ngAria
template:
'<div class="_md-container" md-ink-ripple md-ink-ripple-checkbox>' +
'<div class="_md-icon"></div>' +
'</div>' +
'<div ng-transclude class="_md-label"></div>',
compile: compile
};
// **********************************************************
// Private Methods
// **********************************************************
function compile (tElement, tAttrs) {
var container = tElement.children();
var mdIndeterminateStateEnabled = tAttrs.hasOwnProperty('mdIndeterminate');
tAttrs.type = 'checkbox';
tAttrs.tabindex = tAttrs.tabindex || '0';
tElement.attr('role', tAttrs.type);
// Attach a click handler in compile in order to immediately stop propagation
// (especially for ng-click) when the checkbox is disabled.
tElement.on('click', function(event) {
if (this.hasAttribute('disabled')) {
event.stopImmediatePropagation();
}
});
// Redirect focus events to the root element, because IE11 is always focusing the container element instead
// of the md-checkbox element. This causes issues when using ngModelOptions: `updateOnBlur`
container.on('focus', function() {
tElement.focus();
});
return function postLink(scope, element, attr, ngModelCtrl) {
var isIndeterminate;
ngModelCtrl = ngModelCtrl || $mdUtil.fakeNgModel();
$mdTheming(element);
if (mdIndeterminateStateEnabled) {
setIndeterminateState();
scope.$watch(attr.mdIndeterminate, setIndeterminateState);
}
if (attr.ngChecked) {
scope.$watch(
scope.$eval.bind(scope, attr.ngChecked),
ngModelCtrl.$setViewValue.bind(ngModelCtrl)
);
}
$$watchExpr('ngDisabled', 'tabindex', {
true: '-1',
false: attr.tabindex
});
$mdAria.expectWithText(element, 'aria-label');
// Reuse the original input[type=checkbox] directive from Angular core.
// This is a bit hacky as we need our own event listener and own render
// function.
inputDirective.link.pre(scope, {
on: angular.noop,
0: {}
}, attr, [ngModelCtrl]);
scope.mouseActive = false;
element.on('click', listener)
.on('keypress', keypressHandler)
.on('mousedown', function() {
scope.mouseActive = true;
$timeout(function() {
scope.mouseActive = false;
}, 100);
})
.on('focus', function() {
if (scope.mouseActive === false) {
element.addClass('md-focused');
}
})
.on('blur', function() {
element.removeClass('md-focused');
});
ngModelCtrl.$render = render;
function $$watchExpr(expr, htmlAttr, valueOpts) {
if (attr[expr]) {
scope.$watch(attr[expr], function(val) {
if (valueOpts[val]) {
element.attr(htmlAttr, valueOpts[val]);
}
});
}
}
function keypressHandler(ev) {
var keyCode = ev.which || ev.keyCode;
if (keyCode === $mdConstant.KEY_CODE.SPACE || keyCode === $mdConstant.KEY_CODE.ENTER) {
ev.preventDefault();
if (!element.hasClass('md-focused')) {
element.addClass('md-focused');
}
listener(ev);
}
}
function listener(ev) {
if (element[0].hasAttribute('disabled')) {
return;
}
scope.$apply(function() {
// Toggle the checkbox value...
var viewValue = attr.ngChecked ? attr.checked : !ngModelCtrl.$viewValue;
ngModelCtrl.$setViewValue( viewValue, ev && ev.type);
ngModelCtrl.$render();
});
}
function render() {
if(ngModelCtrl.$viewValue && !isIndeterminate) {
element.addClass(CHECKED_CSS);
} else {
element.removeClass(CHECKED_CSS);
}
}
function setIndeterminateState(newValue) {
isIndeterminate = newValue !== false;
if (isIndeterminate) {
element.attr('aria-checked', 'mixed');
}
element.toggleClass('md-indeterminate', isIndeterminate);
}
};
}
}
| mit |
seblm/design-pattern-reloaded | src/main/java/monad/monad3.java | 1823 | package monad;
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Predicate;
public interface monad3 {
public class Validator<T> {
private final T t;
private final IllegalStateException error;
private Validator(T t, IllegalStateException error) {
this.t = t;
this.error = error;
}
public T get() throws IllegalStateException {
if (error == null) {
return t;
}
throw error;
}
public Validator<T> validate(Predicate<? super T> validation, String message) {
if (!validation.test(t)) {
return new Validator<>(t, new IllegalStateException(message));
}
return this;
}
public <U> Validator<T> validate(Function<? super T, ? extends U> projection, Predicate<? super U> validation, String message) {
//return validate(t -> validation.test(projection.apply(t)), message);
return validate(projection.andThen(validation::test)::apply, message);
}
public static <T> Validator<T> of(T t) {
Objects.requireNonNull(t);
return new Validator<>(t, null);
}
}
public class User {
private final String name;
private final int age;
public User(String name, int age) {
this.name = name;
this.age = age;
}
public String getName() {
return name;
}
public int getAge() {
return age;
}
}
public static void main(String[] args) {
User user = new User("bob", 12);
//User user = new User("", -12);
User validatedUser = Validator.of(user)
.validate(User::getName, Objects::nonNull, "name is null")
.validate(User::getName, name -> !name.isEmpty(), "name is empty")
.validate(User::getAge, age -> age > 0 && age < 150, "age is between 0 and 150")
.get();
}
}
| mit |
skygr/angular.js | src/ng/directive/ngEventDirs.js | 12027 | 'use strict';
/**
* @ngdoc directive
* @name ngClick
*
* @description
* The ngClick directive allows you to specify custom behavior when
* an element is clicked.
*
* @element ANY
* @priority 0
* @param {expression} ngClick {@link guide/expression Expression} to evaluate upon
* click. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<button ng-click="count = count + 1" ng-init="count=0">
Increment
</button>
count: {{count}}
</file>
<file name="protractor.js" type="protractor">
it('should check ng-click', function() {
expect(element(by.binding('count')).getText()).toMatch('0');
element(by.css('button')).click();
expect(element(by.binding('count')).getText()).toMatch('1');
});
</file>
</example>
*/
/*
* A directive that allows creation of custom onclick handlers that are defined as angular
* expressions and are compiled and executed within the current scope.
*
* Events that are handled via these handler are always configured not to propagate further.
*/
var ngEventDirectives = {};
forEach(
'click dblclick mousedown mouseup mouseover mouseout mousemove mouseenter mouseleave keydown keyup keypress submit focus blur copy cut paste'.split(' '),
function(name) {
var directiveName = directiveNormalize('ng-' + name);
ngEventDirectives[directiveName] = ['$parse', function($parse) {
return {
compile: function($element, attr) {
var fn = $parse(attr[directiveName]);
return function ngEventHandler(scope, element) {
element.on(lowercase(name), function(event) {
scope.$apply(function() {
fn(scope, {$event:event});
});
});
};
}
};
}];
}
);
/**
* @ngdoc directive
* @name ngDblclick
*
* @description
* The `ngDblclick` directive allows you to specify custom behavior on a dblclick event.
*
* @element ANY
* @priority 0
* @param {expression} ngDblclick {@link guide/expression Expression} to evaluate upon
* a dblclick. (The Event object is available as `$event`)
*
* @example
<example>
<file name="index.html">
<button ng-dblclick="count = count + 1" ng-init="count=0">
Increment (on double click)
</button>
count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngMousedown
*
* @description
* The ngMousedown directive allows you to specify custom behavior on mousedown event.
*
* @element ANY
* @priority 0
* @param {expression} ngMousedown {@link guide/expression Expression} to evaluate upon
* mousedown. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<button ng-mousedown="count = count + 1" ng-init="count=0">
Increment (on mouse down)
</button>
count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngMouseup
*
* @description
* Specify custom behavior on mouseup event.
*
* @element ANY
* @priority 0
* @param {expression} ngMouseup {@link guide/expression Expression} to evaluate upon
* mouseup. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<button ng-mouseup="count = count + 1" ng-init="count=0">
Increment (on mouse up)
</button>
count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngMouseover
*
* @description
* Specify custom behavior on mouseover event.
*
* @element ANY
* @priority 0
* @param {expression} ngMouseover {@link guide/expression Expression} to evaluate upon
* mouseover. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<button ng-mouseover="count = count + 1" ng-init="count=0">
Increment (when mouse is over)
</button>
count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngMouseenter
*
* @description
* Specify custom behavior on mouseenter event.
*
* @element ANY
* @priority 0
* @param {expression} ngMouseenter {@link guide/expression Expression} to evaluate upon
* mouseenter. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<button ng-mouseenter="count = count + 1" ng-init="count=0">
Increment (when mouse enters)
</button>
count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngMouseleave
*
* @description
* Specify custom behavior on mouseleave event.
*
* @element ANY
* @priority 0
* @param {expression} ngMouseleave {@link guide/expression Expression} to evaluate upon
* mouseleave. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<button ng-mouseleave="count = count + 1" ng-init="count=0">
Increment (when mouse leaves)
</button>
count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngMousemove
*
* @description
* Specify custom behavior on mousemove event.
*
* @element ANY
* @priority 0
* @param {expression} ngMousemove {@link guide/expression Expression} to evaluate upon
* mousemove. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<button ng-mousemove="count = count + 1" ng-init="count=0">
Increment (when mouse moves)
</button>
count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngKeydown
*
* @description
* Specify custom behavior on keydown event.
*
* @element ANY
* @priority 0
* @param {expression} ngKeydown {@link guide/expression Expression} to evaluate upon
* keydown. (Event object is available as `$event` and can be interrogated for keyCode, altKey, etc.)
*
* @example
<example>
<file name="index.html">
<input ng-keydown="count = count + 1" ng-init="count=0">
key down count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngKeyup
*
* @description
* Specify custom behavior on keyup event.
*
* @element ANY
* @priority 0
* @param {expression} ngKeyup {@link guide/expression Expression} to evaluate upon
* keyup. (Event object is available as `$event` and can be interrogated for keyCode, altKey, etc.)
*
* @example
<example>
<file name="index.html">
<p>Typing in the input box below updates the key count</p>
<input ng-keyup="count = count + 1" ng-init="count=0"> key up count: {{count}}
<p>Typing in the input box below updates the keycode</p>
<input ng-keyup="event=$event">
<p>event keyCode: {{ event.keyCode }}</p>
<p>event altKey: {{ event.altKey }}</p>
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngKeypress
*
* @description
* Specify custom behavior on keypress event.
*
* @element ANY
* @param {expression} ngKeypress {@link guide/expression Expression} to evaluate upon
* keypress. ({@link guide/expression#-event- Event object is available as `$event`}
* and can be interrogated for keyCode, altKey, etc.)
*
* @example
<example>
<file name="index.html">
<input ng-keypress="count = count + 1" ng-init="count=0">
key press count: {{count}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngSubmit
*
* @description
* Enables binding angular expressions to onsubmit events.
*
* Additionally it prevents the default action (which for form means sending the request to the
* server and reloading the current page), but only if the form does not contain `action`,
* `data-action`, or `x-action` attributes.
*
* @element form
* @priority 0
* @param {expression} ngSubmit {@link guide/expression Expression} to eval.
* ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example module="submitExample">
<file name="index.html">
<script>
angular.module('submitExample', [])
.controller('ExampleController', ['$scope', function($scope) {
$scope.list = [];
$scope.text = 'hello';
$scope.submit = function() {
if ($scope.text) {
$scope.list.push(this.text);
$scope.text = '';
}
};
}]);
</script>
<form ng-submit="submit()" ng-controller="ExampleController">
Enter text and hit enter:
<input type="text" ng-model="text" name="text" />
<input type="submit" id="submit" value="Submit" />
<pre>list={{list}}</pre>
</form>
</file>
<file name="protractor.js" type="protractor">
it('should check ng-submit', function() {
expect(element(by.binding('list')).getText()).toBe('list=[]');
element(by.css('#submit')).click();
expect(element(by.binding('list')).getText()).toContain('hello');
expect(element(by.model('text')).getAttribute('value')).toBe('');
});
it('should ignore empty strings', function() {
expect(element(by.binding('list')).getText()).toBe('list=[]');
element(by.css('#submit')).click();
element(by.css('#submit')).click();
expect(element(by.binding('list')).getText()).toContain('hello');
});
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngFocus
*
* @description
* Specify custom behavior on focus event.
*
* @element window, input, select, textarea, a
* @priority 0
* @param {expression} ngFocus {@link guide/expression Expression} to evaluate upon
* focus. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
* See {@link ng.directive:ngClick ngClick}
*/
/**
* @ngdoc directive
* @name ngBlur
*
* @description
* Specify custom behavior on blur event.
*
* @element window, input, select, textarea, a
* @priority 0
* @param {expression} ngBlur {@link guide/expression Expression} to evaluate upon
* blur. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
* See {@link ng.directive:ngClick ngClick}
*/
/**
* @ngdoc directive
* @name ngCopy
*
* @description
* Specify custom behavior on copy event.
*
* @element window, input, select, textarea, a
* @priority 0
* @param {expression} ngCopy {@link guide/expression Expression} to evaluate upon
* copy. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<input ng-copy="copied=true" ng-init="copied=false; value='copy me'" ng-model="value">
copied: {{copied}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngCut
*
* @description
* Specify custom behavior on cut event.
*
* @element window, input, select, textarea, a
* @priority 0
* @param {expression} ngCut {@link guide/expression Expression} to evaluate upon
* cut. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<input ng-cut="cut=true" ng-init="cut=false; value='cut me'" ng-model="value">
cut: {{cut}}
</file>
</example>
*/
/**
* @ngdoc directive
* @name ngPaste
*
* @description
* Specify custom behavior on paste event.
*
* @element window, input, select, textarea, a
* @priority 0
* @param {expression} ngPaste {@link guide/expression Expression} to evaluate upon
* paste. ({@link guide/expression#-event- Event object is available as `$event`})
*
* @example
<example>
<file name="index.html">
<input ng-paste="paste=true" ng-init="paste=false" placeholder='paste here'>
pasted: {{paste}}
</file>
</example>
*/
| mit |
laijingtao/landlab | docs/model_grid_guide/diffusion_with_radial_model_grid.py | 2826 | #! /usr/env/python
"""
2D numerical model of diffusion, implemented using ModelGrid.
Provides example of a radial grid.
Last updated GT May 2014
"""
from landlab import RadialModelGrid
import pylab
def main():
"""
In this simple tutorial example, the main function does all the work:
it sets the parameter values, creates and initializes a grid, sets up
the state variables, runs the main loop, and cleans up.
"""
import time
start_time = time.time()
# INITIALIZE
# User-defined parameter values
num_shells=10 # number of radial "shells" in the grid
#numcols = 30 # not needed for a radial model grid
dr = 10.0 # grid cell spacing
kd = 0.01 # diffusivity coefficient, in m2/yr
uplift_rate = 0.0001 # baselevel/uplift rate, in m/yr
num_time_steps = 1000 # number of time steps in run
# Derived parameters
dt = 0.1*dr**2 / kd # time-step size set by CFL condition
# Create and initialize a radial model grid
mg = RadialModelGrid(num_shells, dr)
# Set up scalar values: elevation and time rate of change of elevation.
# Note use of CSDMS standard names for these variables.
z = mg.add_zeros('node', 'Land_surface__elevation')
dzdt = mg.add_zeros('node', 'Land_surface__time_derivative_of_elevation')
# Get a list of the core nodes
core_nodes = mg.core_nodes
# Display a message
print( 'Running diffusion_with_radial_model_grid.py' )
print( 'Time-step size has been set to ' + str( dt ) + ' years.' )
# RUN
# Main loop
for i in range(0, num_time_steps):
# Calculate the gradients and sediment fluxes
g = mg.calculate_gradients_at_active_links(z)
qs = -kd*g
# Calculate the net deposition/erosion rate at each node
dqsds = mg.calculate_flux_divergence_at_nodes(qs)
# Calculate the total rate of elevation change
dzdt = uplift_rate - dqsds
# Update the elevations
z[core_nodes] = z[core_nodes] + dzdt[core_nodes] * dt
# FINALIZE
# Plot the points, colored by elevation
import numpy
maxelev = numpy.amax(z)
for i in range(mg.number_of_nodes):
mycolor = str(z[i]/maxelev)
pylab.plot(mg.node_x[i], mg.node_y[i], 'o', color=mycolor, ms=10)
mg.display_grid()
# Plot the points from the side, with analytical solution
pylab.figure(3)
L = num_shells*dr
xa = numpy.arange(-L, L+dr, dr)
z_analytical = (uplift_rate/(4*kd))*(L*L-xa*xa)
pylab.plot(mg.node_x, z, 'o')
pylab.plot(xa, z_analytical, 'r-')
pylab.xlabel('Distance from center (m)')
pylab.ylabel('Height (m)')
pylab.show()
end_time = time.time()
print 'Elapsed time',end_time-start_time
if __name__ == "__main__":
main()
| mit |
brendankowitz/azure-webjobs-sdk | src/Microsoft.Azure.WebJobs.ServiceBus/Listeners/ServiceBusTriggerExecutor.cs | 1102 | // Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs.Host.Executors;
using Microsoft.ServiceBus.Messaging;
namespace Microsoft.Azure.WebJobs.ServiceBus.Listeners
{
internal class ServiceBusTriggerExecutor
{
private readonly ITriggeredFunctionExecutor _innerExecutor;
public ServiceBusTriggerExecutor(ITriggeredFunctionExecutor innerExecutor)
{
_innerExecutor = innerExecutor;
}
public async Task<FunctionResult> ExecuteAsync(BrokeredMessage value, CancellationToken cancellationToken)
{
Guid? parentId = ServiceBusCausalityHelper.GetOwner(value);
TriggeredFunctionData input = new TriggeredFunctionData
{
ParentId = parentId,
TriggerValue = value
};
return await _innerExecutor.TryExecuteAsync(input, cancellationToken);
}
}
}
| mit |
sunyardtc/git | node_modules/strong-agent/lib/node-measured/test/unit/util/test-Stopwatch.js | 1132 | var common = require('../../common');
var test = require('utest');
var assert = require('assert');
var Stopwatch = common.measured.Stopwatch;
var sinon = require('sinon');
var watch;
var clock;
test('Stopwatch', {
before: function() {
clock = sinon.useFakeTimers();
watch = new Stopwatch();
},
after: function() {
clock.restore();
},
'returns time on end': function() {
clock.tick(10);
var watch = new Stopwatch();
clock.tick(100);
var elapsed = watch.end();
assert.equal(elapsed, 100);
},
'emits time on end': function() {
var watch = new Stopwatch();
clock.tick(20);
var time;
watch.on('end', function(_time) {
time = _time;
});
watch.end();
assert.equal(time, 20);
},
'becomes useless after being ended once': function() {
var watch = new Stopwatch();
clock.tick(20);
var time;
watch.on('end', function(_time) {
time = _time;
});
assert.equal(watch.end(), 20);
assert.equal(time, 20);
time = null;
assert.equal(watch.end(), undefined);
assert.equal(time, null);
},
});
| mit |
Yasu31/TK_1720 | app/loverduck_app/vendor/bundle/ruby/2.4.0/gems/activesupport-5.1.4/lib/active_support/callbacks.rb | 28923 | require "active_support/concern"
require "active_support/descendants_tracker"
require "active_support/core_ext/array/extract_options"
require "active_support/core_ext/class/attribute"
require "active_support/core_ext/kernel/reporting"
require "active_support/core_ext/kernel/singleton_class"
require "active_support/core_ext/string/filters"
require "active_support/deprecation"
require "thread"
module ActiveSupport
# Callbacks are code hooks that are run at key points in an object's life cycle.
# The typical use case is to have a base class define a set of callbacks
# relevant to the other functionality it supplies, so that subclasses can
# install callbacks that enhance or modify the base functionality without
# needing to override or redefine methods of the base class.
#
# Mixing in this module allows you to define the events in the object's
# life cycle that will support callbacks (via +ClassMethods.define_callbacks+),
# set the instance methods, procs, or callback objects to be called (via
# +ClassMethods.set_callback+), and run the installed callbacks at the
# appropriate times (via +run_callbacks+).
#
# Three kinds of callbacks are supported: before callbacks, run before a
# certain event; after callbacks, run after the event; and around callbacks,
# blocks that surround the event, triggering it when they yield. Callback code
# can be contained in instance methods, procs or lambdas, or callback objects
# that respond to certain predetermined methods. See +ClassMethods.set_callback+
# for details.
#
# class Record
# include ActiveSupport::Callbacks
# define_callbacks :save
#
# def save
# run_callbacks :save do
# puts "- save"
# end
# end
# end
#
# class PersonRecord < Record
# set_callback :save, :before, :saving_message
# def saving_message
# puts "saving..."
# end
#
# set_callback :save, :after do |object|
# puts "saved"
# end
# end
#
# person = PersonRecord.new
# person.save
#
# Output:
# saving...
# - save
# saved
module Callbacks
extend Concern
included do
extend ActiveSupport::DescendantsTracker
class_attribute :__callbacks, instance_writer: false
self.__callbacks ||= {}
end
CALLBACK_FILTER_TYPES = [:before, :after, :around]
# Runs the callbacks for the given event.
#
# Calls the before and around callbacks in the order they were set, yields
# the block (if given one), and then runs the after callbacks in reverse
# order.
#
# If the callback chain was halted, returns +false+. Otherwise returns the
# result of the block, +nil+ if no callbacks have been set, or +true+
# if callbacks have been set but no block is given.
#
# run_callbacks :save do
# save
# end
#
#--
#
# As this method is used in many places, and often wraps large portions of
# user code, it has an additional design goal of minimizing its impact on
# the visible call stack. An exception from inside a :before or :after
# callback can be as noisy as it likes -- but when control has passed
# smoothly through and into the supplied block, we want as little evidence
# as possible that we were here.
def run_callbacks(kind)
callbacks = __callbacks[kind.to_sym]
if callbacks.empty?
yield if block_given?
else
env = Filters::Environment.new(self, false, nil)
next_sequence = callbacks.compile
invoke_sequence = Proc.new do
skipped = nil
while true
current = next_sequence
current.invoke_before(env)
if current.final?
env.value = !env.halted && (!block_given? || yield)
elsif current.skip?(env)
(skipped ||= []) << current
next_sequence = next_sequence.nested
next
else
next_sequence = next_sequence.nested
begin
target, block, method, *arguments = current.expand_call_template(env, invoke_sequence)
target.send(method, *arguments, &block)
ensure
next_sequence = current
end
end
current.invoke_after(env)
skipped.pop.invoke_after(env) while skipped && skipped.first
break env.value
end
end
# Common case: no 'around' callbacks defined
if next_sequence.final?
next_sequence.invoke_before(env)
env.value = !env.halted && (!block_given? || yield)
next_sequence.invoke_after(env)
env.value
else
invoke_sequence.call
end
end
end
private
# A hook invoked every time a before callback is halted.
# This can be overridden in ActiveSupport::Callbacks implementors in order
# to provide better debugging/logging.
def halted_callback_hook(filter)
end
module Conditionals # :nodoc:
class Value
def initialize(&block)
@block = block
end
def call(target, value); @block.call(value); end
end
end
module Filters
Environment = Struct.new(:target, :halted, :value)
class Before
def self.build(callback_sequence, user_callback, user_conditions, chain_config, filter)
halted_lambda = chain_config[:terminator]
if user_conditions.any?
halting_and_conditional(callback_sequence, user_callback, user_conditions, halted_lambda, filter)
else
halting(callback_sequence, user_callback, halted_lambda, filter)
end
end
def self.halting_and_conditional(callback_sequence, user_callback, user_conditions, halted_lambda, filter)
callback_sequence.before do |env|
target = env.target
value = env.value
halted = env.halted
if !halted && user_conditions.all? { |c| c.call(target, value) }
result_lambda = -> { user_callback.call target, value }
env.halted = halted_lambda.call(target, result_lambda)
if env.halted
target.send :halted_callback_hook, filter
end
end
env
end
end
private_class_method :halting_and_conditional
def self.halting(callback_sequence, user_callback, halted_lambda, filter)
callback_sequence.before do |env|
target = env.target
value = env.value
halted = env.halted
unless halted
result_lambda = -> { user_callback.call target, value }
env.halted = halted_lambda.call(target, result_lambda)
if env.halted
target.send :halted_callback_hook, filter
end
end
env
end
end
private_class_method :halting
end
class After
def self.build(callback_sequence, user_callback, user_conditions, chain_config)
if chain_config[:skip_after_callbacks_if_terminated]
if user_conditions.any?
halting_and_conditional(callback_sequence, user_callback, user_conditions)
else
halting(callback_sequence, user_callback)
end
else
if user_conditions.any?
conditional callback_sequence, user_callback, user_conditions
else
simple callback_sequence, user_callback
end
end
end
def self.halting_and_conditional(callback_sequence, user_callback, user_conditions)
callback_sequence.after do |env|
target = env.target
value = env.value
halted = env.halted
if !halted && user_conditions.all? { |c| c.call(target, value) }
user_callback.call target, value
end
env
end
end
private_class_method :halting_and_conditional
def self.halting(callback_sequence, user_callback)
callback_sequence.after do |env|
unless env.halted
user_callback.call env.target, env.value
end
env
end
end
private_class_method :halting
def self.conditional(callback_sequence, user_callback, user_conditions)
callback_sequence.after do |env|
target = env.target
value = env.value
if user_conditions.all? { |c| c.call(target, value) }
user_callback.call target, value
end
env
end
end
private_class_method :conditional
def self.simple(callback_sequence, user_callback)
callback_sequence.after do |env|
user_callback.call env.target, env.value
env
end
end
private_class_method :simple
end
end
class Callback #:nodoc:#
def self.build(chain, filter, kind, options)
if filter.is_a?(String)
raise ArgumentError, <<-MSG.squish
Passing string to define a callback is not supported. See the `.set_callback`
documentation to see supported values.
MSG
end
new chain.name, filter, kind, options, chain.config
end
attr_accessor :kind, :name
attr_reader :chain_config
def initialize(name, filter, kind, options, chain_config)
@chain_config = chain_config
@name = name
@kind = kind
@filter = filter
@key = compute_identifier filter
@if = Array(options[:if])
@unless = Array(options[:unless])
end
def filter; @key; end
def raw_filter; @filter; end
def merge_conditional_options(chain, if_option:, unless_option:)
options = {
if: @if.dup,
unless: @unless.dup
}
options[:if].concat Array(unless_option)
options[:unless].concat Array(if_option)
self.class.build chain, @filter, @kind, options
end
def matches?(_kind, _filter)
@kind == _kind && filter == _filter
end
def duplicates?(other)
case @filter
when Symbol, String
matches?(other.kind, other.filter)
else
false
end
end
# Wraps code with filter
def apply(callback_sequence)
user_conditions = conditions_lambdas
user_callback = CallTemplate.build(@filter, self)
case kind
when :before
Filters::Before.build(callback_sequence, user_callback.make_lambda, user_conditions, chain_config, @filter)
when :after
Filters::After.build(callback_sequence, user_callback.make_lambda, user_conditions, chain_config)
when :around
callback_sequence.around(user_callback, user_conditions)
end
end
def current_scopes
Array(chain_config[:scope]).map { |s| public_send(s) }
end
private
def compute_identifier(filter)
case filter
when String, ::Proc
filter.object_id
else
filter
end
end
def conditions_lambdas
@if.map { |c| CallTemplate.build(c, self).make_lambda } +
@unless.map { |c| CallTemplate.build(c, self).inverted_lambda }
end
end
# A future invocation of user-supplied code (either as a callback,
# or a condition filter).
class CallTemplate # :nodoc:
def initialize(target, method, arguments, block)
@override_target = target
@method_name = method
@arguments = arguments
@override_block = block
end
# Return the parts needed to make this call, with the given
# input values.
#
# Returns an array of the form:
#
# [target, block, method, *arguments]
#
# This array can be used as such:
#
# target.send(method, *arguments, &block)
#
# The actual invocation is left up to the caller to minimize
# call stack pollution.
def expand(target, value, block)
result = @arguments.map { |arg|
case arg
when :value; value
when :target; target
when :block; block || raise(ArgumentError)
end
}
result.unshift @method_name
result.unshift @override_block || block
result.unshift @override_target || target
# target, block, method, *arguments = result
# target.send(method, *arguments, &block)
result
end
# Return a lambda that will make this call when given the input
# values.
def make_lambda
lambda do |target, value, &block|
target, block, method, *arguments = expand(target, value, block)
target.send(method, *arguments, &block)
end
end
# Return a lambda that will make this call when given the input
# values, but then return the boolean inverse of that result.
def inverted_lambda
lambda do |target, value, &block|
target, block, method, *arguments = expand(target, value, block)
! target.send(method, *arguments, &block)
end
end
# Filters support:
#
# Symbols:: A method to call.
# Strings:: Some content to evaluate.
# Procs:: A proc to call with the object.
# Objects:: An object with a <tt>before_foo</tt> method on it to call.
#
# All of these objects are converted into a CallTemplate and handled
# the same after this point.
def self.build(filter, callback)
case filter
when Symbol
new(nil, filter, [], nil)
when String
new(nil, :instance_exec, [:value], compile_lambda(filter))
when Conditionals::Value
new(filter, :call, [:target, :value], nil)
when ::Proc
if filter.arity > 1
new(nil, :instance_exec, [:target, :block], filter)
elsif filter.arity > 0
new(nil, :instance_exec, [:target], filter)
else
new(nil, :instance_exec, [], filter)
end
else
method_to_call = callback.current_scopes.join("_")
new(filter, method_to_call, [:target], nil)
end
end
def self.compile_lambda(filter)
eval("lambda { |value| #{filter} }")
end
end
# Execute before and after filters in a sequence instead of
# chaining them with nested lambda calls, see:
# https://github.com/rails/rails/issues/18011
class CallbackSequence # :nodoc:
def initialize(nested = nil, call_template = nil, user_conditions = nil)
@nested = nested
@call_template = call_template
@user_conditions = user_conditions
@before = []
@after = []
end
def before(&before)
@before.unshift(before)
self
end
def after(&after)
@after.push(after)
self
end
def around(call_template, user_conditions)
CallbackSequence.new(self, call_template, user_conditions)
end
def skip?(arg)
arg.halted || !@user_conditions.all? { |c| c.call(arg.target, arg.value) }
end
def nested
@nested
end
def final?
!@call_template
end
def expand_call_template(arg, block)
@call_template.expand(arg.target, arg.value, block)
end
def invoke_before(arg)
@before.each { |b| b.call(arg) }
end
def invoke_after(arg)
@after.each { |a| a.call(arg) }
end
end
# An Array with a compile method.
class CallbackChain #:nodoc:#
include Enumerable
attr_reader :name, :config
def initialize(name, config)
@name = name
@config = {
scope: [:kind],
terminator: default_terminator
}.merge!(config)
@chain = []
@callbacks = nil
@mutex = Mutex.new
end
def each(&block); @chain.each(&block); end
def index(o); @chain.index(o); end
def empty?; @chain.empty?; end
def insert(index, o)
@callbacks = nil
@chain.insert(index, o)
end
def delete(o)
@callbacks = nil
@chain.delete(o)
end
def clear
@callbacks = nil
@chain.clear
self
end
def initialize_copy(other)
@callbacks = nil
@chain = other.chain.dup
@mutex = Mutex.new
end
def compile
@callbacks || @mutex.synchronize do
final_sequence = CallbackSequence.new
@callbacks ||= @chain.reverse.inject(final_sequence) do |callback_sequence, callback|
callback.apply callback_sequence
end
end
end
def append(*callbacks)
callbacks.each { |c| append_one(c) }
end
def prepend(*callbacks)
callbacks.each { |c| prepend_one(c) }
end
protected
def chain; @chain; end
private
def append_one(callback)
@callbacks = nil
remove_duplicates(callback)
@chain.push(callback)
end
def prepend_one(callback)
@callbacks = nil
remove_duplicates(callback)
@chain.unshift(callback)
end
def remove_duplicates(callback)
@callbacks = nil
@chain.delete_if { |c| callback.duplicates?(c) }
end
def default_terminator
Proc.new do |target, result_lambda|
terminate = true
catch(:abort) do
result_lambda.call if result_lambda.is_a?(Proc)
terminate = false
end
terminate
end
end
end
module ClassMethods
def normalize_callback_params(filters, block) # :nodoc:
type = CALLBACK_FILTER_TYPES.include?(filters.first) ? filters.shift : :before
options = filters.extract_options!
filters.unshift(block) if block
[type, filters, options.dup]
end
# This is used internally to append, prepend and skip callbacks to the
# CallbackChain.
def __update_callbacks(name) #:nodoc:
([self] + ActiveSupport::DescendantsTracker.descendants(self)).reverse_each do |target|
chain = target.get_callbacks name
yield target, chain.dup
end
end
# Install a callback for the given event.
#
# set_callback :save, :before, :before_method
# set_callback :save, :after, :after_method, if: :condition
# set_callback :save, :around, ->(r, block) { stuff; result = block.call; stuff }
#
# The second argument indicates whether the callback is to be run +:before+,
# +:after+, or +:around+ the event. If omitted, +:before+ is assumed. This
# means the first example above can also be written as:
#
# set_callback :save, :before_method
#
# The callback can be specified as a symbol naming an instance method; as a
# proc, lambda, or block; or as an object that responds to a certain method
# determined by the <tt>:scope</tt> argument to +define_callbacks+.
#
# If a proc, lambda, or block is given, its body is evaluated in the context
# of the current object. It can also optionally accept the current object as
# an argument.
#
# Before and around callbacks are called in the order that they are set;
# after callbacks are called in the reverse order.
#
# Around callbacks can access the return value from the event, if it
# wasn't halted, from the +yield+ call.
#
# ===== Options
#
# * <tt>:if</tt> - A symbol, a string (deprecated) or an array of symbols,
# each naming an instance method or a proc; the callback will be called
# only when they all return a true value.
# * <tt>:unless</tt> - A symbol, a string (deprecated) or an array of symbols,
# each naming an instance method or a proc; the callback will be called
# only when they all return a false value.
# * <tt>:prepend</tt> - If +true+, the callback will be prepended to the
# existing chain rather than appended.
def set_callback(name, *filter_list, &block)
type, filters, options = normalize_callback_params(filter_list, block)
if options[:if].is_a?(String) || options[:unless].is_a?(String)
ActiveSupport::Deprecation.warn(<<-MSG.squish)
Passing string to be evaluated in :if and :unless conditional
options is deprecated and will be removed in Rails 5.2 without
replacement. Pass a symbol for an instance method, or a lambda,
proc or block, instead.
MSG
end
self_chain = get_callbacks name
mapped = filters.map do |filter|
Callback.build(self_chain, filter, type, options)
end
__update_callbacks(name) do |target, chain|
options[:prepend] ? chain.prepend(*mapped) : chain.append(*mapped)
target.set_callbacks name, chain
end
end
# Skip a previously set callback. Like +set_callback+, <tt>:if</tt> or
# <tt>:unless</tt> options may be passed in order to control when the
# callback is skipped.
#
# class Writer < Person
# skip_callback :validate, :before, :check_membership, if: -> { age > 18 }
# end
#
# An <tt>ArgumentError</tt> will be raised if the callback has not
# already been set (unless the <tt>:raise</tt> option is set to <tt>false</tt>).
def skip_callback(name, *filter_list, &block)
type, filters, options = normalize_callback_params(filter_list, block)
if options[:if].is_a?(String) || options[:unless].is_a?(String)
ActiveSupport::Deprecation.warn(<<-MSG.squish)
Passing string to :if and :unless conditional options is deprecated
and will be removed in Rails 5.2 without replacement.
MSG
end
options[:raise] = true unless options.key?(:raise)
__update_callbacks(name) do |target, chain|
filters.each do |filter|
callback = chain.find { |c| c.matches?(type, filter) }
if !callback && options[:raise]
raise ArgumentError, "#{type.to_s.capitalize} #{name} callback #{filter.inspect} has not been defined"
end
if callback && (options.key?(:if) || options.key?(:unless))
new_callback = callback.merge_conditional_options(chain, if_option: options[:if], unless_option: options[:unless])
chain.insert(chain.index(callback), new_callback)
end
chain.delete(callback)
end
target.set_callbacks name, chain
end
end
# Remove all set callbacks for the given event.
def reset_callbacks(name)
callbacks = get_callbacks name
ActiveSupport::DescendantsTracker.descendants(self).each do |target|
chain = target.get_callbacks(name).dup
callbacks.each { |c| chain.delete(c) }
target.set_callbacks name, chain
end
set_callbacks(name, callbacks.dup.clear)
end
# Define sets of events in the object life cycle that support callbacks.
#
# define_callbacks :validate
# define_callbacks :initialize, :save, :destroy
#
# ===== Options
#
# * <tt>:terminator</tt> - Determines when a before filter will halt the
# callback chain, preventing following before and around callbacks from
# being called and the event from being triggered.
# This should be a lambda to be executed.
# The current object and the result lambda of the callback will be provided
# to the terminator lambda.
#
# define_callbacks :validate, terminator: ->(target, result_lambda) { result_lambda.call == false }
#
# In this example, if any before validate callbacks returns +false+,
# any successive before and around callback is not executed.
#
# The default terminator halts the chain when a callback throws +:abort+.
#
# * <tt>:skip_after_callbacks_if_terminated</tt> - Determines if after
# callbacks should be terminated by the <tt>:terminator</tt> option. By
# default after callbacks are executed no matter if callback chain was
# terminated or not. This option makes sense only when <tt>:terminator</tt>
# option is specified.
#
# * <tt>:scope</tt> - Indicates which methods should be executed when an
# object is used as a callback.
#
# class Audit
# def before(caller)
# puts 'Audit: before is called'
# end
#
# def before_save(caller)
# puts 'Audit: before_save is called'
# end
# end
#
# class Account
# include ActiveSupport::Callbacks
#
# define_callbacks :save
# set_callback :save, :before, Audit.new
#
# def save
# run_callbacks :save do
# puts 'save in main'
# end
# end
# end
#
# In the above case whenever you save an account the method
# <tt>Audit#before</tt> will be called. On the other hand
#
# define_callbacks :save, scope: [:kind, :name]
#
# would trigger <tt>Audit#before_save</tt> instead. That's constructed
# by calling <tt>#{kind}_#{name}</tt> on the given instance. In this
# case "kind" is "before" and "name" is "save". In this context +:kind+
# and +:name+ have special meanings: +:kind+ refers to the kind of
# callback (before/after/around) and +:name+ refers to the method on
# which callbacks are being defined.
#
# A declaration like
#
# define_callbacks :save, scope: [:name]
#
# would call <tt>Audit#save</tt>.
#
# ===== Notes
#
# +names+ passed to +define_callbacks+ must not end with
# <tt>!</tt>, <tt>?</tt> or <tt>=</tt>.
#
# Calling +define_callbacks+ multiple times with the same +names+ will
# overwrite previous callbacks registered with +set_callback+.
def define_callbacks(*names)
options = names.extract_options!
names.each do |name|
name = name.to_sym
set_callbacks name, CallbackChain.new(name, options)
module_eval <<-RUBY, __FILE__, __LINE__ + 1
def _run_#{name}_callbacks(&block)
run_callbacks #{name.inspect}, &block
end
def self._#{name}_callbacks
get_callbacks(#{name.inspect})
end
def self._#{name}_callbacks=(value)
set_callbacks(#{name.inspect}, value)
end
def _#{name}_callbacks
__callbacks[#{name.inspect}]
end
RUBY
end
end
protected
def get_callbacks(name) # :nodoc:
__callbacks[name.to_sym]
end
def set_callbacks(name, callbacks) # :nodoc:
self.__callbacks = __callbacks.merge(name.to_sym => callbacks)
end
end
end
end
| mit |
bmac/ember-cli | blueprints/adapter/index.js | 1159 | var fs = require('fs');
var path = require('path');
var stringUtil = require('../../lib/utilities/string');
var SilentError = require('../../lib/errors/silent');
module.exports = {
description: 'Generates an ember-data adapter.',
availableOptions: [
{ name: 'base-class', type: String }
],
locals: function(options) {
var adapterName = options.entity.name;
var baseClass = 'DS.RESTAdapter';
var importStatement = 'import DS from \'ember-data\';';
if (!options.baseClass && adapterName !== 'application') {
options.baseClass = 'application';
}
if (options.baseClass === adapterName) {
throw new SilentError('Adapters cannot extend from themself. To resolve this, remove the `--base-class` option or change to a different base-class.')
}
if (options.baseClass) {
baseClass = stringUtil.classify(options.baseClass.replace('\/', '-'));
baseClass = baseClass + 'Adapter';
importStatement = 'import ' + baseClass + ' from \'./' + options.baseClass + '\';'
}
return {
importStatement: importStatement,
baseClass: baseClass
};
}
};
| mit |
viral810/ngSimpleCMS | vendor/jackalope/jackalope/tests/Jackalope/Observation/EventFilterTestCase.php | 981 | <?php
namespace Jackalope\Observation;
use Jackalope\TestCase;
use Jackalope\Observation\EventFilter;
/**
* Unit tests for the EventFilter
*/
abstract class EventFilterTestCase extends TestCase
{
/**
* @var EventFilter
*/
protected $eventFilter;
/**
* @var \Jackalope\FactoryInterface
*/
protected $factory;
/**
* @var \PHPCR\SessionInterface
*/
protected $session;
public function setUp()
{
$this->factory = $this->getMock('Jackalope\\FactoryInterface');
$this->session = $this->getSessionMock();
$this->session
->expects($this->any())
->method('getNodes')
->will($this->returnValue(array())
);
$this->session
->expects($this->any())
->method('getNodesByIdentifier')
->will($this->returnValue(array())
);
$this->eventFilter = new EventFilter($this->factory, $this->session);
}
}
| mit |
cdnjs/cdnjs | ajax/libs/flot/3.2.5/jquery.flot.hover.js | 11242 | /* global jQuery */
/**
## jquery.flot.hover.js
This plugin is used for mouse hover and tap on a point of plot series.
It supports the following options:
```js
grid: {
hoverable: false, //to trigger plothover event on mouse hover or tap on a point
clickable: false //to trigger plotclick event on mouse hover
}
```
It listens to native mouse move event or click, as well as artificial generated
tap and touchevent.
When the mouse is over a point or a tap on a point is performed, that point or
the correscponding bar will be highlighted and a "plothover" event will be generated.
Custom "touchevent" is triggered when any touch interaction is made. Hover plugin
handles this events by unhighlighting all of the previously highlighted points and generates
"plothovercleanup" event to notify any part that is handling plothover (for exemple to cleanup
the tooltip from webcharts).
*/
(function($) {
'use strict';
var options = {
grid: {
hoverable: false,
clickable: false
}
};
var browser = $.plot.browser;
var highlights = [];
var eventType = {
click: 'click',
hover: 'hover'
}
var lastMouseMoveEvent;
var plot;
function bindEvents(plot, eventHolder) {
var o = plot.getOptions();
if (o.grid.hoverable || o.grid.clickable) {
eventHolder[0].addEventListener('touchevent', triggerCleanupEvent, false);
eventHolder[0].addEventListener('tap', tap.generatePlothoverEvent, false);
}
if (o.grid.clickable) {
eventHolder.bind("click", onClick);
}
if (o.grid.hoverable) {
eventHolder.bind("mousemove", onMouseMove);
// Use bind, rather than .mouseleave, because we officially
// still support jQuery 1.2.6, which doesn't define a shortcut
// for mouseenter or mouseleave. This was a bug/oversight that
// was fixed somewhere around 1.3.x. We can return to using
// .mouseleave when we drop support for 1.2.6.
eventHolder.bind("mouseleave", onMouseLeave);
}
}
function shutdown(plot, eventHolder) {
eventHolder[0].removeEventListener('tap', tap.generatePlothoverEvent);
eventHolder[0].removeEventListener('touchevent', triggerCleanupEvent);
eventHolder.unbind("mousemove", onMouseMove);
eventHolder.unbind("mouseleave", onMouseLeave);
eventHolder.unbind("click", onClick);
highlights = [];
}
var tap = {
generatePlothoverEvent: function (e) {
var o = plot.getOptions(),
newEvent = new CustomEvent('mouseevent');
//transform from touch event to mouse event format
newEvent.pageX = e.detail.changedTouches[0].pageX;
newEvent.pageY = e.detail.changedTouches[0].pageY;
newEvent.clientX = e.detail.changedTouches[0].clientX;
newEvent.clientY = e.detail.changedTouches[0].clientY;
if (o.grid.hoverable) {
doTriggerClickHoverEvent(newEvent, eventType.hover, 30);
}
return false;
}
};
function doTriggerClickHoverEvent(event, eventType, searchDistance) {
var series = plot.getData();
if (event !== undefined
&& series.length > 0
&& series[0].xaxis.c2p !== undefined
&& series[0].yaxis.c2p !== undefined) {
var eventToTrigger = "plot" + eventType;
var seriesFlag = eventType + "able";
triggerClickHoverEvent(eventToTrigger, event,
function(i) {
return series[i][seriesFlag] !== false;
}, searchDistance);
}
}
function onMouseMove(e) {
lastMouseMoveEvent = e;
plot.getPlaceholder()[0].lastMouseMoveEvent = e;
doTriggerClickHoverEvent(e, eventType.hover);
}
function onMouseLeave(e) {
lastMouseMoveEvent = undefined;
plot.getPlaceholder()[0].lastMouseMoveEvent = undefined;
triggerClickHoverEvent("plothover", e,
function(i) {
return false;
});
}
function onClick(e) {
doTriggerClickHoverEvent(e, eventType.click);
}
function triggerCleanupEvent() {
plot.unhighlight();
plot.getPlaceholder().trigger('plothovercleanup');
}
// trigger click or hover event (they send the same parameters
// so we share their code)
function triggerClickHoverEvent(eventname, event, seriesFilter, searchDistance) {
var options = plot.getOptions(),
offset = plot.offset(),
page = browser.getPageXY(event),
canvasX = page.X - offset.left,
canvasY = page.Y - offset.top,
pos = plot.c2p({
left: canvasX,
top: canvasY
}),
distance = searchDistance !== undefined ? searchDistance : options.grid.mouseActiveRadius;
pos.pageX = page.X;
pos.pageY = page.Y;
var item = plot.findNearbyItem(canvasX, canvasY, seriesFilter, distance);
if (item) {
// fill in mouse pos for any listeners out there
item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left, 10);
item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top, 10);
}
if (options.grid.autoHighlight) {
// clear auto-highlights
for (var i = 0; i < highlights.length; ++i) {
var h = highlights[i];
if ((h.auto === eventname &&
!(item && h.series === item.series &&
h.point[0] === item.datapoint[0] &&
h.point[1] === item.datapoint[1])) || !item) {
unhighlight(h.series, h.point);
}
}
if (item) {
highlight(item.series, item.datapoint, eventname);
}
}
plot.getPlaceholder().trigger(eventname, [pos, item]);
}
function highlight(s, point, auto) {
if (typeof s === "number") {
s = plot.getData()[s];
}
if (typeof point === "number") {
var ps = s.datapoints.pointsize;
point = s.datapoints.points.slice(ps * point, ps * (point + 1));
}
var i = indexOfHighlight(s, point);
if (i === -1) {
highlights.push({
series: s,
point: point,
auto: auto
});
plot.triggerRedrawOverlay();
} else if (!auto) {
highlights[i].auto = false;
}
}
function unhighlight(s, point) {
if (s == null && point == null) {
highlights = [];
plot.triggerRedrawOverlay();
return;
}
if (typeof s === "number") {
s = plot.getData()[s];
}
if (typeof point === "number") {
var ps = s.datapoints.pointsize;
point = s.datapoints.points.slice(ps * point, ps * (point + 1));
}
var i = indexOfHighlight(s, point);
if (i !== -1) {
highlights.splice(i, 1);
plot.triggerRedrawOverlay();
}
}
function indexOfHighlight(s, p) {
for (var i = 0; i < highlights.length; ++i) {
var h = highlights[i];
if (h.series === s &&
h.point[0] === p[0] &&
h.point[1] === p[1]) {
return i;
}
}
return -1;
}
function processRawData() {
triggerCleanupEvent();
doTriggerClickHoverEvent(lastMouseMoveEvent, eventType.hover);
}
function drawOverlay(plot, octx, overlay) {
var plotOffset = plot.getPlotOffset(),
i, hi;
octx.save();
octx.translate(plotOffset.left, plotOffset.top);
for (i = 0; i < highlights.length; ++i) {
hi = highlights[i];
if (hi.series.bars.show) drawBarHighlight(hi.series, hi.point, octx);
else drawPointHighlight(hi.series, hi.point, octx, plot);
}
octx.restore();
}
function drawPointHighlight(series, point, octx, plot) {
var x = point[0],
y = point[1],
axisx = series.xaxis,
axisy = series.yaxis,
highlightColor = (typeof series.highlightColor === "string") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString();
if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) {
return;
}
var pointRadius = series.points.radius + series.points.lineWidth / 2;
octx.lineWidth = pointRadius;
octx.strokeStyle = highlightColor;
var radius = 1.5 * pointRadius;
x = axisx.p2c(x);
y = axisy.p2c(y);
octx.beginPath();
var symbol = series.points.symbol;
if (symbol === 'circle') {
octx.arc(x, y, radius, 0, 2 * Math.PI, false);
} else if (typeof symbol === 'string' && plot.drawSymbol && plot.drawSymbol[symbol]) {
plot.drawSymbol[symbol](octx, x, y, radius, false);
}
octx.closePath();
octx.stroke();
}
function drawBarHighlight(series, point, octx) {
var highlightColor = (typeof series.highlightColor === "string") ? series.highlightColor : $.color.parse(series.color).scale('a', 0.5).toString(),
fillStyle = highlightColor,
barLeft;
var barWidth = series.bars.barWidth[0] || series.bars.barWidth;
switch (series.bars.align) {
case "left":
barLeft = 0;
break;
case "right":
barLeft = -barWidth;
break;
default:
barLeft = -barWidth / 2;
}
octx.lineWidth = series.bars.lineWidth;
octx.strokeStyle = highlightColor;
var fillTowards = series.bars.fillTowards || 0,
bottom = fillTowards > series.yaxis.min ? Math.min(series.yaxis.max, fillTowards) : series.yaxis.min;
$.plot.drawSeries.drawBar(point[0], point[1], point[2] || bottom, barLeft, barLeft + barWidth,
function() {
return fillStyle;
}, series.xaxis, series.yaxis, octx, series.bars.horizontal, series.bars.lineWidth);
}
function initHover(plot, options) {
plot.highlight = highlight;
plot.unhighlight = unhighlight;
if (options.grid.hoverable || options.grid.clickable) {
plot.hooks.drawOverlay.push(drawOverlay);
plot.hooks.processRawData.push(processRawData);
}
lastMouseMoveEvent = plot.getPlaceholder()[0].lastMouseMoveEvent;
}
function init(plt) {
plot = plt;
plot.hooks.bindEvents.push(bindEvents);
plot.hooks.shutdown.push(shutdown);
plot.hooks.processOptions.push(initHover);
}
$.plot.plugins.push({
init: init,
options: options,
name: 'hover',
version: '0.1'
});
})(jQuery);
| mit |
dukenguyen/Rapture | Libs/RaptureField/src/test/java/rapture/field/StructureLoaderTest.java | 591 | package rapture.field;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import rapture.common.RaptureStructure;
public class StructureLoaderTest {
@Test
public void testLoadStructure() {
ResourceLoader loader = new ResourceLoader();
RaptureStructure s = loader.getStructure("/test/structure1");
assertTrue(s.getName().equals("//test/structure1"));
assertTrue(s.getDescription().equals("A test structure"));
assertTrue(s.getFields().size() == 1);
assertTrue(s.getFields().get(0).getKey().equals("field1"));
}
}
| mit |
gkeane/wp-auth0 | lib/WP_Auth0_Ip_Check.php | 1222 | <?php
class WP_Auth0_Ip_Check {
public static function init(){
if( !WP_Auth0_Options::get( 'ip_range_check' ) || is_admin() )
return;
new WP_Auth0_Ip_Check();
}
private function __construct() {
add_filter( 'wp_auth0_get_option', array($this, 'check_activate' ), 10, 2 );
}
public function check_activate( $val, $key ){
if($key != "active")
return $val;
$is_active = $this->validate_ip() ? 1 : 0;
return $is_active;
}
private function validate_ip(){
$ranges = $this->get_ranges();
$ip = $_SERVER['REMOTE_ADDR'];
foreach($ranges as $range){
$in_range = $this->in_range($ip, $range);
if($in_range)
return true;
}
return false;
}
private function in_range($ip, $range){
$from = ip2long($range['from']);
$to = ip2long($range['to']);
$ip = ip2long($ip);
return $ip >= $from && $ip <= $to;
}
private function get_ranges(){
$data = WP_Auth0_Options::get( 'ip_ranges' );
$data = str_replace("\r\n", "\n", $data);
$raw = explode("\n", $data);
$ranges = array();
foreach($raw as $r){
$d = explode('-', $r);
if(count($d) < 2)
continue;
$ranges[] = array(
'from' => trim($d[0]),
'to' => trim($d[1])
);
}
return $ranges;
}
} | mit |
2ndkauboy/platform | src/Oro/Bundle/TagBundle/Tests/Selenium/TagsAcl.php | 8724 | <?php
namespace Oro\Bundle\TagBundle\Tests\Selenium;
use Oro\Bundle\TagBundle\Tests\Selenium\Pages\Tags;
use Oro\Bundle\TestFrameworkBundle\Test\Selenium2TestCase;
use Oro\Bundle\UserBundle\Tests\Selenium\Pages\Roles;
use Oro\Bundle\UserBundle\Tests\Selenium\Pages\Users;
class TagsAcl extends Selenium2TestCase
{
public function testCreateRole()
{
$randomPrefix = mt_rand();
$login = $this->login();
/** @var Roles $login*/
$login->openRoles('Oro\Bundle\UserBundle')
->add()
->setLabel('Label_' . $randomPrefix)
->setEntity('Tag', array('Create', 'Edit', 'Delete', 'View'), 'Organization')
->setEntity('User', array('Create', 'Edit', 'Delete', 'View', 'Assign'), 'Organization')
->setEntity('Group', array('Create', 'Edit', 'Delete', 'View', 'Assign'), 'Organization')
->setEntity('Role', array('Create', 'Edit', 'Delete', 'View', 'Assign'), 'Organization')
->setCapability(
array(
'Tag assign/unassign',
'Unassign all tags from entities'),
'System'
)
->save()
->assertMessage('Role saved')
->close();
return ($randomPrefix);
}
/**
* @depends testCreateRole
* @param $role
* @return string
*/
public function testCreateUser($role)
{
$userName = 'User_'.mt_rand();
$login = $this->login();
/** @var Users $login*/
$login->openUsers('Oro\Bundle\UserBundle')
->add()
->assertTitle('Create User - Users - User Management - System')
->setUsername($userName)
->setOwner('Main')
->enable()
->setFirstPassword('123123q')
->setSecondPassword('123123q')
->setFirstName('First_'.$userName)
->setLastName('Last_'.$userName)
->setEmail($userName.'@mail.com')
->setRoles(array('Label_' . $role))
->setOrganization('OroCRM')
->uncheckInviteUser()
->save()
->assertMessage('User saved')
->toGrid()
->close()
->assertTitle('All - Users - User Management - System');
return $userName;
}
/**
* @depends testCreateUser
* @return string
*/
public function testCreateTag()
{
$tagName = 'Tag_'.mt_rand();
$login = $this->login();
/** @var Tags $login*/
$login->openTags('Oro\Bundle\TagBundle')
->add()
->assertTitle('Create Tag - Tags - System')
->setTagName($tagName)
->setOwner('admin')
->save()
->assertMessage('Tag saved')
->assertTitle('All - Tags - System')
->close();
return $tagName;
}
/**
* @depends testCreateUser
* @depends testCreateRole
* @depends testCreateTag
* @param $username
* @param $role
* @param $tagName
* @param string $aclCase
* @dataProvider columnTitle
*/
public function testTagAcl($aclCase, $username, $role, $tagName)
{
$roleName = 'Label_' . $role;
$login = $this->login();
switch ($aclCase) {
case 'delete':
$this->deleteAcl($login, $roleName, $username, $tagName);
break;
case 'update':
$this->updateAcl($login, $roleName, $username, $tagName);
break;
case 'create':
$this->createAcl($login, $roleName, $username);
break;
case 'view list':
$this->viewListAcl($login, $roleName, $username);
break;
case 'unassign global':
$this->unassignGlobalAcl($login, $roleName, $tagName);
break;
case 'assign unassign':
$this->assignAcl($login, $roleName, $username);
break;
}
}
public function deleteAcl($login, $role, $username, $tagName)
{
/** @var Roles $login*/
$login->openRoles('Oro\Bundle\UserBundle')
->filterBy('Label', $role)
->open(array($role))
->setEntity('Tag', array('Delete'), 'None')
->save()
->logout()
->setUsername($username)
->setPassword('123123q')
->submit()
->openTags('Oro\Bundle\TagBundle')
->checkContextMenu($tagName, 'Delete');
}
public function updateAcl($login, $role, $username, $tagName)
{
/** @var Roles $login*/
$login->openRoles('Oro\Bundle\UserBundle')
->filterBy('Label', $role)
->open(array($role))
->setEntity('Tag', array('Edit'), 'None')
->save()
->logout()
->setUsername($username)
->setPassword('123123q')
->submit()
->openTags('Oro\Bundle\TagBundle')
->checkContextMenu($tagName, 'Update');
}
public function createAcl($login, $role, $username)
{
/** @var Roles $login*/
$login->openRoles('Oro\Bundle\UserBundle')
->filterBy('Label', $role)
->open(array($role))
->setEntity('Tag', array('Create'), 'None')
->save()
->logout()
->setUsername($username)
->setPassword('123123q')
->submit()
->openTags('Oro\Bundle\TagBundle')
->assertElementNotPresent("//div[@class = 'container-fluid']//a[contains(., 'Create Tag')]");
}
public function viewListAcl($login, $role, $username)
{
/** @var Roles $login*/
$login->openRoles('Oro\Bundle\UserBundle')
->filterBy('Label', $role)
->open(array($role))
->setEntity('Tag', array('View'), 'None')
->save()
->logout()
->setUsername($username)
->setPassword('123123q')
->submit()
->openTags('Oro\Bundle\TagBundle')
->assertTitle('403 - Forbidden');
}
public function unassignGlobalAcl($login, $roleName, $tagName)
{
$username = 'user' . mt_rand();
/** @var Roles $login*/
$login = $login->openRoles('Oro\Bundle\UserBundle')
->filterBy('Label', $roleName)
->open(array($roleName))
->setCapability(array('Unassign all tags from entities'), 'None')
->save();
/** @var Users $login*/
$login = $login->openUsers('Oro\Bundle\UserBundle')
->add()
->setUsername($username)
->enable()
->setOwner('Main')
->setFirstpassword('123123q')
->setSecondpassword('123123q')
->setFirstName('First_'.$username)
->setLastName('Last_'.$username)
->setEmail($username.'@mail.com')
->setRoles(array($roleName))
->setTag($tagName)
->save()
->logout()
->setUsername($username)
->setPassword('123123q')
->submit();
$login->openUsers('Oro\Bundle\UserBundle')
->filterBy('Username', $username)
->open(array($username))
->edit()
->assertElementNotPresent(
"//div[starts-with(@id,'s2id_oro_user_user_form_tags')]//li[contains(., '{$tagName}')]" .
"/a[@class='select2-search-choice-close']"
);
}
public function assignAcl($login, $role, $username)
{
/** @var Roles $login*/
$login->openRoles('Oro\Bundle\UserBundle')
->filterBy('Label', $role)
->open(array($role))
->setCapability(array('Tag assign/unassign'), 'None')
->save()
->logout()
->setUsername($username)
->setPassword('123123q')
->submit()
->openUsers('Oro\Bundle\UserBundle')
->add()
->assertElementNotPresent(
"//div[@class='select2-container select2-container-multi select2-container-disabled']"
);
}
/**
* Data provider for Tags ACL test
*
* @return array
*/
public function columnTitle()
{
return array(
'unassign global' => array('unassign global'),
'assign unassign' => array('assign unassign'),
'delete' => array('delete'),
'update' => array('update'),
'create' => array('create'),
'view list' => array('view list'),
);
}
}
| mit |
rhalff/storybook | addons/options/src/shared/index.js | 176 | // addons, panels and events get unique names using a prefix
export const ADDON_ID = 'storybooks/storybook-addon-options';
export const EVENT_ID = `${ADDON_ID}/options-event`;
| mit |
ALTELMA/asset_manager | application/libraries/PHPExcel/branches/v1.7.3b/Tests/01simple.php | 2848 | <?php
/**
* PHPExcel
*
* Copyright (C) 2006 - 2010 PHPExcel
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* @category PHPExcel
* @package PHPExcel
* @copyright Copyright (c) 2006 - 2010 PHPExcel (http://www.codeplex.com/PHPExcel)
* @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL
* @version ##VERSION##, ##DATE##
*/
/** Error reporting */
error_reporting(E_ALL);
/** PHPExcel */
require_once '../Classes/PHPExcel.php';
// Create new PHPExcel object
echo date('H:i:s') . " Create new PHPExcel object\n";
$objPHPExcel = new PHPExcel();
// Set properties
echo date('H:i:s') . " Set properties\n";
$objPHPExcel->getProperties()->setCreator("Maarten Balliauw")
->setLastModifiedBy("Maarten Balliauw")
->setTitle("Office 2007 XLSX Test Document")
->setSubject("Office 2007 XLSX Test Document")
->setDescription("Test document for Office 2007 XLSX, generated using PHP classes.")
->setKeywords("office 2007 openxml php")
->setCategory("Test result file");
// Add some data
echo date('H:i:s') . " Add some data\n";
$objPHPExcel->setActiveSheetIndex(0)
->setCellValue('A1', 'Hello')
->setCellValue('B2', 'world!')
->setCellValue('C1', 'Hello')
->setCellValue('D2', 'world!');
// Miscellaneous glyphs, UTF-8
$objPHPExcel->setActiveSheetIndex(0)
->setCellValue('A4', 'Miscellaneous glyphs')
->setCellValue('A5', 'éàèùâêîôûëïüÿäöüç');
// Rename sheet
echo date('H:i:s') . " Rename sheet\n";
$objPHPExcel->getActiveSheet()->setTitle('Simple');
// Set active sheet index to the first sheet, so Excel opens this as the first sheet
$objPHPExcel->setActiveSheetIndex(0);
// Save Excel 2007 file
echo date('H:i:s') . " Write to Excel2007 format\n";
$objWriter = PHPExcel_IOFactory::createWriter($objPHPExcel, 'Excel2007');
$objWriter->save(str_replace('.php', '.xlsx', __FILE__));
// Echo memory peak usage
echo date('H:i:s') . " Peak memory usage: " . (memory_get_peak_usage(true) / 1024 / 1024) . " MB\r\n";
// Echo done
echo date('H:i:s') . " Done writing file.\r\n";
| mit |
tajddin/voiceplay | node_modules/airplay2/node_modules/mdns/src/mdns_utils.hpp | 1349 | #ifndef NODE_MDNS_UTILS_INCLUDED
#define NODE_MDNS_UTILS_INCLUDED
#include <sstream>
#include <errno.h>
#include <fcntl.h>
namespace node_mdns {
const char * errorString(DNSServiceErrorType error);
v8::Local<v8::Value> buildException(DNSServiceErrorType error_code);
inline
v8::Handle<v8::Value>
throwError(const char * message) {
NanThrowError( NanError(message) );
return NanUndefined();
}
inline
v8::Handle<v8::Value>
throwTypeError(const char * message) {
NanThrowTypeError( message );
return NanUndefined();
}
inline
v8::Handle<v8::Value>
throwMdnsError(DNSServiceErrorType error_code) {
NanThrowError( buildException(error_code) );
return NanUndefined();
}
inline
bool
argumentCountMismatch(_NAN_METHOD_ARGS, int expectedCount) {
return args.Length() != expectedCount;
}
inline
v8::Handle<v8::Value>
throwArgumentCountMismatchException(_NAN_METHOD_ARGS, size_t expectedCount) {
std::ostringstream msg;
msg << "argument count mismatch: expected " << expectedCount
<< ", but got " << args.Length() << " arguments.";
return throwError(msg.str().c_str());
}
inline
v8::Local<v8::Value>
stringOrUndefined(const char * str) {
if (str) {
return NanNew(str);
} else {
return NanUndefined();
}
}
} // end of namespace node_mdns
#endif // NODE_MDNS_UTILS_INCLUDED
| mit |
cdnjs/cdnjs | ajax/libs/simple-icons/4.15.0/sahibinden.min.js | 839 | module.exports={title:"Sahibinden",slug:"sahibinden",svg:'<svg role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><title>Sahibinden icon</title><path d="M0 0v24h24V0zm11.517 4.723c.563-.007 1.13-.004 1.69.063 2.412.054 4.853 2.18 4.879 4.508h-3.319c.009-.694-.603-1.555-1.279-1.732-1.105-.269-2.46-.355-3.43.294-.738.445-1.065 1.672-.095 2.056 2.288 1.083 5.158.846 7.224 2.372 1.698 1.21 1.598 3.666.274 5.086-1.718 1.84-4.636 2.132-7.099 1.782-2.448-.117-4.755-2.245-4.819-4.562h3.311c-.056.832.638 1.557 1.46 1.822 1.27.275 2.726.358 3.93-.19.96-.323 1.024-1.544.284-2.103-1.595-.897-3.565-.924-5.297-1.518-2.012-.39-3.643-2.278-3.26-4.197.424-2.342 3.127-3.727 5.546-3.681z"/></svg>',get path(){return this.svg.match(/<path\s+d="([^"]*)/)[1]},source:"https://www.sahibinden.com/favicon.ico",hex:"FFE800",license:void 0}; | mit |
RallySoftware/eclipselink.runtime | jpa/eclipselink.jpa.wdf.test/src/org/eclipse/persistence/testing/models/wdf/jpa1/types/UserDefinedEnum.java | 789 | /*******************************************************************************
* Copyright (c) 2005, 2015 SAP. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* SAP - initial API and implementation
******************************************************************************/
package org.eclipse.persistence.testing.models.wdf.jpa1.types;
public enum UserDefinedEnum {
HUGO, EMIL
}
| epl-1.0 |
menghanli/ice | org.eclipse.ice.reactor.plant/src/org/eclipse/ice/reactor/plant/Subchannel.java | 6601 | /*******************************************************************************
* Copyright (c) 2014 UT-Battelle, LLC.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Initial API and implementation and/or initial documentation - Jay Jay Billings,
* Jordan H. Deyton, Dasha Gorin, Alexander J. McCaskey, Taylor Patterson,
* Claire Saunders, Matthew Wang, Anna Wojtowicz
*******************************************************************************/
package org.eclipse.ice.reactor.plant;
import java.security.InvalidParameterException;
/**
* <p>
* Represents a lattice of rods for housing fuel rods of a reactor. For the
* purpose of drawing, this is just a specialized pipe.
* </p>
*
* @author Anna Wojtowicz
*/
public class Subchannel extends Pipe {
/**
* <p>
* Number of fuel rods contained within.
* </p>
*
*/
private int numRods;
/**
* <p>
* Diameter of the subchannel fuel rods (this assumes uniform rod sizes).
* </p>
*
*/
private double rodDiameter;
/**
* <p>
* Pitch of the fuel rod bundle (distance between adjacent rod centers).
* </p>
*
*/
private double pitch;
/**
* <p>
* Nullary constructor.
* </p>
*
*/
public Subchannel() {
// Set the name, description and ID.
setName("Subchannel 1");
setDescription("A subchannel plant component for reactors");
setId(1);
// Set the default number of rods, rod diameter and pitch.
setNumRods(1);
setRodDiameter(1.0);
setPitch(1.5);
// Note: Pitch must always be set after diameter, as setPitch method
// checks that pitch >= rodDiameter.
return;
}
/**
* <p>
* Parameterized constructor.
* </p>
*
* @param numRods
* <p>
* Number of rods contained.
* </p>
* @param rodDiameter
* <p>
* Diameter of the (uniformly-sized) fuel rods.
* </p>
* @param pitch
* <p>
* Pitch of the fuel rods.
* </p>
*/
public Subchannel(int numRods, double rodDiameter, double pitch) {
// Set the name, description and ID.
setName("Subchannel 1");
setDescription("A subchannel plant component for reactors");
setId(1);
// Set the default number of rods, rod diameter and pitch.
setNumRods(numRods);
setRodDiameter(rodDiameter);
setPitch(pitch);
return;
}
/**
* @return the numRods
*/
public int getNumRods() {
return numRods;
}
/**
* @param numRods
* the numRods to set
*/
public void setNumRods(int numRods) {
// Check the input is valid.
if (numRods >= 1) {
this.numRods = numRods;
} else {
throw new InvalidParameterException("Subchannel error: The number "
+ "of rods must greater than or equal to 1.");
}
return;
}
/**
* @return the rodDiameter
*/
public double getRodDiameter() {
return rodDiameter;
}
/**
* @param rodDiameter
* the rodDiameter to set
*/
public void setRodDiameter(double rodDiameter) {
// Check the input is valid.
if (rodDiameter > 0) {
this.rodDiameter = rodDiameter;
} else {
throw new InvalidParameterException("Subchannel error: The rod "
+ "diameter must be non-negative.");
}
return;
}
/**
* @return the pitch
*/
public double getPitch() {
return pitch;
}
/**
* @param pitch
* the pitch to set
*/
public void setPitch(double pitch) {
// Check that the input is valid.
if (pitch >= rodDiameter) {
this.pitch = pitch;
} else {
throw new InvalidParameterException("Subchannel error: The rod "
+ "pitch cannot be smaller than the rod diameter.");
}
}
/**
* <p>
* Performs an equality check between two Objects.
* </p>
*
* @param otherObject
* <p>
* The other Object to compare against.
* </p>
* @return <p>
* Returns true if the two objects are equal, otherwise false.
* </p>
*/
@Override
public boolean equals(Object otherObject) {
// By default, the objects are not equivalent.
boolean equals = false;
// Check the reference.
if (this == otherObject) {
equals = true;
}
// Check the information stored in the other object.
else if (otherObject != null && otherObject instanceof Subchannel) {
// Cast the other object.
Subchannel component = (Subchannel) otherObject;
// Compare all the variables. (Save the biggest for last; Java
// should
// short-circuit the logical operators as soon as a mismatch is
// found)
equals = (super.equals(component) && numRods == component.numRods
&& rodDiameter == component.rodDiameter && pitch == component.pitch);
}
return equals;
}
/**
* <p>
* Performs a deep copy and returns a newly instantiated Object.
* </p>
*
* @return <p>
* The newly instantiated Object.
* </p>
*/
@Override
public Object clone() {
// Initialize a new object.
Subchannel object = new Subchannel();
// Copy the contents from this one.
object.copy(this);
// Return the newly instantiated object.
return object;
}
/**
* <p>
* Deep copies the contents of otherObject.
* </p>
*
* @param otherObject
* <p>
* The other object to copy the contents from.
* </p>
*/
public void copy(Subchannel otherObject) {
// Check the otherObject is valid.
if (otherObject == null) {
return;
}
// Copy the other object.
super.copy(otherObject);
numRods = otherObject.numRods;
rodDiameter = otherObject.rodDiameter;
pitch = otherObject.pitch;
return;
}
/**
* <p>
* Returns the hashCode of the object.
* </p>
*
* @return <p>
* The hashCode of the Object.
* </p>
*/
@Override
public int hashCode() {
// Call the super's hashCode.
int hash = super.hashCode();
// Add local hashes.
hash = 31 * hash + numRods;
hash = 31 * hash + new Double(rodDiameter).hashCode();
hash = 31 * hash + new Double(pitch).hashCode();
return hash;
}
/**
* <p>
* Accepts PlantComponentVisitors to reveal the type of a PlantComponent.
* </p>
*
* @param visitor
* <p>
* The PlantComponent's visitor.
* </p>
*/
@Override
public void accept(IPlantComponentVisitor visitor) {
// Only accept valid visitors.
if (visitor != null) {
visitor.visit(this);
}
return;
}
} | epl-1.0 |
NeilBryant/check_mk | web/plugins/wato/userdb.py | 2026 | #!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
def sync_pre_activate_changes(_unused):
# In some rare cases for still unknown reasons at this time the
# variable config sometimes has the value None. This could or could
# not be a mod_python problem. But it makes the activation of changes
# in a D-WATO setup break. So better handle this case here.
try:
do_sync = 'wato_pre_activate_changes' in config.userdb_automatic_sync
except:
do_sync = False
if do_sync:
userdb.hook_sync()
register_hook('pre-activate-changes', sync_pre_activate_changes)
| gpl-2.0 |
Gurgel100/gcc | gcc/testsuite/g++.dg/parse/pragma-recovery.C | 512 | /* { dg-additional-options -fopenmp } */
/* { dg-require-effective-target fopenmp } */
// Make sure error recovery doesn't get confused by tokens inside a
// deferred pragma.
// OpenMP is a convenient deferred pragma insertion mechanism.
void foo ()
{
1 * "" // { dg-error "invalid" }
#pragma omp atomic {
;
}
void bar ()
{
1 * "" // { dg-error "invalid" }
#pragma omp atomic }
;
}
void baz ()
{
1 * "" // { dg-error "invalid" }
#pragma omp atomic ;
0;
}
| gpl-2.0 |