repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
leopardoooo/cambodia | boss-core/src/main/java/com/sysway/outwardtps/service/pay/CustomerValidateResponse0.java | 13692 | /**
* CustomerValidateResponse0.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.6.3 Built on : Jun 27, 2015 (11:18:31 BST)
*/
package com.sysway.outwardtps.service.pay;
/**
* CustomerValidateResponse0 bean class
*/
@SuppressWarnings({"unchecked",
"unused"
})
public class CustomerValidateResponse0 implements org.apache.axis2.databinding.ADBBean {
public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName("http://pay.service.outwardtps.sysway.com/",
"customerValidateResponse", "ns1");
/**
* field for CustomerValidateResponse
*/
protected com.sysway.outwardtps.service.pay.CustomerValidateResponse localCustomerValidateResponse;
/**
* Auto generated getter method
* @return com.sysway.outwardtps.service.pay.CustomerValidateResponse
*/
public com.sysway.outwardtps.service.pay.CustomerValidateResponse getCustomerValidateResponse() {
return localCustomerValidateResponse;
}
/**
* Auto generated setter method
* @param param CustomerValidateResponse
*/
public void setCustomerValidateResponse(
com.sysway.outwardtps.service.pay.CustomerValidateResponse param) {
this.localCustomerValidateResponse = param;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement(
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory)
throws org.apache.axis2.databinding.ADBException {
org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,
MY_QNAME);
return factory.createOMElement(dataSource, MY_QNAME);
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException,
org.apache.axis2.databinding.ADBException {
serialize(parentQName, xmlWriter, false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
javax.xml.stream.XMLStreamWriter xmlWriter, boolean serializeType)
throws javax.xml.stream.XMLStreamException,
org.apache.axis2.databinding.ADBException {
//We can safely assume an element has only one type associated with it
if (localCustomerValidateResponse == null) {
throw new org.apache.axis2.databinding.ADBException(
"customerValidateResponse cannot be null!");
}
localCustomerValidateResponse.serialize(MY_QNAME, xmlWriter);
}
private static java.lang.String generatePrefix(java.lang.String namespace) {
if (namespace.equals("http://pay.service.outwardtps.sysway.com/")) {
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* Utility method to write an element start tag.
*/
private void writeStartElement(java.lang.String prefix,
java.lang.String namespace, java.lang.String localPart,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, localPart);
} else {
if (namespace.length() == 0) {
prefix = "";
} else if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, localPart, namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,
java.lang.String namespace, java.lang.String attName,
java.lang.String attValue, javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace, attName, attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,
java.lang.String attName, java.lang.String attValue,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace,
java.lang.String attName, javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix, namespaceURI);
}
if (prefix.trim().length() > 0) {
xmlWriter.writeCharacters(prefix + ":" +
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix, namespaceURI);
}
if (prefix.trim().length() > 0) {
stringToWrite.append(prefix).append(":")
.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(
javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace)
throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext();
while (true) {
java.lang.String uri = nsContext.getNamespaceURI(prefix);
if ((uri == null) || (uri.length() == 0)) {
break;
}
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(
javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException {
//We can safely assume an element has only one type associated with it
return localCustomerValidateResponse.getPullParser(MY_QNAME);
}
/**
* Factory class that keeps the parse method
*/
public static class Factory {
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static CustomerValidateResponse0 parse(
javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception {
CustomerValidateResponse0 object = new CustomerValidateResponse0();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix = "";
java.lang.String namespaceuri = "";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while (!reader.isEndElement()) {
if (reader.isStartElement()) {
if (reader.isStartElement() &&
new javax.xml.namespace.QName(
"http://pay.service.outwardtps.sysway.com/",
"customerValidateResponse").equals(
reader.getName())) {
object.setCustomerValidateResponse(com.sysway.outwardtps.service.pay.CustomerValidateResponse.Factory.parse(
reader));
} // End of if for expected property start element
else {
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException(
"Unexpected subelement " + reader.getName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
} //end of factory class
}
| apache-2.0 |
ONLYOFFICE/CommunityServer | module/ASC.Api/ASC.Api.CRM/CRMCalendar.cs | 4077 | /*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.Globalization;
using ASC.Core;
using ASC.CRM.Core;
using ASC.CRM.Core.Dao;
using ASC.Web.Core;
using ASC.Web.Core.Calendars;
using ASC.Web.CRM.Core;
using Autofac;
namespace ASC.Api.CRM
{
public sealed class CRMCalendar : BaseCalendar
{
[AllDayLongUTCAttribute]
private class Event : BaseEvent
{
}
public CRMCalendar(Guid userId)
{
Context.HtmlBackgroundColor = "";
Context.HtmlTextColor = "";
Context.CanChangeAlertType = false;
Context.CanChangeTimeZone = false;
Context.GetGroupMethod = () => Web.CRM.Resources.CRMCommonResource.ProductName;
Id = "crm_calendar";
EventAlertType = EventAlertType.Never;
Name = Web.CRM.Resources.CRMCommonResource.ProductName;
Description = "";
SharingOptions = new SharingOptions();
SharingOptions.PublicItems.Add(new SharingOptions.PublicItem { Id = userId, IsGroup = false });
}
public override List<IEvent> LoadEvents(Guid userId, DateTime startDate, DateTime endDate)
{
using (var scope = DIHelper.Resolve())
{
var _daoFactory = scope.Resolve<DaoFactory>();
var events = new List<IEvent>();
if (
!WebItemSecurity.IsAvailableForMe(WebItemManager.CRMProductID))
{
return events;
}
var tasks = _daoFactory.TaskDao.GetTasks(String.Empty, userId, 0, false, DateTime.MinValue,
DateTime.MinValue, EntityType.Any, 0, 0, 0, null);
foreach (var t in tasks)
{
if (t.DeadLine == DateTime.MinValue) continue;
var allDayEvent = t.DeadLine.Hour == 0 && t.DeadLine.Minute == 0;
var utcDate = allDayEvent ? t.DeadLine.Date : Core.Tenants.TenantUtil.DateTimeToUtc(t.DeadLine);
var e = new Event
{
AlertType = EventAlertType.Never,
AllDayLong = allDayEvent,
CalendarId = Id,
UtcStartDate = utcDate,
UtcEndDate = utcDate,
Id = "crm_task_" + t.ID.ToString(CultureInfo.InvariantCulture),
Name = Web.CRM.Resources.CRMCommonResource.ProductName + ": " + t.Title,
Description = t.Description
};
if (IsVisibleEvent(startDate, endDate, e.UtcStartDate, e.UtcEndDate))
events.Add(e);
}
return events;
}
}
public override TimeZoneInfo TimeZone
{
get { return CoreContext.TenantManager.GetCurrentTenant().TimeZone; }
}
private bool IsVisibleEvent(DateTime startDate, DateTime endDate, DateTime eventStartDate, DateTime eventEndDate)
{
return (startDate <= eventStartDate && eventStartDate <= endDate) ||
(startDate <= eventEndDate && eventEndDate <= endDate) ||
(eventStartDate < startDate && eventEndDate > endDate);
}
}
} | apache-2.0 |
vcdemon/wechat-framework | wechat-api/src/main/java/com/itfvck/wechatframework/api/report/datacube/model/DataCubeStreamMsgWeekInfo.java | 546 | package com.itfvck.wechatframework.api.report.datacube.model;
import java.util.List;
/**
* 结果类--获取消息发送周数据
*
* @author luweichao
*
* 2015年1月27日
*/
public class DataCubeStreamMsgWeekInfo extends DataCubeBase {
/**
*
*/
private static final long serialVersionUID = -5224886136169067196L;
private List<DataCubeStreamMsgWeekInfo> list;
public List<DataCubeStreamMsgWeekInfo> getList() {
return list;
}
public void setList(List<DataCubeStreamMsgWeekInfo> list) {
this.list = list;
}
}
| apache-2.0 |
webframp/sfn-vault | lib/sfn-vault/callback.rb | 8457 | require 'sfn-parameters'
require 'securerandom'
require 'vault'
# Modeled after the Assume Role callback
module Sfn
class Callback
class VaultRead < Callback
include SfnVault::Utils
# Cache credentials for re-use to prevent re-generation of temporary
# credentials on every command request.
VAULT_CACHED_ITEMS = [
:vault_lease_id,
:vault_lease_expiration,
:aws_access_key_id,
:aws_secret_access_key
]
def template(*args)
# search for all parameters of type 'Vault::Generic::Secret'
# 1. use the sparkleformation instance to get at the parameter hash,
config[:parameters] ||= Smash.new
stack = args.first[:sparkle_stack]
# 2. find names for things you want,
client = vault_client
pseudo_parameters(stack).each do |param|
param_path = vault_path_name(args, param)
ui.debug "Using #{param_path} for saved parameter"
# check if already saved in vault
# Save the secret unless one already exists at the defined path
unless client.logical.read(param_path)
ui.info "Vault: No pre-existing value for parameter #{param} saving new secret"
client.logical.write(param_path, value: random_secret)
end
# Read saved secret back from Vault and update parameters config
# 3. set param into config
config[:parameters][param] = client.logical.read(param_path).data[:value]
# 4. update type in template and that should do it
stack.compile.parameters.set!(param).type 'String'
end
end
# Use SecureRandom to generate a suitable password
# Length is configurable by setting `pseudo_parameter_length` in the vault
# section of the sfn config
#
# @return [String] The generated string
def random_secret
SecureRandom.base64(config.fetch(:vault, :pseudo_parameter_length, 15))
end
# Build the path where generated secrets can be saved in Vault
# This will use the value of `:pseudo_parameter_path` from the config if set. If
# unset it will attempt to build a type of standardized path based on the
# combined value any stack 'Project' tag and Stack name.
# Project will fallback to 'SparkleFormation' if unset
#
# @param args [Array] Array of args passed to the sfn instance
# @param parameter [String] Template parameter to save value for in vault
# @return [String] String value or stack name if available or default to template name
def vault_path_name(args, parameter)
pref = config.get(:vault, :pseudo_parameter_path)
# If we have a stack name use it, otherwise try to get from env and fallback to just template name
stack = args.first[:sparkle_stack]
stack_name = args.first[:stack_name].nil? ? ENV.fetch('STACK_NAME', stack.name).to_s : args.first[:stack_name]
project = config[:options][:tags].fetch('Project', 'SparkleFormation')
# When running in a detectable CI environment assume that we have rights to save a generic secret
# but honor user preference value if set
vault_path = if ci_environment?
# write to vault at generic path
base = pref.nil? ? "secret" : pref
File.join(base, project, stack_name, parameter)
else
base = pref.nil? ? "cubbyhole" : pref
# or for local dev use cubbyhole
File.join(base, project, stack_name, parameter)
end
vault_path
end
# Lookup all pseudo parameters in the template
#
# @param stack [SparkleFormation] An instance of the stack template
# @param parameter [String] The string value of the pseudo type to lookup
# @return [Array] Array of parameter names matching the pseudo type
def pseudo_parameters(stack, parameter: 'Vault::Generic::Secret')
stack.dump.fetch('Parameters', {}).map{|k,v| k if v['Type'] == parameter}.compact
end
# Check if we are running in any detectable CI type environments
#
# @return [TrueClass, FalseClass]
def ci_environment?
# check for any ci system env variables
return true if ENV['GO_PIPELINE_NAME']
return true if ENV['CI']
false
end
def after_config(*_)
# Inject credentials read from configured vault path
# into API provider configuration
# if credentials block contains vault_read_path
# TODO: this could be done earlier if at all possible so credentials
# struct does not need the aws config
if(enabled? && config.fetch(:credentials, :vault_read_path))
load_stored_session
end
end
# Store session credentials until lease expires
def after(*_)
if(enabled?)
if(config.fetch(:credentials, :vault_read_path) && api.connection.aws_region)
path = cache_file
FileUtils.touch(path)
File.chmod(0600, path)
values = load_stored_values(path)
VAULT_CACHED_ITEMS.map do |key|
values[key] = api.connection.data[key]
end
File.open(path, 'w') do |file|
file.puts MultiJson.dump(values)
end
end
end
end
# @return [TrueClass, FalseClass]
def enabled?
config.fetch(:vault, :status, 'enabled').to_s == 'enabled'
end
# @return [String ]path
def cache_file
config.fetch(:vault, :cache_file, '.sfn-vault')
end
# @param [FixNum] expiration
# @return [TrueClass, FalseClass]
# check lease is just: time.now greater than lease expires?
def expired?(expiration)
Time.now.to_i >= expiration
end
# @return [Object] of type Vault::Secret
def vault_read
client = vault_client
ui.debug "Have Vault client, configured with: #{client.options}"
read_path = config.fetch(:credentials, :vault_read_path, "aws/creds/deploy") # save this value?
credential = client.logical.read(read_path)
credential
end
# Load stored configuration data into the api connection
# or read retrieve with Vault client
# @return [TrueClass, FalseClass]
def load_stored_session
path = cache_file
FileUtils.touch(path)
if(File.exist?(path))
values = load_stored_values(path)
VAULT_CACHED_ITEMS.each do |key|
api.connection.data[key] = values[key]
if [:aws_access_key_id, :aws_secret_access_key].member?(key)
ui.debug "Updating environment #{key} with #{values[key]}"
# also update environment for this process
ENV[key.to_s] = values[key]
end
end
if values[:vault_lease_expiration].nil?
values[:vault_lease_expiration] = 0
end
if(expired?(values[:vault_lease_expiration]))
begin
secret = vault_read
# without the sleep the credentials are not ready
# this is arbitrary
timeout = config.fetch(:vault, :iam_delay, 30)
ui.info "Sleeping #{timeout}s for first time credentials system wide activation"
sleep(timeout)
api.connection.data[:vault_lease_id] = secret.lease_id # maybe unused?
api.connection.data[:vault_lease_expiration] = Time.now.to_i + secret.lease_duration
# update keys in api connection
api.connection.data[:aws_access_key_id] = secret.data[:access_key]
api.connection.data[:aws_secret_access_key] = secret.data[:secret_key]
rescue
end
end
true
else
false
end
end
# Load stored values
#
# @param path [String]
# @return [Hash]
def load_stored_values(path)
begin
if(File.exist?(path))
MultiJson.load(File.read(path)).to_smash
else
Smash.new
end
rescue MultiJson::ParseError
Smash.new
end
end
# Default quiet mode
def quiet
true unless config[:debug]
end
end
end
end
| apache-2.0 |
yida-lxw/solr-5.3.1 | lucene/analysis/common/src/java/org/apache/lucene/analysis/wikipedia/WikipediaTokenizerImpl.java | 36818 | /* The following code was generated by JFlex 1.6.0 */
package org.apache.lucene.analysis.wikipedia;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
/**
* JFlex-generated tokenizer that is aware of Wikipedia syntax.
*/
@SuppressWarnings("fallthrough")
class WikipediaTokenizerImpl {
/** This character denotes the end of file */
public static final int YYEOF = -1;
/** initial size of the lookahead buffer */
private static final int ZZ_BUFFERSIZE = 4096;
/** lexical states */
public static final int YYINITIAL = 0;
public static final int CATEGORY_STATE = 2;
public static final int INTERNAL_LINK_STATE = 4;
public static final int EXTERNAL_LINK_STATE = 6;
public static final int TWO_SINGLE_QUOTES_STATE = 8;
public static final int THREE_SINGLE_QUOTES_STATE = 10;
public static final int FIVE_SINGLE_QUOTES_STATE = 12;
public static final int DOUBLE_EQUALS_STATE = 14;
public static final int DOUBLE_BRACE_STATE = 16;
public static final int STRING = 18;
/**
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
* at the beginning of a line
* l is of the form l = 2*k, k a non negative integer
*/
private static final int ZZ_LEXSTATE[] = {
0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7,
8, 8, 9, 9
};
/**
* Translates characters to character classes
*/
private static final String ZZ_CMAP_PACKED =
"\11\0\1\24\1\23\1\0\1\24\1\22\22\0\1\24\1\0\1\12"+
"\1\53\2\0\1\3\1\1\4\0\1\14\1\5\1\2\1\10\12\16"+
"\1\27\1\0\1\7\1\11\1\13\1\53\1\4\2\15\1\30\5\15"+
"\1\41\21\15\1\25\1\0\1\26\1\0\1\6\1\0\1\31\1\43"+
"\2\15\1\33\1\40\1\34\1\50\1\41\4\15\1\42\1\35\1\51"+
"\1\15\1\36\1\52\1\32\3\15\1\44\1\37\1\15\1\45\1\47"+
"\1\46\102\0\27\15\1\0\37\15\1\0\u0568\15\12\17\206\15\12\17"+
"\u026c\15\12\17\166\15\12\17\166\15\12\17\166\15\12\17\166\15\12\17"+
"\167\15\11\17\166\15\12\17\166\15\12\17\166\15\12\17\340\15\12\17"+
"\166\15\12\17\u0166\15\12\17\266\15\u0100\15\u0e00\15\u1040\0\u0150\21\140\0"+
"\20\21\u0100\0\200\21\200\0\u19c0\21\100\0\u5200\21\u0c00\0\u2bb0\20\u2150\0"+
"\u0200\21\u0465\0\73\21\75\15\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\uffff\0\63\0";
/**
* Translates characters to character classes
*/
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
/**
* Translates DFA states to action switch labels.
*/
private static final int [] ZZ_ACTION = zzUnpackAction();
private static final String ZZ_ACTION_PACKED_0 =
"\12\0\4\1\4\2\1\3\1\4\1\1\2\5\1\6"+
"\1\5\1\7\1\5\2\10\1\11\1\5\1\12\1\11"+
"\1\13\1\14\1\15\1\16\1\15\1\17\1\20\1\10"+
"\1\21\1\10\4\22\1\23\1\24\1\25\1\26\3\0"+
"\1\27\14\0\1\30\1\31\1\32\1\33\1\11\1\0"+
"\1\34\1\35\1\36\1\0\1\37\1\0\1\40\3\0"+
"\1\41\1\42\2\43\1\42\2\44\2\0\1\43\1\0"+
"\14\43\1\42\3\0\1\11\1\45\3\0\1\46\1\47"+
"\5\0\1\50\4\0\1\50\2\0\2\50\2\0\1\11"+
"\5\0\1\31\1\42\1\43\1\51\3\0\1\11\2\0"+
"\1\52\30\0\1\53\2\0\1\54\1\55\1\56";
private static int [] zzUnpackAction() {
int [] result = new int[181];
int offset = 0;
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAction(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/**
* Translates a state to a row index in the transition table
*/
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
private static final String ZZ_ROWMAP_PACKED_0 =
"\0\0\0\54\0\130\0\204\0\260\0\334\0\u0108\0\u0134"+
"\0\u0160\0\u018c\0\u01b8\0\u01e4\0\u0210\0\u023c\0\u0268\0\u0294"+
"\0\u02c0\0\u02ec\0\u01b8\0\u0318\0\u0344\0\u01b8\0\u0370\0\u039c"+
"\0\u03c8\0\u03f4\0\u0420\0\u01b8\0\u0370\0\u044c\0\u0478\0\u01b8"+
"\0\u04a4\0\u04d0\0\u04fc\0\u0528\0\u0554\0\u0580\0\u05ac\0\u05d8"+
"\0\u0604\0\u0630\0\u065c\0\u01b8\0\u0688\0\u0370\0\u06b4\0\u06e0"+
"\0\u070c\0\u01b8\0\u01b8\0\u0738\0\u0764\0\u0790\0\u01b8\0\u07bc"+
"\0\u07e8\0\u0814\0\u0840\0\u086c\0\u0898\0\u08c4\0\u08f0\0\u091c"+
"\0\u0948\0\u0974\0\u09a0\0\u09cc\0\u09f8\0\u01b8\0\u01b8\0\u0a24"+
"\0\u0a50\0\u0a7c\0\u0a7c\0\u01b8\0\u0aa8\0\u0ad4\0\u0b00\0\u0b2c"+
"\0\u0b58\0\u0b84\0\u0bb0\0\u0bdc\0\u0c08\0\u0c34\0\u0c60\0\u0c8c"+
"\0\u0814\0\u0cb8\0\u0ce4\0\u0d10\0\u0d3c\0\u0d68\0\u0d94\0\u0dc0"+
"\0\u0dec\0\u0e18\0\u0e44\0\u0e70\0\u0e9c\0\u0ec8\0\u0ef4\0\u0f20"+
"\0\u0f4c\0\u0f78\0\u0fa4\0\u0fd0\0\u0ffc\0\u1028\0\u1054\0\u01b8"+
"\0\u1080\0\u10ac\0\u10d8\0\u1104\0\u01b8\0\u1130\0\u115c\0\u1188"+
"\0\u11b4\0\u11e0\0\u120c\0\u1238\0\u1264\0\u1290\0\u12bc\0\u12e8"+
"\0\u1314\0\u1340\0\u07e8\0\u0974\0\u136c\0\u1398\0\u13c4\0\u13f0"+
"\0\u141c\0\u1448\0\u1474\0\u14a0\0\u01b8\0\u14cc\0\u14f8\0\u1524"+
"\0\u1550\0\u157c\0\u15a8\0\u15d4\0\u1600\0\u162c\0\u01b8\0\u1658"+
"\0\u1684\0\u16b0\0\u16dc\0\u1708\0\u1734\0\u1760\0\u178c\0\u17b8"+
"\0\u17e4\0\u1810\0\u183c\0\u1868\0\u1894\0\u18c0\0\u18ec\0\u1918"+
"\0\u1944\0\u1970\0\u199c\0\u19c8\0\u19f4\0\u1a20\0\u1a4c\0\u1a78"+
"\0\u1aa4\0\u1ad0\0\u01b8\0\u01b8\0\u01b8";
private static int [] zzUnpackRowMap() {
int [] result = new int[181];
int offset = 0;
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
return result;
}
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int high = packed.charAt(i++) << 16;
result[j++] = high | packed.charAt(i++);
}
return j;
}
/**
* The transition table of the DFA
*/
private static final int [] ZZ_TRANS = zzUnpackTrans();
private static final String ZZ_TRANS_PACKED_0 =
"\1\13\1\14\5\13\1\15\1\13\1\16\3\13\1\17"+
"\1\20\1\21\1\22\1\23\3\13\1\24\2\13\15\17"+
"\1\25\2\13\3\17\1\13\7\26\1\27\5\26\4\30"+
"\5\26\1\31\1\26\15\30\3\26\3\30\10\26\1\27"+
"\5\26\4\32\5\26\1\33\1\26\15\32\3\26\3\32"+
"\1\26\7\34\1\35\5\34\4\36\1\34\1\37\2\26"+
"\1\34\1\40\1\34\15\36\3\34\1\41\2\36\2\34"+
"\1\42\5\34\1\35\5\34\4\43\4\34\1\44\2\34"+
"\15\43\3\34\3\43\10\34\1\35\5\34\4\45\4\34"+
"\1\44\2\34\15\45\3\34\3\45\10\34\1\35\5\34"+
"\4\45\4\34\1\46\2\34\15\45\3\34\3\45\10\34"+
"\1\35\1\34\1\47\3\34\4\50\7\34\15\50\3\34"+
"\3\50\10\34\1\51\5\34\4\52\7\34\15\52\1\34"+
"\1\53\1\34\3\52\1\34\1\54\1\55\5\54\1\56"+
"\1\54\1\57\3\54\4\60\4\54\1\61\2\54\15\60"+
"\2\54\1\62\3\60\1\54\55\0\1\63\62\0\1\64"+
"\4\0\4\65\7\0\6\65\1\66\6\65\3\0\3\65"+
"\12\0\1\67\43\0\1\70\1\71\1\72\1\73\2\74"+
"\1\0\1\75\3\0\1\75\1\17\1\20\1\21\1\22"+
"\7\0\15\17\3\0\3\17\3\0\1\76\1\0\1\77"+
"\2\100\1\0\1\101\3\0\1\101\3\20\1\22\7\0"+
"\15\20\3\0\3\20\2\0\1\70\1\102\1\72\1\73"+
"\2\100\1\0\1\101\3\0\1\101\1\21\1\20\1\21"+
"\1\22\7\0\15\21\3\0\3\21\3\0\1\103\1\0"+
"\1\77\2\74\1\0\1\75\3\0\1\75\4\22\7\0"+
"\15\22\3\0\3\22\26\0\1\104\73\0\1\105\16\0"+
"\1\64\4\0\4\65\7\0\15\65\3\0\3\65\16\0"+
"\4\30\7\0\15\30\3\0\3\30\27\0\1\106\42\0"+
"\4\32\7\0\15\32\3\0\3\32\27\0\1\107\42\0"+
"\4\36\7\0\15\36\3\0\3\36\24\0\1\26\45\0"+
"\4\36\7\0\2\36\1\110\12\36\3\0\3\36\2\0"+
"\1\111\67\0\4\43\7\0\15\43\3\0\3\43\26\0"+
"\1\112\43\0\4\45\7\0\15\45\3\0\3\45\26\0"+
"\1\113\37\0\1\114\57\0\4\50\7\0\15\50\3\0"+
"\3\50\11\0\1\115\4\0\4\65\7\0\15\65\3\0"+
"\3\65\16\0\4\52\7\0\15\52\3\0\3\52\47\0"+
"\1\114\6\0\1\116\63\0\1\117\57\0\4\60\7\0"+
"\15\60\3\0\3\60\26\0\1\120\43\0\4\65\7\0"+
"\15\65\3\0\3\65\14\0\1\34\1\0\4\121\1\0"+
"\3\122\3\0\15\121\3\0\3\121\14\0\1\34\1\0"+
"\4\121\1\0\3\122\3\0\3\121\1\123\11\121\3\0"+
"\3\121\16\0\1\124\1\0\1\124\10\0\15\124\3\0"+
"\3\124\16\0\1\125\1\126\1\127\1\130\7\0\15\125"+
"\3\0\3\125\16\0\1\131\1\0\1\131\10\0\15\131"+
"\3\0\3\131\16\0\1\132\1\133\1\132\1\133\7\0"+
"\15\132\3\0\3\132\16\0\1\134\2\135\1\136\7\0"+
"\15\134\3\0\3\134\16\0\1\75\2\137\10\0\15\75"+
"\3\0\3\75\16\0\1\140\2\141\1\142\7\0\15\140"+
"\3\0\3\140\16\0\4\133\7\0\15\133\3\0\3\133"+
"\16\0\1\143\2\144\1\145\7\0\15\143\3\0\3\143"+
"\16\0\1\146\2\147\1\150\7\0\15\146\3\0\3\146"+
"\16\0\1\151\1\141\1\152\1\142\7\0\15\151\3\0"+
"\3\151\16\0\1\153\2\126\1\130\7\0\15\153\3\0"+
"\3\153\30\0\1\154\1\155\64\0\1\156\27\0\4\36"+
"\7\0\2\36\1\157\12\36\3\0\3\36\2\0\1\160"+
"\101\0\1\161\1\162\40\0\4\65\7\0\6\65\1\163"+
"\6\65\3\0\3\65\2\0\1\164\63\0\1\165\71\0"+
"\1\166\1\167\34\0\1\170\1\0\1\34\1\0\4\121"+
"\1\0\3\122\3\0\15\121\3\0\3\121\16\0\4\171"+
"\1\0\3\122\3\0\15\171\3\0\3\171\12\0\1\170"+
"\1\0\1\34\1\0\4\121\1\0\3\122\3\0\10\121"+
"\1\172\4\121\3\0\3\121\2\0\1\70\13\0\1\124"+
"\1\0\1\124\10\0\15\124\3\0\3\124\3\0\1\173"+
"\1\0\1\77\2\174\6\0\1\125\1\126\1\127\1\130"+
"\7\0\15\125\3\0\3\125\3\0\1\175\1\0\1\77"+
"\2\176\1\0\1\177\3\0\1\177\3\126\1\130\7\0"+
"\15\126\3\0\3\126\3\0\1\200\1\0\1\77\2\176"+
"\1\0\1\177\3\0\1\177\1\127\1\126\1\127\1\130"+
"\7\0\15\127\3\0\3\127\3\0\1\201\1\0\1\77"+
"\2\174\6\0\4\130\7\0\15\130\3\0\3\130\3\0"+
"\1\202\2\0\1\202\7\0\1\132\1\133\1\132\1\133"+
"\7\0\15\132\3\0\3\132\3\0\1\202\2\0\1\202"+
"\7\0\4\133\7\0\15\133\3\0\3\133\3\0\1\174"+
"\1\0\1\77\2\174\6\0\1\134\2\135\1\136\7\0"+
"\15\134\3\0\3\134\3\0\1\176\1\0\1\77\2\176"+
"\1\0\1\177\3\0\1\177\3\135\1\136\7\0\15\135"+
"\3\0\3\135\3\0\1\174\1\0\1\77\2\174\6\0"+
"\4\136\7\0\15\136\3\0\3\136\3\0\1\177\2\0"+
"\2\177\1\0\1\177\3\0\1\177\3\137\10\0\15\137"+
"\3\0\3\137\3\0\1\103\1\0\1\77\2\74\1\0"+
"\1\75\3\0\1\75\1\140\2\141\1\142\7\0\15\140"+
"\3\0\3\140\3\0\1\76\1\0\1\77\2\100\1\0"+
"\1\101\3\0\1\101\3\141\1\142\7\0\15\141\3\0"+
"\3\141\3\0\1\103\1\0\1\77\2\74\1\0\1\75"+
"\3\0\1\75\4\142\7\0\15\142\3\0\3\142\3\0"+
"\1\74\1\0\1\77\2\74\1\0\1\75\3\0\1\75"+
"\1\143\2\144\1\145\7\0\15\143\3\0\3\143\3\0"+
"\1\100\1\0\1\77\2\100\1\0\1\101\3\0\1\101"+
"\3\144\1\145\7\0\15\144\3\0\3\144\3\0\1\74"+
"\1\0\1\77\2\74\1\0\1\75\3\0\1\75\4\145"+
"\7\0\15\145\3\0\3\145\3\0\1\75\2\0\2\75"+
"\1\0\1\75\3\0\1\75\1\146\2\147\1\150\7\0"+
"\15\146\3\0\3\146\3\0\1\101\2\0\2\101\1\0"+
"\1\101\3\0\1\101\3\147\1\150\7\0\15\147\3\0"+
"\3\147\3\0\1\75\2\0\2\75\1\0\1\75\3\0"+
"\1\75\4\150\7\0\15\150\3\0\3\150\3\0\1\203"+
"\1\0\1\77\2\74\1\0\1\75\3\0\1\75\1\151"+
"\1\141\1\152\1\142\7\0\15\151\3\0\3\151\3\0"+
"\1\204\1\0\1\77\2\100\1\0\1\101\3\0\1\101"+
"\1\152\1\141\1\152\1\142\7\0\15\152\3\0\3\152"+
"\3\0\1\201\1\0\1\77\2\174\6\0\1\153\2\126"+
"\1\130\7\0\15\153\3\0\3\153\31\0\1\155\54\0"+
"\1\205\64\0\1\206\26\0\4\36\7\0\15\36\3\0"+
"\1\36\1\207\1\36\31\0\1\162\54\0\1\210\35\0"+
"\1\34\1\0\4\121\1\0\3\122\3\0\3\121\1\211"+
"\11\121\3\0\3\121\2\0\1\212\102\0\1\167\54\0"+
"\1\213\34\0\1\214\52\0\1\170\3\0\4\171\7\0"+
"\15\171\3\0\3\171\12\0\1\170\1\0\1\215\1\0"+
"\4\121\1\0\3\122\3\0\15\121\3\0\3\121\16\0"+
"\1\216\1\130\1\216\1\130\7\0\15\216\3\0\3\216"+
"\16\0\4\136\7\0\15\136\3\0\3\136\16\0\4\142"+
"\7\0\15\142\3\0\3\142\16\0\4\145\7\0\15\145"+
"\3\0\3\145\16\0\4\150\7\0\15\150\3\0\3\150"+
"\16\0\1\217\1\142\1\217\1\142\7\0\15\217\3\0"+
"\3\217\16\0\4\130\7\0\15\130\3\0\3\130\16\0"+
"\4\220\7\0\15\220\3\0\3\220\33\0\1\221\61\0"+
"\1\222\30\0\4\36\6\0\1\223\15\36\3\0\2\36"+
"\1\224\33\0\1\225\32\0\1\170\1\0\1\34\1\0"+
"\4\121\1\0\3\122\3\0\10\121\1\226\4\121\3\0"+
"\3\121\2\0\1\227\104\0\1\230\36\0\4\231\7\0"+
"\15\231\3\0\3\231\3\0\1\173\1\0\1\77\2\174"+
"\6\0\1\216\1\130\1\216\1\130\7\0\15\216\3\0"+
"\3\216\3\0\1\203\1\0\1\77\2\74\1\0\1\75"+
"\3\0\1\75\1\217\1\142\1\217\1\142\7\0\15\217"+
"\3\0\3\217\3\0\1\202\2\0\1\202\7\0\4\220"+
"\7\0\15\220\3\0\3\220\34\0\1\232\55\0\1\233"+
"\26\0\1\234\60\0\4\36\6\0\1\223\15\36\3\0"+
"\3\36\34\0\1\235\31\0\1\170\1\0\1\114\1\0"+
"\4\121\1\0\3\122\3\0\15\121\3\0\3\121\34\0"+
"\1\236\32\0\1\237\2\0\4\231\7\0\15\231\3\0"+
"\3\231\35\0\1\240\62\0\1\241\20\0\1\242\77\0"+
"\1\243\53\0\1\244\32\0\1\34\1\0\4\171\1\0"+
"\3\122\3\0\15\171\3\0\3\171\36\0\1\245\53\0"+
"\1\246\33\0\4\247\7\0\15\247\3\0\3\247\36\0"+
"\1\250\53\0\1\251\54\0\1\252\61\0\1\253\11\0"+
"\1\254\12\0\4\247\7\0\15\247\3\0\3\247\37\0"+
"\1\255\53\0\1\256\54\0\1\257\22\0\1\13\62\0"+
"\4\260\7\0\15\260\3\0\3\260\40\0\1\261\53\0"+
"\1\262\43\0\1\263\26\0\2\260\1\0\2\260\1\0"+
"\2\260\2\0\5\260\7\0\15\260\3\0\4\260\27\0"+
"\1\264\53\0\1\265\24\0";
private static int [] zzUnpackTrans() {
int [] result = new int[6908];
int offset = 0;
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
return result;
}
private static int zzUnpackTrans(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
value--;
do result[j++] = value; while (--count > 0);
}
return j;
}
/* error codes */
private static final int ZZ_UNKNOWN_ERROR = 0;
private static final int ZZ_NO_MATCH = 1;
private static final int ZZ_PUSHBACK_2BIG = 2;
/* error messages for the codes above */
private static final String ZZ_ERROR_MSG[] = {
"Unkown internal scanner error",
"Error: could not match input",
"Error: pushback value was too large"
};
/**
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
*/
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
private static final String ZZ_ATTRIBUTE_PACKED_0 =
"\12\0\1\11\7\1\1\11\2\1\1\11\5\1\1\11"+
"\3\1\1\11\13\1\1\11\5\1\2\11\3\0\1\11"+
"\14\0\2\1\2\11\1\1\1\0\2\1\1\11\1\0"+
"\1\1\1\0\1\1\3\0\7\1\2\0\1\1\1\0"+
"\15\1\3\0\1\1\1\11\3\0\1\1\1\11\5\0"+
"\1\1\4\0\1\1\2\0\2\1\2\0\1\1\5\0"+
"\1\11\3\1\3\0\1\1\2\0\1\11\30\0\1\1"+
"\2\0\3\11";
private static int [] zzUnpackAttribute() {
int [] result = new int[181];
int offset = 0;
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/** the input device */
private java.io.Reader zzReader;
/** the current state of the DFA */
private int zzState;
/** the current lexical state */
private int zzLexicalState = YYINITIAL;
/** this buffer contains the current text to be matched and is
the source of the yytext() string */
private char zzBuffer[] = new char[ZZ_BUFFERSIZE];
/** the textposition at the last accepting state */
private int zzMarkedPos;
/** the current text position in the buffer */
private int zzCurrentPos;
/** startRead marks the beginning of the yytext() string in the buffer */
private int zzStartRead;
/** endRead marks the last character in the buffer, that has been read
from input */
private int zzEndRead;
/** number of newlines encountered up to the start of the matched text */
private int yyline;
/** the number of characters up to the start of the matched text */
private int yychar;
/**
* the number of characters from the last newline up to the start of the
* matched text
*/
private int yycolumn;
/**
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
*/
private boolean zzAtBOL = true;
/** zzAtEOF == true <=> the scanner is at the EOF */
private boolean zzAtEOF;
/** denotes if the user-EOF-code has already been executed */
private boolean zzEOFDone;
/**
* The number of occupied positions in zzBuffer beyond zzEndRead.
* When a lead/high surrogate has been read from the input stream
* into the final zzBuffer position, this will have a value of 1;
* otherwise, it will have a value of 0.
*/
private int zzFinalHighSurrogate = 0;
/* user code: */
public static final int ALPHANUM = WikipediaTokenizer.ALPHANUM_ID;
public static final int APOSTROPHE = WikipediaTokenizer.APOSTROPHE_ID;
public static final int ACRONYM = WikipediaTokenizer.ACRONYM_ID;
public static final int COMPANY = WikipediaTokenizer.COMPANY_ID;
public static final int EMAIL = WikipediaTokenizer.EMAIL_ID;
public static final int HOST = WikipediaTokenizer.HOST_ID;
public static final int NUM = WikipediaTokenizer.NUM_ID;
public static final int CJ = WikipediaTokenizer.CJ_ID;
public static final int INTERNAL_LINK = WikipediaTokenizer.INTERNAL_LINK_ID;
public static final int EXTERNAL_LINK = WikipediaTokenizer.EXTERNAL_LINK_ID;
public static final int CITATION = WikipediaTokenizer.CITATION_ID;
public static final int CATEGORY = WikipediaTokenizer.CATEGORY_ID;
public static final int BOLD = WikipediaTokenizer.BOLD_ID;
public static final int ITALICS = WikipediaTokenizer.ITALICS_ID;
public static final int BOLD_ITALICS = WikipediaTokenizer.BOLD_ITALICS_ID;
public static final int HEADING = WikipediaTokenizer.HEADING_ID;
public static final int SUB_HEADING = WikipediaTokenizer.SUB_HEADING_ID;
public static final int EXTERNAL_LINK_URL = WikipediaTokenizer.EXTERNAL_LINK_URL_ID;
private int currentTokType;
private int numBalanced = 0;
private int positionInc = 1;
private int numLinkToks = 0;
//Anytime we start a new on a Wiki reserved token (category, link, etc.) this value will be 0, otherwise it will be the number of tokens seen
//this can be useful for detecting when a new reserved token is encountered
//see https://issues.apache.org/jira/browse/LUCENE-1133
private int numWikiTokensSeen = 0;
public static final String [] TOKEN_TYPES = WikipediaTokenizer.TOKEN_TYPES;
/**
Returns the number of tokens seen inside a category or link, etc.
@return the number of tokens seen inside the context of wiki syntax.
**/
public final int getNumWikiTokensSeen(){
return numWikiTokensSeen;
}
public final int yychar()
{
return yychar;
}
public final int getPositionIncrement(){
return positionInc;
}
/**
* Fills Lucene token with the current token text.
*/
final void getText(CharTermAttribute t) {
t.copyBuffer(zzBuffer, zzStartRead, zzMarkedPos-zzStartRead);
}
final int setText(StringBuilder buffer){
int length = zzMarkedPos - zzStartRead;
buffer.append(zzBuffer, zzStartRead, length);
return length;
}
final void reset() {
currentTokType = 0;
numBalanced = 0;
positionInc = 1;
numLinkToks = 0;
numWikiTokensSeen = 0;
}
/**
* Creates a new scanner
*
* @param in the java.io.Reader to read input from.
*/
WikipediaTokenizerImpl(java.io.Reader in) {
this.zzReader = in;
}
/**
* Unpacks the compressed character translation table.
*
* @param packed the packed character translation table
* @return the unpacked character translation table
*/
private static char [] zzUnpackCMap(String packed) {
char [] map = new char[0x110000];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
while (i < 262) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
}
return map;
}
/**
* Refills the input buffer.
*
* @return <code>false</code>, iff there was new input.
*
* @exception java.io.IOException if any I/O-Error occurs
*/
private boolean zzRefill() throws java.io.IOException {
/* first: make room (if you can) */
if (zzStartRead > 0) {
zzEndRead += zzFinalHighSurrogate;
zzFinalHighSurrogate = 0;
System.arraycopy(zzBuffer, zzStartRead,
zzBuffer, 0,
zzEndRead-zzStartRead);
/* translate stored positions */
zzEndRead-= zzStartRead;
zzCurrentPos-= zzStartRead;
zzMarkedPos-= zzStartRead;
zzStartRead = 0;
}
/* is the buffer big enough? */
if (zzCurrentPos >= zzBuffer.length - zzFinalHighSurrogate) {
/* if not: blow it up */
char newBuffer[] = new char[zzBuffer.length*2];
System.arraycopy(zzBuffer, 0, newBuffer, 0, zzBuffer.length);
zzBuffer = newBuffer;
zzEndRead += zzFinalHighSurrogate;
zzFinalHighSurrogate = 0;
}
/* fill the buffer with new input */
int requested = zzBuffer.length - zzEndRead;
int totalRead = 0;
while (totalRead < requested) {
int numRead = zzReader.read(zzBuffer, zzEndRead + totalRead, requested - totalRead);
if (numRead == -1) {
break;
}
totalRead += numRead;
}
if (totalRead > 0) {
zzEndRead += totalRead;
if (totalRead == requested) { /* possibly more input available */
if (Character.isHighSurrogate(zzBuffer[zzEndRead - 1])) {
--zzEndRead;
zzFinalHighSurrogate = 1;
}
}
return false;
}
// totalRead = 0: End of stream
return true;
}
/**
* Closes the input stream.
*/
public final void yyclose() throws java.io.IOException {
zzAtEOF = true; /* indicate end of file */
zzEndRead = zzStartRead; /* invalidate buffer */
if (zzReader != null)
zzReader.close();
}
/**
* Resets the scanner to read from a new input stream.
* Does not close the old reader.
*
* All internal variables are reset, the old input stream
* <b>cannot</b> be reused (internal buffer is discarded and lost).
* Lexical state is set to <tt>ZZ_INITIAL</tt>.
*
* Internal scan buffer is resized down to its initial length, if it has grown.
*
* @param reader the new input stream
*/
public final void yyreset(java.io.Reader reader) {
zzReader = reader;
zzAtBOL = true;
zzAtEOF = false;
zzEOFDone = false;
zzEndRead = zzStartRead = 0;
zzCurrentPos = zzMarkedPos = 0;
zzFinalHighSurrogate = 0;
yyline = yychar = yycolumn = 0;
zzLexicalState = YYINITIAL;
if (zzBuffer.length > ZZ_BUFFERSIZE)
zzBuffer = new char[ZZ_BUFFERSIZE];
}
/**
* Returns the current lexical state.
*/
public final int yystate() {
return zzLexicalState;
}
/**
* Enters a new lexical state
*
* @param newState the new lexical state
*/
public final void yybegin(int newState) {
zzLexicalState = newState;
}
/**
* Returns the text matched by the current regular expression.
*/
public final String yytext() {
return new String( zzBuffer, zzStartRead, zzMarkedPos-zzStartRead );
}
/**
* Returns the character at position <tt>pos</tt> from the
* matched text.
*
* It is equivalent to yytext().charAt(pos), but faster
*
* @param pos the position of the character to fetch.
* A value from 0 to yylength()-1.
*
* @return the character at position pos
*/
public final char yycharat(int pos) {
return zzBuffer[zzStartRead+pos];
}
/**
* Returns the length of the matched text region.
*/
public final int yylength() {
return zzMarkedPos-zzStartRead;
}
/**
* Reports an error that occured while scanning.
*
* In a wellformed scanner (no or only correct usage of
* yypushback(int) and a match-all fallback rule) this method
* will only be called with things that "Can't Possibly Happen".
* If this method is called, something is seriously wrong
* (e.g. a JFlex bug producing a faulty scanner etc.).
*
* Usual syntax/scanner level error handling should be done
* in error fallback rules.
*
* @param errorCode the code of the errormessage to display
*/
private void zzScanError(int errorCode) {
String message;
try {
message = ZZ_ERROR_MSG[errorCode];
}
catch (ArrayIndexOutOfBoundsException e) {
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
}
throw new Error(message);
}
/**
* Pushes the specified amount of characters back into the input stream.
*
* They will be read again by then next call of the scanning method
*
* @param number the number of characters to be read again.
* This number must not be greater than yylength()!
*/
public void yypushback(int number) {
if ( number > yylength() )
zzScanError(ZZ_PUSHBACK_2BIG);
zzMarkedPos -= number;
}
/**
* Resumes scanning until the next regular expression is matched,
* the end of input is encountered or an I/O-Error occurs.
*
* @return the next token
* @exception java.io.IOException if any I/O-Error occurs
*/
public int getNextToken() throws java.io.IOException {
int zzInput;
int zzAction;
// cached fields:
int zzCurrentPosL;
int zzMarkedPosL;
int zzEndReadL = zzEndRead;
char [] zzBufferL = zzBuffer;
char [] zzCMapL = ZZ_CMAP;
int [] zzTransL = ZZ_TRANS;
int [] zzRowMapL = ZZ_ROWMAP;
int [] zzAttrL = ZZ_ATTRIBUTE;
while (true) {
zzMarkedPosL = zzMarkedPos;
yychar+= zzMarkedPosL-zzStartRead;
zzAction = -1;
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
zzState = ZZ_LEXSTATE[zzLexicalState];
// set up zzAction for empty match case:
int zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
}
zzForAction: {
while (true) {
if (zzCurrentPosL < zzEndReadL) {
zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL, zzEndReadL);
zzCurrentPosL += Character.charCount(zzInput);
}
else if (zzAtEOF) {
zzInput = YYEOF;
break zzForAction;
}
else {
// store back cached positions
zzCurrentPos = zzCurrentPosL;
zzMarkedPos = zzMarkedPosL;
boolean eof = zzRefill();
// get translated positions and possibly new buffer
zzCurrentPosL = zzCurrentPos;
zzMarkedPosL = zzMarkedPos;
zzBufferL = zzBuffer;
zzEndReadL = zzEndRead;
if (eof) {
zzInput = YYEOF;
break zzForAction;
}
else {
zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL, zzEndReadL);
zzCurrentPosL += Character.charCount(zzInput);
}
}
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
if (zzNext == -1) break zzForAction;
zzState = zzNext;
zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
zzMarkedPosL = zzCurrentPosL;
if ( (zzAttributes & 8) == 8 ) break zzForAction;
}
}
}
// store back cached position
zzMarkedPos = zzMarkedPosL;
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
case 1:
{ numWikiTokensSeen = 0; positionInc = 1; /* Break so we don't hit fall-through warning: */ break;
}
case 47: break;
case 2:
{ positionInc = 1; return ALPHANUM;
}
case 48: break;
case 3:
{ positionInc = 1; return CJ;
}
case 49: break;
case 4:
{ numWikiTokensSeen = 0; positionInc = 1; currentTokType = EXTERNAL_LINK_URL; yybegin(EXTERNAL_LINK_STATE);/* Break so we don't hit fall-through warning: */ break;
}
case 50: break;
case 5:
{ positionInc = 1; /* Break so we don't hit fall-through warning: */ break;
}
case 51: break;
case 6:
{ yybegin(CATEGORY_STATE); numWikiTokensSeen++; return currentTokType;
}
case 52: break;
case 7:
{ yybegin(INTERNAL_LINK_STATE); numWikiTokensSeen++; return currentTokType;
}
case 53: break;
case 8:
{ /* Break so we don't hit fall-through warning: */ break;/* ignore */
}
case 54: break;
case 9:
{ if (numLinkToks == 0){positionInc = 0;} else{positionInc = 1;} numWikiTokensSeen++; currentTokType = EXTERNAL_LINK; yybegin(EXTERNAL_LINK_STATE); numLinkToks++; return currentTokType;
}
case 55: break;
case 10:
{ numLinkToks = 0; positionInc = 0; yybegin(YYINITIAL); /* Break so we don't hit fall-through warning: */ break;
}
case 56: break;
case 11:
{ currentTokType = BOLD; yybegin(THREE_SINGLE_QUOTES_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 57: break;
case 12:
{ currentTokType = ITALICS; numWikiTokensSeen++; yybegin(STRING); return currentTokType;/*italics*/
}
case 58: break;
case 13:
{ currentTokType = EXTERNAL_LINK; numWikiTokensSeen = 0; yybegin(EXTERNAL_LINK_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 59: break;
case 14:
{ yybegin(STRING); numWikiTokensSeen++; return currentTokType;
}
case 60: break;
case 15:
{ currentTokType = SUB_HEADING; numWikiTokensSeen = 0; yybegin(STRING); /* Break so we don't hit fall-through warning: */ break;
}
case 61: break;
case 16:
{ currentTokType = HEADING; yybegin(DOUBLE_EQUALS_STATE); numWikiTokensSeen++; return currentTokType;
}
case 62: break;
case 17:
{ yybegin(DOUBLE_BRACE_STATE); numWikiTokensSeen = 0; return currentTokType;
}
case 63: break;
case 18:
{ /* Break so we don't hit fall-through warning: */ break;/* ignore STRING */
}
case 64: break;
case 19:
{ yybegin(STRING); numWikiTokensSeen++; return currentTokType;/* STRING ALPHANUM*/
}
case 65: break;
case 20:
{ numBalanced = 0; numWikiTokensSeen = 0; currentTokType = EXTERNAL_LINK;yybegin(EXTERNAL_LINK_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 66: break;
case 21:
{ yybegin(STRING); return currentTokType;/*pipe*/
}
case 67: break;
case 22:
{ numWikiTokensSeen = 0; positionInc = 1; if (numBalanced == 0){numBalanced++;yybegin(TWO_SINGLE_QUOTES_STATE);} else{numBalanced = 0;}/* Break so we don't hit fall-through warning: */ break;
}
case 68: break;
case 23:
{ numWikiTokensSeen = 0; positionInc = 1; yybegin(DOUBLE_EQUALS_STATE);/* Break so we don't hit fall-through warning: */ break;
}
case 69: break;
case 24:
{ numWikiTokensSeen = 0; positionInc = 1; currentTokType = INTERNAL_LINK; yybegin(INTERNAL_LINK_STATE);/* Break so we don't hit fall-through warning: */ break;
}
case 70: break;
case 25:
{ numWikiTokensSeen = 0; positionInc = 1; currentTokType = CITATION; yybegin(DOUBLE_BRACE_STATE);/* Break so we don't hit fall-through warning: */ break;
}
case 71: break;
case 26:
{ yybegin(YYINITIAL);/* Break so we don't hit fall-through warning: */ break;
}
case 72: break;
case 27:
{ numLinkToks = 0; yybegin(YYINITIAL); /* Break so we don't hit fall-through warning: */ break;
}
case 73: break;
case 28:
{ currentTokType = INTERNAL_LINK; numWikiTokensSeen = 0; yybegin(INTERNAL_LINK_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 74: break;
case 29:
{ currentTokType = INTERNAL_LINK; numWikiTokensSeen = 0; yybegin(INTERNAL_LINK_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 75: break;
case 30:
{ yybegin(YYINITIAL); /* Break so we don't hit fall-through warning: */ break;
}
case 76: break;
case 31:
{ numBalanced = 0;currentTokType = ALPHANUM; yybegin(YYINITIAL); /* Break so we don't hit fall-through warning: */ break;/*end italics*/
}
case 77: break;
case 32:
{ numBalanced = 0; numWikiTokensSeen = 0; currentTokType = INTERNAL_LINK;yybegin(INTERNAL_LINK_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 78: break;
case 33:
{ positionInc = 1; return APOSTROPHE;
}
case 79: break;
case 34:
{ positionInc = 1; return HOST;
}
case 80: break;
case 35:
{ positionInc = 1; return NUM;
}
case 81: break;
case 36:
{ positionInc = 1; return COMPANY;
}
case 82: break;
case 37:
{ currentTokType = BOLD_ITALICS; yybegin(FIVE_SINGLE_QUOTES_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 83: break;
case 38:
{ numBalanced = 0;currentTokType = ALPHANUM;yybegin(YYINITIAL); /* Break so we don't hit fall-through warning: */ break;/*end bold*/
}
case 84: break;
case 39:
{ numBalanced = 0;currentTokType = ALPHANUM; yybegin(YYINITIAL); /* Break so we don't hit fall-through warning: */ break;/*end sub header*/
}
case 85: break;
case 40:
{ positionInc = 1; return ACRONYM;
}
case 86: break;
case 41:
{ positionInc = 1; return EMAIL;
}
case 87: break;
case 42:
{ numBalanced = 0;currentTokType = ALPHANUM; yybegin(YYINITIAL); /* Break so we don't hit fall-through warning: */ break;/*end bold italics*/
}
case 88: break;
case 43:
{ positionInc = 1; numWikiTokensSeen++; yybegin(EXTERNAL_LINK_STATE); return currentTokType;
}
case 89: break;
case 44:
{ numWikiTokensSeen = 0; positionInc = 1; currentTokType = CATEGORY; yybegin(CATEGORY_STATE);/* Break so we don't hit fall-through warning: */ break;
}
case 90: break;
case 45:
{ currentTokType = CATEGORY; numWikiTokensSeen = 0; yybegin(CATEGORY_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 91: break;
case 46:
{ numBalanced = 0; numWikiTokensSeen = 0; currentTokType = CATEGORY;yybegin(CATEGORY_STATE); /* Break so we don't hit fall-through warning: */ break;
}
case 92: break;
default:
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
zzAtEOF = true;
return YYEOF;
}
else {
zzScanError(ZZ_NO_MATCH);
}
}
}
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-contactcenterinsights/v1/1.31.0/com/google/api/services/contactcenterinsights/v1/model/GoogleCloudContactcenterinsightsV1ListAnalysesResponse.java | 3726 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.contactcenterinsights.v1.model;
/**
* The response to list analyses.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Contact Center AI Insights API. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudContactcenterinsightsV1ListAnalysesResponse extends com.google.api.client.json.GenericJson {
/**
* The analyses that match the request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudContactcenterinsightsV1Analysis> analyses;
static {
// hack to force ProGuard to consider GoogleCloudContactcenterinsightsV1Analysis used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudContactcenterinsightsV1Analysis.class);
}
/**
* A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted,
* there are no subsequent pages.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nextPageToken;
/**
* The analyses that match the request.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudContactcenterinsightsV1Analysis> getAnalyses() {
return analyses;
}
/**
* The analyses that match the request.
* @param analyses analyses or {@code null} for none
*/
public GoogleCloudContactcenterinsightsV1ListAnalysesResponse setAnalyses(java.util.List<GoogleCloudContactcenterinsightsV1Analysis> analyses) {
this.analyses = analyses;
return this;
}
/**
* A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted,
* there are no subsequent pages.
* @return value or {@code null} for none
*/
public java.lang.String getNextPageToken() {
return nextPageToken;
}
/**
* A token, which can be sent as `page_token` to retrieve the next page. If this field is omitted,
* there are no subsequent pages.
* @param nextPageToken nextPageToken or {@code null} for none
*/
public GoogleCloudContactcenterinsightsV1ListAnalysesResponse setNextPageToken(java.lang.String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
@Override
public GoogleCloudContactcenterinsightsV1ListAnalysesResponse set(String fieldName, Object value) {
return (GoogleCloudContactcenterinsightsV1ListAnalysesResponse) super.set(fieldName, value);
}
@Override
public GoogleCloudContactcenterinsightsV1ListAnalysesResponse clone() {
return (GoogleCloudContactcenterinsightsV1ListAnalysesResponse) super.clone();
}
}
| apache-2.0 |
andyuk1986/js-client | spec/infinispan_expiry_spec.js | 8656 | var _ = require('underscore');
var t = require('./utils/testing'); // Testing dependency
describe('Infinispan local client working with expiry operations', function() {
var client = t.client(t.local);
var client1 = t.client(t.cluster1);
var client2 = t.client(t.cluster2);
var client3 = t.client(t.cluster3);
beforeEach(function(done) { client
.then(t.assert(t.clear()))
.catch(t.failed(done)).finally(done);
});
it('can validate incorrect duration definitions', function(done) { client
.then(assertError(t.put('_', '_', {lifespan: '1z'}), t.toContain('Unknown duration unit')))
.then(assertError(t.putIfAbsent('_', '_', {lifespan: 'aa'}), t.toContain('Unknown duration format')))
.then(assertError(t.replace('_', '_', {lifespan: 1}), t.toContain('Positive duration provided without time unit')))
.then(assertError(t.putAll([{key: '_', value: '_'}], {lifespan: '1z'}), t.toContain('Unknown duration unit')))
.then(assertError(t.replaceV('_', '_', '_', {lifespan: 1}), t.toContain('Positive duration provided without time unit')))
.catch(t.failed(done))
.finally(done);
});
it('removes keys when their lifespan has expired', function(done) { client
.then(t.assert(t.put('life', 'value', {lifespan: '100ms'})))
.then(t.assert(t.containsKey('life'), t.toBeTruthy))
.then(waitLifespanExpire('life', 1000))
.then(t.assert(t.putIfAbsent('life-absent', 'value', {lifespan: '100000μs'})))
.then(t.assert(t.containsKey('life-absent'), t.toBeTruthy))
.then(waitLifespanExpire('life-absent', 1000))
.then(t.assert(t.putIfAbsent('life-replace', 'v0')))
.then(t.assert(t.get('life-replace'), t.toBe('v0')))
.then(t.assert(t.replace('life-replace', 'v1', {lifespan: '100000000ns'})))
.then(t.assert(t.get('life-replace'), t.toBe('v1')))
.then(waitLifespanExpire('life-replace', 1000))
.catch(t.failed(done))
.finally(done);
});
xit('removes keys when their lifespan has expired in cluster', function(done) { client1
.then(t.assert(t.put('life', 'value', {lifespan: '100ms'})))
.then(t.assert(t.containsKey('life'), t.toBeTruthy))
.then(function(client) {
return client2
.then(t.assert(t.containsKey('life'), t.toBeTruthy))
.then(waitLifespanExpire('life', 1000))
.then(t.assert(t.putIfAbsent('life-absent', 'value', {lifespan: '100000μs'})))
.then(t.assert(t.containsKey('life-absent'), t.toBeTruthy))
.then(function() {
return client;
})
})
.then(function(client){
return client3
.then(t.assert(t.get('life-absent'), t.toBe('value')))
.then(waitLifespanExpire('life-absent', 1000))
.then(t.assert(t.putIfAbsent('life-replace', 'v0')))
.then(t.assert(t.get('life-replace'), t.toBe('v0')))
.then(t.assert(t.replace('life-replace', 'v1', {lifespan: '100000000ns'})))
.then(function() {
return client;
})
})
.then(t.assert(t.get('life-replace'), t.toBe('v1')))
.then(waitLifespanExpire('life-replace', 1000))
.catch(t.failed(done))
.finally(done);
});
it('removes keys when their max idle time has expired', function(done) {
var pairs = [{key: 'idle-multi1', value: 'v1'}, {key: 'idle-multi2', value: 'v2'}];
client
.then(t.assert(t.put('idle-replace', 'v0')))
.then(t.assert(t.conditional(t.replaceV, t.getM, 'idle-replace', 'v0', 'v1', {maxIdle: '100ms'}), t.toBeTruthy))
.then(t.assert(t.get('idle-replace'), t.toBe('v1')))
.then(waitIdleTimeExpire('idle-replace', 1000))
.then(t.assert(t.putAll(pairs, {maxIdle: '100000μs'}), t.toBeUndefined))
.then(t.assert(t.containsKey('idle-multi1'), t.toBeTruthy))
.then(t.assert(t.containsKey('idle-multi2'), t.toBeTruthy))
.then(waitIdleTimeExpire('idle-multi1', 1000))
.then(waitIdleTimeExpire('idle-multi2', 1000))
.catch(t.failed(done))
.finally(done);
});
it('removes keys when their max idle time has expired in cluster', function(done) {
var pairs = [{key: 'idle-multi1', value: 'v1'}, {key: 'idle-multi2', value: 'v2'}];
client1
.then(t.assert(t.put('idle-replace', 'v0')))
.then(t.assert(t.conditional(t.replaceV, t.getM, 'idle-replace', 'v0', 'v1', {maxIdle: '100ms'}), t.toBeTruthy))
.then(t.assert(t.get('idle-replace'), t.toBe('v1')))
.then(function(client) {
return client2
.then(t.assert(t.get('idle-replace'), t.toBe('v1')))
.then(waitIdleTimeExpire('idle-replace', 1000))
.then(t.assert(t.containsKey('idle-replace'), t.toBeFalsy))
.then(t.assert(t.putAll(pairs, {maxIdle: '100000μs'}), t.toBeUndefined))
.then(function() {
return client;
});
})
.then(function(client) {
return client3
.then(t.assert(t.containsKey('idle-multi2'), t.toBeTruthy))
.then(waitIdleTimeExpire('idle-multi2', 1000))
.then(t.assert(t.containsKey('idle-multi2'), t.toBeFalsy))
.then(function() {
return client;
});
})
.then(t.assert(t.containsKey('idle-multi2'), t.toBeFalsy))
.catch(t.failed(done))
.finally(done);
});
it('can listen for expired events', function(done) { client
.then(t.on('expiry', t.expectEvent('listen-expiry', done, true)))
.then(t.assert(t.putIfAbsent('listen-expiry', 'value', {lifespan: '100ms'})))
.then(waitForExpiryEvent('listen-expiry'))
.catch(t.failed(done));
});
if (process.env.protocol == null || process.env.protocol >= '2.9') {
it('can listen for custom expired events', function(done) {
var expected = "KeyValueWithPrevious{key=listen-expiry, value=value, prev=null}";
var opts = { converterFactory : { name: "key-value-with-previous-converter-factory" } };
client
.then(t.on('expiry', t.expectCustomEvent(expected, done), opts))
.then(t.assert(t.putIfAbsent('listen-expiry', 'value', {lifespan: '100ms'})))
.then(waitForExpiryEvent('listen-expiry'))
.catch(t.failed(done));
});
}
it('can listen for expired events in cluster', function(done) { client1
.then(t.on('expiry', t.expectEvent('listen-expiry', done, true)))
.then(t.assert(t.putIfAbsent('listen-expiry', 'value', {lifespan: '100ms'})))
.then(function(client) {
return client2
.then(t.assert(t.containsKey('listen-expiry'), t.toBeTruthy))
.then(waitForExpiryEvent('listen-expiry'))
.then(function() {
return client;
});
})
.then(t.assert(t.containsKey('listen-expiry'), t.toBeFalsy))
.catch(t.failed(done));
});
// Since Jasmine 1.3 does not have afterAll callback, this disconnect test must be last
it('disconnects client', function(done) {
// Guarantee that even if one of the disconnect fails, all disconnects have been called
Promise.all([client, client1, client2, client3])
.then(function(clients) {
return Promise.all(_.map(clients, function(client) {
return client.disconnect();
}));
})
.catch(t.failed(done))
.finally(done);
});
});
// timeout in ms
function waitLifespanExpire(key, timeout) {
return function(client) {
var contains = true;
waitsFor(function() {
client.containsKey(key).then(function(success) {
contains = success;
});
return !contains;
}, '`' + key + '` key should be expired', timeout);
return client;
}
}
function waitForExpiryEvent(key) {
return function(client) {
t.sleepFor(200); // sleep required, waitFor() does not work with event
client.containsKey(key).then(function(success) {
expect(success).toBeFalsy();
});
return client;
}
}
// timeout in ms
function waitIdleTimeExpire(key, timeout) {
return function(client) {
var contains = true;
t.sleepFor(200); // sleep required
waitsFor(function() {
client.containsKey(key).then(function(success) {
contains = success;
});
return !contains;
}, '`' + key + '` key should be expired', timeout);
return client;
}
}
function assertError(fun, expectErrorFun) {
return function(client) {
var failed = false;
try {
fun(client);
} catch(error) {
failed = true;
expectErrorFun(error.message);
}
if (!failed)
throw new Error('Expected function to fail');
return client;
}
}
| apache-2.0 |
abperiasamy/minio | pkg/policy/resource.go | 3112 | /*
* Minio Cloud Storage, (C) 2018 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package policy
import (
"encoding/json"
"fmt"
"strings"
"github.com/minio/minio/pkg/wildcard"
)
// ResourceARNPrefix - resource ARN prefix as per AWS S3 specification.
const ResourceARNPrefix = "arn:aws:s3:::"
// Resource - resource in policy statement.
type Resource struct {
bucketName string
pattern string
}
func (r Resource) isBucketPattern() bool {
return !strings.Contains(r.pattern, "/")
}
func (r Resource) isObjectPattern() bool {
return strings.Contains(r.pattern, "/") || strings.Contains(r.bucketName, "*")
}
// IsValid - checks whether Resource is valid or not.
func (r Resource) IsValid() bool {
return r.bucketName != "" && r.pattern != ""
}
// Match - matches object name with resource pattern.
func (r Resource) Match(resource string) bool {
return wildcard.Match(r.pattern, resource)
}
// MarshalJSON - encodes Resource to JSON data.
func (r Resource) MarshalJSON() ([]byte, error) {
if !r.IsValid() {
return nil, fmt.Errorf("invalid resource %v", r)
}
return json.Marshal(r.String())
}
func (r Resource) String() string {
return ResourceARNPrefix + r.pattern
}
// UnmarshalJSON - decodes JSON data to Resource.
func (r *Resource) UnmarshalJSON(data []byte) error {
var s string
if err := json.Unmarshal(data, &s); err != nil {
return err
}
parsedResource, err := parseResource(s)
if err != nil {
return err
}
*r = parsedResource
return nil
}
// Validate - validates Resource is for given bucket or not.
func (r Resource) Validate(bucketName string) error {
if !r.IsValid() {
return fmt.Errorf("invalid resource")
}
if !wildcard.Match(r.bucketName, bucketName) {
return fmt.Errorf("bucket name does not match")
}
return nil
}
// parseResource - parses string to Resource.
func parseResource(s string) (Resource, error) {
if !strings.HasPrefix(s, ResourceARNPrefix) {
return Resource{}, fmt.Errorf("invalid resource '%v'", s)
}
pattern := strings.TrimPrefix(s, ResourceARNPrefix)
tokens := strings.SplitN(pattern, "/", 2)
bucketName := tokens[0]
if bucketName == "" {
return Resource{}, fmt.Errorf("invalid resource format '%v'", s)
}
return Resource{
bucketName: bucketName,
pattern: pattern,
}, nil
}
// NewResource - creates new resource.
func NewResource(bucketName, keyName string) Resource {
pattern := bucketName
if keyName != "" {
if !strings.HasPrefix(keyName, "/") {
pattern += "/"
}
pattern += keyName
}
return Resource{
bucketName: bucketName,
pattern: pattern,
}
}
| apache-2.0 |
McLeodMoores/starling | projects/financial/src/main/java/com/opengamma/financial/analytics/model/equity/option/EquityOptionFunctions.java | 25565 | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*
* Modified by McLeod Moores Software Limited.
*
* Copyright (C) 2015-Present McLeod Moores Software Limited. All rights reserved.
*/
package com.opengamma.financial.analytics.model.equity.option;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.InitializingBean;
import com.opengamma.engine.function.config.AbstractFunctionConfigurationBean;
import com.opengamma.engine.function.config.FunctionConfiguration;
import com.opengamma.engine.function.config.FunctionConfigurationSource;
import com.opengamma.financial.analytics.model.curve.forward.InterpolatedForwardCurveDefaults;
import com.opengamma.financial.analytics.model.equity.EquityForwardCurvePerTickerDefaults;
import com.opengamma.financial.property.DefaultPropertyFunction.PriorityClass;
import com.opengamma.util.ArgumentChecker;
public class EquityOptionFunctions extends AbstractFunctionConfigurationBean {
/**
* Default instance of a repository configuration source exposing the functions from this package.
*
* @return the configuration source exposing functions from this package
*/
public static FunctionConfigurationSource instance() {
return new EquityOptionFunctions().getObjectCreating();
}
/**
* Equity ticker-specific data.
*/
public static class EquityInfo implements InitializingBean {
/** The discounting curve name */
private String _discountingCurve;
/** The discounting curve calculation configuration name */
private String _discountingCurveConfig;
/** The discounting curve currency */
private String _discountingCurveCurrency;
/** The volatility surface name */
private String _volatilitySurface;
/** The volatility surface calculation method */
private String _surfaceCalculationMethod;
/** The volatility surface interpolation method name */
private String _surfaceInterpolationMethod;
/** The forward curve name */
private String _forwardCurve;
/** The forward curve interpolator name */
private String _forwardCurveInterpolator;
/** The forward curve left extrapolator name */
private String _forwardCurveLeftExtrapolator;
/** The forward curve right extrapolator name */
private String _forwardCurveRightExtrapolator;
/** The forward curve calculation method name */
private String _forwardCurveCalculationMethod;
/** The dividend type */
private String _dividendType;
/**
* Gets the discounting curve name.
*
* @return The discounting curve name
*/
public String getDiscountingCurve() {
return _discountingCurve;
}
/**
* Sets the discounting curve name.
*
* @param discountingCurve
* The discounting curve name
*/
public void setDiscountingCurve(final String discountingCurve) {
_discountingCurve = discountingCurve;
}
/**
* Gets the discounting curve configuration name.
*
* @return The discounting curve configuration name
*/
public String getDiscountingCurveConfig() {
return _discountingCurveConfig;
}
/**
* Sets the discounting curve configuration name.
*
* @param discountingCurveConfig
* The discounting curve configuration name
*/
public void setDiscountingCurveConfig(final String discountingCurveConfig) {
_discountingCurveConfig = discountingCurveConfig;
}
/**
* Gets the discounting curve currency.
*
* @return The discounting curve configuration name
*/
public String getDiscountingCurveCurrency() {
return _discountingCurveCurrency;
}
/**
* Sets the discounting curve currency.
*
* @param discountingCurveCurrency
* The discounting curve currency
*/
public void setDiscountingCurveCurrency(final String discountingCurveCurrency) {
_discountingCurveCurrency = discountingCurveCurrency;
}
/**
* Gets the volatility surface name.
*
* @return The volatility surface name
*/
public String getVolatilitySurface() {
return _volatilitySurface;
}
/**
* Sets the volatility surface name.
*
* @param volatilitySurface
* The volatility surface name
*/
public void setVolatilitySurface(final String volatilitySurface) {
ArgumentChecker.notNull(volatilitySurface, "volatilitySurface");
_volatilitySurface = volatilitySurface;
}
/**
* Gets the volatility surface calculation method name.
*
* @return The volatility surface calculation method name
*/
public String getSurfaceCalculationMethod() {
return _surfaceCalculationMethod;
}
/**
* Sets the volatility surface calculation method name.
*
* @param surfaceCalculationMethod
* The volatility surface calculation method name
*/
public void setSurfaceCalculationMethod(final String surfaceCalculationMethod) {
ArgumentChecker.notNull(surfaceCalculationMethod, "surfaceCalculationMethod");
_surfaceCalculationMethod = surfaceCalculationMethod;
}
/**
* Gets the volatility surface interpolation method name.
*
* @return The volatility surface interpolation method name
*/
public String getSurfaceInterpolationMethod() {
return _surfaceInterpolationMethod;
}
/**
* Sets the volatility surface interpolation method name.
*
* @param surfaceInterpolationMethod
* The volatility surface interpolation method name
*/
public void setSurfaceInterpolationMethod(final String surfaceInterpolationMethod) {
ArgumentChecker.notNull(surfaceInterpolationMethod, "surfaceInterpolationMethod");
_surfaceInterpolationMethod = surfaceInterpolationMethod;
}
/**
* Gets the forward curve name.
*
* @return The forward curve name
*/
public String getForwardCurve() {
return _forwardCurve;
}
/**
* Sets the forward curve name.
*
* @param forwardCurve
* The forward curve name
*/
public void setForwardCurve(final String forwardCurve) {
_forwardCurve = forwardCurve;
}
/**
* Gets the forward curve interpolator name.
*
* @return The forward curve interpolator name
*/
public String getForwardCurveInterpolator() {
return _forwardCurveInterpolator;
}
/**
* Sets the forward curve interpolator name.
*
* @param forwardCurveInterpolator
* The forward curve interpolator name
*/
public void setForwardCurveInterpolator(final String forwardCurveInterpolator) {
ArgumentChecker.notNull(forwardCurveInterpolator, "forwardCurveInterpolator");
_forwardCurveInterpolator = forwardCurveInterpolator;
}
/**
* Gets the forward curve left extrapolator name.
*
* @return The forward curve left extrapolator name
*/
public String getForwardCurveLeftExtrapolator() {
return _forwardCurveLeftExtrapolator;
}
/**
* Sets the forward curve left extrapolator name.
*
* @param forwardCurveLeftExtrapolator
* The forward curve left extrapolator name
*/
public void setForwardCurveLeftExtrapolator(final String forwardCurveLeftExtrapolator) {
ArgumentChecker.notNull(forwardCurveLeftExtrapolator, "forwardCurveLeftExtrapolator");
_forwardCurveLeftExtrapolator = forwardCurveLeftExtrapolator;
}
/**
* Gets the forward curve right extrapolator name.
*
* @return The forward curve right extrapolator name
*/
public String getForwardCurveRightExtrapolator() {
return _forwardCurveRightExtrapolator;
}
/**
* Sets the forward curve right extrapolator name.
*
* @param forwardCurveRightExtrapolator
* The forward curve right extrapolator name
*/
public void setForwardCurveRightExtrapolator(final String forwardCurveRightExtrapolator) {
ArgumentChecker.notNull(forwardCurveRightExtrapolator, "forwardCurveRightExtrapolator");
_forwardCurveRightExtrapolator = forwardCurveRightExtrapolator;
}
/**
* Gets the forward curve calculation method name.
*
* @return The forward curve calculation method name
*/
public String getForwardCurveCalculationMethod() {
return _forwardCurveCalculationMethod;
}
/**
* Sets the forward curve calculation method name.
*
* @param forwardCurveCalculationMethod
* The forward curve calculation method name.
*/
public void setForwardCurveCalculationMethod(final String forwardCurveCalculationMethod) {
_forwardCurveCalculationMethod = forwardCurveCalculationMethod;
}
/**
* Gets the dividend type.
*
* @return The dividend type
*/
public String getDividendType() {
return _dividendType;
}
/**
* Sets the dividend type.
*
* @param dividendType
* The dividend type
*/
public void setDividendType(final String dividendType) {
ArgumentChecker.notNull(dividendType, "dividendType");
_dividendType = dividendType;
}
@Override
public void afterPropertiesSet() {
ArgumentChecker.notNullInjected(getDiscountingCurve(), "discountingCurve");
ArgumentChecker.notNullInjected(getDiscountingCurveConfig(), "discountingCurveConfig");
ArgumentChecker.notNullInjected(getForwardCurve(), "forwardCurve");
ArgumentChecker.notNullInjected(getForwardCurveCalculationMethod(), "forwardCurveCalculationMethod");
// Not checking other properties because they are not all required by volatility surfaces
// and forward curves. The null check is performed in the setter.
}
}
/**
* Contains default values for forward curves.
*/
public static class EquityForwardDefaults extends AbstractFunctionConfigurationBean {
/** The number of default parameters that a forward curve requires */
private static final int N_CURVE_ARGS = 5;
/** The number of default parameters that the interpolator requires */
private static final int N_INTERPOLATOR_ARGS = 3;
/** The per-equity defaults */
private final Map<String, EquityInfo> _perEquityInfo = new HashMap<>();
/** The interpolator name */
private String _interpolator;
/** The left extrapolator name */
private String _leftExtrapolator;
/** The right extrapolator name */
private String _rightExtrapolator;
/** The discounting curve name */
private String _discountingCurve;
/** The discounting curve calculation config name */
private String _discountingCurveConfig;
/** The discounting curve currency */
private String _discountingCurveCurrency;
/** The dividend type */
private String _dividendType;
/**
* Sets the forward curve defaults for a set of equity tickers.
*
* @param perEquityInfo
* The per-equity defaults
*/
public void setPerEquityInfo(final Map<String, EquityInfo> perEquityInfo) {
_perEquityInfo.clear();
_perEquityInfo.putAll(perEquityInfo);
}
/**
* Gets the forward curve defaults for a set of equity tickers.
*
* @return The per-equity defaults
*/
public Map<String, EquityInfo> getPerEquityInfo() {
return _perEquityInfo;
}
/**
* Gets the interpolator name.
*
* @return The interpolator name
*/
public String getInterpolator() {
return _interpolator;
}
/**
* Sets the interpolator name.
*
* @param interpolator
* The interpolator name
*/
public void setInterpolator(final String interpolator) {
_interpolator = interpolator;
}
/**
* Gets the left extrapolator name.
*
* @return The left extrapolator name
*/
public String getLeftExtrapolator() {
return _leftExtrapolator;
}
/**
* Sets the left extrapolator name.
*
* @param leftExtrapolator
* The left extrapolator name
*/
public void setLeftExtrapolator(final String leftExtrapolator) {
_leftExtrapolator = leftExtrapolator;
}
/**
* Gets the right extrapolator name.
*
* @return The right extrapolator name
*/
public String getRightExtrapolator() {
return _rightExtrapolator;
}
/**
* Sets the right extrapolator name.
*
* @param rightExtrapolator
* The right extrapolator name
*/
public void setRightExtrapolator(final String rightExtrapolator) {
_rightExtrapolator = rightExtrapolator;
}
/**
* Gets the discounting curve name.
*
* @return The discounting curve name
*/
public String getDiscountingCurve() {
return _discountingCurve;
}
/**
* Sets the discounting curve name.
*
* @param discountingCurve
* The discounting curve name
*/
public void setDiscountingCurve(final String discountingCurve) {
_discountingCurve = discountingCurve;
}
/**
* Gets the discounting curve configuration name.
*
* @return The discounting curve configuration name
*/
public String getDiscountingCurveConfig() {
return _discountingCurveConfig;
}
/**
* Sets the discounting curve configuration name.
*
* @param discountingCurveConfig
* The discounting curve configuration name
*/
public void setDiscountingCurveConfig(final String discountingCurveConfig) {
_discountingCurveConfig = discountingCurveConfig;
}
/**
* Gets the discounting curve currency.
*
* @return The discounting curve currency
*/
public String getDiscountingCurveCurrency() {
return _discountingCurveCurrency;
}
/**
* Sets the discounting curve currency.
*
* @param discountingCurveCurrency
* The discounting curve currency
*/
public void setDiscountingCurveCurrency(final String discountingCurveCurrency) {
_discountingCurveCurrency = discountingCurveCurrency;
}
/**
* Gets the dividend type.
*
* @return The dividend type
*/
public String getDividendType() {
return _dividendType;
}
/**
* Sets the dividend type.
*
* @param dividendType
* The dividend type
*/
public void setDividendType(final String dividendType) {
_dividendType = dividendType;
}
@Override
protected void addAllConfigurations(final List<FunctionConfiguration> functions) {
final Map<String, EquityInfo> perEquityInfo = getPerEquityInfo();
final String[] interpolatorArgs = new String[perEquityInfo.size() * N_INTERPOLATOR_ARGS];
final String[] curveArgs = new String[1 + perEquityInfo.size() * N_CURVE_ARGS];
curveArgs[0] = PriorityClass.ABOVE_NORMAL.name();
int i = 0;
int j = 1;
for (final Map.Entry<String, EquityInfo> entry : perEquityInfo.entrySet()) {
final String ticker = entry.getKey();
final EquityInfo equityInfo = entry.getValue();
interpolatorArgs[i++] = equityInfo.getForwardCurveInterpolator();
interpolatorArgs[i++] = equityInfo.getForwardCurveLeftExtrapolator();
interpolatorArgs[i++] = equityInfo.getForwardCurveRightExtrapolator();
curveArgs[j++] = ticker;
curveArgs[j++] = equityInfo.getDiscountingCurveCurrency();
curveArgs[j++] = equityInfo.getDiscountingCurve();
curveArgs[j++] = equityInfo.getDiscountingCurveConfig();
curveArgs[j++] = equityInfo.getDividendType();
}
functions.add(functionConfiguration(InterpolatedForwardCurveDefaults.class, interpolatorArgs));
functions.add(functionConfiguration(EquityForwardCurvePerTickerDefaults.class, curveArgs));
}
}
/**
* Contains default values for volatility surfaces.
*/
public static class EquityOptionDefaults extends AbstractFunctionConfigurationBean {
/** The number of default values that a volatility surface requires */
private static final int N_SURFACE_ARGS = 7;
/** The per-equity defaults */
private final Map<String, EquityInfo> _perEquityInfo = new HashMap<>();
/** The interpolator name */
private String _interpolator;
/** The left extrapolator name */
private String _leftExtrapolator;
/** The right extrapolator name */
private String _rightExtrapolator;
/** The discounting curve name */
private String _discountingCurve;
/** The discounting curve calculation config name */
private String _discountingCurveConfig;
/** The discounting curve currency */
private String _discountingCurveCurrency;
/** The dividend type */
private String _dividendType;
/** The surface calculation method */
private String _surfaceCalculationMethod;
/**
* Sets the volatility surface defaults for a set of equity tickers.
*
* @param perEquityInfo
* The per-equity defaults
*/
public void setPerEquityInfo(final Map<String, EquityInfo> perEquityInfo) {
_perEquityInfo.clear();
_perEquityInfo.putAll(perEquityInfo);
}
/**
* Gets the volatility surface defaults for a set of equity tickers.
*
* @return The per-equity defaults
*/
public Map<String, EquityInfo> getPerEquityInfo() {
return _perEquityInfo;
}
/**
* Gets the interpolator name.
*
* @return The interpolator name
*/
public String getInterpolator() {
return _interpolator;
}
/**
* Sets the interpolator name.
*
* @param interpolator
* The interpolator name
*/
public void setInterpolator(final String interpolator) {
_interpolator = interpolator;
}
/**
* Gets the left extrapolator name.
*
* @return The left extrapolator name
*/
public String getLeftExtrapolator() {
return _leftExtrapolator;
}
/**
* Sets the left extrapolator name.
*
* @param leftExtrapolator
* The left extrapolator name
*/
public void setLeftExtrapolator(final String leftExtrapolator) {
_leftExtrapolator = leftExtrapolator;
}
/**
* Gets the right extrapolator name.
*
* @return The right extrapolator name
*/
public String getRightExtrapolator() {
return _rightExtrapolator;
}
/**
* Sets the right extrapolator name.
*
* @param rightExtrapolator
* The right extrapolator name
*/
public void setRightExtrapolator(final String rightExtrapolator) {
_rightExtrapolator = rightExtrapolator;
}
/**
* Gets the discounting curve name.
*
* @return The discounting curve name
*/
public String getDiscountingCurve() {
return _discountingCurve;
}
/**
* Sets the discounting curve name.
*
* @param discountingCurve
* The discounting curve name
*/
public void setDiscountingCurve(final String discountingCurve) {
_discountingCurve = discountingCurve;
}
/**
* Gets the discounting curve configuration name.
*
* @return The discounting curve configuration name
*/
public String getDiscountingCurveConfig() {
return _discountingCurveConfig;
}
/**
* Sets the discounting curve configuration name.
*
* @param discountingCurveConfig
* The discounting curve configuration name
*/
public void setDiscountingCurveConfig(final String discountingCurveConfig) {
_discountingCurveConfig = discountingCurveConfig;
}
/**
* Gets the discounting curve currency.
*
* @return The discounting curve currency
*/
public String getDiscountingCurveCurrency() {
return _discountingCurveCurrency;
}
/**
* Sets the discounting curve currency.
*
* @param discountingCurveCurrency
* The discounting curve currency
*/
public void setDiscountingCurveCurrency(final String discountingCurveCurrency) {
_discountingCurveCurrency = discountingCurveCurrency;
}
/**
* Gets the dividend type.
*
* @return The dividend type
*/
public String getDividendType() {
return _dividendType;
}
/**
* Sets the dividend type.
*
* @param dividendType
* The dividend type
*/
public void setDividendType(final String dividendType) {
_dividendType = dividendType;
}
/**
* Gets the surface calculation method.
*
* @return The surface calculation method
*/
public String getSurfaceCalculationMethod() {
return _surfaceCalculationMethod;
}
/**
* Sets the surface calculation method.
*
* @param surfaceCalculationMethod
* The surface calculation method
*/
public void setSurfaceCalculationMethod(final String surfaceCalculationMethod) {
_surfaceCalculationMethod = surfaceCalculationMethod;
}
@Override
protected void addAllConfigurations(final List<FunctionConfiguration> functions) {
final Map<String, EquityInfo> perEquityInfo = getPerEquityInfo();
final String[] calculationMethodArgs = new String[1 + perEquityInfo.size() * 2];
final String[] surfaceArgs = new String[1 + perEquityInfo.size() * N_SURFACE_ARGS];
calculationMethodArgs[0] = PriorityClass.ABOVE_NORMAL.name();
surfaceArgs[0] = PriorityClass.ABOVE_NORMAL.name();
int i = 1;
int j = 1;
for (final Map.Entry<String, EquityInfo> entry : perEquityInfo.entrySet()) {
final String ticker = entry.getKey();
final EquityInfo equityInfo = entry.getValue();
calculationMethodArgs[i++] = ticker;
calculationMethodArgs[i++] = equityInfo.getSurfaceCalculationMethod();
surfaceArgs[j++] = ticker;
surfaceArgs[j++] = equityInfo.getDiscountingCurve();
surfaceArgs[j++] = equityInfo.getDiscountingCurveConfig();
surfaceArgs[j++] = equityInfo.getVolatilitySurface();
surfaceArgs[j++] = equityInfo.getSurfaceInterpolationMethod();
surfaceArgs[j++] = equityInfo.getForwardCurve();
surfaceArgs[j++] = equityInfo.getForwardCurveCalculationMethod();
}
functions.add(functionConfiguration(EquityOptionSurfaceCalculationMethodPerEquityDefaults.class, calculationMethodArgs));
functions.add(functionConfiguration(EquityOptionInterpolatedBlackLognormalPerEquityDefaults.class, surfaceArgs));
}
}
@Override
protected void addAllConfigurations(final List<FunctionConfiguration> functions) {
functions.add(functionConfiguration(EquityOptionBAWGreeksFunction.class));
functions.add(functionConfiguration(EquityOptionBAWImpliedVolatilityFunction.class));
functions.add(functionConfiguration(EquityOptionBAWPresentValueFunction.class));
functions.add(functionConfiguration(EquityOptionBAWScenarioPnLFunction.class));
functions.add(functionConfiguration(EquityOptionBAWValueDeltaFunction.class));
functions.add(functionConfiguration(EquityOptionBAWValueGammaFunction.class));
functions.add(functionConfiguration(EquityOptionBjerksundStenslandGreeksFunction.class));
functions.add(functionConfiguration(EquityOptionBjerksundStenslandPresentValueFunction.class));
functions.add(functionConfiguration(EquityOptionBjerksundStenslandValueDeltaFunction.class));
functions.add(functionConfiguration(EquityOptionBjerksundStenslandValueGammaFunction.class));
functions.add(functionConfiguration(EquityOptionBjerksundStenslandScenarioPnLFunction.class));
functions.add(functionConfiguration(EquityOptionBjerksundStenslandImpliedVolFunction.class));
functions.add(functionConfiguration(EquityOptionPDEPresentValueFunction.class));
functions.add(functionConfiguration(EquityOptionPDEScenarioPnLFunction.class));
functions.add(functionConfiguration(EquityOptionBlackImpliedVolFunction.class));
functions.add(functionConfiguration(EquityOptionBlackPresentValueFunction.class));
functions.add(functionConfiguration(EquityOptionBlackRhoFunction.class));
functions.add(functionConfiguration(EquityOptionBlackSpotDeltaFunction.class));
functions.add(functionConfiguration(EquityOptionBlackThetaFunction.class));
functions.add(functionConfiguration(EquityOptionBlackScenarioPnLFunction.class));
functions.add(functionConfiguration(EquityOptionBlackSpotGammaFunction.class));
functions.add(functionConfiguration(EquityOptionBlackSpotVannaFunction.class));
functions.add(functionConfiguration(EquityOptionBlackVegaFunction.class));
functions.add(functionConfiguration(EquityOptionBlackVegaMatrixFunction.class));
functions.add(functionConfiguration(EquityOptionBlackVommaFunction.class));
functions.add(functionConfiguration(EquityOptionBlackValueDeltaFunction.class));
functions.add(functionConfiguration(EquityOptionBlackValueGammaFunction.class));
functions.add(functionConfiguration(EquityOptionForwardValueFunction.class));
functions.add(functionConfiguration(EquityOptionSpotIndexFunction.class));
}
}
| apache-2.0 |
Juni4567/meritscholarship | wp-content/themes/wplms/setup/vibe_importer/wordpress-importer.php | 42328 | <?php
/*
Plugin Name: WordPress Importer
Plugin URI: http://wordpress.org/extend/plugins/wordpress-importer/
Description: Import posts, pages, comments, custom fields, categories, tags and more from a WordPress export file.
Author: wordpressdotorg
Author URI: http://wordpress.org/
Version: 0.6.1
Text Domain: wordpress-importer
License: GPL version 2 or later - http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*/
/*
if ( ! defined( 'WP_LOAD_IMPORTERS' ) )
return;
*/
/** Display verbose errors */
define( 'IMPORT_DEBUG', true );
// Load Importer API
require_once ABSPATH . 'wp-admin/includes/import.php';
if ( ! class_exists( 'WP_Importer' ) ) {
$class_wp_importer = ABSPATH . 'wp-admin/includes/class-wp-importer.php';
if ( file_exists( $class_wp_importer ) ){
require $class_wp_importer;
}
}
// include WXR file parsers
require dirname( __FILE__ ) . '/parsers.php';
/**
* WordPress Importer class for managing the import process of a WXR file
*
* @package WordPress
* @subpackage Importer
*/
if ( class_exists( 'WP_Importer' ) ) {
class WP_Import extends WP_Importer {
var $max_wxr_version = 1.2; // max. supported WXR version
var $id; // WXR attachment ID
// information to import from WXR file
var $version;
var $authors = array();
var $posts = array();
var $terms = array();
var $categories = array();
var $tags = array();
var $base_url = '';
// mappings from old information to new
var $processed_authors = array();
var $author_mapping = array();
var $processed_terms = array();
var $processed_posts = array();
var $post_orphans = array();
var $processed_menu_items = array();
var $menu_item_orphans = array();
var $missing_menu_items = array();
var $fetch_attachments = false;
var $url_remap = array();
var $featured_images = array();
function WP_Import() { /* nothing */ }
/**
* Registered callback function for the WordPress Importer
*
* Manages the three separate stages of the WXR import process
*/
function dispatch() {
$this->header();
$step = empty( $_GET['step'] ) ? 0 : (int) $_GET['step'];
switch ( $step ) {
case 0:
$this->greet();
break;
case 1:
check_admin_referer( 'import-upload' );
if ( $this->handle_upload() )
$this->import_options();
break;
case 2:
check_admin_referer( 'import-wordpress' );
$this->fetch_attachments = ( ! empty( $_POST['fetch_attachments'] ) && $this->allow_fetch_attachments() );
$this->id = (int) $_POST['import_id'];
$file = get_attached_file( $this->id );
set_time_limit(0);
$this->import( $file );
break;
}
$this->footer();
}
/**
* The main controller for the actual import stage.
*
* @param string $file Path to the WXR file for importing
*/
function import( $file ) {
add_filter( 'import_post_meta_key', array( $this, 'is_valid_meta_key' ) );
add_filter( 'http_request_timeout', array( &$this, 'bump_request_timeout' ) );
$this->import_start( $file );
$this->get_author_mapping();
wp_suspend_cache_invalidation( true );
$this->process_categories();
$this->process_tags();
$this->process_terms();
$this->process_posts();
wp_suspend_cache_invalidation( false );
// update incorrect/missing information in the DB
$this->backfill_parents();
$this->backfill_attachment_urls();
$this->remap_featured_images();
$this->import_end();
}
/**
* Parses the WXR file and prepares us for the task of processing parsed data
*
* @param string $file Path to the WXR file for importing
*/
function import_start( $file ) {
if ( ! is_file($file) ) {
echo '<p><strong>' . __( 'Sorry, there has been an error.', 'wordpress-importer' ) . '</strong><br />';
echo __( 'The file does not exist, please try again.', 'wordpress-importer' ) . '</p>';
$this->footer();
die();
}
$import_data = $this->parse( $file );
if ( is_wp_error( $import_data ) ) {
echo '<p><strong>' . __( 'Sorry, there has been an error.', 'wordpress-importer' ) . '</strong><br />';
echo esc_html( $import_data->get_error_message() ) . '</p>';
$this->footer();
die();
}
$this->version = $import_data['version'];
$this->get_authors_from_import( $import_data );
$this->posts = $import_data['posts'];
$this->terms = $import_data['terms'];
$this->categories = $import_data['categories'];
$this->tags = $import_data['tags'];
$this->base_url = esc_url( $import_data['base_url'] );
wp_defer_term_counting( true );
wp_defer_comment_counting( true );
do_action( 'import_start' );
}
/**
* Performs post-import cleanup of files and the cache
*/
function import_end() {
wp_import_cleanup( $this->id );
wp_cache_flush();
foreach ( get_taxonomies() as $tax ) {
delete_option( "{$tax}_children" );
_get_term_hierarchy( $tax );
}
wp_defer_term_counting( false );
wp_defer_comment_counting( false );
echo '<p>' . __( 'All done.', 'wordpress-importer' ) . ' <a href="' . admin_url() . '">' . __( 'Have fun!', 'wordpress-importer' ) . '</a>' . '</p>';
echo '<p>' . __( 'Remember to update the passwords and roles of imported users.', 'wordpress-importer' ) . '</p>';
do_action( 'import_end' );
}
/**
* Handles the WXR upload and initial parsing of the file to prepare for
* displaying author import options
*
* @return bool False if error uploading or invalid file, true otherwise
*/
function handle_upload() {
$file = wp_import_handle_upload();
if ( isset( $file['error'] ) ) {
echo '<p><strong>' . __( 'Sorry, there has been an error.', 'wordpress-importer' ) . '</strong><br />';
echo esc_html( $file['error'] ) . '</p>';
return false;
} else if ( ! file_exists( $file['file'] ) ) {
echo '<p><strong>' . __( 'Sorry, there has been an error.', 'wordpress-importer' ) . '</strong><br />';
printf( __( 'The export file could not be found at <code>%s</code>. It is likely that this was caused by a permissions problem.', 'wordpress-importer' ), esc_html( $file['file'] ) );
echo '</p>';
return false;
}
$this->id = (int) $file['id'];
$import_data = $this->parse( $file['file'] );
if ( is_wp_error( $import_data ) ) {
echo '<p><strong>' . __( 'Sorry, there has been an error.', 'wordpress-importer' ) . '</strong><br />';
echo esc_html( $import_data->get_error_message() ) . '</p>';
return false;
}
$this->version = $import_data['version'];
if ( $this->version > $this->max_wxr_version ) {
echo '<div class="error"><p><strong>';
printf( __( 'This WXR file (version %s) may not be supported by this version of the importer. Please consider updating.', 'wordpress-importer' ), esc_html($import_data['version']) );
echo '</strong></p></div>';
}
$this->get_authors_from_import( $import_data );
return true;
}
/**
* Retrieve authors from parsed WXR data
*
* Uses the provided author information from WXR 1.1 files
* or extracts info from each post for WXR 1.0 files
*
* @param array $import_data Data returned by a WXR parser
*/
function get_authors_from_import( $import_data ) {
if ( ! empty( $import_data['authors'] ) ) {
$this->authors = $import_data['authors'];
// no author information, grab it from the posts
} else {
foreach ( $import_data['posts'] as $post ) {
$login = sanitize_user( $post['post_author'], true );
if ( empty( $login ) ) {
printf( __( 'Failed to import author %s. Their posts will be attributed to the current user.', 'wordpress-importer' ), esc_html( $post['post_author'] ) );
echo '<br />';
continue;
}
if ( ! isset($this->authors[$login]) )
$this->authors[$login] = array(
'author_login' => $login,
'author_display_name' => $post['post_author']
);
}
}
}
/**
* Display pre-import options, author importing/mapping and option to
* fetch attachments
*/
function import_options() {
$j = 0;
?>
<form action="<?php echo admin_url( 'admin.php?import=wordpress&step=2' ); ?>" method="post">
<?php wp_nonce_field( 'import-wordpress' ); ?>
<input type="hidden" name="import_id" value="<?php echo $this->id; ?>" />
<?php if ( ! empty( $this->authors ) ) : ?>
<h3><?php _e( 'Assign Authors', 'wordpress-importer' ); ?></h3>
<p><?php _e( 'To make it easier for you to edit and save the imported content, you may want to reassign the author of the imported item to an existing user of this site. For example, you may want to import all the entries as <code>admin</code>s entries.', 'wordpress-importer' ); ?></p>
<?php if ( $this->allow_create_users() ) : ?>
<p><?php printf( __( 'If a new user is created by WordPress, a new password will be randomly generated and the new user’s role will be set as %s. Manually changing the new user’s details will be necessary.', 'wordpress-importer' ), esc_html( get_option('default_role') ) ); ?></p>
<?php endif; ?>
<ol id="authors">
<?php foreach ( $this->authors as $author ) : ?>
<li><?php $this->author_select( $j++, $author ); ?></li>
<?php endforeach; ?>
</ol>
<?php endif; ?>
<?php if ( $this->allow_fetch_attachments() ) : ?>
<h3><?php _e( 'Import Attachments', 'wordpress-importer' ); ?></h3>
<p>
<input type="checkbox" value="1" name="fetch_attachments" id="import-attachments" />
<label for="import-attachments"><?php _e( 'Download and import file attachments', 'wordpress-importer' ); ?></label>
</p>
<?php endif; ?>
<p class="submit"><input type="submit" class="button" value="<?php esc_attr_e( 'Submit', 'wordpress-importer' ); ?>" /></p>
</form>
<?php
}
/**
* Display import options for an individual author. That is, either create
* a new user based on import info or map to an existing user
*
* @param int $n Index for each author in the form
* @param array $author Author information, e.g. login, display name, email
*/
function author_select( $n, $author ) {
_e( 'Import author:', 'wordpress-importer' );
echo ' <strong>' . esc_html( $author['author_display_name'] );
if ( $this->version != '1.0' ) echo ' (' . esc_html( $author['author_login'] ) . ')';
echo '</strong><br />';
if ( $this->version != '1.0' )
echo '<div style="margin-left:18px">';
$create_users = $this->allow_create_users();
if ( $create_users ) {
if ( $this->version != '1.0' ) {
_e( 'or create new user with login name:', 'wordpress-importer' );
$value = '';
} else {
_e( 'as a new user:', 'wordpress-importer' );
$value = esc_attr( sanitize_user( $author['author_login'], true ) );
}
echo ' <input type="text" name="user_new['.$n.']" value="'. $value .'" /><br />';
}
if ( ! $create_users && $this->version == '1.0' )
_e( 'assign posts to an existing user:', 'wordpress-importer' );
else
_e( 'or assign posts to an existing user:', 'wordpress-importer' );
wp_dropdown_users( array( 'name' => "user_map[$n]", 'multi' => true, 'show_option_all' => __( '- Select -', 'wordpress-importer' ) ) );
echo '<input type="hidden" name="imported_authors['.$n.']" value="' . esc_attr( $author['author_login'] ) . '" />';
if ( $this->version != '1.0' )
echo '</div>';
}
/**
* Map old author logins to local user IDs based on decisions made
* in import options form. Can map to an existing user, create a new user
* or falls back to the current user in case of error with either of the previous
*/
function get_author_mapping() {
if ( ! isset( $_POST['imported_authors'] ) )
return;
$create_users = $this->allow_create_users();
foreach ( (array) $_POST['imported_authors'] as $i => $old_login ) {
// Multisite adds strtolower to sanitize_user. Need to sanitize here to stop breakage in process_posts.
$santized_old_login = sanitize_user( $old_login, true );
$old_id = isset( $this->authors[$old_login]['author_id'] ) ? intval($this->authors[$old_login]['author_id']) : false;
if ( ! empty( $_POST['user_map'][$i] ) ) {
$user = get_userdata( intval($_POST['user_map'][$i]) );
if ( isset( $user->ID ) ) {
if ( $old_id )
$this->processed_authors[$old_id] = $user->ID;
$this->author_mapping[$santized_old_login] = $user->ID;
}
} else if ( $create_users ) {
if ( ! empty($_POST['user_new'][$i]) ) {
$user_id = wp_create_user( $_POST['user_new'][$i], wp_generate_password() );
} else if ( $this->version != '1.0' ) {
$user_data = array(
'user_login' => $old_login,
'user_pass' => wp_generate_password(),
'user_email' => isset( $this->authors[$old_login]['author_email'] ) ? $this->authors[$old_login]['author_email'] : '',
'display_name' => $this->authors[$old_login]['author_display_name'],
'first_name' => isset( $this->authors[$old_login]['author_first_name'] ) ? $this->authors[$old_login]['author_first_name'] : '',
'last_name' => isset( $this->authors[$old_login]['author_last_name'] ) ? $this->authors[$old_login]['author_last_name'] : '',
);
$user_id = wp_insert_user( $user_data );
}
if ( ! is_wp_error( $user_id ) ) {
if ( $old_id )
$this->processed_authors[$old_id] = $user_id;
$this->author_mapping[$santized_old_login] = $user_id;
} else {
printf( __( 'Failed to create new user for %s. Their posts will be attributed to the current user.', 'wordpress-importer' ), esc_html($this->authors[$old_login]['author_display_name']) );
if ( defined('IMPORT_DEBUG') && IMPORT_DEBUG )
echo ' ' . $user_id->get_error_message();
echo '<br />';
}
}
// failsafe: if the user_id was invalid, default to the current user
if ( ! isset( $this->author_mapping[$santized_old_login] ) ) {
if ( $old_id )
$this->processed_authors[$old_id] = (int) get_current_user_id();
$this->author_mapping[$santized_old_login] = (int) get_current_user_id();
}
}
}
/**
* Create new categories based on import information
*
* Doesn't create a new category if its slug already exists
*/
function process_categories() {
$this->categories = apply_filters( 'wp_import_categories', $this->categories );
if ( empty( $this->categories ) )
return;
foreach ( $this->categories as $cat ) {
// if the category already exists leave it alone
$term_id = term_exists( $cat['category_nicename'], 'category' );
if ( $term_id ) {
if ( is_array($term_id) ) $term_id = $term_id['term_id'];
if ( isset($cat['term_id']) )
$this->processed_terms[intval($cat['term_id'])] = (int) $term_id;
continue;
}
$category_parent = empty( $cat['category_parent'] ) ? 0 : category_exists( $cat['category_parent'] );
$category_description = isset( $cat['category_description'] ) ? $cat['category_description'] : '';
$catarr = array(
'category_nicename' => $cat['category_nicename'],
'category_parent' => $category_parent,
'cat_name' => $cat['cat_name'],
'category_description' => $category_description
);
$id = wp_insert_category( $catarr );
if ( ! is_wp_error( $id ) ) {
if ( isset($cat['term_id']) )
$this->processed_terms[intval($cat['term_id'])] = $id;
} else {
printf( __( 'Failed to import category %s', 'wordpress-importer' ), esc_html($cat['category_nicename']) );
if ( defined('IMPORT_DEBUG') && IMPORT_DEBUG )
echo ': ' . $id->get_error_message();
echo '<br />';
continue;
}
}
unset( $this->categories );
}
/**
* Create new post tags based on import information
*
* Doesn't create a tag if its slug already exists
*/
function process_tags() {
$this->tags = apply_filters( 'wp_import_tags', $this->tags );
if ( empty( $this->tags ) )
return;
foreach ( $this->tags as $tag ) {
// if the tag already exists leave it alone
$term_id = term_exists( $tag['tag_slug'], 'post_tag' );
if ( $term_id ) {
if ( is_array($term_id) ) $term_id = $term_id['term_id'];
if ( isset($tag['term_id']) )
$this->processed_terms[intval($tag['term_id'])] = (int) $term_id;
continue;
}
$tag_desc = isset( $tag['tag_description'] ) ? $tag['tag_description'] : '';
$tagarr = array( 'slug' => $tag['tag_slug'], 'description' => $tag_desc );
$id = wp_insert_term( $tag['tag_name'], 'post_tag', $tagarr );
if ( ! is_wp_error( $id ) ) {
if ( isset($tag['term_id']) )
$this->processed_terms[intval($tag['term_id'])] = $id['term_id'];
} else {
printf( __( 'Failed to import post tag %s', 'wordpress-importer' ), esc_html($tag['tag_name']) );
if ( defined('IMPORT_DEBUG') && IMPORT_DEBUG )
echo ': ' . $id->get_error_message();
echo '<br />';
continue;
}
}
unset( $this->tags );
}
/**
* Create new terms based on import information
*
* Doesn't create a term its slug already exists
*/
function process_terms() {
$this->terms = apply_filters( 'wp_import_terms', $this->terms );
if ( empty( $this->terms ) )
return;
foreach ( $this->terms as $term ) {
// if the term already exists in the correct taxonomy leave it alone
$term_id = term_exists( $term['slug'], $term['term_taxonomy'] );
if ( $term_id ) {
if ( is_array($term_id) ) $term_id = $term_id['term_id'];
if ( isset($term['term_id']) )
$this->processed_terms[intval($term['term_id'])] = (int) $term_id;
continue;
}
if ( empty( $term['term_parent'] ) ) {
$parent = 0;
} else {
$parent = term_exists( $term['term_parent'], $term['term_taxonomy'] );
if ( is_array( $parent ) ) $parent = $parent['term_id'];
}
$description = isset( $term['term_description'] ) ? $term['term_description'] : '';
$termarr = array( 'slug' => $term['slug'], 'description' => $description, 'parent' => intval($parent) );
$id = wp_insert_term( $term['term_name'], $term['term_taxonomy'], $termarr );
if ( ! is_wp_error( $id ) ) {
if ( isset($term['term_id']) )
$this->processed_terms[intval($term['term_id'])] = $id['term_id'];
} else {
printf( __( 'Failed to import %s %s', 'wordpress-importer' ), esc_html($term['term_taxonomy']), esc_html($term['term_name']) );
if ( defined('IMPORT_DEBUG') && IMPORT_DEBUG )
echo ': ' . $id->get_error_message();
echo '<br />';
continue;
}
}
unset( $this->terms );
}
/**
* Create new posts based on import information
*
* Posts marked as having a parent which doesn't exist will become top level items.
* Doesn't create a new post if: the post type doesn't exist, the given post ID
* is already noted as imported or a post with the same title and date already exists.
* Note that new/updated terms, comments and meta are imported for the last of the above.
*/
function process_posts() {
$this->posts = apply_filters( 'wp_import_posts', $this->posts );
foreach ( $this->posts as $post ) {
$post = apply_filters( 'wp_import_post_data_raw', $post );
if ( ! post_type_exists( $post['post_type'] ) ) {
printf( __( 'Failed to import “%s”: Invalid post type %s', 'wordpress-importer' ),
esc_html($post['post_title']), esc_html($post['post_type']) );
echo '<br />';
do_action( 'wp_import_post_exists', $post );
continue;
}
if ( isset( $this->processed_posts[$post['post_id']] ) && ! empty( $post['post_id'] ) )
continue;
if ( $post['status'] == 'auto-draft' )
continue;
if ( 'nav_menu_item' == $post['post_type'] ) {
$this->process_menu_item( $post );
continue;
}
$post_type_object = get_post_type_object( $post['post_type'] );
$post_exists = post_exists( $post['post_title'], '', $post['post_date'] );
if ( $post_exists && get_post_type( $post_exists ) == $post['post_type'] ) {
printf( __('%s “%s” already exists.', 'wordpress-importer'), $post_type_object->labels->singular_name, esc_html($post['post_title']) );
echo '<br />';
$comment_post_ID = $post_id = $post_exists;
} else {
$post_parent = (int) $post['post_parent'];
if ( $post_parent ) {
// if we already know the parent, map it to the new local ID
if ( isset( $this->processed_posts[$post_parent] ) ) {
$post_parent = $this->processed_posts[$post_parent];
// otherwise record the parent for later
} else {
$this->post_orphans[intval($post['post_id'])] = $post_parent;
$post_parent = 0;
}
}
// map the post author
$author = sanitize_user( $post['post_author'], true );
if ( isset( $this->author_mapping[$author] ) )
$author = $this->author_mapping[$author];
else
$author = (int) get_current_user_id();
$postdata = array(
'import_id' => $post['post_id'], 'post_author' => $author, 'post_date' => $post['post_date'],
'post_date_gmt' => $post['post_date_gmt'], 'post_content' => $post['post_content'],
'post_excerpt' => $post['post_excerpt'], 'post_title' => $post['post_title'],
'post_status' => $post['status'], 'post_name' => $post['post_name'],
'comment_status' => $post['comment_status'], 'ping_status' => $post['ping_status'],
'guid' => $post['guid'], 'post_parent' => $post_parent, 'menu_order' => $post['menu_order'],
'post_type' => $post['post_type'], 'post_password' => $post['post_password']
);
$original_post_ID = $post['post_id'];
$postdata = apply_filters( 'wp_import_post_data_processed', $postdata, $post );
if ( 'attachment' == $postdata['post_type'] ) {
$remote_url = ! empty($post['attachment_url']) ? $post['attachment_url'] : $post['guid'];
if(!strpos($remote_url,'://')){
$remote_url = apply_filters('wplms_data_import_url',VIBE_URL.'/setup/data/uploads/').$remote_url;
}
// try to use _wp_attached file for upload folder placement to ensure the same location as the export site
// e.g. location is 2003/05/image.jpg but the attachment post_date is 2010/09, see media_handle_upload()
$postdata['upload_date'] = $post['post_date'];
if ( isset( $post['postmeta'] ) ) {
foreach( $post['postmeta'] as $meta ) {
if ( $meta['key'] == '_wp_attached_file' ) {
if ( preg_match( '%^[0-9]{4}/[0-9]{2}%', $meta['value'], $matches ) )
$postdata['upload_date'] = $matches[0];
break;
}
}
}
$comment_post_ID = $post_id = $this->process_attachment( $postdata, $remote_url );
} else {
$comment_post_ID = $post_id = wp_insert_post( $postdata, true );
do_action( 'wp_import_insert_post', $post_id, $original_post_ID, $postdata, $post );
}
if ( is_wp_error( $post_id ) ) {
printf( __( 'Failed to import %s “%s”', 'wordpress-importer' ),
$post_type_object->labels->singular_name, esc_html($post['post_title']) );
if ( defined('IMPORT_DEBUG') && IMPORT_DEBUG )
echo ': ' . $post_id->get_error_message();
echo '<br />';
continue;
}
if ( $post['is_sticky'] == 1 )
stick_post( $post_id );
}
// map pre-import ID to local ID
$this->processed_posts[intval($post['post_id'])] = (int) $post_id;
if ( ! isset( $post['terms'] ) )
$post['terms'] = array();
$post['terms'] = apply_filters( 'wp_import_post_terms', $post['terms'], $post_id, $post );
// add categories, tags and other terms
if ( ! empty( $post['terms'] ) ) {
$terms_to_set = array();
foreach ( $post['terms'] as $term ) {
// back compat with WXR 1.0 map 'tag' to 'post_tag'
$taxonomy = ( 'tag' == $term['domain'] ) ? 'post_tag' : $term['domain'];
$term_exists = term_exists( $term['slug'], $taxonomy );
$term_id = is_array( $term_exists ) ? $term_exists['term_id'] : $term_exists;
if ( ! $term_id ) {
$t = wp_insert_term( $term['name'], $taxonomy, array( 'slug' => $term['slug'] ) );
if ( ! is_wp_error( $t ) ) {
$term_id = $t['term_id'];
do_action( 'wp_import_insert_term', $t, $term, $post_id, $post );
} else {
printf( __( 'Failed to import %s %s', 'wordpress-importer' ), esc_html($taxonomy), esc_html($term['name']) );
if ( defined('IMPORT_DEBUG') && IMPORT_DEBUG )
echo ': ' . $t->get_error_message();
echo '<br />';
do_action( 'wp_import_insert_term_failed', $t, $term, $post_id, $post );
continue;
}
}
$terms_to_set[$taxonomy][] = intval( $term_id );
}
foreach ( $terms_to_set as $tax => $ids ) {
$tt_ids = wp_set_post_terms( $post_id, $ids, $tax );
do_action( 'wp_import_set_post_terms', $tt_ids, $ids, $tax, $post_id, $post );
}
unset( $post['terms'], $terms_to_set );
}
if ( ! isset( $post['comments'] ) )
$post['comments'] = array();
$post['comments'] = apply_filters( 'wp_import_post_comments', $post['comments'], $post_id, $post );
// add/update comments
if ( ! empty( $post['comments'] ) ) {
$num_comments = 0;
$inserted_comments = array();
foreach ( $post['comments'] as $comment ) {
$comment_id = $comment['comment_id'];
$newcomments[$comment_id]['comment_post_ID'] = $comment_post_ID;
$newcomments[$comment_id]['comment_author'] = $comment['comment_author'];
$newcomments[$comment_id]['comment_author_email'] = $comment['comment_author_email'];
$newcomments[$comment_id]['comment_author_IP'] = $comment['comment_author_IP'];
$newcomments[$comment_id]['comment_author_url'] = $comment['comment_author_url'];
$newcomments[$comment_id]['comment_date'] = $comment['comment_date'];
$newcomments[$comment_id]['comment_date_gmt'] = $comment['comment_date_gmt'];
$newcomments[$comment_id]['comment_content'] = $comment['comment_content'];
$newcomments[$comment_id]['comment_approved'] = $comment['comment_approved'];
$newcomments[$comment_id]['comment_type'] = $comment['comment_type'];
$newcomments[$comment_id]['comment_parent'] = $comment['comment_parent'];
$newcomments[$comment_id]['commentmeta'] = isset( $comment['commentmeta'] ) ? $comment['commentmeta'] : array();
if ( isset( $this->processed_authors[$comment['comment_user_id']] ) )
$newcomments[$comment_id]['user_id'] = $this->processed_authors[$comment['comment_user_id']];
}
ksort( $newcomments );
foreach ( $newcomments as $key => $comment ) {
// if this is a new post we can skip the comment_exists() check
if ( ! $post_exists || ! comment_exists( $comment['comment_author'], $comment['comment_date'] ) ) {
if ( isset( $inserted_comments[$comment['comment_parent']] ) )
$comment['comment_parent'] = $inserted_comments[$comment['comment_parent']];
$comment = wp_filter_comment( $comment );
$inserted_comments[$key] = wp_insert_comment( $comment );
do_action( 'wp_import_insert_comment', $inserted_comments[$key], $comment, $comment_post_ID, $post );
foreach( $comment['commentmeta'] as $meta ) {
$value = maybe_unserialize( $meta['value'] );
add_comment_meta( $inserted_comments[$key], $meta['key'], $value );
}
$num_comments++;
}
}
unset( $newcomments, $inserted_comments, $post['comments'] );
}
if ( ! isset( $post['postmeta'] ) )
$post['postmeta'] = array();
$post['postmeta'] = apply_filters( 'wp_import_post_meta', $post['postmeta'], $post_id, $post );
// add/update post meta
if ( ! empty( $post['postmeta'] ) ) {
foreach ( $post['postmeta'] as $meta ) {
$key = apply_filters( 'import_post_meta_key', $meta['key'], $post_id, $post );
$value = false;
if ( '_edit_last' == $key ) {
if ( isset( $this->processed_authors[intval($meta['value'])] ) )
$value = $this->processed_authors[intval($meta['value'])];
else
$key = false;
}
if ( $key ) {
// export gets meta straight from the DB so could have a serialized string
if ( ! $value )
$value = maybe_unserialize( $meta['value'] );
add_post_meta( $post_id, $key, $value );
do_action( 'import_post_meta', $post_id, $key, $value );
// if the post has a featured image, take note of this in case of remap
if ( '_thumbnail_id' == $key )
$this->featured_images[$post_id] = (int) $value;
}
}
}
}
unset( $this->posts );
}
/**
* Attempt to create a new menu item from import data
*
* Fails for draft, orphaned menu items and those without an associated nav_menu
* or an invalid nav_menu term. If the post type or term object which the menu item
* represents doesn't exist then the menu item will not be imported (waits until the
* end of the import to retry again before discarding).
*
* @param array $item Menu item details from WXR file
*/
function process_menu_item( $item ) {
// skip draft, orphaned menu items
if ( 'draft' == $item['status'] )
return;
$menu_slug = false;
if ( isset($item['terms']) ) {
// loop through terms, assume first nav_menu term is correct menu
foreach ( $item['terms'] as $term ) {
if ( 'nav_menu' == $term['domain'] ) {
$menu_slug = $term['slug'];
break;
}
}
}
// no nav_menu term associated with this menu item
if ( ! $menu_slug ) {
_e( 'Menu item skipped due to missing menu slug', 'wordpress-importer' );
echo '<br />';
return;
}
$menu_id = term_exists( $menu_slug, 'nav_menu' );
if ( ! $menu_id ) {
printf( __( 'Menu item skipped due to invalid menu slug: %s', 'wordpress-importer' ), esc_html( $menu_slug ) );
echo '<br />';
return;
} else {
$menu_id = is_array( $menu_id ) ? $menu_id['term_id'] : $menu_id;
}
foreach ( $item['postmeta'] as $meta )
$$meta['key'] = $meta['value'];
if ( 'taxonomy' == $_menu_item_type && isset( $this->processed_terms[intval($_menu_item_object_id)] ) ) {
$_menu_item_object_id = $this->processed_terms[intval($_menu_item_object_id)];
} else if ( 'post_type' == $_menu_item_type && isset( $this->processed_posts[intval($_menu_item_object_id)] ) ) {
$_menu_item_object_id = $this->processed_posts[intval($_menu_item_object_id)];
} else if ( 'custom' != $_menu_item_type ) {
// associated object is missing or not imported yet, we'll retry later
$this->missing_menu_items[] = $item;
return;
}
if ( isset( $this->processed_menu_items[intval($_menu_item_menu_item_parent)] ) ) {
$_menu_item_menu_item_parent = $this->processed_menu_items[intval($_menu_item_menu_item_parent)];
} else if ( $_menu_item_menu_item_parent ) {
$this->menu_item_orphans[intval($item['post_id'])] = (int) $_menu_item_menu_item_parent;
$_menu_item_menu_item_parent = 0;
}
// wp_update_nav_menu_item expects CSS classes as a space separated string
$_menu_item_classes = maybe_unserialize( $_menu_item_classes );
if ( is_array( $_menu_item_classes ) )
$_menu_item_classes = implode( ' ', $_menu_item_classes );
$args = array(
'menu-item-object-id' => $_menu_item_object_id,
'menu-item-object' => $_menu_item_object,
'menu-item-parent-id' => $_menu_item_menu_item_parent,
'menu-item-position' => intval( $item['menu_order'] ),
'menu-item-type' => $_menu_item_type,
'menu-item-title' => $item['post_title'],
'menu-item-url' => $_menu_item_url,
'menu-item-description' => $item['post_content'],
'menu-item-attr-title' => $item['post_excerpt'],
'menu-item-target' => $_menu_item_target,
'menu-item-classes' => $_menu_item_classes,
'menu-item-xfn' => $_menu_item_xfn,
'menu-item-status' => $item['status']
);
$id = wp_update_nav_menu_item( $menu_id, 0, $args );
if ( $id && ! is_wp_error( $id ) )
$this->processed_menu_items[intval($item['post_id'])] = (int) $id;
}
/**
* If fetching attachments is enabled then attempt to create a new attachment
*
* @param array $post Attachment post details from WXR
* @param string $url URL to fetch attachment from
* @return int|WP_Error Post ID on success, WP_Error otherwise
*/
function process_attachment( $post, $url ) {
if ( ! $this->fetch_attachments )
return new WP_Error( 'attachment_processing_error',
__( 'Fetching attachments is not enabled', 'wordpress-importer' ) );
// if the URL is absolute, but does not contain address, then upload it assuming base_site_url
if ( preg_match( '|^/[\w\W]+$|', $url ) )
$url = rtrim( $this->base_url, '/' ) . $url;
$upload = $this->fetch_remote_file( $url, $post );
if ( is_wp_error( $upload ) )
return $upload;
if ( $info = wp_check_filetype( $upload['file'] ) )
$post['post_mime_type'] = $info['type'];
else
return new WP_Error( 'attachment_processing_error', __('Invalid file type', 'wordpress-importer') );
$post['guid'] = $upload['url'];
// as per wp-admin/includes/upload.php
$post_id = wp_insert_attachment( $post, $upload['file'] );
wp_update_attachment_metadata( $post_id, wp_generate_attachment_metadata( $post_id, $upload['file'] ) );
// remap resized image URLs, works by stripping the extension and remapping the URL stub.
if ( preg_match( '!^image/!', $info['type'] ) ) {
$parts = pathinfo( $url );
$name = basename( $parts['basename'], ".{$parts['extension']}" ); // PATHINFO_FILENAME in PHP 5.2
$parts_new = pathinfo( $upload['url'] );
$name_new = basename( $parts_new['basename'], ".{$parts_new['extension']}" );
$this->url_remap[$parts['dirname'] . '/' . $name] = $parts_new['dirname'] . '/' . $name_new;
}
return $post_id;
}
/**
* Attempt to download a remote file attachment
*
* @param string $url URL of item to fetch
* @param array $post Attachment details
* @return array|WP_Error Local file location details on success, WP_Error otherwise
*/
function fetch_remote_file( $url, $post ) {
// extract the file name and extension from the url
$file_name = basename( $url );
// get placeholder file in the upload dir with a unique, sanitized filename
$upload = wp_upload_bits( $file_name, 0, '', $post['upload_date'] );
if ( $upload['error'] )
return new WP_Error( 'upload_dir_error', $upload['error'] );
// fetch the remote url and write it to the placeholder file
$headers = wp_get_http( $url, $upload['file'] );
// request failed
if ( ! $headers ) {
@unlink( $upload['file'] );
return new WP_Error( 'import_file_error', __('Remote server did not respond', 'wordpress-importer') );
}
// make sure the fetch was successful
if ( $headers['response'] != '200' ) {
@unlink( $upload['file'] );
return new WP_Error( 'import_file_error', sprintf( __('Remote server returned error response %1$d %2$s', 'wordpress-importer'), esc_html($headers['response']), get_status_header_desc($headers['response']) ) );
}
$filesize = filesize( $upload['file'] );
if ( isset( $headers['content-length'] ) && $filesize != $headers['content-length'] ) {
@unlink( $upload['file'] );
return new WP_Error( 'import_file_error', __('Remote file is incorrect size', 'wordpress-importer') );
}
if ( 0 == $filesize ) {
@unlink( $upload['file'] );
return new WP_Error( 'import_file_error', __('Zero size file downloaded', 'wordpress-importer') );
}
$max_size = (int) $this->max_attachment_size();
if ( ! empty( $max_size ) && $filesize > $max_size ) {
@unlink( $upload['file'] );
return new WP_Error( 'import_file_error', sprintf(__('Remote file is too large, limit is %s', 'wordpress-importer'), size_format($max_size) ) );
}
// keep track of the old and new urls so we can substitute them later
$this->url_remap[$url] = $upload['url'];
$this->url_remap[$post['guid']] = $upload['url']; // r13735, really needed?
// keep track of the destination if the remote url is redirected somewhere else
if ( isset($headers['x-final-location']) && $headers['x-final-location'] != $url )
$this->url_remap[$headers['x-final-location']] = $upload['url'];
return $upload;
}
/**
* Attempt to associate posts and menu items with previously missing parents
*
* An imported post's parent may not have been imported when it was first created
* so try again. Similarly for child menu items and menu items which were missing
* the object (e.g. post) they represent in the menu
*/
function backfill_parents() {
global $wpdb;
// find parents for post orphans
foreach ( $this->post_orphans as $child_id => $parent_id ) {
$local_child_id = $local_parent_id = false;
if ( isset( $this->processed_posts[$child_id] ) )
$local_child_id = $this->processed_posts[$child_id];
if ( isset( $this->processed_posts[$parent_id] ) )
$local_parent_id = $this->processed_posts[$parent_id];
if ( $local_child_id && $local_parent_id )
$wpdb->update( $wpdb->posts, array( 'post_parent' => $local_parent_id ), array( 'ID' => $local_child_id ), '%d', '%d' );
}
// all other posts/terms are imported, retry menu items with missing associated object
$missing_menu_items = $this->missing_menu_items;
foreach ( $missing_menu_items as $item )
$this->process_menu_item( $item );
// find parents for menu item orphans
foreach ( $this->menu_item_orphans as $child_id => $parent_id ) {
$local_child_id = $local_parent_id = 0;
if ( isset( $this->processed_menu_items[$child_id] ) )
$local_child_id = $this->processed_menu_items[$child_id];
if ( isset( $this->processed_menu_items[$parent_id] ) )
$local_parent_id = $this->processed_menu_items[$parent_id];
if ( $local_child_id && $local_parent_id )
update_post_meta( $local_child_id, '_menu_item_menu_item_parent', (int) $local_parent_id );
}
}
/**
* Use stored mapping information to update old attachment URLs
*/
function backfill_attachment_urls() {
global $wpdb;
// make sure we do the longest urls first, in case one is a substring of another
uksort( $this->url_remap, array(&$this, 'cmpr_strlen') );
foreach ( $this->url_remap as $from_url => $to_url ) {
// remap urls in post_content
$wpdb->query( $wpdb->prepare("UPDATE {$wpdb->posts} SET post_content = REPLACE(post_content, %s, %s)", $from_url, $to_url) );
// remap enclosure urls
$result = $wpdb->query( $wpdb->prepare("UPDATE {$wpdb->postmeta} SET meta_value = REPLACE(meta_value, %s, %s) WHERE meta_key='enclosure'", $from_url, $to_url) );
}
}
/**
* Update _thumbnail_id meta to new, imported attachment IDs
*/
function remap_featured_images() {
// cycle through posts that have a featured image
foreach ( $this->featured_images as $post_id => $value ) {
if ( isset( $this->processed_posts[$value] ) ) {
$new_id = $this->processed_posts[$value];
// only update if there's a difference
if ( $new_id != $value )
update_post_meta( $post_id, '_thumbnail_id', $new_id );
}
}
}
/**
* Parse a WXR file
*
* @param string $file Path to WXR file for parsing
* @return array Information gathered from the WXR file
*/
function parse( $file ) {
$parser = new WXR_Parser();
return $parser->parse( $file );
}
// Display import page title
function header() {
echo '<div class="wrap">';
screen_icon();
echo '<h2>' . __( 'Import WordPress', 'wordpress-importer' ) . '</h2>';
$updates = get_plugin_updates();
$basename = plugin_basename(__FILE__);
if ( isset( $updates[$basename] ) ) {
$update = $updates[$basename];
echo '<div class="error"><p><strong>';
printf( __( 'A new version of this importer is available. Please update to version %s to ensure compatibility with newer export files.', 'wordpress-importer' ), $update->update->new_version );
echo '</strong></p></div>';
}
}
// Close div.wrap
function footer() {
echo '</div>';
}
/**
* Display introductory text and file upload form
*/
function greet() {
echo '<div class="narrow">';
echo '<p>'.__( 'Howdy! Upload your WordPress eXtended RSS (WXR) file and we’ll import the posts, pages, comments, custom fields, categories, and tags into this site.', 'wordpress-importer' ).'</p>';
echo '<p>'.__( 'Choose a WXR (.xml) file to upload, then click Upload file and import.', 'wordpress-importer' ).'</p>';
wp_import_upload_form( 'admin.php?import=wordpress&step=1' );
echo '</div>';
}
/**
* Decide if the given meta key maps to information we will want to import
*
* @param string $key The meta key to check
* @return string|bool The key if we do want to import, false if not
*/
function is_valid_meta_key( $key ) {
// skip attachment metadata since we'll regenerate it from scratch
// skip _edit_lock as not relevant for import
if ( in_array( $key, array( '_wp_attached_file', '_wp_attachment_metadata', '_edit_lock' ) ) )
return false;
return $key;
}
/**
* Decide whether or not the importer is allowed to create users.
* Default is true, can be filtered via import_allow_create_users
*
* @return bool True if creating users is allowed
*/
function allow_create_users() {
return apply_filters( 'import_allow_create_users', true );
}
/**
* Decide whether or not the importer should attempt to download attachment files.
* Default is true, can be filtered via import_allow_fetch_attachments. The choice
* made at the import options screen must also be true, false here hides that checkbox.
*
* @return bool True if downloading attachments is allowed
*/
function allow_fetch_attachments() {
return apply_filters( 'import_allow_fetch_attachments', true );
}
/**
* Decide what the maximum file size for downloaded attachments is.
* Default is 0 (unlimited), can be filtered via import_attachment_size_limit
*
* @return int Maximum attachment file size to import
*/
function max_attachment_size() {
return apply_filters( 'import_attachment_size_limit', 0 );
}
/**
* Added to http_request_timeout filter to force timeout at 60 seconds during import
* @return int 60
*/
function bump_request_timeout() {
return 60;
}
// return the difference in length between two strings
function cmpr_strlen( $a, $b ) {
return strlen($b) - strlen($a);
}
}
} // class_exists( 'WP_Importer' )
function wordpress_importer_init() {
load_plugin_textdomain( 'wordpress-importer', false, dirname( plugin_basename( __FILE__ ) ) . '/languages' );
/**
* WordPress Importer object for registering the import callback
* @global WP_Import $wp_import
*/
$GLOBALS['wp_import'] = new WP_Import();
register_importer( 'wordpress', 'WordPress', __('Import <strong>posts, pages, comments, custom fields, categories, and tags</strong> from a WordPress export file.', 'wordpress-importer'), array( $GLOBALS['wp_import'], 'dispatch' ) );
}
add_action( 'admin_init', 'wordpress_importer_init' );
| apache-2.0 |
flavoi/diventi | diventi/landing/migrations/0081_auto_20200301_1644.py | 511 | # Generated by Django 2.2.10 on 2020-03-01 15:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('landing', '0080_auto_20200301_1636'),
]
operations = [
migrations.AlterField(
model_name='section',
name='alignment',
field=models.CharField(choices=[('left', 'left'), ('centered', 'centered'), ('right', 'right')], default='centered', max_length=50, verbose_name='alignment'),
),
]
| apache-2.0 |
andrewtikhonov/rdc-rmodules-rcloud | web-app/js/HighDimensionalData.js | 22019 | var HighDimensionalData = function () {
/**
* Get supported high dimensional data types
* @returns {*}
*/
var getSupportedTypes = function () {
// check if tranSMART's global variable exists then use it
if (typeof(HIGH_DIMENSIONAL_DATA) != "undefined") {
return HIGH_DIMENSIONAL_DATA
}
return {
"mrna" : {"platform": "MRNA_AFFYMETRIX", "type": "Gene Expression"},
"mirna_qpcr" : {"platform": "MIRNA_QPCR", "type": "MIRNA_QPCR"},
"mirna_seq" : {"platform": "MIRNA_SEQ", "type": "MIRNA_SEQ"},
"rbm" : {"platform": "RBM", "type": "RBM"},
"proteomics" : {"platform": "PROTEIN", "type": "PROTEOMICS"},
"snp" : {"platform": "SNP", "type": "SNP"},
"rnaseq" : {"platform": "RNA_AFFYMETRIX", "type": "RNASEQ"},
"metabolite" : {"platform": "METABOLOMICS", "type": "METABOLOMICS"}
};
}
// define supported types
this.supportedTypes = getSupportedTypes();
// high dimensional data view element
this.view = null;
// high dimensional data
this.data = null;
// div id
this.divId = null;
}
/**
* Populate data to the popup window
*/
HighDimensionalData.prototype.populate_data = function () {
for (var key in this.data) {
if (this.data.hasOwnProperty(key)) {
var _tmp_data = this.data[key];
// set global marker type
if (_tmp_data.platforms[0].markerType) {
GLOBAL.HighDimDataType = _tmp_data.platforms[0].markerType;
} else {
GLOBAL.HighDimDataType = "";
}
if (document.getElementById("highDimContainer")) {
document.getElementById("highDimensionType").value = key;
document.getElementById("platforms1").value = GLOBAL.HighDimDataType;
document.getElementById("gpl1").value = _tmp_data.platforms[0].id ? _tmp_data.platforms[0].id : "";
document.getElementById("sample1").value = _tmp_data.sampleTypes[0].label ? _tmp_data.sampleTypes[0].label : "";
var _strTissueTypes = "";
if (_tmp_data.tissueTypes) {
for (var i = 0, max = _tmp_data.tissueTypes.length; i < max; i++) {
if (_tmp_data.tissueTypes[i].label) {
_strTissueTypes += _tmp_data.tissueTypes[i].label.concat((i < max - 1) ? ", " : "");
} else {
_strTissueTypes += "";
}
}
document.getElementById("tissue1").value = _strTissueTypes;
}
this.create_pathway_search_box('searchPathway', 'divpathway');
}
} else {
Ext.Msg.alert("Error", "Returned object is unknown.");
}
}
}
HighDimensionalData.prototype.create_pathway_search_box = function (searchInputEltName, divName) {
var ajaxurl, ds, resultTpl;
// remove all elements
var el = document.getElementById(searchInputEltName);
if (el) {
el.value = ''; // empty the search input value
// then remove all child elements
while (el.firstChild) {
el.removeChild(el.firstChild);
}
}
ajaxurl = pageInfo.basePath + '/search/loadSearchPathways';
ds = new Ext.data.Store({
proxy: new Ext.data.ScriptTagProxy({
url: ajaxurl
}),
reader: new Ext.data.JsonReader(
{root: "rows", id: "id"},
[
{name: "id"},
{name: "source"},
{name: "keyword"},
{name: "synonyms"},
{name: "category"},
{name: "display"}
]
)
});
// Custom rendering Template
resultTpl = new Ext.XTemplate(
'<tpl for=".">',
'<div class="search-item">',
'<p>',
'<span class="category-{display:lowercase}">{display}>{source}</span> ',
'<b>{keyword}</b> {synonyms}',
'</p>',
'</div>',
'</tpl>'
);
var search = new Ext.form.ComboBox({
store: ds,
displayField: 'title',
width: 455,
typeAhead: false,
loadingText: 'Searching...',
listHeight: 500,
valueField: 'naturalid',
hideTrigger: true,
allowBlank: false,
name: 'searchText',
mode: 'remote',
tpl: resultTpl,
minChars: 1,
applyTo: searchInputEltName,
itemSelector: 'div.search-item',
onSelect: function (record) { // override default onSelect to do redirect
var sp = Ext.get(searchInputEltName);
sp.dom.value = record.data.keyword;
GLOBAL.CurrentPathway = record.data.id;
GLOBAL.CurrentPathwayName = record.data.keyword;
search.collapse();
}
});
if (GLOBAL.HeatmapType == 'Select' || GLOBAL.HeatmapType == 'PCA') {
//Clear the pathway variable so we don't submit a value.
GLOBAL.CurrentPathway = '';
//Remove the pathway box.
document.getElementById(divName).style.display = "none";
}
}
HighDimensionalData.prototype.generate_view = function () {
var _this = this;
var _view = this.view;
/**
* to satisfy load high dim function
* @private
*/
var _store_high_dim_params_as_global = function () {
window[_this.divId + 'pathway'] = GLOBAL.CurrentPathway;
window[_this.divId + 'pathwayName'] = GLOBAL.CurrentPathwayName;
window[_this.divId + 'markerType'] = GLOBAL.HighDimDataType;
window[_this.divId + 'samples1'] = Ext.get('sample1').dom.value;
window[_this.divId + 'platforms1'] = Ext.get('platforms1').dom.value;
window[_this.divId + 'gpls1'] = Ext.get('gpl1').dom.value;
window[_this.divId + 'tissues1'] = Ext.get('tissue1').dom.value;
window[_this.divId + 'probesAggregation'] = Ext.get('probesAggregation').dom.checked;
};
/**
* Inner function to display node details summary
* @private
*/
var _display_high_dim_selection_summary = function () {
// set high dimensional data type
if (_this.divId == 'divIndependentVariable' && document.getElementById("independentVarDataType")) {
document.getElementById("independentVarDataType").value = Ext.get('highDimensionType').dom.value;
document.getElementById("independentPathway").value = GLOBAL.CurrentPathway;
}
if (_this.divId == 'divDependentVariable' && document.getElementById("dependentVarDataType")) {
document.getElementById("dependentVarDataType").value = Ext.get('highDimensionType').dom.value;
document.getElementById("dependentPathway").value = GLOBAL.CurrentPathway;
}
if (_this.divId == 'divCategoryVariable' && document.getElementById("dependentVarDataType")) {
document.getElementById("dependentVarDataType").value = Ext.get('highDimensionType').dom.value;
document.getElementById("dependentPathway").value = GLOBAL.CurrentPathway;
}
if (_this.divId == 'divGroupByVariable' && document.getElementById("groupByVarDataType")) {
document.getElementById("groupByVarDataType").value = Ext.get('highDimensionType').dom.value;
document.getElementById("groupByPathway").value = GLOBAL.CurrentPathway;
}
// init summary string
var summaryString = '<br> <b>GPL Platform:</b> ' + Ext.get('gpl1').dom.value +
'<br> <b>Sample:</b> ' + Ext.get('sample1').dom.value +
'<br> <b>Tissue:</b> ' + Ext.get('tissue1').dom.value +
'<br>';
// get search gene/pathway
var selectedSearchPathway = GLOBAL.CurrentPathwayName;
// get flag for probe aggregation
var probeAggregationFlag = Ext.get('probesAggregation').dom.checked;
// create final string
var innerHtml = summaryString +
'<br> <b>Pathway:</b> ' + selectedSearchPathway +
'<br> <b>Probe aggregation:</b> ' + probeAggregationFlag +
'<br> <b>Marker Type:</b> ' + GLOBAL.HighDimDataType;
// ** start stub **
// TODO : to be removed when load high dim params is no longer used.
_store_high_dim_params_as_global();
// ** end stub **
// display it
var domObj = document.getElementById("display" + GLOBAL.CurrentAnalysisDivId);
domObj.innerHTML = innerHtml;
};
/**
* Inner function to create High Dimensional Popup element
* @returns {Ext.Window}
* @private
*/
var _create_view = function () {
return new Ext.Window({
id: 'compareStepPathwaySelectionWindow',
title: 'Compare Subsets-Pathway Selection',
layout: 'fit',
width: 475,
autoHeight: true,
closable: false,
plain: true,
modal: true,
border: false,
buttons: [
{
id: 'dataAssociationApplyButton',
text: 'Apply Selections',
handler: function () {
_display_high_dim_selection_summary();
_view.hide();
}
},
{
text: 'Cancel',
handler: function () {
_view.hide();
}
}
],
resizable: false,
autoLoad: {
url: pageInfo.basePath + '/static/panels/highDimensionalWindow.html',
scripts: true,
nocache: true,
discardUrl: true,
method: 'GET'
},
tools: [
{
id: 'help',
qtip: 'Click for context sensitive help',
handler: function (event, toolEl, panel) {
D2H_ShowHelp('1126', helpURL, "wndExternal", CTXT_DISPLAY_FULLHELP);
}
}
]
});
}
// ------------------------------------------- //
// create view only when it's not created yet. //
// ------------------------------------------- //
if (!_view) {
_view = _create_view();
}
return _view;
}
HighDimensionalData.prototype.get_inputs = function (divId) {
return [
{
"label": "High Dimensional Data",
"el": Ext.get(divId),
"validations": [
{type: "REQUIRED"},
{type: "HIGH_DIMENSIONAL"}
]
}
]
}
HighDimensionalData.prototype.gather_high_dimensional_data = function (divId) {
var _this = this;
/**
* Reset global variables
* @private
*/
var _reset_global_var = function () {
// reset the pathway information.
GLOBAL.CurrentPathway = '';
GLOBAL.CurrentPathwayName = '';
// set global div id
GLOBAL.CurrentAnalysisDivId = divId;
_this.divId = divId;
}
_reset_global_var();
// check if global subset id is already defined or not
// if not the re-run query
if (!variableDivEmpty(divId)
&& ((GLOBAL.CurrentSubsetIDs[1] == null) || (multipleSubsets() && GLOBAL.CurrentSubsetIDs[2] == null))) {
runAllQueriesForSubsetId(function () {
_this.gather_high_dimensional_data(divId);
}, divId);
return;
}
// reset data
_this.data = null;
// instantiate input elements object with their corresponding validations
var inputArray = this.get_inputs(divId);
// define the validator for this form
var formValidator = new FormValidator(inputArray);
if (formValidator.validateInputForm()) {
this.fetchNodeDetails( divId, function( result ) {
_this.data = JSON.parse(result.responseText);
platforms = _this.getPlatformValidator(_this.getPlatforms(_this.data));
var formValidator = new FormValidator(platforms);
if (formValidator.validateInputForm()) {
_this.display_high_dimensional_popup();
} else {
formValidator.display_errors();
}
});
} else { // something is not correct in the validation
// display the error message
formValidator.display_errors();
}
}
HighDimensionalData.prototype.getPlatformValidator = function(platforms) {
return [
{
"label": "Platforms",
"el": platforms,
"validations": [
{type: "IDENTICAL_ITEMS"}
]
}
]
}
HighDimensionalData.prototype.getPlatforms = function(data) {
var keys = Object.keys(data);
var platformTitles = [];
for (var i = 0; i < keys.length; i++) {
var dataTypeSpecificTitles = data[keys[i]].platforms.map( function(platform) {
return platform.title;
});
platformTitles = platformTitles.concat(dataTypeSpecificTitles);
}
return platformTitles;
}
HighDimensionalData.prototype.fetchNodeDetails = function( divId, callback ) {
// get nodes from the dropzone
var _nodes = Ext.get(divId).dom.childNodes;
var _conceptPaths = new Array();
for (var i = 0; i < _nodes.length; i++) {
var _str_key = _nodes[i].concept.key;
_conceptPaths.push(_str_key);
}
// Retrieve node details
Ext.Ajax.request({
url: pageInfo.basePath + "/HighDimension/nodeDetails",
method: 'POST',
timeout: '10000',
params: Ext.urlEncode({
conceptKeys: _conceptPaths
}),
success: callback,
failure: function () {
Ext.Msg.alert("Error", "Cannot retrieve high dimensional node details");
}
});
}
HighDimensionalData.prototype.load_parameters = function (formParams) {
//These will tell tranSMART what data types we need to retrieve.
var mrnaData = false
var snpData = false
//Gene expression filters.
var fullGEXSampleType = "";
var fullGEXTissueType = "";
var fullGEXTime = "";
var fullGEXGeneList = "";
var fullGEXGPL = "";
//SNP Filters.
var fullSNPSampleType = "";
var fullSNPTissueType = "";
var fullSNPTime = "";
var fullSNPGeneList = "";
var fullSNPGPL = "";
//Pull the individual filters from the window object.
var independentGeneList = document.getElementById('independentPathway').value
var dependentGeneList = document.getElementById('dependentPathway').value
var dependentPlatform = window['divDependentVariableplatforms1'];
var independentPlatform = window['divIndependentVariableplatforms1'];
var dependentType = window['divDependentVariablemarkerType'];
var independentType = window['divIndependentVariablemarkerType'];
var dependentTime = window['divDependentVariabletimepointsValues'];
var independentTime = window['divIndependentVariabletimepointsValues'];
var dependentSample = window['divDependentVariablesamplesValues'];
var independentSample = window['divIndependentVariablesamplesValues'];
var dependentTissue = window['divDependentVariabletissuesValues'];
var independentTissue = window['divIndependentVariabletissuesValues'];
var dependentGPL = window['divDependentVariablegplValues'];
var independentGPL = window['divIndependentVariablegplValues'];
if (dependentGPL) dependentGPL = dependentGPL[0];
if (independentGPL) independentGPL = independentGPL[0];
// If we are using High Dimensional data we need to create variables that represent genes from both independent and
// dependent selections (In the event they are both of a single high dimensional type).
// Check to see if the user selected GEX in the independent input.
if (independentType == "Gene Expression") {
//Put the independent filters in the GEX variables.
fullGEXGeneList = String(independentGeneList);
fullGEXSampleType = String(independentSample);
fullGEXTissueType = String(independentTissue);
fullGEXTime = String(independentTime);
fullGEXGPL = String(independentGPL);
//This flag will tell us to write the GEX text file.
mrnaData = true;
//Fix the platform to be something the R script expects.
independentType = "MRNA";
}
if (dependentType == "Gene Expression") {
//If the gene list already has items, add a comma.
if (fullGEXGeneList != "") fullGEXGeneList += ","
if (fullGEXSampleType != "") fullGEXSampleType += ","
if (fullGEXTissueType != "") fullGEXTissueType += ","
if (fullGEXTime != "") fullGEXTime += ","
if (fullGEXGPL != "") fullGEXGPL += ","
//Add the genes in the list to the full list of GEX genes.
fullGEXGeneList += String(dependentGeneList);
fullGEXSampleType += String(dependentSample);
fullGEXTissueType += String(dependentTissue);
fullGEXTime += String(dependentTime);
fullGEXGPL += String(dependentGPL);
//This flag will tell us to write the GEX text file.
mrnaData = true;
//Fix the platform to be something the R script expects.
dependentType = "MRNA";
}
//Check to see if the user selected SNP in the independent input.
if (independentType == "SNP") {
//The genes entered into the search box were SNP genes.
fullSNPGeneList = String(independentGeneList);
fullSNPSampleType = String(independentSample);
fullSNPTissueType = String(independentTissue);
fullSNPTime = String(independentTime);
fullSNPGPL = String(independentGPL);
//This flag will tell us to write the SNP text file.
snpData = true;
}
if (dependentType == "SNP") {
//If the gene list already has items, add a comma.
if (fullSNPGeneList != "") fullSNPGeneList += ","
if (fullSNPSampleType != "") fullSNPSampleType += ","
if (fullSNPTissueType != "") fullSNPTissueType += ","
if (fullSNPTime != "") fullSNPTime += ","
if (fullSNPGPL != "") fullSNPGPL += ","
//Add the genes in the list to the full list of SNP genes.
fullSNPGeneList += String(dependentGeneList)
fullSNPSampleType += String(dependentSample);
fullSNPTissueType += String(dependentTissue);
fullSNPTime += String(dependentTime);
fullSNPGPL += dependentGPL;
//This flag will tell us to write the SNP text file.
snpData = true;
}
if (!independentGeneList && independentType || !dependentGeneList && dependentType) {
Ext.Msg.alert("No Filter Selected", "Please specify Gene/Pathway/mirID/UniProtID in High Dimensional Data pop-up.")
return false;
}
var _dependentDataType = document.getElementById('dependentVarDataType').value ? document.getElementById('dependentVarDataType').value : 'CLINICAL';
var _independentDataType = document.getElementById('independentVarDataType').value ? document.getElementById('independentVarDataType').value : 'CLINICAL';
formParams["divDependentVariabletimepoints"] = window['divDependentVariabletimepoints1'];
formParams["divDependentVariablesamples"] = window['divDependentVariablesamples1'];
formParams["divDependentVariablerbmPanels"] = window['divDependentVariablerbmPanels1'];
formParams["divDependentVariableplatforms"] = dependentPlatform
formParams["divDependentVariablegpls"] = window['divDependentVariablegplsValue1'];
formParams["divDependentVariabletissues"] = window['divDependentVariabletissues1'];
formParams["divDependentVariableprobesAggregation"] = window['divDependentVariableprobesAggregation'];
formParams["divDependentVariableSNPType"] = window['divDependentVariableSNPType'];
formParams["divDependentVariableType"] = _dependentDataType;
formParams["divDependentVariablePathway"] = dependentGeneList;
formParams["divIndependentVariabletimepoints"] = window['divIndependentVariabletimepoints1'];
formParams["divIndependentVariablesamples"] = window['divIndependentVariablesamples1'];
formParams["divIndependentVariablerbmPanels"] = window['divIndependentVariablerbmPanels1'];
formParams["divIndependentVariableplatforms"] = independentPlatform;
formParams["divIndependentVariablegpls"] = window['divIndependentVariablegplsValue1'];
formParams["divIndependentVariabletissues"] = window['divIndependentVariabletissues1'];
formParams["divIndependentVariableprobesAggregation"] = window['divIndependentVariableprobesAggregation'];
formParams["divIndependentVariableSNPType"] = window['divIndependentVariableSNPType'];
formParams["divIndependentVariableType"] = _independentDataType;
formParams["divIndependentVariablePathway"] = independentGeneList;
formParams["gexpathway"] = fullGEXGeneList;
formParams["snppathway"] = fullSNPGeneList;
formParams["divIndependentPathwayName"] = window['divIndependentVariablepathwayName'];
formParams["divDependentPathwayName"] = window['divDependentVariablepathwayName'];
formParams["mrnaData"] = mrnaData;
formParams["snpData"] = snpData;
formParams["gexgpl"] = fullGEXGPL;
formParams["snpgpl"] = fullSNPGPL;
return true;
}
HighDimensionalData.prototype.display_high_dimensional_popup = function () {
// generate view and populate it with the data
this.view = this.generate_view();
// then show it
if (typeof viewport !== undefined) {
this.view.show(viewport, this.populate_data());
} else {
console.error("No view port to display the window.");
}
}
var highDimensionalData = new HighDimensionalData();
| apache-2.0 |
starksm64/pi4j | pi4j-core/src/main/java/com/pi4j/io/gpio/Pin.java | 1443 | package com.pi4j.io.gpio;
import java.util.EnumSet;
/*
* #%L
* **********************************************************************
* ORGANIZATION : Pi4J
* PROJECT : Pi4J :: Java Library (Core)
* FILENAME : Pin.java
*
* This file is part of the Pi4J project. More information about
* this project can be found here: http://www.pi4j.com/
* **********************************************************************
* %%
* Copyright (C) 2012 - 2013 Pi4J
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* This interface describes a pin.
*
* @author Robert Savage (<a
* href="http://www.savagehomeautomation.com">http://www.savagehomeautomation.com</a>)
*/
public interface Pin {
String getProvider();
int getAddress();
String getName();
EnumSet<PinMode> getSupportedPinModes();
EnumSet<PinPullResistance> getSupportedPinPullResistance();
}
| apache-2.0 |
eldersantos/phonix | Phonix.Tests/SoundexTests.cs | 546 | using Xunit;
namespace Phonix.Tests
{
public class SoundexTests
{
private static readonly string[] Words = new[] { "Spotify", "Spotfy", "Sputfi", "Spotifi" };
private static readonly string[] Words2 = new[] { "United Air Lines", "United Aire Lines", "United Air Line" };
readonly Soundex _generator = new Soundex();
[Fact]
public void Should_Be_Similar()
{
Assert.True(_generator.IsSimilar(Words));
Assert.True(_generator.IsSimilar(Words2));
}
}
}
| apache-2.0 |
nprog/SkyEye | common/pprof.go | 504 | package common
import (
"github.com/nprog/SkyEye/log"
"os"
"os/signal"
"runtime/pprof"
"syscall"
)
type DebugOptions struct {
PprofFile string
}
func Pprof(pprofFile *string) {
f, err := os.Create(*pprofFile)
if err != nil {
log.Fatal(err)
}
pprof.StartCPUProfile(f)
defer pprof.StopCPUProfile()
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
signal.Notify(c, syscall.SIGTERM)
go func() {
<-c
log.Info("Ctrl+C to quit.")
pprof.StopCPUProfile()
os.Exit(1)
}()
}
| apache-2.0 |
fududu/JECharts | src/main/java/org/aying/echarts/feature/Feature.java | 3212 | /*
* Copyright 2016 Aying.Org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.aying.echarts.feature;
import org.aying.echarts.DataZoomSelect;
import java.io.Serializable;
import java.util.Objects;
/**
* 各工具配置项。
*
* @author Fuchun
* @since 1.0
*/
public class Feature implements Serializable {
private static final long serialVersionUID = 7298363466820660730L;
private SaveAsImage saveAsImage;
private Restore restore;
private DataView dataView;
private DataZoomSelect dataZoom;
private MagicType magicType;
private BrushFeature brush;
public Feature() {
super();
}
public SaveAsImage getSaveAsImage() {
return saveAsImage;
}
public void setSaveAsImage(SaveAsImage saveAsImage) {
this.saveAsImage = saveAsImage;
}
public Restore getRestore() {
return restore;
}
public void setRestore(Restore restore) {
this.restore = restore;
}
public DataView getDataView() {
return dataView;
}
public void setDataView(DataView dataView) {
this.dataView = dataView;
}
public DataZoomSelect getDataZoom() {
return dataZoom;
}
public void setDataZoom(DataZoomSelect dataZoom) {
this.dataZoom = dataZoom;
}
public MagicType getMagicType() {
return magicType;
}
public void setMagicType(MagicType magicType) {
this.magicType = magicType;
}
public BrushFeature getBrush() {
return brush;
}
public void setBrush(BrushFeature brush) {
this.brush = brush;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Feature)) return false;
Feature feature = (Feature) o;
return Objects.equals(saveAsImage, feature.saveAsImage) &&
Objects.equals(restore, feature.restore) &&
Objects.equals(dataView, feature.dataView) &&
Objects.equals(dataZoom, feature.dataZoom) &&
Objects.equals(magicType, feature.magicType) &&
Objects.equals(brush, feature.brush);
}
@Override
public int hashCode() {
return Objects.hash(saveAsImage, restore, dataView, dataZoom, magicType, brush);
}
@Override
public String toString() {
return "org.aying.echarts.feature.Feature{" +
"saveAsImage=" + saveAsImage +
", restore=" + restore +
", dataView=" + dataView +
", dataZoom=" + dataZoom +
", magicType=" + magicType +
", brush=" + brush +
'}';
}
}
| apache-2.0 |
google/appengine_xblock_runtime | tests/test_store.py | 5874 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the datastore backed storage classes."""
__author__ = 'John Orr (jorr@google.com)'
import unittest
from appengine_xblock_runtime import store
import appengine_xblock_runtime.runtime
import xblock.exceptions
import xblock.fields
import xblock.runtime
from google.appengine.ext import testbed
class BaseTestCase(unittest.TestCase):
"""Base class for unt tests. Sets up mock datastiore and memcache."""
def setUp(self):
super(BaseTestCase, self).setUp()
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
class TestUsageStore(BaseTestCase):
"""Unit tests for the usage store."""
def setUp(self):
super(TestUsageStore, self).setUp()
self.id_reader = appengine_xblock_runtime.runtime.IdReader()
self.id_generator = appengine_xblock_runtime.runtime.IdGenerator()
def test_create_and_get_definition(self):
'''Should be able to create and then retrieve a definition id.'''
def_id = self.id_generator.create_definition('my_block')
self.assertEqual('my_block', self.id_reader.get_block_type(def_id))
def test_create_and_get_usage(self):
'''Should be able to create and then retrieve a usage id.'''
def_id = self.id_generator.create_definition('my_block')
usage_id = self.id_generator.create_usage(def_id)
self.assertEqual(def_id, self.id_reader.get_definition_id(usage_id))
def test_cannot_create_usage_with_nonexistent_definition(self):
'''Should not create a usage bound to a non-existent definition.'''
try:
self.id_generator.create_usage('123')
self.fail('Expected assertion to fail')
except AssertionError:
pass
def test_get_non_existent_usage_raises_exception(self):
"""Should raise NoSuchUsage when non-existent usage_id requested."""
try:
self.id_reader.get_definition_id('i_dont_exist')
self.fail('Expected NoSuchUsage exception')
except xblock.exceptions.NoSuchUsage:
pass
def test_get_non_existent_definition_raises_exception(self):
"""Should raise NoSuchDefinition when non-existent def_id requested."""
try:
self.id_reader.get_block_type('i_dont_exist')
self.fail('Expected NoSuchDefinition exception')
except xblock.exceptions.NoSuchDefinition:
pass
class TestKeyValueStore(BaseTestCase):
"""Unit tests for the key value store."""
def setUp(self):
super(TestKeyValueStore, self).setUp()
self.key_value_store = store.KeyValueStore()
def _user_state_key(self):
return xblock.runtime.KeyValueStore.Key(
scope=xblock.fields.Scope.user_state, user_id='123',
block_scope_id='456', field_name='my_field')
def test_set_then_get(self):
'''Should set and then retrieve string value from KVS.'''
key = self._user_state_key()
self.key_value_store.set(key, 'data')
self.assertTrue(self.key_value_store.has(key))
self.assertEqual('data', self.key_value_store.get(key))
def test_set_then_get_rich_data(self):
'''Should set and then retrieve structured data from KVS.'''
key = self._user_state_key()
data = {
'a': 'A',
'b': 3.14,
'c': {
'aa': 'AA',
'bb': [1, 2, 3]}}
self.key_value_store.set(key, data)
self.assertTrue(self.key_value_store.has(key))
self.assertEqual(data, self.key_value_store.get(key))
def test_get_without_set(self):
'''Attempt to get unset key should raise KeyError.'''
key = self._user_state_key()
try:
self.key_value_store.get(key)
self.fail('Expected KeyError')
except KeyError:
pass
def test_delete(self):
'''Should be able to delete existing key from KVS.'''
key = self._user_state_key()
self.key_value_store.set(key, 'data')
self.assertTrue(self.key_value_store.has(key))
self.assertEqual('data', self.key_value_store.get(key))
self.key_value_store.delete(key)
self.assertFalse(self.key_value_store.has(key))
try:
self.key_value_store.get(key)
self.fail('Expected KeyError')
except KeyError:
pass
def test_delete_without_add(self):
'''Delete of non-existent key should pass as no-op.'''
key = self._user_state_key()
self.assertFalse(self.key_value_store.has(key))
# Expect no exception
self.key_value_store.delete(key)
self.assertFalse(self.key_value_store.has(key))
def test_has_finds_key(self):
'''Should be able to detect presence of key.'''
key = self._user_state_key()
self.key_value_store.set(key, 'data')
self.assertTrue(self.key_value_store.has(key))
def test_has_does_not_find_key(self):
'''Should be able to detect absence of key.'''
key = self._user_state_key()
self.assertFalse(self.key_value_store.has(key))
| apache-2.0 |
labsai/EDDI | persistencestore-impl/src/main/java/ai/labs/persistence/bootstrap/PersistenceModule.java | 9679 | package ai.labs.persistence.bootstrap;
import ai.labs.persistence.mongo.codec.JacksonProvider;
import ai.labs.runtime.bootstrap.AbstractBaseModule;
import ai.labs.utilities.RuntimeUtilities;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.inject.Provides;
import com.mongodb.*;
import com.mongodb.client.MongoDatabase;
import de.undercouch.bson4jackson.BsonFactory;
import org.bson.BsonInvalidOperationException;
import org.bson.BsonReader;
import org.bson.BsonWriter;
import org.bson.codecs.*;
import org.bson.codecs.configuration.CodecRegistry;
import javax.inject.Named;
import javax.inject.Singleton;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.util.LinkedList;
import java.util.List;
import static ai.labs.SerializationUtilities.configureObjectMapper;
import static org.bson.codecs.configuration.CodecRegistries.*;
/**
* @author ginccc
*/
public class PersistenceModule extends AbstractBaseModule {
private final InputStream configFile;
public PersistenceModule(InputStream configFile) {
this.configFile = configFile;
}
@Override
protected void configure() {
registerConfigFiles(this.configFile);
}
@Provides
@Singleton
public MongoDatabase provideMongoDB(@Named("mongodb.hosts") String hosts,
@Named("mongodb.port") Integer port,
@Named("mongodb.database") String database,
@Named("mongodb.source") String source,
@Named("mongodb.username") String username,
@Named("mongodb.password") String password,
@Named("mongodb.connectionsPerHost") Integer connectionsPerHost,
@Named("mongodb.connectTimeout") Integer connectTimeout,
@Named("mongodb.heartbeatConnectTimeout") Integer heartbeatConnectTimeout,
@Named("mongodb.heartbeatFrequency") Integer heartbeatFrequency,
@Named("mongodb.heartbeatSocketTimeout") Integer heartbeatSocketTimeout,
@Named("mongodb.localThreshold") Integer localThreshold,
@Named("mongodb.maxConnectionIdleTime") Integer maxConnectionIdleTime,
@Named("mongodb.maxConnectionLifeTime") Integer maxConnectionLifeTime,
@Named("mongodb.maxWaitTime") Integer maxWaitTime,
@Named("mongodb.minConnectionsPerHost") Integer minConnectionsPerHost,
@Named("mongodb.minHeartbeatFrequency") Integer minHeartbeatFrequency,
@Named("mongodb.requiredReplicaSetName") String requiredReplicaSetName,
@Named("mongodb.serverSelectionTimeout") Integer serverSelectionTimeout,
@Named("mongodb.socketTimeout") Integer socketTimeout,
@Named("mongodb.sslEnabled") Boolean sslEnabled,
@Named("mongodb.threadsAllowedToBlockForConnectionMultiplier") Integer threadsAllowedToBlockForConnectionMultiplier,
BsonFactory bsonFactory) {
try {
List<ServerAddress> seeds = hostsToServerAddress(hosts, port);
MongoClient mongoClient;
MongoClientOptions mongoClientOptions = buildMongoClientOptions(
ReadPreference.nearest(), connectionsPerHost, connectTimeout,
heartbeatConnectTimeout, heartbeatFrequency, heartbeatSocketTimeout,
localThreshold, maxConnectionIdleTime, maxConnectionLifeTime, maxWaitTime,
minConnectionsPerHost, minHeartbeatFrequency, requiredReplicaSetName,
serverSelectionTimeout, socketTimeout, sslEnabled,
threadsAllowedToBlockForConnectionMultiplier, bsonFactory);
if ("".equals(username) || "".equals(password)) {
mongoClient = new MongoClient(seeds, mongoClientOptions);
} else {
MongoCredential credential = MongoCredential.createCredential(username, source, password.toCharArray());
mongoClient = new MongoClient(seeds, credential, mongoClientOptions);
}
registerMongoClientShutdownHook(mongoClient);
return mongoClient.getDatabase(database);
} catch (UnknownHostException e) {
throw new RuntimeException(e.getLocalizedMessage(), e);
}
}
private MongoClientOptions buildMongoClientOptions(ReadPreference readPreference,
Integer connectionsPerHost, Integer connectTimeout,
Integer heartbeatConnectTimeout, Integer heartbeatFrequency,
Integer heartbeatSocketTimeout, Integer localThreshold,
Integer maxConnectionIdleTime, Integer maxConnectionLifeTime,
Integer maxWaitTime, Integer minConnectionsPerHost,
Integer minHeartbeatFrequency, String requiredReplicaSetName,
Integer serverSelectionTimeout, Integer socketTimeout,
Boolean sslEnabled,
Integer threadsAllowedToBlockForConnectionMultiplier,
BsonFactory bsonFactory) {
MongoClientOptions.Builder builder = MongoClientOptions.builder();
CodecRegistry codecRegistry = fromRegistries(
MongoClient.getDefaultCodecRegistry(),
fromCodecs(new URIStringCodec(), new RawBsonDocumentCodec()),
fromProviders(
new ValueCodecProvider(), new BsonValueCodecProvider(),
new DocumentCodecProvider(), new IterableCodecProvider(), new MapCodecProvider(),
new JacksonProvider(configureObjectMapper(new ObjectMapper(bsonFactory)))
)
);
builder.codecRegistry(codecRegistry);
builder.writeConcern(WriteConcern.MAJORITY);
builder.readPreference(readPreference);
builder.connectionsPerHost(connectionsPerHost);
builder.connectTimeout(connectTimeout);
builder.heartbeatConnectTimeout(heartbeatConnectTimeout);
builder.heartbeatFrequency(heartbeatFrequency);
builder.heartbeatSocketTimeout(heartbeatSocketTimeout);
builder.localThreshold(localThreshold);
if (maxConnectionIdleTime >= 0) {
builder.maxConnectionIdleTime(maxConnectionIdleTime);
}
if (maxConnectionLifeTime >= 0) {
builder.maxConnectionLifeTime(maxConnectionLifeTime);
}
builder.maxWaitTime(maxWaitTime);
if (minConnectionsPerHost >= 0) {
builder.minConnectionsPerHost(minConnectionsPerHost);
}
builder.minHeartbeatFrequency(minHeartbeatFrequency);
if (!RuntimeUtilities.isNullOrEmpty(requiredReplicaSetName)) {
builder.requiredReplicaSetName(requiredReplicaSetName);
}
builder.serverSelectionTimeout(serverSelectionTimeout);
builder.socketTimeout(socketTimeout);
builder.sslEnabled(sslEnabled);
builder.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier);
return builder.build();
}
@SuppressWarnings("RedundantThrows")
private static List<ServerAddress> hostsToServerAddress(String hosts, Integer port) throws UnknownHostException {
List<ServerAddress> ret = new LinkedList<>();
for (String host : hosts.split(",")) {
ret.add(new ServerAddress(host.trim(), port));
}
return ret;
}
private void registerMongoClientShutdownHook(final MongoClient mongoClient) {
Runtime.getRuntime().addShutdownHook(new Thread("ShutdownHook_MongoClient") {
@Override
public void run() {
try {
mongoClient.close();
} catch (Throwable e) {
String message = "MongoClient did not stop as expected.";
System.out.println(message);
}
}
});
}
public static class URIStringCodec implements Codec<URI> {
@Override
public Class<URI> getEncoderClass() {
return URI.class;
}
@Override
public void encode(BsonWriter writer, URI value, EncoderContext encoderContext) {
writer.writeString(value.toString());
}
@Override
public URI decode(BsonReader reader, DecoderContext decoderContext) {
String uriString = reader.readString();
try {
return new URI(uriString);
} catch (URISyntaxException e) {
throw new BsonInvalidOperationException(
String.format("Cannot create URI from string '%s'", uriString));
}
}
}
}
| apache-2.0 |
smudge202/Deputy | src/Deputy/ActorSystemGuardian.cs | 1386 | using Microsoft.Framework.DependencyInjection;
using System;
using System.Collections.Concurrent;
using System.Linq;
namespace Deputy
{
public static class ActorSystemGuardian
{
private static readonly ConcurrentDictionary<string, IActorSystem> _knownActorSystems = new ConcurrentDictionary<string, IActorSystem>();
public static IActorSystem CreateActorSystem(IServiceCollection services) => _knownActorSystems.GetOrAdd("DefaultActorSystem", key => new ActorSystem(services?.BuildServiceProvider()));
public static IActorSystem CreateActorSystem(IServiceCollection services, string name) => _knownActorSystems.GetOrAdd(name, key => new ActorSystem(services?.BuildServiceProvider(), key));
public static void Shutdown(string name)
{
if (string.IsNullOrWhiteSpace(name))
throw new ArgumentNullException(nameof(name));
IActorSystem actorSystem;
if (!_knownActorSystems.TryRemove(name, out actorSystem))
throw new InvalidActorSystemException(name);
actorSystem.Shutdown();
}
public static void ShutdownAll()
{
var actorSystems = _knownActorSystems.Values.ToArray();
_knownActorSystems.Clear();
foreach (var actorSystem in actorSystems)
actorSystem.Shutdown();
}
}
}
| apache-2.0 |
jnthnclt/nicity | nicity-view/src/main/java/com/jonathancolt/nicity/view/core/AViewWHBorder.java | 3102 | /*
* AViewWHBorder.java.java
*
* Created on 01-03-2010 01:31:35 PM
*
* Copyright 2010 Jonathan Colt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jonathancolt.nicity.view.core;
/*
* #%L
* nicity-view
* %%
* Copyright (C) 2013 Jonathan Colt
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.jonathancolt.nicity.view.border.NullBorder;
import com.jonathancolt.nicity.view.interfaces.IBorder;
import com.jonathancolt.nicity.view.interfaces.ICanvas;
/**
*
* @author Administrator
*/
abstract public class AViewWHBorder extends AView {
/**
*
*/
protected float w = 0.0f;
/**
*
*/
protected float h = 0.0f;
/**
*
*/
protected IBorder border = NullBorder.cNull;
/**
*
*/
public AViewWHBorder() {
super();
}
// IView
@Override
public IBorder getBorder() {
return border;
}
@Override
public void setBorder(IBorder _border) {
if (_border == null) {
border = NullBorder.cNull;
} else {
border = _border;
}
}
@Override
public boolean isActive() {
return border.isActive();
}
@Override
public boolean isSelected() {
return border.isSelected();
}
// Border
@Override
public void paintBorder(ICanvas g, int _x, int _y, int _w, int _h) {
IBorder border = getBorder();
if (border != null) {
border.paintBorder(g, _x, _y, _w, _h);
}
}
@Override
public void paintBackground(ICanvas g, int _x, int _y, int _w, int _h) {
IBorder border = getBorder();
if (border != null) {
border.paintBackground(g, _x, _y, _w, _h);
}
}
// Location
@Override
public float getW() {
if (border == this) {
return w;//??
}
return w + border.getW();
}
@Override
public float getH() {
if (border == this) {
return h;//??
}
return h + border.getH();
}
}
| apache-2.0 |
curtislisle/nanomaterial-dashboard | pdf2csv/service/savedata.py | 8283 | import sys, pymongo, os, glob, re, bson.json_util, json, time, datetime, math, subprocess, base64
from bson.objectid import ObjectId
from pymongo import MongoClient
from bson.dbref import DBRef
from bson.json_util import dumps
from bson.code import Code
import string, tangelo
import csv
#client = MongoClient('fr-s-ivg-mdb.ncifcrf.gov', 29022);
client = MongoClient();
def removeBadCharsFromHeaders(header):
#loop through the columns and remove
header2 = []
for col in range(len(header)):
newstr = header[col].replace('.','')
newstr2 = newstr.replace(',','_')
#print 'replaced:',header[col]," with ",newstr2
header2.append(newstr2)
return header2
def is_integer(s):
try:
int(s)
return True
except ValueError:
return False
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def ConditionAttributeValue(field):
if is_integer(field):
return int(field)
elif is_number(field):
return float(field)
else:
# convert to python string to assist matching
return str(field)
def run_old(data=None, fileName=None, orientation=None):
data = json.loads(data)
db = client["NanoDB3"]
fullcoll = db["saved_pdf_data"]
response = {}
tmpDict = {}
tmpDict['data'] = data
tmpDict['fileName'] = fileName
tmpDict['orientation'] = orientation
tmpDict['datetime'] = datetime.datetime.utcnow()
fullcoll.insert(tmpDict)
response['success'] = "insert successful"
#tangelo.log(str(response))
return bson.json_util.dumps(response)
def run(data=None, fileName=None, orientation=None):
data = json.loads(data)
db = client["NanoDB3"]
fullcoll = db["saved_pdf_data_rows"]
response = {}
response['data'] = []
#print "savedata: data:"
#print data
#print '------------------'
# copy out of the mongodb cursor type to a python list
pdfcount = 0
if len(data) > 0:
for jsonlist in data:
x = {}
# copy to avoid unicode problems. Create new dictionary
for field_dict in jsonlist:
try:
keyname = str(field_dict.keys()[0])
value = ConditionAttributeValue(field_dict[keyname])
x[keyname] = value
except:
pass
# at this point, we have recrated a traditional multi-field json object from each particle in the pdf
# extraction output.
# now add in binary sets. In each case, the identify of a multi-valued field is tested and turned
# into an additional binary set attribute
x['Aromatic'] = 1 if ('Aromatic' in x and x['Aromatic']=='yes') else 0
x['VHQ-R subset'] = 1 if ('VHQ-R subset' in x and x['VHQ-R subset']=='yes') else 0
x['Macrocyclic'] = 1 if ('Macrocyclic' in x and x['Macrocyclic']=='yes') else 0
x['VHQ-R subset'] = 1 if ('VHQ-R subset' in x and x['VHQ-R subset']=='yes') else 0
x['Sugar'] = 1 if ('Sugar' in x and x['Sugar']== 1) else 0
x['source_pdf'] = 1
x['source_nano_db'] = 0
# find mappings to fields in nanomaterial registry entries
x['Material Type'] = x['Macromolecule Type'] if 'Macromolecule Type' in x else ''
x['Molecular Type'] = x['Macromolecule'] if 'Macromolecule' in x else ''
x['Molecular Identity'] = x['Name'] if 'Name' in x else ''
x['NanomaterialID'] = x['PDB ID'] if 'PDB ID' in x else ''
try:
x['Mean Primary Particle Size'] = float(x['R']) if 'R' in x else 0
except:
x['Mean Primary Particle Size'] = x['R'] if 'R' in x else 0
# add the empty columns so the table is always consistent
x['Product Name'] = x['Product Name'] if 'Product Name' in x else ''
x['Material Type'] = x['Material Type'] if 'Material Type' in x else ''
x['Mean Hydrodynamic Diameter'] = x['Mean Hydrodynamic Diameter'] if 'Mean Hydrodynamic Diameter' in x else 0
x['Primary Particle Size'] = x['Primary Particle Size'] if 'Primary Particle Size' in x else 0
x['Component Molecular Weight'] = x['Component Molecular Weight'] if 'Component Molecular Weight' in x else 0
x['Molecular Weight'] = x['Molecular Weight'] if 'Molecular Weight' in x else 0
x['Lambda Max'] = x['Lambda Max'] if 'Lambda Max' in x else 0
x['Bulk Density'] = x['Bulk Density'] if 'Bulk Density' in x else 0
x['Specific Surface Area'] = x['Specific Surface Area'] if 'Specific Surface Area' in x else 0
x['Zeta Potential'] = x['Zeta Potential'] if 'Zeta Potential' in x else 0
x['2D Dimensionality'] = 1 if (('Nanoscale Dimensionality' in x) and x['Nanoscale Dimensionality'] =='2D') else 0
x['3D Dimensionality'] = 1 if (('Nanoscale Dimensionality' in x) and x['Nanoscale Dimensionality'] =='3D') else 0
# x['Null Dimensionality'] = 1 if (('Nanoscale Dimensionality' in x) and x['Nanoscale Dimensionality'] =='Null') else 0
x['Neutral Polarity'] = 1 if (u'Polarity' in x) and ((x[u'Polarity'] =='Neutral') or (x[u'Polarity'] =='Neutral ')) else 0
x['Positive Polarity'] = 1 if (u'Polarity' in x) and (x[u'Polarity'] =='Positive') else 0
x['Negative Polarity'] = 1 if (u'Polarity' in x) and (x[u'Polarity'] =='Negative') else 0
#x['Neutral Polarity'] = 1 if x['Polarity'] =='Neutral' else 0
#x['Positive Polarity'] = 1 if x['Polarity'] =='Potitive' else 0
#x['Negative Polarity'] = 1 if x['Polarity'] =='Negative' else 0
x['Metal'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Metal' ) else 0
x['Metal Oxide'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Metal Oxide') else 0
x['Polymer'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Polymer') else 0
x['Carbohydrate'] = 1 if 'Material Type' in x and ( x['Material Type'] ==u'Carbohydrate') else 0
x['Group Ii-Vi'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Group Ii-Vi' ) else 0
x['Group Iv - Non C'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Group Iv - Non C' ) else 0
x['Dendrimer'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Dendrimer' ) else 0
x['Lipid'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Lipid' ) else 0
x['Protein'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Protein' ) else 0
x['Nucleic Acid'] = 1 if 'Material Type' in x and ( x['Material Type'] =='Nucleic Acid' ) else 0
# look at state (has six different categorical values)
x['Agglomerated'] = 1 if 'State' in x and ( x['State'] =='Agglomerated' ) else 0
x['Aggregated'] = 1 if 'State' in x and ( x['State'] =='Aggregated' ) else 0
x['Aggreg-Agglom'] = 1 if 'State' in x and ( x['State'] =='Aggregated/Agglomerated' ) else 0
x['Not Aggreg-Agglom'] = 1 if 'State' in x and ( x['State'] =='Not Aggregated/Agglomerated' ) else 0
# special cleaning for 'Purity Of' because it has numeric and character and inconsistent values
x['Purity99+'] = 1 if ('Purity Of' in x and x['Purity Of']>=99) else 0
x['UltraPure'] = 1 if 'Purity Of' in x and ((x['Purity Of'] == 'Ultrapure') or (x['Purity Of']=='Ultra Pure')) else 0
# input database has false/true instead of 0/1 needed for UpSet
x['IsCrystalline'] = 1 if (('IsCrystalline' in x) and (x['IsCrystalline'])) else 0
#x['Polycrystalline'] = 1 if ((x['Polycrystalline']) else 0
#x['SingleCrystal'] = 1 if (x['SingleCrystal']) else 0
#x['Monoclinic'] = 1 if (x['Monoclinic']) else 0
# add the extended row to the dataset returned for analysis
response['data'].append(x)
fullcoll.insert(x)
# convert to string to pass through URL callback
return 'success'
| apache-2.0 |
Adien-galen/MobileSafe | src/com/example/mobliesafe/utils/ToastUtils.java | 252 | package com.example.mobliesafe.utils;
import android.content.Context;
import android.widget.Toast;
public class ToastUtils {
public static void showToast(Context ctx,String text) {
Toast.makeText(ctx, text, Toast.LENGTH_SHORT).show();
}
}
| apache-2.0 |
TribeMedia/aura | aura-components/src/main/components/ui/outputEmail/outputEmailTest.js | 4182 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
/**
* Verify outputEmail can display label.
*/
testLabel: {
attributes : {value : 'aura-test@salesforce.com', label: 'Email us'},
test: function(component){
var link = component.find("body").getElement().getElementsByTagName("a")[0];
aura.test.assertEquals('Email us', $A.test.getText(link), "Value attribute not correct");
aura.test.assertTrue(aura.test.contains(link.href,'mailto:aura-test@salesforce.com'), "Value attribute not correct");
}
},
/**
* Verify outputEmail displays email as default label.
*/
testLabelEmpty: {
attributes : {value : 'aura-test@salesforce.com', label: ''},
test: function(component){
var link = component.find("body").getElement().getElementsByTagName("a")[0];
aura.test.assertTrue(aura.test.contains(link.href,'mailto:aura-test@salesforce.com'), "Value attribute not correct");
aura.test.assertEquals('aura-test@salesforce.com', $A.test.getText(link), "Label attribute not correct");
}
},
/**
* Verify outputEmail can display email.
*/
testValue: {
attributes : {value : 'aura-test@salesforce.com', 'class' : 'myClass'},
test: function(component){
var span = component.find("body").getElement();
var link = span.getElementsByTagName("a")[0];
aura.test.assertTrue(aura.test.contains(link.href,'mailto:aura-test@salesforce.com'), "Value attribute not correct");
aura.test.assertEquals('aura-test@salesforce.com', $A.test.getText(link), "Label attribute not correct");
aura.test.assertTrue($A.util.hasClass(span, "myClass"), "myClass class not correctly added");
}
},
/**
* Verify outputEmail displays the link even if the email address is wrong as we don't want to
* do any validation for display
*/
testValueInvalid: {
attributes : {value : 'salesforce.com'},
test: function(component){
var link = component.find("body").getElement().getElementsByTagName("a")[0];
aura.test.assertTrue(aura.test.contains(link.href,'mailto:salesforce.com'), "Value attribute not correct");
aura.test.assertEquals('salesforce.com', $A.test.getText(link), "label attribute not correct");
}
},
/**
* Verify outputEmail sets chains through class attribute even for invalid email addresses
*/
testValueInvalidWithClass: {
attributes : {value : 'salesforce.com', 'class' : 'myClass'},
test: function(component){
aura.test.assertTrue($A.util.hasClass(component.find("body").getElement(), "myClass"), "myClass class not correctly added");
aura.test.assertEquals('salesforce.com', $A.test.getText(component.find("body").getElement()), "label attribute not correct");
}
},
/**
* Verify empty value still displays tag without the href.
*/
testValueEmptyString: {
attributes : {value : '', label : 'email me'},
test: function(component){
$A.test.assertEquals('', $A.test.getText(component.getElement()), "unexpected elements");
}
},
/**
* Verify all-whitespace value still displays tag without the href.
*/
_testValueOnlyWhitespace: {
attributes : {value : ' ', label : 'email me'},
test: function(component){
$A.test.assertEquals(null, component.getElement(), "unexpected elements");
}
}
})// eslint-disable-line semi
| apache-2.0 |
skogler/game-playground | src/graphics/shaderprogram.cpp | 5982 | #include "shaderprogram.hpp"
#include "graphics/glutils.hpp"
#include "utils/logger.hpp"
#include <GL/glew.h>
#include <boost/filesystem.hpp>
#include <boost/format.hpp>
#include <fstream>
#include <glm/glm.hpp>
#include <streambuf>
#include <string>
#include "core/resources/shader.hpp"
namespace fs = boost::filesystem;
ShaderProgram::ShaderProgram() :
modelMatrixName("modelMatrix"),
viewMatrixName("viewMatrix"),
projectionMatrixName("projectionMatrix"),
programId(glCreateProgram()),
numLights(0)
{
}
ShaderProgram::~ShaderProgram()
{
glDeleteProgram(programId);
}
void ShaderProgram::setMaterial(const Material& material)
{
if (material.getType() == MATERIAL_TYPE_COLOR)
{
glUniform4f(materialId.diffuseColor,
material.getDiffuse().r,
material.getDiffuse().g, material.getDiffuse().b,
material.getDiffuse().intensity);
glUniform4f(materialId.specularColor,
material.getSpecular().r,
material.getSpecular().g, material.getSpecular().b,
material.getSpecular().intensity);
glUniform1f(materialId.ambient, 1.0f); // TODO use real value
}
else if (material.getType() == MATERIAL_TYPE_TEXTURE)
{
glActiveTexture(GL_TEXTURE0 + 0);
shared_ptr<Texture> texture = material.getTexture();
glBindTexture(GL_TEXTURE_2D, material.getTexture()->get_id());
}
}
void ShaderProgram::allocateUniforms()
{
modelMatrixId =
glGetUniformLocation(programId, modelMatrixName.c_str());
viewMatrixId =
glGetUniformLocation(programId, viewMatrixName.c_str());
projectionMatrixId = glGetUniformLocation(programId,
projectionMatrixName.c_str());
if (hasEffect(EFFECT_TEXTURE))
{
GLuint textureId = glGetUniformLocation(programId, "texture");
glUniform1i(textureId, 0); // Texture Unit 0 == texture
}
if (hasEffect(EFFECT_COLOR))
{
materialId.diffuseColor = glGetUniformLocation(programId,
"material.diffuseColor");
materialId.specularColor = glGetUniformLocation(programId,
"material.specularColor");
materialId.ambient = glGetUniformLocation(programId,
"material.ambient");
}
// If this shader can do lighting, allocate the uniforms for it
if (hasEffect(EFFECT_LIGHTING))
{
// Allocate 5 lights
for (int i = 0; i <= 5; i++)
{
allocateLight();
}
}
}
void ShaderProgram::setLight(int index, const Light& light)
{
if (hasEffect(EFFECT_LIGHTING))
{
glUniform3fv(lightIds[index].position, 1, &light.getPosition()[0]);
glUniform3fv(lightIds[index].color, 1, &light.getColor()[0]);
glUniform1f(lightIds[index].intensity, light.getIntensity());
glUniform1f(lightIds[index].linearAttenuation,
light.getLinearAttenuation());
glUniform1f(lightIds[index].squaredAttenuation,
light.getSquaredAttenuation());
}
}
int ShaderProgram::addLight(const Light& light)
{
if (numLights < lightIds.size())
{
int index = numLights;
setLight(index, light);
numLights++;
return index;
}
else
{
return -1;
}
}
void ShaderProgram::clearLights()
{
}
void ShaderProgram::attachShader(shared_ptr<Shader> shader)
{
shaders.push_back(shader);
glAttachShader(programId, shader->getShaderId());
}
void ShaderProgram::link()
{
glLinkProgram(programId);
GLint result = GL_FALSE;
glGetProgramiv(programId, GL_LINK_STATUS, &result);
if (result != GL_TRUE)
{
int infoLogLength;
glGetProgramiv(programId, GL_INFO_LOG_LENGTH, &infoLogLength);
std::string errorMessage;
errorMessage.resize(infoLogLength);
glGetProgramInfoLog(programId, infoLogLength, 0, &errorMessage[0]);
Logger::error(errorMessage);
throw std::runtime_error(
"Shader program failed to link: " + errorMessage);
}
bind();
allocateUniforms();
}
bool ShaderProgram::hasEffect(const Effect effect)
{
return find(effects.begin(), effects.end(), effect) != effects.end();
}
void ShaderProgram::addEffect(const Effect effect)
{
effects.push_back(effect);
}
void ShaderProgram::allocateLight()
{
int newLightIndex = lightIds.size();
std::string newLightGLSLName =
(boost::format("lights[%d].") % newLightIndex).str();
std::string positionName = newLightGLSLName + "position";
std::string colorName = newLightGLSLName + "color";
std::string intensityName = newLightGLSLName + "intensity";
std::string linearAttenuationName = newLightGLSLName + "linearAttenuation";
std::string squaredAttenuationName = newLightGLSLName +
"squaredAttenuation";
LightId newLightId;
newLightId.position = glGetUniformLocation(programId,
positionName.c_str());
newLightId.color = glGetUniformLocation(programId,
colorName.c_str());
newLightId.intensity = glGetUniformLocation(programId,
intensityName.c_str());
newLightId.linearAttenuation = glGetUniformLocation(programId,
linearAttenuationName.c_str());
newLightId.squaredAttenuation = glGetUniformLocation(programId,
squaredAttenuationName.c_str());
lightIds.push_back(newLightId);
}
| apache-2.0 |
timothynode/apollo | apollo-portal/src/main/java/com/ctrip/framework/apollo/openapi/v1/controller/NamespaceController.java | 3074 | package com.ctrip.framework.apollo.openapi.v1.controller;
import com.ctrip.framework.apollo.common.dto.NamespaceDTO;
import com.ctrip.framework.apollo.common.dto.NamespaceLockDTO;
import com.ctrip.framework.apollo.core.enums.Env;
import com.ctrip.framework.apollo.openapi.dto.OpenNamespaceDTO;
import com.ctrip.framework.apollo.openapi.dto.OpenNamespaceLockDTO;
import com.ctrip.framework.apollo.openapi.util.OpenApiBeanUtils;
import com.ctrip.framework.apollo.portal.entity.bo.NamespaceBO;
import com.ctrip.framework.apollo.portal.service.NamespaceLockService;
import com.ctrip.framework.apollo.portal.service.NamespaceService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController("openapiNamespaceController")
@RequestMapping("/openapi/v1/envs/{env}")
public class NamespaceController {
@Autowired
private NamespaceLockService namespaceLockService;
@Autowired
private NamespaceService namespaceService;
@RequestMapping(value = "/apps/{appId}/clusters/{clusterName}/namespaces", method = RequestMethod.GET)
public List<OpenNamespaceDTO> findNamespaces(@PathVariable String appId, @PathVariable String env,
@PathVariable String clusterName) {
return OpenApiBeanUtils
.batchTransformFromNamespaceBOs(namespaceService.findNamespaceBOs(appId, Env
.fromString(env), clusterName));
}
@RequestMapping(value = "/apps/{appId}/clusters/{clusterName}/namespaces/{namespaceName:.+}", method = RequestMethod.GET)
public OpenNamespaceDTO loadNamespace(@PathVariable String appId, @PathVariable String env,
@PathVariable String clusterName, @PathVariable String
namespaceName) {
NamespaceBO namespaceBO = namespaceService.loadNamespaceBO(appId, Env.fromString
(env), clusterName, namespaceName);
if (namespaceBO == null) {
return null;
}
return OpenApiBeanUtils.transformFromNamespaceBO(namespaceBO);
}
@RequestMapping(value = "/apps/{appId}/clusters/{clusterName}/namespaces/{namespaceName}/lock", method = RequestMethod.GET)
public OpenNamespaceLockDTO getNamespaceLock(@PathVariable String appId, @PathVariable String env,
@PathVariable String clusterName, @PathVariable
String namespaceName) {
NamespaceDTO namespace = namespaceService.loadNamespaceBaseInfo(appId, Env
.fromString(env), clusterName, namespaceName);
NamespaceLockDTO lockDTO = namespaceLockService.getNamespaceLock(appId, Env
.fromString(env), clusterName, namespaceName);
return OpenApiBeanUtils.transformFromNamespaceLockDTO(namespace.getNamespaceName(), lockDTO);
}
}
| apache-2.0 |
peteriliev/kata | MergeSort/src/test/java/TestMergeSort.java | 1151 | package test.java;
import static org.junit.Assert.assertTrue;
import java.rmi.RemoteException;
import java.util.Arrays;
import main.java.MergeSort33;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.google.inject.Inject;
import com.iliev.peter.guice.GuiceIntegration;
import com.iliev.peter.kata.conventions.ISortTestSet;
import com.iliev.peter.kata.utils.ArrayComparer;
import com.iliev.peter.kata.utils.ITestSetProvider;
@RunWith(GuiceIntegration.class)
public class TestMergeSort {
@Inject
ITestSetProvider testProvider;
@Test
public void test() throws RemoteException {
ISortTestSet[] allSets = testProvider.getAll();
for (ISortTestSet set : allSets) {
Integer[] sortedSet = (Integer[]) set.getSortedSet();
Integer[][] unsortedSets = (Integer[][]) set.getUnsortedSets();
for (Integer[] unsorted : unsortedSets) {
System.out.printf("Sorted %s \t\t\t\tunsorted %s\n", Arrays.toString(sortedSet), Arrays.toString(unsorted));
MergeSort33.sort(unsorted);
assertTrue(String.format("Sorting %s failed", Arrays.toString(unsorted)), ArrayComparer.areEqualArrays(sortedSet, unsorted));
}
}
}
}
| apache-2.0 |
casid/mazebert-ladder | src/main/java/db/migration/core/MigrationUsecaseExecutor.java | 2742 | package db.migration.core;
import com.mazebert.gateways.mysql.MySqlPlayerGateway;
import com.mazebert.plugins.balancing.PlayerLevelPlugin;
import com.mazebert.usecases.migration.RepairPlayerLevels;
import com.mazebert.usecases.migration.RepairPlayerNames;
import org.jusecase.inject.InjectUsecaseExecutor;
import org.jusecase.transaction.simple.jdbc.ConnectionProxy;
import javax.sql.DataSource;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.logging.Logger;
public class MigrationUsecaseExecutor extends InjectUsecaseExecutor {
public MigrationUsecaseExecutor(Connection connection) {
injector.add(new MigrationDataSource(connection));
injector.add(MySqlPlayerGateway.class);
injector.add(new PlayerLevelPlugin());
addUsecase(RepairPlayerLevels.class);
addUsecase(RepairPlayerNames.class);
}
private static class MigrationDataSource implements DataSource {
private final Connection connection;
public MigrationDataSource(Connection connection) {
// Use connection proxy, we do not want the connection to be closed,
// since Flyway is running migration steps inside a transaction.
this.connection = new ConnectionProxy(connection);
}
@Override
public java.sql.Connection getConnection() throws SQLException {
return connection;
}
@Override
public java.sql.Connection getConnection(String username, String password) throws SQLException {
return connection;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public PrintWriter getLogWriter() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setLogWriter(PrintWriter out) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public void setLoginTimeout(int seconds) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public int getLoginTimeout() throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
throw new SQLFeatureNotSupportedException();
}
}
}
| apache-2.0 |
lisasievers/smallbusinessdiy | app/siteanalysis/application/language/german/common_lang.php | 3255 | <?php
$lang['Recent Activities'] = "Kürzliche Aktivitäten";
$lang["Today's New Visitor Report"] = "Heute Neue Besucherbericht";
// admin sidebar
$lang["save"] = "sparen";
$lang["generate widget code"] = "erzeugen Widget-Code";
$lang["send"] = "senden";
$lang["cancel"] = "stornieren";
$lang["close"] = "schließen";
$lang["add"] = "hinzufügen";
$lang["edit"] = "bearbeiten";
$lang["update"] = "aktualisieren";
$lang["details"] = "Details";
$lang["view"] = "Aussicht";
$lang["read"] = "lesen";
$lang["delete"] = "löschen";
$lang["search"] = "Suche";
$lang["print"] = "drucken";
$lang["download"] = "herunterladen";
$lang["keyword"] = "Stichwort";
$lang["actions"] = "Aktionen";
$lang["search by"] = "Suche nach";
$lang["total"] = "gesamt";
$lang["more info"] = "mehr Informationen";
$lang["status"] = "Status";
$lang["active"] = "aktiv";
$lang["inactive"] = "inaktiv";
$lang["yes"] = "ja";
$lang["no"] = "Nein";
$lang["OR"] = "ODER";
$lang["only me"] = "nur ich";
$lang["everyone"] = "jeder";
$lang["new search"] = "neue Suche";
$lang["new scan"] = "neue Scan";
$lang["new analysis"] = "neue Analyse";
$lang["search report"] = "Recherchenbericht";
$lang["download selected"] = "Download ausgewählt";
$lang["download all"] = "alles herunterladen";
$lang["delete selected"] = "ausgewählte löschen";
$lang["delete all"] = "alles löschen";
$lang["upload file"] = "Datei hochladen";
$lang["start searching"] = "starten Sie die Suche";
$lang["start analysis"] = "Start Analyse";
$lang["start scraping"] = "beginnen Schaben";
$lang["completed"] = "fertiggestellt";
$lang["backlink generated"] = "back~~POS=TRUNC -Generator";
$lang["ping completed"] = "back~~POS=TRUNC generiert";
$lang["enter domain name"] = "Geben Sie Domain-Namen";
$lang["enter URL"] = "URL eingeben";
$lang["search completed"] = "Suche abgeschlossen";
$lang["analysis completed"] = "Analyse abgeschlossen";
$lang["any location"] = "an jedem Ort";
$lang["any language"] = "jede Sprache";
$lang["select country"] = "Land auswählen";
$lang["select your domain"] = "Wählen Sie Ihre Domain";
$lang['max dimension'] = 'max Dimension';
$lang['max size'] = 'max Größe';
$lang['allowed format'] = 'erlaubt Format';
$lang['date'] = "Datum";
$lang['from date'] = "ab Datum";
$lang['to date'] = "miteinander ausgehen";
// new addition*******************************************
$lang['add domain'] = "Add Domain";
$lang['domain list'] = "Domain-Liste";
$lang['analyze website'] = "analysieren Website";
$lang["scrape google adwords"]= "kratzen google adwords";
$lang["visitor analysis report"]="Besucher Analysebericht";
$lang["date range"]="Datumsbereich";
$lang["add website"]="Website hinzufügen";
$lang['visitor analytics : website list'] = "Besucher Analytik: Website-Liste";
$lang['get js code'] = "erhalten js Code";
$lang["start encoding"]="Start kodieren";
$lang["start decoding"]="Start Decodierung";
$lang["generate"]="generieren";
$lang["download csv"]="cSV-Datei herunterladen";
$lang["download txt"]="herunterladen txt";
$lang["start generation"]="Start Generation";
$lang["start scanning"]="starten des Scanvorgangs";
| apache-2.0 |
Alachisoft/NCache | Src/NCCache/Config/Dom/Provider.cs | 3939 | // Copyright (c) 2021 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
using System;
using System.Collections;
using System.Text;
using Alachisoft.NCache.Common.Configuration;
using Alachisoft.NCache.Runtime.Serialization;
using Runtime = Alachisoft.NCache.Runtime;
namespace Alachisoft.NCache.Config.Dom
{
[Serializable]
public class Provider: ICloneable,ICompactSerializable
{
string providerName, assemblyName, className, _fullProviderName;
bool isDefaultProvider;
bool asyncMode;
Parameter[] parameters;
public Provider() { }
[ConfigurationAttribute("provider-name")]
public string ProviderName
{
get { return providerName; }
set { providerName = value; }
}
[ConfigurationAttribute("assembly-name")]
public string AssemblyName
{
get { return assemblyName; }
set { assemblyName = value; }
}
[ConfigurationAttribute("class-name")]
public string ClassName
{
get { return className; }
set { className = value; }
}
[ConfigurationAttribute("full-name")]
public string FullProviderName
{
get { return _fullProviderName; }
set { _fullProviderName = value; }
}
[ConfigurationAttribute("default-provider")]
public bool IsDefaultProvider
{
get { return isDefaultProvider; }
set { isDefaultProvider = value; }
}
[ConfigurationSection("parameters")]//Changes for New Dom from param
public Parameter[] Parameters
{
get { return parameters; }
set { parameters = value; }
}
#region ICloneable Members
public object Clone()
{
Provider provider = new Provider();
provider.ProviderName = ProviderName != null ? (string) ProviderName.Clone() : null;
provider.AssemblyName = AssemblyName != null ? (string) AssemblyName.Clone(): null;
provider.ClassName = ClassName != null ? (string) ClassName.Clone(): null;
provider.FullProviderName = FullProviderName != null ? (string)FullProviderName.Clone() : null;
provider.Parameters = Parameters != null ? Parameters.Clone() as Parameter[]: null;
return provider;
}
#endregion
#region ICompactSerializable Members
public void Deserialize(Runtime.Serialization.IO.CompactReader reader)
{
providerName = reader.ReadObject() as string;
assemblyName = reader.ReadObject() as string;
className = reader.ReadObject() as string;
_fullProviderName = reader.ReadObject() as string;
isDefaultProvider = reader.ReadBoolean();
asyncMode = reader.ReadBoolean();
parameters = reader.ReadObject() as Parameter[];
}
public void Serialize(Runtime.Serialization.IO.CompactWriter writer)
{
writer.WriteObject(providerName);
writer.WriteObject(assemblyName);
writer.WriteObject(className);
writer.WriteObject(_fullProviderName);
writer.Write(isDefaultProvider);
writer.Write(asyncMode);
writer.WriteObject(parameters);
}
#endregion
}
}
| apache-2.0 |
apache/incubator-superset | superset/db_engine_specs/teradata.py | 1790 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from superset.db_engine_specs.base import BaseEngineSpec, LimitMethod
class TeradataEngineSpec(BaseEngineSpec):
"""Dialect for Teradata DB."""
engine = "teradata"
engine_name = "Teradata"
limit_method = LimitMethod.WRAP_SQL
max_column_name_length = 30 # since 14.10 this is 128
_time_grain_expressions = {
None: "{col}",
"PT1M": "TRUNC(CAST({col} as DATE), 'MI')",
"PT1H": "TRUNC(CAST({col} as DATE), 'HH')",
"P1D": "TRUNC(CAST({col} as DATE), 'DDD')",
"P1W": "TRUNC(CAST({col} as DATE), 'WW')",
"P1M": "TRUNC(CAST({col} as DATE), 'MONTH')",
"P3M": "TRUNC(CAST({col} as DATE), 'Q')",
"P1Y": "TRUNC(CAST({col} as DATE), 'YEAR')",
}
@classmethod
def epoch_to_dttm(cls) -> str:
return (
"CAST(((CAST(DATE '1970-01-01' + ({col} / 86400) AS TIMESTAMP(0) "
"AT 0)) AT 0) + (({col} MOD 86400) * INTERVAL '00:00:01' "
"HOUR TO SECOND) AS TIMESTAMP(0))"
)
| apache-2.0 |
scalastuff/scalaleafs | sample/src/main/scala/net/scalaleafs/sample/Calendar2.js | 2821 | function initCal() {
$('#calendar2').cal({
resources : {
'15' : 'Aziza',
'16' : 'Heath',
'17' : 'Karen',
'90' : 'Michelle'
},
allowresize : true,
allowmove : true,
allowselect : true,
allowremove : true,
allownotesedit : true,
daytimestart: '06:00:00',
daytimeend: '22:00:00',
eventselect : function( uid ){
console.log( 'Selected event: '+uid );
},
eventremove : function( uid ){
console.log( 'Removed event: '+uid );
},
eventnotesedit : function( uid ){
console.log( 'Edited Notes for event: '+uid );
},
eventresize : function (uid, data, events) {
console.log('Resize: ' + uid + 'data: ' + data + 'events: ' + events);
$('#calendar2').cal('add',{
uid : 10,
begins : $.cal.date().addDays(3).format('Y-m-d')+' 10:10:00',
ends : $.cal.date().addDays(3).format('Y-m-d')+' 12:00:00',
color : '#dddddd',
resource: '90',
title : 'Added'
});
},
// Load events as .ics
events : //'http://staff.digitalfusion.co.nz.local/time/calendar/leave/'
[
{
uid : 1,
begins : $.cal.date().addDays(2).format('Y-m-d')+' 10:10:00',
ends : $.cal.date().addDays(2).format('Y-m-d')+' 12:00:00',
color : '#dddddd',
resource: '90',
title : 'Done'
},
{
uid : 2,
begins : $.cal.date().addDays(2).format('Y-m-d')+' 12:15:00',
ends : $.cal.date().addDays(2).format('Y-m-d')+' 13:45:00',
resource: '16',
notes : 'Keepin\' it real…\n\nMan.'
},
{
uid : 3,
begins : $.cal.date().addDays(2).format('Y-m-d')+' 10:30:00',
ends : $.cal.date().addDays(2).format('Y-m-d')+' 12:15:00',
color : 'rgb( 90, 0, 0 )',
resource: '16',
notes : 'The cake is a lie.'
},
{
uid : 4,
begins : $.cal.date().addDays(3).format('Y-m-d')+' 14:15:00',
ends : $.cal.date().addDays(3).format('Y-m-d')+' 16:30:00',
resource: '17',
notes : 'An <example> event…'
},
{
uid : 5,
begins : $.cal.date().addDays(4).format('Y-m-d')+' 11:30:00',
ends : $.cal.date().addDays(4).format('Y-m-d')+' 12:30:00',
color : '#990066',
notes : 'The big game'
},
{
uid : 6,
begins : $.cal.date().addDays(0).format('Y-m-d')+' 12:30:00',
ends : $.cal.date().addDays(2).format('Y-m-d')+' 8:45:00',
resource: '17',
notes : 'Good-O'
}
],
masktimelabel: {
'00' : 'G:i'
}
});
$('#calendar2').on('click','.ui-cal-time',function(e){
console.log( 'Time: ', $(this).attr('time') );
console.log( 'Date: ', $(this).closest('.ui-cal-resource').attr('date') );
console.log( 'Resource: ', $(this).closest('.ui-cal-resource').attr('resource') );
});
}
| apache-2.0 |
sHiniz0r/Concurrent | zad2/src/main/java/Manager.java | 664 | /**
* Created by Michał on 04.10.2016.
*/
public class Manager extends Thread {
private int id;
private Storage storage;
private int messagesNumber;
public Manager(int id, Storage storage, int messagesNumber){
this.id = id;
this.storage = storage;
this.messagesNumber = messagesNumber;
}
public void run(){
while(storage.locationSentMessages(id) < messagesNumber){
try {
storage.sendMessage(id, "something " + id);
int delay = (int)(Math.random() * 1000 + 3000);
sleep(delay);
}catch (InterruptedException ex){}
}
}
}
| apache-2.0 |
ModernMT/MMT | src/commons/src/main/java/eu/modernmt/data/TranslationUnitMessage.java | 1973 | package eu.modernmt.data;
import eu.modernmt.lang.LanguageDirection;
import eu.modernmt.model.Alignment;
import eu.modernmt.model.Sentence;
import eu.modernmt.model.corpus.TranslationUnit;
import java.util.UUID;
/**
* Created by davide on 06/09/16.
*/
public class TranslationUnitMessage extends DataMessage {
public final long memory;
public final UUID owner;
public final TranslationUnit value;
public final boolean update;
public final String previousSentence;
public final String previousTranslation;
public final LanguageDirection language; // this is the language mapped by the engine's language index
public final Sentence sentence;
public final Sentence translation;
public final Alignment alignment;
public TranslationUnitMessage(short channel, long channelPosition, long memory, UUID owner, TranslationUnit value,
boolean update, String previousSentence, String previousTranslation,
LanguageDirection language, Sentence sentence, Sentence translation, Alignment alignment) {
super(channel, channelPosition);
this.memory = memory;
this.owner = owner;
this.value = value;
this.update = update;
this.previousSentence = previousSentence;
this.previousTranslation = previousTranslation;
this.language = language;
this.sentence = sentence;
this.translation = translation;
this.alignment = alignment;
}
@Override
public String toString() {
return "TranslationUnitMessage{" +
"memory=" + memory +
", owner=" + owner +
", value=" + value +
", update=" + update +
", previousSentence='" + previousSentence + '\'' +
", previousTranslation='" + previousTranslation + '\'' +
", language=" + language +
'}';
}
}
| apache-2.0 |
krasserm/scalaz-camel | scalaz-camel-core/src/test/scala/scalaz/camel/core/CamelTestProcessors.scala | 2909 | /*
* Copyright 2010-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalaz.camel.core
import org.apache.camel.{AsyncCallback, AsyncProcessor, Exchange, Processor}
import scalaz._
/**
* @author Martin Krasser
*/
trait CamelTestProcessors { this: Conv =>
import scalaz.concurrent.Strategy
import Scalaz._
/** Concurrency strategy for each created processor (defaults to Strategy.Sequential) */
var processorConcurrencyStrategy: Strategy = Strategy.Sequential
//
// Direct-style processors: Message => Message (may throw exception)
//
/** Fails with Exception and error message em (direct-style processor). */
def ds_failWithMessage(em: String): Message => Message = (m: Message) => throw new Exception(em)
/** Appends o to message body (direct-style processor) */
def ds_appendToBody(o: Any)(implicit mgnt: ContextMgnt) = (m: Message) => m.appendToBody(o)
/** Prints message to stdout (direct-style processor) */
def ds_printMessage = (m: Message) => { println(m); m }
//
// CPS (continuation-passing style) processors: (Message, MessageValidation => Unit) => Unit
//
/** Fails with Exception and error message em. */
def failWithMessage(em: String): MessageProcessor = cps(ds_failWithMessage(em))
/** Converts message body to String */
def convertBodyToString(implicit mgnt: ContextMgnt) = cps(m => m.bodyTo[String])
/** Appends o to message body */
def appendToBody(o: Any)(implicit mgnt: ContextMgnt) = cps(ds_appendToBody(o))
/** Prints message to stdout */
def printMessage = cps(ds_printMessage)
/** Repeats message body (using String concatenation) */
def repeatBody = new RepeatBodyProcessor(processorConcurrencyStrategy)
/** Camel processor that repeats the body of the input message */
class RepeatBodyProcessor(s: Strategy) extends AsyncProcessor {
def process(exchange: Exchange) = {
val body = exchange.getIn.getBody(classOf[String])
exchange.getIn.setBody(body + body)
}
def process(exchange: Exchange, callback: AsyncCallback) = {
s.apply {
process(exchange)
callback.done(false)
}
false
}
def sp = this.asInstanceOf[Processor]
}
/** Creates an CPS processor direct-style processor */
def cps(p: Message => Message): MessageProcessor = messageProcessor(p, processorConcurrencyStrategy)
} | apache-2.0 |
gsinkovskiy/phabricator | src/applications/guides/view/PhabricatorGuideListView.php | 1030 | <?php
final class PhabricatorGuideListView extends AphrontView {
private $items = array();
public function addItem(PhabricatorGuideItemView $item) {
$this->items[] = $item;
return $this;
}
public function render() {
$list = id(new PHUIObjectItemListView())
->setBig(true);
foreach ($this->items as $item) {
$icon = id(new PHUIIconView())
->setIcon($item->getIcon())
->setBackground($item->getIconBackground());
$list_item = id(new PHUIObjectItemView())
->setHeader($item->getTitle())
->setHref($item->getHref())
->setImageIcon($icon)
->addAttribute($item->getDescription());
$skip_href = $item->getSkipHref();
if ($skip_href) {
$skip = id(new PHUIButtonView())
->setText(pht('Skip'))
->setTag('a')
->setHref($skip_href)
->setColor(PHUIButtonView::SIMPLE);
$list_item->setSideColumn($skip);
}
$list->addItem($list_item);
}
return $list;
}
}
| apache-2.0 |
dsyer/spring-cloud-connectors | spring-cloud-heroku-connector/src/test/java/org/springframework/cloud/heroku/AbstractHerokuConnectorRelationalServiceTest.java | 1708 | package org.springframework.cloud.heroku;
import static org.junit.Assert.assertEquals;
import org.springframework.cloud.service.common.RelationalServiceInfo;
/**
*
* @author Ramnivas Laddad
*
*/
public abstract class AbstractHerokuConnectorRelationalServiceTest extends AbstractHerokuConnectorTest {
private String databaseType;
public AbstractHerokuConnectorRelationalServiceTest(String databaseType) {
this.databaseType = databaseType;
}
protected String getJdbcUrl(String name) {
String jdbcUrlDatabaseType = databaseType;
if (databaseType.equals("postgres")) {
jdbcUrlDatabaseType = "postgresql";
}
return "jdbc:" + jdbcUrlDatabaseType + "://" + hostname + ":" + port + "/" + name +
"?user=" + username + "&password=" + password;
}
protected String getRelationalServiceUrl(String name) {
String template = "$databaseType://$username:$password@$host:$port/$database";
return template.replace("$databaseType", databaseType).
replace("$username", username).
replace("$password", password).
replace("$host", hostname).
replace("$port", Integer.toString(port)).
replace("$database", name);
}
protected void assertReleationServiceInfo(RelationalServiceInfo serviceInfo, String databaseName) {
assertEquals(hostname, serviceInfo.getHost());
assertEquals(port, serviceInfo.getPort());
assertEquals(username, serviceInfo.getUserName());
assertEquals(password, serviceInfo.getPassword());
assertEquals(getJdbcUrl(databaseName), serviceInfo.getJdbcUrl());
}
}
| apache-2.0 |
LearnLib/alex | docs/.vuepress/config.js | 2321 | const env = require('./env.default.js');
module.exports = {
base: env.BASE,
title: 'ALEX Docs (v2.1.1)',
description: 'User documentation for ALEX',
dest: './.vuepress/dist',
markdown: {
lineNumbers: true
},
themeConfig: {
lastUpdated: 'Last Updated',
sidebarDepth: 2,
sidebar: [
['/', 'Home'],
{
title: 'Getting started',
collapsable: true,
children: [
['/contents/getting-started/installation/', 'Installation'],
['/contents/getting-started/configuration/', 'Configuration']
]
},
{
title: 'User manual',
collapsable: true,
children: [
['/contents/user-manual/introduction/', 'Introduction'],
['/contents/user-manual/user-management/', 'User management'],
['/contents/user-manual/project-management/', 'Project management'],
['/contents/user-manual/symbol-management/', 'Symbol management'],
['/contents/user-manual/testing/', 'Testing'],
['/contents/user-manual/learning/', 'Learning'],
['/contents/user-manual/model-checking/', 'Model Checking'],
['/contents/user-manual/integrations/', 'Integrations'],
['/contents/user-manual/best-practices/', 'Best practices']
]
},
{
title: 'Examples',
collapsable: true,
children: [
['/contents/examples/todomvc/', 'TodoMVC']
]
},
{
title: 'Developer docs',
collapsable: true,
children: [
['/contents/dev-docs/development/', 'Develop'],
['/contents/dev-docs/rest-api/', 'REST API'],
['/contents/dev-docs/cli/', 'CLI']
]
},
['/contents/about/', 'About']
],
nav: [
{text: 'Homepage', link: 'https://learnlib.github.io/alex/'},
{text: 'GitHub', link: 'https://github.com/LearnLib/alex'}
]
}
};
| apache-2.0 |
NCI-GDC/portal-ui | src/packages/@ncigdc/modern_components/SCRNASeq/toolbar/index.js | 122 | export { default as DownloadButton } from './DownloadButton';
export { default as ToolbarButton } from './ToolbarButton';
| apache-2.0 |
erezvani1529/azure-storage-net | Lib/AspNet/Microsoft.WindowsAzure.Storage.Facade/FacadeLib/Microsoft.WindowsAzure.Storage.Core.CommandLocationMode.cs | 149 |
namespace Microsoft.WindowsAzure.Storage.Core
{
internal enum CommandLocationMode
{
PrimaryOnly,
SecondaryOnly,
PrimaryOrSecondary,
}
} | apache-2.0 |
gkossakowski/gimd | src/main/scala/com/google/gimd/query/Handle.scala | 1774 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gimd.query
import com.google.gimd.{MessageField, UserType}
import com.google.gimd.file.File
import com.google.gimd.UserType
import com.google.gimd.MessageField
/** Any handle to a stored message which corresponds to user object of type T. */
abstract sealed class Handle[T]
/**
* <p>Handle to mested message determined by path given by list of
* MessageFields.</p>
*
* <p>In order to get message this handle is pointing at one should use
* <code>path.last.value</code></p>
*
* <p>Path stores also all UserTypes corresponding each to Message that contains
* given MessageField.</p>
*
* <p>For any non-empty path condition T == U should yield true for T,U defined by expression:
* PathHandle[T].path.last.userType[U]</p>
*/
final case class PathHandle[+T](path: List[(UserType[_], MessageField)])
object PathHandle {
val empty = PathHandle(Nil)
}
/**
* <p>Complete handle consisting of file and pathHandle</p>
*
* <p>The top level message can be accessed using <code>file.message</code>.</p>
*/
final case class CompleteHandle[T](file: File[_],
pathHandle: PathHandle[T]) extends Handle[T]
| apache-2.0 |
cs-au-dk/TAJS | test-resources/src/flowgraphbuilder/forin_call2.js | 65 | function f() {
}
for (var i in { a: 0, b: 0}) {
(0)[f]();
}
| apache-2.0 |
sjbutler/jim | src/uk/ac/open/crc/jim/parser/java14/ASTTypeArgument.java | 892 | /* Generated By:JJTree: Do not edit this line. ASTTypeArgument.java Version 6.0 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=true,VISITOR=true,TRACK_TOKENS=true,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=*,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package uk.ac.open.crc.jim.parser.java14;
public
class ASTTypeArgument extends SimpleNode {
public ASTTypeArgument(int id) {
super(id);
}
public ASTTypeArgument(Java14Parser p, int id) {
super(p, id);
}
public static Node jjtCreate(int id) {
return new ASTTypeArgument(id);
}
public static Node jjtCreate(Java14Parser p, int id) {
return new ASTTypeArgument(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(Java14ParserVisitor visitor, Object data) {
return
visitor.visit(this, data);
}
}
/* JavaCC - OriginalChecksum=03b07c185dc36944738c1d22f611573d (do not edit this line) */
| apache-2.0 |
Blazebit/blaze-storage | server/ui/src/main/java/com/blazebit/storage/server/storage/StorageDetailPage.java | 686 | package com.blazebit.storage.server.storage;
import javax.enterprise.context.RequestScoped;
import javax.inject.Named;
import com.blazebit.storage.rest.model.StorageRepresentation;
@Named
@RequestScoped
public class StorageDetailPage extends StorageBasePage {
private static final long serialVersionUID = 1L;
private String typeName;
@Override
protected void init() {
super.init();
if (storage == null) {
this.typeName = null;
} else {
this.typeName = client.storageTypes().get(storage.getType()).getName();
}
}
public StorageRepresentation getStorage() {
return (StorageRepresentation) storage;
}
public String getTypeName() {
return typeName;
}
}
| apache-2.0 |
dangdangdotcom/sharding-jdbc | sharding-core/src/main/java/io/shardingsphere/core/rewrite/MasterSlaveSQLRewriteEngine.java | 3306 | /*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.core.rewrite;
import io.shardingsphere.core.metadata.ShardingMetaData;
import io.shardingsphere.core.parsing.parser.sql.SQLStatement;
import io.shardingsphere.core.parsing.parser.token.SQLToken;
import io.shardingsphere.core.parsing.parser.token.SchemaToken;
import io.shardingsphere.core.rewrite.placeholder.SchemaPlaceholder;
import io.shardingsphere.core.rule.MasterSlaveRule;
import java.util.Collections;
import java.util.List;
/**
* SQL rewrite engine for master slave rule.
*
* <p>should rewrite schema name.</p>
*
* @author chenqingyang
*/
public final class MasterSlaveSQLRewriteEngine {
private final MasterSlaveRule masterSlaveRule;
private final String originalSQL;
private final List<SQLToken> sqlTokens;
private final ShardingMetaData metaData;
/**
* Constructs master slave SQL rewrite engine.
*
* @param masterSlaveRule master slave rule
* @param originalSQL original SQL
* @param sqlStatement SQL statement
* @param metaData meta data
*/
public MasterSlaveSQLRewriteEngine(final MasterSlaveRule masterSlaveRule, final String originalSQL, final SQLStatement sqlStatement, final ShardingMetaData metaData) {
this.masterSlaveRule = masterSlaveRule;
this.originalSQL = originalSQL;
sqlTokens = sqlStatement.getSQLTokens();
this.metaData = metaData;
}
/**
* Rewrite SQL.
*
* @return SQL
*/
public String rewrite() {
if (sqlTokens.isEmpty()) {
return originalSQL;
}
SQLBuilder result = new SQLBuilder(Collections.emptyList());
int count = 0;
for (SQLToken each : sqlTokens) {
if (0 == count) {
result.appendLiterals(originalSQL.substring(0, each.getBeginPosition()));
}
if (each instanceof SchemaToken) {
appendSchemaPlaceholder(originalSQL, result, (SchemaToken) each, count);
}
count++;
}
return result.toSQL(masterSlaveRule, metaData.getDataSource());
}
private void appendSchemaPlaceholder(final String sql, final SQLBuilder sqlBuilder, final SchemaToken schemaToken, final int count) {
sqlBuilder.appendPlaceholder(new SchemaPlaceholder(schemaToken.getSchemaName().toLowerCase(), null));
int beginPosition = schemaToken.getBeginPosition() + schemaToken.getOriginalLiterals().length();
int endPosition = sqlTokens.size() - 1 == count ? sql.length() : sqlTokens.get(count + 1).getBeginPosition();
sqlBuilder.appendLiterals(sql.substring(beginPosition, endPosition));
}
}
| apache-2.0 |
hgani/androlib | ganilib/src/main/java/com/gani/lib/database/GDbData.java | 359 | package com.gani.lib.database;
import com.google.gson.reflect.TypeToken;
public interface GDbData {
int getInt(String key);
long getLong(String key);
boolean getBoolean(String key);
Long getNullableLong(String key);
Boolean getNullableBoolean(String key);
String getString(String key);
<T> T getObject(String name, TypeToken<T> typeToken);
}
| apache-2.0 |
aperepel/netty | src/test/java/org/jboss/netty/util/MapBackedSetTest.java | 3278 | /*
* Copyright 2009 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.util;
import static org.easymock.EasyMock.*;
import static org.junit.Assert.*;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
/**
* @author <a href="http://www.jboss.org/netty/">The Netty Project</a>
* @author <a href="http://gleamynode.net/">Trustin Lee</a>
*
* @version $Rev$, $Date$
*
*/
public class MapBackedSetTest {
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testSize() {
Map map = createStrictMock(Map.class);
expect(map.size()).andReturn(0);
replay(map);
assertEquals(0, new MapBackedSet(map).size());
verify(map);
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testContains() {
Map map = createStrictMock(Map.class);
expect(map.containsKey("key")).andReturn(true);
replay(map);
assertTrue(new MapBackedSet(map).contains("key"));
verify(map);
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testRemove() {
Map map = createStrictMock(Map.class);
expect(map.remove("key")).andReturn(true);
expect(map.remove("key")).andReturn(null);
replay(map);
assertTrue(new MapBackedSet(map).remove("key"));
assertFalse(new MapBackedSet(map).remove("key"));
verify(map);
}
@Test
@SuppressWarnings({"unchecked", "rawtypes"})
public void testAdd() {
Map map = createStrictMock(Map.class);
expect(map.put("key", true)).andReturn(null);
expect(map.put("key", true)).andReturn(true);
replay(map);
assertTrue(new MapBackedSet(map).add("key"));
assertFalse(new MapBackedSet(map).add("key"));
verify(map);
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testClear() {
Map map = createStrictMock(Map.class);
map.clear();
replay(map);
new MapBackedSet(map).clear();
verify(map);
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testIterator() {
Map map = createStrictMock(Map.class);
Set keySet = createStrictMock(Set.class);
Iterator keySetIterator = createStrictMock(Iterator.class);
expect(map.keySet()).andReturn(keySet);
expect(keySet.iterator()).andReturn(keySetIterator);
replay(map);
replay(keySet);
replay(keySetIterator);
assertSame(keySetIterator, new MapBackedSet(map).iterator());
verify(map);
verify(keySet);
verify(keySetIterator);
}
}
| apache-2.0 |
schmittjoh/php-stubs | res/php/pgsql/functions/pg-execute.php | 337 | <?php
/**
* Sends a request to execute a prepared statement with given parameters, and waits for the result.
*
* @phpstub
*
* @param resource $connection
* @param string $stmtname
* @param array $params
*
* @return resource A query result resource on success.
*/
function pg_execute($connection = NULL, $stmtname, $params)
{
} | apache-2.0 |
cloudfoundry/php-buildpack | fixtures/symfony_5_local_deps/vendor/ocramius/proxy-manager/src/ProxyManager/ProxyGenerator/AccessInterceptor/MethodGenerator/SetMethodSuffixInterceptor.php | 1225 | <?php
declare(strict_types=1);
namespace ProxyManager\ProxyGenerator\AccessInterceptor\MethodGenerator;
use Closure;
use ProxyManager\Generator\MethodGenerator;
use Zend\Code\Generator\ParameterGenerator;
use Zend\Code\Generator\PropertyGenerator;
/**
* Implementation for {@see \ProxyManager\Proxy\AccessInterceptorInterface::setMethodSuffixInterceptor}
* for access interceptor objects
*
* @author Marco Pivetta <ocramius@gmail.com>
* @license MIT
*/
class SetMethodSuffixInterceptor extends MethodGenerator
{
/**
* Constructor
*
* @param PropertyGenerator $suffixInterceptor
*
* @throws \Zend\Code\Generator\Exception\InvalidArgumentException
*/
public function __construct(PropertyGenerator $suffixInterceptor)
{
parent::__construct('setMethodSuffixInterceptor');
$interceptor = new ParameterGenerator('suffixInterceptor');
$interceptor->setType(Closure::class);
$interceptor->setDefaultValue(null);
$this->setParameter(new ParameterGenerator('methodName', 'string'));
$this->setParameter($interceptor);
$this->setBody('$this->' . $suffixInterceptor->getName() . '[$methodName] = $suffixInterceptor;');
}
}
| apache-2.0 |
nivanov/ignite | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheSwapPreloadSelfTest.java | 7490 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Collections;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.cache.Cache;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheMode.REPLICATED;
import static org.apache.ignite.cache.CacheRebalanceMode.SYNC;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
/**
* Test for cache swap preloading.
*/
public class GridCacheSwapPreloadSelfTest extends GridCommonAbstractTest {
/** Entry count. */
private static final int ENTRY_CNT = 15_000;
/** */
private final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** */
private CacheMode cacheMode;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
cfg.setDiscoverySpi(disco);
cfg.setNetworkTimeout(2000);
CacheConfiguration cacheCfg = defaultCacheConfiguration();
cacheCfg.setWriteSynchronizationMode(FULL_SYNC);
cacheCfg.setSwapEnabled(true);
cacheCfg.setCacheMode(cacheMode);
cacheCfg.setRebalanceMode(SYNC);
cacheCfg.setEvictSynchronized(false);
cacheCfg.setAtomicityMode(TRANSACTIONAL);
if (cacheMode == PARTITIONED)
cacheCfg.setBackups(1);
cfg.setCacheConfiguration(cacheCfg);
return cfg;
}
/** @throws Exception If failed. */
public void testSwapReplicated() throws Exception {
cacheMode = REPLICATED;
checkSwap();
}
/** @throws Exception If failed. */
public void testSwapPartitioned() throws Exception {
cacheMode = PARTITIONED;
checkSwap();
}
/** @throws Exception If failed. */
private void checkSwap() throws Exception {
try {
startGrid(0);
IgniteCache<Integer, Integer> cache = grid(0).cache(null);
Set<Integer> keys = new HashSet<>();
// Populate.
for (int i = 0; i < ENTRY_CNT; i++) {
keys.add(i);
cache.put(i, i);
}
info("Put finished.");
// Evict all.
cache.localEvict(keys);
info("Evict finished.");
for (int i = 0; i < ENTRY_CNT; i++)
assertNull(cache.localPeek(i, CachePeekMode.ONHEAP));
assert cache.localSize(CachePeekMode.PRIMARY, CachePeekMode.BACKUP, CachePeekMode.NEAR,
CachePeekMode.ONHEAP) == 0;
startGrid(1);
int size = grid(1).cache(null).localSize(CachePeekMode.ALL);
info("New node cache size: " + size);
assertEquals(ENTRY_CNT, size);
}
finally {
stopAllGrids();
}
}
/**
* @throws Exception If failed.
*/
public void testSwapReplicatedMultithreaded() throws Exception {
cacheMode = REPLICATED;
checkSwapMultithreaded();
}
/** @throws Exception If failed. */
public void testSwapPartitionedMultithreaded() throws Exception {
cacheMode = PARTITIONED;
checkSwapMultithreaded();
}
/** @throws Exception If failed. */
private void checkSwapMultithreaded() throws Exception {
fail("https://issues.apache.org/jira/browse/IGNITE-614");
final AtomicBoolean done = new AtomicBoolean();
IgniteInternalFuture<?> fut = null;
try {
startGrid(0);
final IgniteCache<Integer, Integer> cache = grid(0).cache(null);
assertNotNull(cache);
// Populate.
for (int i = 0; i < ENTRY_CNT; i++)
cache.put(i, i);
Set<Integer> keys = new HashSet<>();
for (Cache.Entry<Integer, Integer> entry : cache.localEntries())
keys.add(entry.getKey());
cache.localEvict(keys);
fut = multithreadedAsync(new Callable<Object>() {
@Nullable @Override public Object call() throws Exception {
Random rnd = new Random();
while (!done.get()) {
int key = rnd.nextInt(ENTRY_CNT);
Integer i = cache.get(key);
assertNotNull(i);
assertEquals(Integer.valueOf(key), i);
cache.localEvict(Collections.singleton(rnd.nextInt(ENTRY_CNT)));
}
return null;
}
}, 10);
startGrid(1);
done.set(true);
fut.get();
fut = null;
int size = grid(1).cache(null).localSize(CachePeekMode.PRIMARY, CachePeekMode.BACKUP,
CachePeekMode.NEAR, CachePeekMode.ONHEAP);
info("New node cache size: " + size);
if (size != ENTRY_CNT) {
Set<Integer> keySet = new TreeSet<>();
int next = 0;
for (IgniteCache.Entry<Integer, Integer> e : grid(1).<Integer, Integer>cache(null).localEntries())
keySet.add(e.getKey());
for (Integer i : keySet) {
while (next < i)
info("Missing key: " + next++);
next++;
}
}
assertEquals(ENTRY_CNT, size);
}
finally {
done.set(true);
try {
if (fut != null)
fut.get();
}
finally {
stopAllGrids();
}
}
}
} | apache-2.0 |
totticarter/presto | presto-lucene/src/main/java/com/facebook/presto/example/LuceneRecordSet.java | 2268 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.example;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.type.Type;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.io.ByteSource;
import com.google.common.io.Resources;
import java.net.MalformedURLException;
import java.util.List;
import org.apache.lucene.queryparser.classic.ParseException;
import static java.util.Objects.requireNonNull;
public class LuceneRecordSet
implements RecordSet
{
private final List<LuceneColumnHandle> columnHandles;
private final List<Type> columnTypes;
private final ByteSource byteSource;
public LuceneRecordSet(LuceneSplit split, List<LuceneColumnHandle> columnHandles)
{
requireNonNull(split, "split is null");
this.columnHandles = requireNonNull(columnHandles, "column handles is null");
ImmutableList.Builder<Type> types = ImmutableList.builder();
for (LuceneColumnHandle column : columnHandles) {
types.add(column.getColumnType());
}
this.columnTypes = types.build();
try {
byteSource = Resources.asByteSource(split.getUri().toURL());
}
catch (MalformedURLException e) {
throw Throwables.propagate(e);
}
}
@Override
public List<Type> getColumnTypes()
{
return columnTypes;
}
@Override
public RecordCursor cursor()
{
LuceneRecordCursor lrc = null;
try {
lrc = new LuceneRecordCursor(columnHandles);
} catch (ParseException e)
{
e.printStackTrace();
}
return lrc;
}
}
| apache-2.0 |
haijiaoqiufeng/CoolWeather | app/src/main/java/com/haiqiu/coolweather/MainActivity.java | 733 | package com.haiqiu.coolweather;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
if (prefs.getString("weather", null) != null) {
Intent intent = new Intent(this, WeatherActivity.class);
startActivity(intent);
finish();
}
}
}
| apache-2.0 |
dbrant/apps-android-wikipedia | app/src/test/java/org/wikipedia/edit/preview/EditPreviewClientTest.java | 1308 | package org.wikipedia.edit.preview;
import com.google.gson.stream.MalformedJsonException;
import org.junit.Test;
import org.wikipedia.dataclient.mwapi.MwException;
import org.wikipedia.test.MockRetrofitTest;
import io.reactivex.rxjava3.core.Observable;
public class EditPreviewClientTest extends MockRetrofitTest {
@Test public void testRequestSuccessHasResults() throws Throwable {
String expected = "<div class=\"mf-section-0\" id=\"mf-section-0\"><p>\\o/\\n\\ntest12\\n\\n3</p>\n\n\n\n\n</div>";
enqueueFromFile("edit_preview.json");
getObservable().test().await()
.assertComplete().assertNoErrors()
.assertValue(response -> response.result().equals(expected));
}
@Test public void testRequestResponseApiError() throws Throwable {
enqueueFromFile("api_error.json");
getObservable().test().await()
.assertError(MwException.class);
}
@Test public void testRequestResponseMalformed() throws Throwable {
enqueueMalformed();
getObservable().test().await()
.assertError(MalformedJsonException.class);
}
private Observable<EditPreview> getObservable() {
return getApiService().postEditPreview("User:Mhollo/sandbox", "wikitext of change");
}
}
| apache-2.0 |
kmrov/kubernetes | pkg/registry/batch/cronjob/storage/storage_test.go | 5359 | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package storage
import (
"testing"
"k8s.io/api/batch/v2alpha1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apiserver/pkg/registry/generic"
genericregistrytest "k8s.io/apiserver/pkg/registry/generic/testing"
etcdtesting "k8s.io/apiserver/pkg/storage/etcd/testing"
"k8s.io/kubernetes/pkg/api/legacyscheme"
"k8s.io/kubernetes/pkg/api/testapi"
"k8s.io/kubernetes/pkg/apis/batch"
api "k8s.io/kubernetes/pkg/apis/core"
"k8s.io/kubernetes/pkg/registry/registrytest"
)
// TODO: allow for global factory override
func newStorage(t *testing.T) (*REST, *StatusREST, *etcdtesting.EtcdTestServer) {
etcdStorage, server := registrytest.NewEtcdStorage(t, batch.GroupName)
restOptions := generic.RESTOptions{StorageConfig: etcdStorage, Decorator: generic.UndecoratedStorage, DeleteCollectionWorkers: 1}
storage, statusStorage := NewREST(restOptions)
return storage, statusStorage, server
}
func validNewCronJob() *batch.CronJob {
return &batch.CronJob{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
Namespace: metav1.NamespaceDefault,
},
Spec: batch.CronJobSpec{
Schedule: "* * * * ?",
ConcurrencyPolicy: batch.AllowConcurrent,
JobTemplate: batch.JobTemplateSpec{
Spec: batch.JobSpec{
Template: api.PodTemplateSpec{
Spec: api.PodSpec{
RestartPolicy: api.RestartPolicyOnFailure,
DNSPolicy: api.DNSClusterFirst,
Containers: []api.Container{{Name: "abc", Image: "image", ImagePullPolicy: api.PullIfNotPresent}},
},
},
},
},
},
}
}
func TestCreate(t *testing.T) {
// scheduled jobs should be tested only when batch/v2alpha1 is enabled
if *testapi.Batch.GroupVersion() != v2alpha1.SchemeGroupVersion {
return
}
storage, _, server := newStorage(t)
defer server.Terminate(t)
defer storage.Store.DestroyFunc()
test := genericregistrytest.New(t, storage.Store, legacyscheme.Scheme)
validCronJob := validNewCronJob()
validCronJob.ObjectMeta = metav1.ObjectMeta{}
test.TestCreate(
// valid
validCronJob,
// invalid (empty spec)
&batch.CronJob{
Spec: batch.CronJobSpec{},
},
)
}
func TestUpdate(t *testing.T) {
// scheduled jobs should be tested only when batch/v2alpha1 is enabled
if *testapi.Batch.GroupVersion() != v2alpha1.SchemeGroupVersion {
return
}
storage, _, server := newStorage(t)
defer server.Terminate(t)
defer storage.Store.DestroyFunc()
test := genericregistrytest.New(t, storage.Store, legacyscheme.Scheme)
schedule := "1 1 1 1 ?"
test.TestUpdate(
// valid
validNewCronJob(),
// updateFunc
func(obj runtime.Object) runtime.Object {
object := obj.(*batch.CronJob)
object.Spec.Schedule = schedule
return object
},
// invalid updateFunc
func(obj runtime.Object) runtime.Object {
object := obj.(*batch.CronJob)
object.Spec.Schedule = "* * *"
return object
},
)
}
func TestDelete(t *testing.T) {
// scheduled jobs should be tested only when batch/v2alpha1 is enabled
if *testapi.Batch.GroupVersion() != v2alpha1.SchemeGroupVersion {
return
}
storage, _, server := newStorage(t)
defer server.Terminate(t)
defer storage.Store.DestroyFunc()
test := genericregistrytest.New(t, storage.Store, legacyscheme.Scheme)
test.TestDelete(validNewCronJob())
}
func TestGet(t *testing.T) {
// scheduled jobs should be tested only when batch/v2alpha1 is enabled
if *testapi.Batch.GroupVersion() != v2alpha1.SchemeGroupVersion {
return
}
storage, _, server := newStorage(t)
defer server.Terminate(t)
defer storage.Store.DestroyFunc()
test := genericregistrytest.New(t, storage.Store, legacyscheme.Scheme)
test.TestGet(validNewCronJob())
}
func TestList(t *testing.T) {
// scheduled jobs should be tested only when batch/v2alpha1 is enabled
if *testapi.Batch.GroupVersion() != v2alpha1.SchemeGroupVersion {
return
}
storage, _, server := newStorage(t)
defer server.Terminate(t)
defer storage.Store.DestroyFunc()
test := genericregistrytest.New(t, storage.Store, legacyscheme.Scheme)
test.TestList(validNewCronJob())
}
func TestWatch(t *testing.T) {
// scheduled jobs should be tested only when batch/v2alpha1 is enabled
if *testapi.Batch.GroupVersion() != v2alpha1.SchemeGroupVersion {
return
}
storage, _, server := newStorage(t)
defer server.Terminate(t)
defer storage.Store.DestroyFunc()
test := genericregistrytest.New(t, storage.Store, legacyscheme.Scheme)
test.TestWatch(
validNewCronJob(),
// matching labels
[]labels.Set{},
// not matching labels
[]labels.Set{
{"x": "y"},
},
// matching fields
[]fields.Set{},
// not matching fields
[]fields.Set{
{"metadata.name": "xyz"},
{"name": "foo"},
},
)
}
// TODO: test update /status
| apache-2.0 |
jitsi/ice4j | src/main/java/org/ice4j/socket/MultiplexingDatagramSocket.java | 18953 | /*
* ice4j, the OpenSource Java Solution for NAT and Firewall Traversal.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ice4j.socket;
import java.io.*;
import java.net.*;
import java.util.*;
/**
* Represents a <tt>DatagramSocket</tt> which allows filtering
* <tt>DatagramPacket</tt>s it reads from the network using
* <tt>DatagramPacketFilter</tt>s so that the <tt>DatagramPacket</tt>s do not
* get received through it but through associated
* <tt>MultiplexedDatagramSocket</tt>s.
*
* @author Lyubomir Marinov
*/
public class MultiplexingDatagramSocket
extends SafeCloseDatagramSocket
{
/**
* The {@code MultiplexingXXXSocketSupport} which implements functionality
* common to TCP and UDP sockets in order to facilitate implementers such as
* this instance.
*/
private final MultiplexingXXXSocketSupport<MultiplexedDatagramSocket>
multiplexingXXXSocketSupport
= new MultiplexingXXXSocketSupport<MultiplexedDatagramSocket>()
{
/**
* {@inheritDoc}
*/
@Override
protected MultiplexedDatagramSocket createSocket(
DatagramPacketFilter filter)
throws SocketException
{
return
new MultiplexedDatagramSocket(
MultiplexingDatagramSocket.this,
filter);
}
/**
* {@inheritDoc}
*/
@Override
protected void doReceive(DatagramPacket p)
throws IOException
{
multiplexingXXXSocketSupportDoReceive(p);
}
/**
* {@inheritDoc}
*/
@Override
protected void doSetReceiveBufferSize(int receiveBufferSize)
throws SocketException
{
multiplexingXXXSocketSupportDoSetReceiveBufferSize(
receiveBufferSize);
}
/**
* {@inheritDoc}
*/
@Override
protected SocketReceiveBuffer getReceived()
{
return received;
}
/**
* {@inheritDoc}
*/
@Override
protected SocketReceiveBuffer getReceived(
MultiplexedDatagramSocket socket)
{
return socket.received;
}
};
/**
* The list of <tt>DatagramPacket</tt>s to be received through this
* <tt>DatagramSocket</tt> i.e. not accepted by the list of
* {@link MultiplexedDatagramSocket} of this instance at the time of the
* reading from the network.
*/
private final SocketReceiveBuffer received
= new SocketReceiveBuffer(this::getReceiveBufferSize);
/**
* Buffer variable for storing the SO_TIMEOUT value set by the
* last <tt>setSoTimeout()</tt> call. Although not strictly needed,
* getting the locally stored value as opposed to retrieving it
* from a parent <tt>getSoTimeout()</tt> call seems to
* significantly improve efficiency, at least on some platforms.
*/
private int soTimeout = 0;
/**
* Whether this socket should be kept open even when all of its
* {@link MultiplexedDatagramSocket} are closed (if the value is
* {@code true}), or it should be closed when the last of its
* {@link MultiplexedDatagramSocket} is closed (if the value is
* {@code false}).
*/
private final boolean persistent;
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering and binds it to any available
* port on the local host machine. The socket will be bound to the wildcard
* address, an IP address chosen by the kernel.
*
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @see DatagramSocket#DatagramSocket()
*/
public MultiplexingDatagramSocket()
throws SocketException
{
this(false);
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering and binds it to any available
* port on the local host machine. The socket will be bound to the wildcard
* address, an IP address chosen by the kernel.
*
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @param persistent whether this socket should be kept open after all of
* its {@link MultiplexedDatagramSocket}s are closed.
* @see DatagramSocket#DatagramSocket()
*/
public MultiplexingDatagramSocket(boolean persistent)
throws SocketException
{
this.persistent = persistent;
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering on a specific
* <tt>DatagramSocket</tt>.
*
* @param delegate the <tt>DatagramSocket</tt> on which
* <tt>DatagramPacket</tt> filtering is to be enabled by the new instance
* @throws SocketException if anything goes wrong while initializing the new
* instance
*/
public MultiplexingDatagramSocket(DatagramSocket delegate)
throws SocketException
{
this(delegate, false);
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering on a specific
* <tt>DatagramSocket</tt>.
*
* @param delegate the <tt>DatagramSocket</tt> on which
* <tt>DatagramPacket</tt> filtering is to be enabled by the new instance
* @param persistent whether this socket should be kept open after all of
* its {@link MultiplexedDatagramSocket}s are closed.
* @throws SocketException if anything goes wrong while initializing the new
* instance
*/
public MultiplexingDatagramSocket(
DatagramSocket delegate,
boolean persistent)
throws SocketException
{
super(delegate);
this.persistent = persistent;
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering and binds it to the specified
* port on the local host machine. The socket will be bound to the wildcard
* address, an IP address chosen by the kernel.
*
* @param port the port to bind the new socket to
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @see DatagramSocket#DatagramSocket(int)
*/
public MultiplexingDatagramSocket(int port)
throws SocketException
{
this(port, false);
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering and binds it to the specified
* port on the local host machine. The socket will be bound to the wildcard
* address, an IP address chosen by the kernel.
*
* @param port the port to bind the new socket to
* @param persistent whether this socket should be kept open after all of
* its {@link MultiplexedDatagramSocket}s are closed.
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @see DatagramSocket#DatagramSocket(int)
*/
public MultiplexingDatagramSocket(int port, boolean persistent)
throws SocketException
{
super(port);
this.persistent = persistent;
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering, bound to the specified local
* address. The local port must be between 0 and 65535 inclusive. If the IP
* address is 0.0.0.0, the socket will be bound to the wildcard address, an
* IP address chosen by the kernel.
*
* @param port the local port to bind the new socket to
* @param laddr the local address to bind the new socket to
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @see DatagramSocket#DatagramSocket(int, InetAddress)
*/
public MultiplexingDatagramSocket(int port, InetAddress laddr)
throws SocketException
{
this(port, laddr, false);
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering, bound to the specified local
* address. The local port must be between 0 and 65535 inclusive. If the IP
* address is 0.0.0.0, the socket will be bound to the wildcard address, an
* IP address chosen by the kernel.
*
* @param port the local port to bind the new socket to
* @param laddr the local address to bind the new socket to
* @param persistent whether this socket should be kept open after all of
* its {@link MultiplexedDatagramSocket}s are closed.
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @see DatagramSocket#DatagramSocket(int, InetAddress)
*/
public MultiplexingDatagramSocket(
int port,
InetAddress laddr,
boolean persistent)
throws SocketException
{
super(port, laddr);
this.persistent = persistent;
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering, bound to the specified local
* socket address.
* <p>
* If the specified local socket address is <tt>null</tt>, creates an
* unbound socket.
* </p>
*
* @param bindaddr local socket address to bind, or <tt>null</tt> for an
* unbound socket
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @see DatagramSocket#DatagramSocket(SocketAddress)
*/
public MultiplexingDatagramSocket(SocketAddress bindaddr)
throws SocketException
{
this(bindaddr, false);
}
/**
* Initializes a new <tt>MultiplexingDatagramSocket</tt> instance which is
* to enable <tt>DatagramPacket</tt> filtering, bound to the specified local
* socket address.
* <p>
* If the specified local socket address is <tt>null</tt>, creates an
* unbound socket.
* </p>
*
* @param bindaddr local socket address to bind, or <tt>null</tt> for an
* unbound socket
* @param persistent whether this socket should be kept open after all of
* its {@link MultiplexedDatagramSocket}s are closed.
* @throws SocketException if the socket could not be opened, or the socket
* could not bind to the specified local port
* @see DatagramSocket#DatagramSocket(SocketAddress)
*/
public MultiplexingDatagramSocket(
SocketAddress bindaddr,
boolean persistent)
throws SocketException
{
super(bindaddr);
this.persistent = persistent;
}
/**
* Closes a specific <tt>MultiplexedDatagramSocket</tt> which filters
* <tt>DatagramPacket</tt>s away from this <tt>DatagramSocket</tt>.
*
* @param multiplexed the <tt>MultiplexedDatagramSocket</tt> to close
*/
void close(MultiplexedDatagramSocket multiplexed)
{
if (!multiplexingXXXSocketSupport.close(multiplexed)
&& !persistent)
{
close();
}
}
/**
* Gets a <tt>MultiplexedDatagramSocket</tt> which filters
* <tt>DatagramPacket</tt>s away from this <tt>DatagramSocket</tt> using a
* specific <tt>DatagramPacketFilter</tt>. If such a
* <tt>MultiplexedDatagramSocket</tt> does not exist in this instance, it is
* created.
*
* @param filter the <tt>DatagramPacketFilter</tt> to get a
* <tt>MultiplexedDatagramSocket</tt> for
* @return a <tt>MultiplexedDatagramSocket</tt> which filters
* <tt>DatagramPacket</tt>s away from this <tt>DatagramSocket</tt> using the
* specified <tt>filter</tt>
* @throws SocketException if creating the
* <tt>MultiplexedDatagramSocket</tt> for the specified <tt>filter</tt>
* fails
*/
public MultiplexedDatagramSocket getSocket(DatagramPacketFilter filter)
throws SocketException
{
return getSocket(filter, /* create */ true);
}
/**
* Gets a <tt>MultiplexedDatagramSocket</tt> which filters
* <tt>DatagramPacket</tt>s away from this <tt>DatagramSocket</tt> using a
* specific <tt>DatagramPacketFilter</tt>. If <tt>create</tt> is true and
* such a <tt>MultiplexedDatagramSocket</tt> does not exist in this
* instance, it is created.
*
* @param filter the <tt>DatagramPacketFilter</tt> to get a
* <tt>MultiplexedDatagramSocket</tt> for
* @param create whether or not to create a
* <tt>MultiplexedDatagramSocket</tt> if this instance does not already have
* a socket for the given <tt>filter</tt>.
* @return a <tt>MultiplexedDatagramSocket</tt> which filters
* <tt>DatagramPacket</tt>s away from this <tt>DatagramSocket</tt> using the
* specified <tt>filter</tt>
* @throws SocketException if creating the
* <tt>MultiplexedDatagramSocket</tt> for the specified <tt>filter</tt>
* fails.
*/
public MultiplexedDatagramSocket getSocket(
DatagramPacketFilter filter,
boolean create)
throws SocketException
{
return multiplexingXXXSocketSupport.getSocket(filter, create);
}
/**
* {@inheritDoc}
*/
@Override
public int getSoTimeout()
{
return soTimeout;
}
/**
* Implements {@link MultiplexingXXXSocketSupport#doReceive(DatagramPacket)}
* on behalf of {@link #multiplexingXXXSocketSupport}. Receives a
* {@code DatagramPacket} from this socket.
*
* @param p the {@code DatagramPacket} into which to place the incoming data
* @throws IOException if an I/O error occurs
*/
private void multiplexingXXXSocketSupportDoReceive(DatagramPacket p)
throws IOException
{
super.receive(p);
}
/**
* Implements
* {@link MultiplexingXXXSocketSupport#doSetReceiveBufferSize(int)} on
* behalf of {@link #multiplexingXXXSocketSupport}. Sets the
* {@code SO_RCVBUF} option to the specified value for this
* {@code DatagramSocket}. The {@code SO_RCVBUF} option is used by the
* network implementation as a hint to size the underlying network I/O
* buffers. The {@code SO_RCVBUF} setting may also be used by the network
* implementation to determine the maximum size of the packet that can be
* received on this socket.
*
* @param receiveBufferSize the size to which to set the receive buffer size
* @throws SocketException if there is an error in the underlying protocol,
* such as a UDP error
*/
private void multiplexingXXXSocketSupportDoSetReceiveBufferSize(
int receiveBufferSize)
throws SocketException
{
super.setReceiveBufferSize(receiveBufferSize);
}
/**
* Receives a datagram packet from this socket. The <tt>DatagramPacket</tt>s
* returned by this method do not match any of the
* <tt>DatagramPacketFilter</tt>s of the <tt>MultiplexedDatagramSocket</tt>s
* associated with this instance at the time of their receipt. When this
* method returns, the <tt>DatagramPacket</tt>'s buffer is filled with the
* data received. The datagram packet also contains the sender's IP address,
* and the port number on the sender's machine.
* <p>
* This method blocks until a datagram is received. The <tt>length</tt>
* field of the datagram packet object contains the length of the received
* message. If the message is longer than the packet's length, the message
* is truncated.
* </p>
*
* @param p the <tt>DatagramPacket</tt> into which to place the incoming
* data
* @throws IOException if an I/O error occurs
* @throws SocketTimeoutException if <tt>setSoTimeout(int)</tt> was
* previously called and the timeout has expired
* @see DatagramSocket#receive(DatagramPacket)
*/
@Override
public void receive(DatagramPacket p)
throws IOException
{
multiplexingXXXSocketSupport.receive(received, p, soTimeout);
}
/**
* Receives a <tt>DatagramPacket</tt> from this <tt>DatagramSocket</tt> upon
* request from a specific <tt>MultiplexedDatagramSocket</tt>.
*
* @param multiplexed the <tt>MultiplexedDatagramSocket</tt> which requests
* the receipt of a <tt>DatagramPacket</tt> from the network
* @param p the <tt>DatagramPacket</tt> to receive the data from the network
* @throws IOException if an I/O error occurs
* @throws SocketTimeoutException if <tt>setSoTimeout(int)</tt> was
* previously called on <tt>multiplexed</tt> and the timeout has expired
*/
void receive(MultiplexedDatagramSocket multiplexed, DatagramPacket p)
throws IOException
{
multiplexingXXXSocketSupport.receive(
multiplexed.received,
p,
multiplexed.getSoTimeout());
}
/**
* {@inheritDoc}
*/
@Override
public void setReceiveBufferSize(int receiveBufferSize)
throws SocketException
{
multiplexingXXXSocketSupport.setReceiveBufferSize(receiveBufferSize);
}
/**
* {@inheritDoc}
*/
@Override
public void setSoTimeout(int timeout)
throws SocketException
{
super.setSoTimeout(timeout);
soTimeout = timeout;
}
}
| apache-2.0 |
luisedware/Learning-Laravel | LaraBBSTutorialSample/resources/views/topics/show.blade.php | 2417 | @extends('layouts.app')
@section('title', $topic->title)
@section('description', $topic->excerpt)
@section('content')
<div class="row">
<div class="col-lg-3 col-md-3 hidden-sm hidden-xs author-info">
<div class="panel panel-default">
<div class="panel-body">
<div class="text-center">
作者:{{ $topic->user->name }}
</div>
<hr>
<div class="media">
<div align="center">
<a href="{{ route('users.show', $topic->user->id) }}">
<img class="thumbnail img-responsive" src="{{ $topic->user->avatar }}" width="300px" height="300px">
</a>
</div>
</div>
</div>
</div>
</div>
<div class="col-lg-9 col-md-9 col-sm-12 col-xs-12 topic-content">
<div class="panel panel-default">
<div class="panel-body">
<h1 class="text-center">
{{ $topic->title }}
</h1>
<div class="article-meta text-center">
{{ $topic->created_at->diffForHumans() }}
⋅
<span class="glyphicon glyphicon-comment" aria-hidden="true"></span>
{{ $topic->reply_count }}
</div>
<div class="topic-body">
{!! $topic->body !!}
</div>
@can('update', $topic)
<div class="operate">
<hr>
<a href="{{ route('topics.edit', $topic->id) }}" class="btn btn-default btn-xs pull-left" role="button">
<i class="glyphicon glyphicon-edit"></i> 编辑
</a>
<form action="{{ route('topics.destroy', $topic->id) }}" method="post">
{{ csrf_field() }}
{{ method_field('DELETE') }}
<button type="submit" class="btn btn-default btn-xs pull-left" style="margin-left: 6px">
<i class="glyphicon glyphicon-trash"></i>
删除
</button>
</form>
</div>
@endcan
</div>
</div>
{{-- 用户回复列表 --}}
<div class="panel panel-default topic-reply">
<div class="panel-body">
@includeWhen(Auth::check(), 'topics._reply_box', ['topic' => $topic])
@include('topics._reply_list', ['replies' => $topic->replies()->with('user')->get()])
</div>
</div>
</div>
</div>
@stop | apache-2.0 |
AlekseyZhelo/idea-mob-plugin | src/main/java/com/alekseyzhelo/evilislands/mobplugin/script/formatting/codeStyle/EIIndentOptionsEditor.java | 688 | package com.alekseyzhelo.evilislands.mobplugin.script.formatting.codeStyle;
import com.intellij.application.options.IndentOptionsEditor;
class EIIndentOptionsEditor extends IndentOptionsEditor {
@Override
public void showStandardOptions(String... optionNames) {
super.showStandardOptions(optionNames);
myCbUseTab.setEnabled(false);
myCbUseTab.setVisible(false);
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
myCbUseTab.setEnabled(false);
}
@Override
protected void setVisible(boolean visible) {
super.setVisible(visible);
myCbUseTab.setVisible(false);
}
}
| apache-2.0 |
DiligentGraphics/DiligentCore | Tests/DiligentCoreAPITest/src/SeparateTextureSamplerTest.cpp | 7858 | /*
* Copyright 2019-2022 Diligent Graphics LLC
* Copyright 2015-2019 Egor Yusov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* In no event and under no legal theory, whether in tort (including negligence),
* contract, or otherwise, unless required by applicable law (such as deliberate
* and grossly negligent acts) or agreed to in writing, shall any Contributor be
* liable for any damages, including any direct, indirect, special, incidental,
* or consequential damages of any character arising as a result of this License or
* out of the use or inability to use the software (including but not limited to damages
* for loss of goodwill, work stoppage, computer failure or malfunction, or any and
* all other commercial damages or losses), even if such Contributor has been advised
* of the possibility of such damages.
*/
#include "TestingEnvironment.hpp"
#include "gtest/gtest.h"
using namespace Diligent;
using namespace Diligent::Testing;
namespace
{
static const char g_VSShaderSource[] = R"(
void VSMain(out float4 pos : SV_POSITION)
{
pos = float4(0.0, 0.0, 0.0, 0.0);
}
)";
static const char g_PSShaderSource[] = R"(
Texture2D g_Tex;
SamplerState g_Sam;
Texture2D g_Tex2;
SamplerState g_Sam2;
SamplerState g_Sam3[2];
SamplerState g_Sam4[2];
void PSMain(out float4 col : SV_TARGET)
{
col = g_Tex.Sample(g_Sam, float2(0.5, 0.5)) +
g_Tex2.Sample(g_Sam2, float2(0.5, 0.5)) +
g_Tex2.Sample(g_Sam3[0], float2(0.5, 0.5)) +
g_Tex2.Sample(g_Sam3[1], float2(0.5, 0.5)) +
g_Tex2.Sample(g_Sam4[0], float2(0.5, 0.5)) +
g_Tex2.Sample(g_Sam4[1], float2(0.5, 0.5));
}
)";
TEST(SeparateTextureSampler, CreateSampler)
{
auto* pEnv = TestingEnvironment::GetInstance();
auto* pDevice = pEnv->GetDevice();
auto* pContext = pEnv->GetDeviceContext();
if (pDevice->GetDeviceInfo().IsGLDevice())
{
GTEST_SKIP() << "Separate texture samplers are not supported in OpenGL";
}
TestingEnvironment::ScopedReset EnvironmentAutoReset;
ShaderCreateInfo Attrs;
Attrs.Source = g_VSShaderSource;
Attrs.EntryPoint = "VSMain";
Attrs.Desc.ShaderType = SHADER_TYPE_VERTEX;
Attrs.Desc.Name = "VSMain (TestSeparateTextureSampler)";
Attrs.SourceLanguage = SHADER_SOURCE_LANGUAGE_HLSL;
Attrs.ShaderCompiler = pEnv->GetDefaultCompiler(Attrs.SourceLanguage);
RefCntAutoPtr<IShader> pVS;
pDevice->CreateShader(Attrs, &pVS);
ASSERT_TRUE(pVS);
Attrs.Source = g_PSShaderSource;
Attrs.EntryPoint = "PSMain";
Attrs.Desc.ShaderType = SHADER_TYPE_PIXEL;
Attrs.Desc.Name = "PSMain (TestSeparateTextureSampler)";
RefCntAutoPtr<IShader> pPS;
pDevice->CreateShader(Attrs, &pPS);
ASSERT_TRUE(pPS);
GraphicsPipelineStateCreateInfo PSOCreateInfo;
PipelineStateDesc& PSODesc = PSOCreateInfo.PSODesc;
GraphicsPipelineDesc& GraphicsPipeline = PSOCreateInfo.GraphicsPipeline;
PSOCreateInfo.pVS = pVS;
PSOCreateInfo.pPS = pPS;
GraphicsPipeline.PrimitiveTopology = PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
GraphicsPipeline.NumRenderTargets = 1;
GraphicsPipeline.RTVFormats[0] = TEX_FORMAT_RGBA8_UNORM;
GraphicsPipeline.DSVFormat = TEX_FORMAT_UNKNOWN;
GraphicsPipeline.DepthStencilDesc.DepthEnable = false;
ShaderResourceVariableDesc Vars[] =
{
{SHADER_TYPE_PIXEL, "g_Tex", SHADER_RESOURCE_VARIABLE_TYPE_MUTABLE},
{SHADER_TYPE_PIXEL, "g_Sam", SHADER_RESOURCE_VARIABLE_TYPE_DYNAMIC},
{SHADER_TYPE_PIXEL, "g_Tex2", SHADER_RESOURCE_VARIABLE_TYPE_DYNAMIC},
{SHADER_TYPE_PIXEL, "g_Sam2", SHADER_RESOURCE_VARIABLE_TYPE_MUTABLE},
{SHADER_TYPE_PIXEL, "g_Sam4", SHADER_RESOURCE_VARIABLE_TYPE_MUTABLE} //
};
PSODesc.ResourceLayout.Variables = Vars;
PSODesc.ResourceLayout.NumVariables = _countof(Vars);
ImmutableSamplerDesc ImtblSamplers[] =
{
{SHADER_TYPE_PIXEL, "g_Sam2", SamplerDesc{}} //
};
PSODesc.ResourceLayout.ImmutableSamplers = ImtblSamplers;
PSODesc.ResourceLayout.NumImmutableSamplers = _countof(ImtblSamplers);
RefCntAutoPtr<IPipelineState> pPSO;
pDevice->CreateGraphicsPipelineState(PSOCreateInfo, &pPSO);
ASSERT_TRUE(pPSO);
TextureDesc TexDesc;
TexDesc.Name = "Separate sampler texture test";
TexDesc.Type = RESOURCE_DIM_TEX_2D;
TexDesc.Width = 256;
TexDesc.Height = 256;
TexDesc.Format = TEX_FORMAT_RGBA8_UNORM;
TexDesc.Usage = USAGE_DEFAULT;
TexDesc.BindFlags = BIND_SHADER_RESOURCE;
RefCntAutoPtr<ITexture> pTexture;
pDevice->CreateTexture(TexDesc, nullptr, &pTexture);
ASSERT_TRUE(pTexture);
RefCntAutoPtr<ISampler> pSampler;
pDevice->CreateSampler(SamplerDesc{}, &pSampler);
IDeviceObject* ppSamplers[2] = {pSampler, pSampler};
pPSO->GetStaticVariableByName(SHADER_TYPE_PIXEL, "g_Sam3")->SetArray(ppSamplers, 0, 2);
RefCntAutoPtr<IShaderResourceBinding> pSRB;
pPSO->CreateShaderResourceBinding(&pSRB, true);
pSRB->GetVariableByName(SHADER_TYPE_PIXEL, "g_Tex")->Set(pTexture->GetDefaultView(TEXTURE_VIEW_SHADER_RESOURCE));
pSRB->GetVariableByName(SHADER_TYPE_PIXEL, "g_Sam")->Set(pSampler);
pSRB->GetVariableByName(SHADER_TYPE_PIXEL, "g_Sam4")->SetArray(ppSamplers, 0, 2);
pSRB->GetVariableByName(SHADER_TYPE_PIXEL, "g_Tex2")->Set(pTexture->GetDefaultView(TEXTURE_VIEW_SHADER_RESOURCE));
ASSERT_TRUE(pSRB->GetVariableByName(SHADER_TYPE_PIXEL, "g_Sam2") == nullptr);
auto VarCount = pSRB->GetVariableCount(SHADER_TYPE_PIXEL);
EXPECT_EQ(VarCount, 4u);
for (Uint32 v = 0; v < VarCount; ++v)
{
auto* pVar = pSRB->GetVariableByIndex(SHADER_TYPE_PIXEL, v);
EXPECT_EQ(pVar->GetIndex(), v);
EXPECT_TRUE(pVar->GetType() == SHADER_RESOURCE_VARIABLE_TYPE_MUTABLE || pVar->GetType() == SHADER_RESOURCE_VARIABLE_TYPE_DYNAMIC);
ShaderResourceDesc ResDesc;
pVar->GetResourceDesc(ResDesc);
auto pVar2 = pSRB->GetVariableByName(SHADER_TYPE_PIXEL, ResDesc.Name);
EXPECT_EQ(pVar, pVar2);
}
TextureDesc RenderTargetDesc;
RenderTargetDesc.Type = RESOURCE_DIM_TEX_2D;
RenderTargetDesc.Width = 256;
RenderTargetDesc.Height = 256;
RenderTargetDesc.BindFlags = BIND_RENDER_TARGET | BIND_SHADER_RESOURCE;
RenderTargetDesc.Format = TEX_FORMAT_RGBA8_UNORM;
RenderTargetDesc.Name = "TestSeparateTextureSampler: test render target";
RefCntAutoPtr<ITexture> pRenderTarget;
pDevice->CreateTexture(RenderTargetDesc, nullptr, &pRenderTarget);
ASSERT_TRUE(pRenderTarget);
ITextureView* pRTV[] = {pRenderTarget->GetDefaultView(TEXTURE_VIEW_RENDER_TARGET)};
pContext->SetRenderTargets(1, pRTV, nullptr, RESOURCE_STATE_TRANSITION_MODE_TRANSITION);
float Zero[4] = {};
pContext->ClearRenderTarget(pRTV[0], Zero, RESOURCE_STATE_TRANSITION_MODE_VERIFY);
pContext->SetPipelineState(pPSO);
pContext->CommitShaderResources(pSRB, RESOURCE_STATE_TRANSITION_MODE_TRANSITION);
DrawAttribs DrawAttrs(3, DRAW_FLAG_VERIFY_ALL);
pContext->Draw(DrawAttrs);
}
} // namespace
| apache-2.0 |
fishjd/HappyNewMoonWithReport | src/main/java/happynewmoonwithreport/opcode/comparison/I64_eq.java | 2458 | /*
* Copyright 2017 - 2021 Whole Bean Software, LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package happynewmoonwithreport.opcode.comparison;
import happynewmoonwithreport.WasmInstanceInterface;
import happynewmoonwithreport.WasmRuntimeException;
import happynewmoonwithreport.WasmStack;
import happynewmoonwithreport.type.I32;
import happynewmoonwithreport.type.I64;
import java.util.UUID;
/**
* I64 equals (i64_eq)
* <br>
* <b>Note this is the same for all Relative Operations</b>
* <br>
* t.relop
* <ol>
* <li>
* Assert: due to validation, two values of value type t are on the top of the stack.
* </li>
* <li>
* Pop the value t.const c1 from the stack.
* </li>
* <li>
* Let c be the result of computing relopt(c1).
* </li>
* <li>
* Push the value i64.const c to the stack.
* <br>
* </li>
* </ol>
* <br>
* Source: <a href="https://webassembly.github.io/spec/core/appendix/index-instructions.html"
* target="_top"> https://webassembly.github.io/spec/core/appendix/index-instructions.html </a>
*/
public class I64_eq {
private WasmInstanceInterface instance;
private I64_eq() {
super();
}
public I64_eq(WasmInstanceInterface instance) {
this();
this.instance = instance;
}
/**
* Execute the opcode.
*/
public void execute() {
WasmStack<Object> stack = instance.stack();
if ((stack.peek() instanceof I64) == false) {
throw new WasmRuntimeException(UUID.fromString("e9b2cccf-1977-4a6b-9cb2-00d101c1203c"),
"I64_eq: Value2 type is incorrect");
}
I64 value2 = (I64) stack.pop();
if ((stack.peek() instanceof I64) == false) {
throw new WasmRuntimeException(UUID.fromString("b7d4d9bd-742c-4a78-9d90-2d4e1f3292b0"),
"I64_eq: Value1 type is incorrect");
}
I64 value1 = (I64) stack.pop();
Integer iResult;
if (value2.equals(value1)) {
iResult = 1;
} else {
iResult = 0;
}
I32 result = new I32(iResult);
stack.push(result);
}
}
| apache-2.0 |
lzl6261/uml-example | src/main/java/com/example/demo/factory/PieChart.java | 288 | package com.example.demo.factory;
/**
* @author merlin
* @create 2017-07-07 上午11:09
*/
public class PieChart implements Chart {
public PieChart() {
System.out.println("创建饼状图!");
}
public void display() {
System.out.println("显示饼状图!");
}
}
| apache-2.0 |
xasx/camunda-bpm-platform | engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/dto/history/HistoricTaskInstanceDto.java | 4990 | /*
* Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.dto.history;
import java.util.Date;
import org.camunda.bpm.engine.history.HistoricTaskInstance;
/**
* @author Roman Smirnov
*
*/
public class HistoricTaskInstanceDto {
protected String id;
protected String processDefinitionKey;
protected String processDefinitionId;
protected String processInstanceId;
protected String executionId;
protected String caseDefinitionKey;
protected String caseDefinitionId;
protected String caseInstanceId;
protected String caseExecutionId;
protected String activityInstanceId;
protected String name;
protected String description;
protected String deleteReason;
protected String owner;
protected String assignee;
protected Date startTime;
protected Date endTime;
protected Long duration;
protected String taskDefinitionKey;
protected int priority;
protected Date due;
protected String parentTaskId;
protected Date followUp;
private String tenantId;
protected Date removalTime;
protected String rootProcessInstanceId;
public String getId() {
return id;
}
public String getProcessDefinitionKey() {
return processDefinitionKey;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public String getExecutionId() {
return executionId;
}
public String getCaseDefinitionKey() {
return caseDefinitionKey;
}
public String getCaseDefinitionId() {
return caseDefinitionId;
}
public String getCaseInstanceId() {
return caseInstanceId;
}
public String getCaseExecutionId() {
return caseExecutionId;
}
public String getActivityInstanceId() {
return activityInstanceId;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public String getDeleteReason() {
return deleteReason;
}
public String getOwner() {
return owner;
}
public String getAssignee() {
return assignee;
}
public Date getStartTime() {
return startTime;
}
public Date getEndTime() {
return endTime;
}
public Long getDuration() {
return duration;
}
public String getTaskDefinitionKey() {
return taskDefinitionKey;
}
public int getPriority() {
return priority;
}
public Date getDue() {
return due;
}
public String getParentTaskId() {
return parentTaskId;
}
public Date getFollowUp() {
return followUp;
}
public String getTenantId() {
return tenantId;
}
public Date getRemovalTime() {
return removalTime;
}
public String getRootProcessInstanceId() {
return rootProcessInstanceId;
}
public static HistoricTaskInstanceDto fromHistoricTaskInstance(HistoricTaskInstance taskInstance) {
HistoricTaskInstanceDto dto = new HistoricTaskInstanceDto();
dto.id = taskInstance.getId();
dto.processDefinitionKey = taskInstance.getProcessDefinitionKey();
dto.processDefinitionId = taskInstance.getProcessDefinitionId();
dto.processInstanceId = taskInstance.getProcessInstanceId();
dto.executionId = taskInstance.getExecutionId();
dto.caseDefinitionKey = taskInstance.getCaseDefinitionKey();
dto.caseDefinitionId = taskInstance.getCaseDefinitionId();
dto.caseInstanceId = taskInstance.getCaseInstanceId();
dto.caseExecutionId = taskInstance.getCaseExecutionId();
dto.activityInstanceId = taskInstance.getActivityInstanceId();
dto.name = taskInstance.getName();
dto.description = taskInstance.getDescription();
dto.deleteReason = taskInstance.getDeleteReason();
dto.owner = taskInstance.getOwner();
dto.assignee = taskInstance.getAssignee();
dto.startTime = taskInstance.getStartTime();
dto.endTime = taskInstance.getEndTime();
dto.duration = taskInstance.getDurationInMillis();
dto.taskDefinitionKey = taskInstance.getTaskDefinitionKey();
dto.priority = taskInstance.getPriority();
dto.due = taskInstance.getDueDate();
dto.parentTaskId = taskInstance.getParentTaskId();
dto.followUp = taskInstance.getFollowUpDate();
dto.tenantId = taskInstance.getTenantId();
dto.removalTime = taskInstance.getRemovalTime();
dto.rootProcessInstanceId = taskInstance.getRootProcessInstanceId();
return dto;
}
}
| apache-2.0 |
castelom/Skynet | lib/p2p.py | 2579 | import socket
import threading
from lib.comms import StealthConn
from lib.files import p2p_download_file
# Keep track of where our server is
# This is primarily so we don't try to talk to ourselves
server_port = 1337
def find_bot():
print("Finding another bot...")
port = 1337
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while 1:
if port == server_port:
# Don't connect to yourself, silly bot!
port += 1
else:
try:
print("Found bot on port %d" % port)
conn.connect(("localhost", port))
sconn = StealthConn(conn, client=True)
return sconn
except socket.error:
print("No bot was listening on port %d" % port)
port += 1
def echo_server(sconn):
while 1:
data = sconn.recv()
print("ECHOING>", data)
sconn.send(data)
if data == b'X' or data == b'exit':
print("Closing connection...")
sconn.close()
return
def accept_connection(conn):
try:
sconn = StealthConn(conn, server=True)
# The sender is either going to chat to us or send a file
cmd = sconn.recv()
if cmd == b'ECHO TEST 123456':
echo_server(sconn)
elif cmd == b'FILE':
p2p_download_file(sconn)
except socket.error:
print("Connection closed unexpectedly")
def bot_server():
global server_port
# Every bot is both client & server, so needs to listen for
# connections. This is to allow for peer to peer traffic.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Real worms use shifting ports but for simplicity, we won't.
# We'll also assume you may run another bot on your computer
# so if something else is using 1337, we'll keep going up.
while True:
try:
s.bind(("localhost", server_port))
print("Listening on port %d" % server_port)
break
except socket.error:
# Someone is already using that port -- let's go up one
print("Port %d not available" % server_port)
server_port += 1
s.listen(5)
while 1:
print("Waiting for connection...")
conn, address = s.accept()
print("Accepted a connection from %s..." % (address,))
# Start a new thread per connection
# We don't need to specify it's a daemon thread as daemon status is inherited
threading.Thread(target=accept_connection, args=(conn,)).start()
| apache-2.0 |
efwGrp/efw3.X | source for jar/efw/resource/client/efw.client.messages.js | 1024 | /**** efw3.X Copyright 2016 efwGrp ****/
/**
* The class to keep messages in client.
*
* @author Chang Kejun
*/
var EfwClientMessages = function() {
};
EfwClientMessages.prototype = {
OtherErrorException : "予想外エラーが発生しました。",
CommunicationErrorException : "通信エラーが発生しました。リトライしますか。",
EventIsBusyException :"該当機能は混雑しています。しばらくお待ちください。",
RuntimeErrorException : "実行時エラーが発生しました。\n\neventId={eventId}\nmessage={message}",
ParamsFormatErrorException : "イベントのパラメータ定義は正しくありません。\n\neventId={eventId}",
ResultValuesErrorException : "描画用のデータは正しくありません。\n\neventId={eventId}",
ResultActionsErrorException : "動作用のデータは正しくありません。\n\neventId={eventId}",
SuccessCallbackErrorException : "成功時コールバックは正しくありません。\n\neventId={eventId}",
};
| apache-2.0 |
macrocode/mcgiapp | src/app/app.component.ts | 5963 | import { Component, ViewChild } from '@angular/core';
import { Events, MenuController, Nav, Platform } from 'ionic-angular';
import { SplashScreen } from '@ionic-native/splash-screen';
import { Storage } from '@ionic/storage';
import { AboutPage } from '../pages/about/about';
import { AccountPage } from '../pages/account/account';
import { LoginPage } from '../pages/login/login';
import { MapPage } from '../pages/map/map';
import { SignupPage } from '../pages/signup/signup';
import { TabsPage } from '../pages/tabs/tabs';
import { GatheringtabsPage } from '../pages/gatheringtabs/gatheringtabs';
import { TutorialPage } from '../pages/tutorial/tutorial';
import { SupportPage } from '../pages/support/support';
import { GatheringPage } from '../pages/gathering/gathering';
import { BePage } from '../pages/be/be';
import { NewsPage } from '../pages/news/news';
import { PmPage } from '../pages/pm/pm';
import { ConferenceData } from '../providers/conference-data';
import { UserData } from '../providers/user-data';
export interface PageInterface {
title: string;
name: string;
component: any;
icon: string;
logsOut?: boolean;
index?: number;
tabName?: string;
tabComponent?: any;
}
@Component({
templateUrl: 'app.template.html'
})
export class MCGIApp {
// the root nav is a child of the root app component
// @ViewChild(Nav) gets a reference to the app's root nav
@ViewChild(Nav) nav: Nav;
// List of pages that can be navigated to from the left menu
// the left menu only works after login
// the login page disables the left menu
servicePages: PageInterface[] = [
{ title: '24/7 Community Prayer', name: 'PmPage', component: PmPage, icon: 'cloud' },
{ title: 'News', name: 'NewsPage', component: NewsPage, icon: 'book' },
{ title: 'Upcoming Events', name: 'TabsPage', component: TabsPage, icon: 'timer' }
];
appPages: PageInterface[] = [
{ title: 'Gathering', name: 'TabsPage', component: TabsPage, tabComponent: GatheringPage, index: 0, icon: 'book' },
{ title: 'Events', name: 'TabsPage', component: TabsPage, tabComponent: BePage, index: 1, icon: 'calendar' },
{ title: 'Overall Servants', name: 'TabsPage', component: GatheringtabsPage, tabComponent: MapPage, index: 2, icon: 'people' },
{ title: 'Ministries', name: 'TabsPage', component: TabsPage, tabComponent: AboutPage, index: 3, icon: 'information-circle' }
];
loggedInPages: PageInterface[] = [
{ title: 'Account', name: 'AccountPage', component: AccountPage, icon: 'person' },
{ title: 'Support', name: 'SupportPage', component: SupportPage, icon: 'help' },
{ title: 'Logout', name: 'TabsPage', component: TabsPage, icon: 'log-out', logsOut: true }
];
loggedOutPages: PageInterface[] = [
{ title: 'Login', name: 'LoginPage', component: LoginPage, icon: 'log-in' },
{ title: 'Support', name: 'SupportPage', component: SupportPage, icon: 'help' },
{ title: 'Signup', name: 'SignupPage', component: SignupPage, icon: 'person-add' }
];
rootPage: any;
constructor(
public events: Events,
public userData: UserData,
public menu: MenuController,
public platform: Platform,
public confData: ConferenceData,
public storage: Storage,
public splashScreen: SplashScreen
) {
// Check if the user has already seen the tutorial
this.storage.get('hasSeenTutorial')
.then((hasSeenTutorial) => {
if (hasSeenTutorial) {
this.rootPage = TabsPage;
} else {
this.rootPage = TutorialPage;
}
this.platformReady()
});
// load the conference data
confData.load();
// decide which menu items should be hidden by current login status stored in local storage
this.userData.hasLoggedIn().then((hasLoggedIn) => {
this.enableMenu(hasLoggedIn === true);
});
this.enableMenu(true);
this.listenToLoginEvents();
}
openPage(page: PageInterface) {
let params = {};
// the nav component was found using @ViewChild(Nav)
// setRoot on the nav to remove previous pages and only have this page
// we wouldn't want the back button to show in this scenario
if (page.index) {
params = { tabIndex: page.index };
}
// If we are already on tabs just change the selected tab
// don't setRoot again, this maintains the history stack of the
// tabs even if changing them from the menu
if (this.nav.getActiveChildNav() && page.index != undefined) {
this.nav.getActiveChildNav().select(page.index);
// Set the root of the nav with params if it's a tab index
} else {
this.nav.setRoot(page.name, params).catch((err: any) => {
console.log(`Didn't set nav root: ${err}`);
});
}
if (page.logsOut === true) {
// Give the menu time to close before changing to logged out
this.userData.logout();
}
}
openTutorial() {
this.nav.setRoot(TutorialPage);
}
listenToLoginEvents() {
this.events.subscribe('user:login', () => {
this.enableMenu(true);
});
this.events.subscribe('user:signup', () => {
this.enableMenu(true);
});
this.events.subscribe('user:logout', () => {
this.enableMenu(false);
});
}
enableMenu(loggedIn: boolean) {
this.menu.enable(loggedIn, 'loggedInMenu');
this.menu.enable(!loggedIn, 'loggedOutMenu');
}
platformReady() {
// Call any initial plugins when ready
this.platform.ready().then(() => {
this.splashScreen.hide();
});
}
isActive(page: PageInterface) {
let childNav = this.nav.getActiveChildNav();
// Tabs are a special case because they have their own navigation
if (childNav) {
if (childNav.getSelected() && childNav.getSelected().root === page.tabComponent) {
return 'primary';
}
return;
}
if (this.nav.getActive() && this.nav.getActive().name === page.name) {
return 'primary';
}
return;
}
}
| apache-2.0 |
royclarkson/spring-cloud-config | spring-cloud-config-server/src/main/java/org/springframework/cloud/config/server/environment/EnvironmentController.java | 12168 | /*
* Copyright 2013-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.config.server.environment;
import static org.springframework.cloud.config.server.support.EnvironmentPropertySource.prepareEnvironment;
import static org.springframework.cloud.config.server.support.EnvironmentPropertySource.resolvePlaceholders;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import javax.servlet.http.HttpServletResponse;
import org.springframework.boot.bind.PropertiesConfigurationFactory;
import org.springframework.cloud.config.environment.Environment;
import org.springframework.cloud.config.environment.PropertySource;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.MutablePropertySources;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.util.StringUtils;
import org.springframework.validation.BindException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.yaml.snakeyaml.DumperOptions.FlowStyle;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.nodes.Tag;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* @author Dave Syer
* @author Spencer Gibb
* @author Roy Clarkson
* @author Bartosz Wojtkiewicz
* @author Rafal Zukowski
* @author Ivan Corrales Solera
*
*/
@RestController
@RequestMapping(method = RequestMethod.GET, path = "${spring.cloud.config.server.prefix:}")
public class EnvironmentController {
private static final String MAP_PREFIX = "map";
private EnvironmentRepository repository;
private ObjectMapper objectMapper;
private boolean stripDocument = true;
public EnvironmentController(EnvironmentRepository repository) {
this(repository, new ObjectMapper());
}
public EnvironmentController(EnvironmentRepository repository,
ObjectMapper objectMapper) {
this.repository = repository;
this.objectMapper = objectMapper;
}
/**
* Flag to indicate that YAML documents which are not a map should be stripped of the
* "document" prefix that is added by Spring (to facilitate conversion to Properties).
*
* @param stripDocument the flag to set
*/
public void setStripDocumentFromYaml(boolean stripDocument) {
this.stripDocument = stripDocument;
}
@RequestMapping("/{name}/{profiles:.*[^-].*}")
public Environment defaultLabel(@PathVariable String name,
@PathVariable String profiles) {
return labelled(name, profiles, null);
}
@RequestMapping("/{name}/{profiles}/{label:.*}")
public Environment labelled(@PathVariable String name, @PathVariable String profiles,
@PathVariable String label) {
if (label != null && label.contains("(_)")) {
// "(_)" is uncommon in a git branch name, but "/" cannot be matched
// by Spring MVC
label = label.replace("(_)", "/");
}
Environment environment = this.repository.findOne(name, profiles, label);
return environment;
}
@RequestMapping("/{name}-{profiles}.properties")
public ResponseEntity<String> properties(@PathVariable String name,
@PathVariable String profiles,
@RequestParam(defaultValue = "true") boolean resolvePlaceholders)
throws IOException {
return labelledProperties(name, profiles, null, resolvePlaceholders);
}
@RequestMapping("/{label}/{name}-{profiles}.properties")
public ResponseEntity<String> labelledProperties(@PathVariable String name,
@PathVariable String profiles, @PathVariable String label,
@RequestParam(defaultValue = "true") boolean resolvePlaceholders)
throws IOException {
validateProfiles(profiles);
Environment environment = labelled(name, profiles, label);
Map<String, Object> properties = convertToProperties(environment);
String propertiesString = getPropertiesString(properties);
if (resolvePlaceholders) {
propertiesString = resolvePlaceholders(prepareEnvironment(environment),
propertiesString);
}
return getSuccess(propertiesString);
}
@RequestMapping("{name}-{profiles}.json")
public ResponseEntity<String> jsonProperties(@PathVariable String name,
@PathVariable String profiles,
@RequestParam(defaultValue = "true") boolean resolvePlaceholders)
throws Exception {
return labelledJsonProperties(name, profiles, null, resolvePlaceholders);
}
@RequestMapping("/{label}/{name}-{profiles}.json")
public ResponseEntity<String> labelledJsonProperties(@PathVariable String name,
@PathVariable String profiles, @PathVariable String label,
@RequestParam(defaultValue = "true") boolean resolvePlaceholders)
throws Exception {
validateProfiles(profiles);
Environment environment = labelled(name, profiles, label);
Map<String, Object> properties = convertToMap(environment);
String json = this.objectMapper.writeValueAsString(properties);
if (resolvePlaceholders) {
json = resolvePlaceholders(prepareEnvironment(environment), json);
}
return getSuccess(json, MediaType.APPLICATION_JSON);
}
private String getPropertiesString(Map<String, Object> properties) {
StringBuilder output = new StringBuilder();
for (Entry<String, Object> entry : properties.entrySet()) {
if (output.length() > 0) {
output.append("\n");
}
String line = entry.getKey() + ": " + entry.getValue();
output.append(line);
}
return output.toString();
}
@RequestMapping({ "/{name}-{profiles}.yml", "/{name}-{profiles}.yaml" })
public ResponseEntity<String> yaml(@PathVariable String name,
@PathVariable String profiles,
@RequestParam(defaultValue = "true") boolean resolvePlaceholders)
throws Exception {
return labelledYaml(name, profiles, null, resolvePlaceholders);
}
@RequestMapping({ "/{label}/{name}-{profiles}.yml",
"/{label}/{name}-{profiles}.yaml" })
public ResponseEntity<String> labelledYaml(@PathVariable String name,
@PathVariable String profiles, @PathVariable String label,
@RequestParam(defaultValue = "true") boolean resolvePlaceholders)
throws Exception {
validateProfiles(profiles);
Environment environment = labelled(name, profiles, label);
Map<String, Object> result = convertToMap(environment);
if (this.stripDocument && result.size() == 1
&& result.keySet().iterator().next().equals("document")) {
Object value = result.get("document");
if (value instanceof Collection) {
return getSuccess(new Yaml().dumpAs(value, Tag.SEQ, FlowStyle.BLOCK));
}
else {
return getSuccess(new Yaml().dumpAs(value, Tag.STR, FlowStyle.BLOCK));
}
}
String yaml = new Yaml().dumpAsMap(result);
if (resolvePlaceholders) {
yaml = resolvePlaceholders(prepareEnvironment(environment), yaml);
}
return getSuccess(yaml);
}
private Map<String, Object> convertToMap(Environment input) throws BindException {
Map<String, Object> target = new LinkedHashMap<>();
PropertiesConfigurationFactory<Map<String, Object>> factory = new PropertiesConfigurationFactory<>(
target);
Map<String, Object> data = convertToProperties(input);
LinkedHashMap<String, Object> properties = new LinkedHashMap<>();
for (String key : data.keySet()) {
properties.put(MAP_PREFIX + "." + key, data.get(key));
}
addArrays(target, properties);
MutablePropertySources propertySources = new MutablePropertySources();
propertySources.addFirst(new MapPropertySource("properties", properties));
factory.setPropertySources(propertySources);
factory.bindPropertiesToTarget();
@SuppressWarnings("unchecked")
Map<String, Object> result = (Map<String, Object>) target.get(MAP_PREFIX);
return result == null ? new LinkedHashMap<String, Object>() : result;
}
@ExceptionHandler(NoSuchLabelException.class)
public void noSuchLabel(HttpServletResponse response) throws IOException {
response.sendError(HttpStatus.NOT_FOUND.value());
}
@ExceptionHandler(IllegalArgumentException.class)
public void illegalArgument(HttpServletResponse response) throws IOException {
response.sendError(HttpStatus.BAD_REQUEST.value());
}
private void validateProfiles(String profiles) {
if (profiles.contains("-")) {
throw new IllegalArgumentException(
"Properties output not supported for name or profiles containing hyphens");
}
}
private HttpHeaders getHttpHeaders(MediaType mediaType) {
HttpHeaders httpHeaders = new HttpHeaders();
httpHeaders.setContentType(mediaType);
return httpHeaders;
}
private ResponseEntity<String> getSuccess(String body) {
return new ResponseEntity<>(body, getHttpHeaders(MediaType.TEXT_PLAIN),
HttpStatus.OK);
}
private ResponseEntity<String> getSuccess(String body, MediaType mediaType) {
return new ResponseEntity<>(body, getHttpHeaders(mediaType), HttpStatus.OK);
}
/**
* Create Lists of the right size for any YAML arrays that are going to need to be
* bound. Some of this might be do-able in RelaxedDataBinder, but we need to do it
* here for now. Only supports arrays at leaf level currently (i.e. the properties
* keys end in [*]).
*
* @param target the target Map
* @param properties the properties (with key names to check)
*/
private void addArrays(Map<String, Object> target, Map<String, Object> properties) {
for (String key : properties.keySet()) {
int index = key.indexOf("[");
Map<String, Object> current = target;
if (index > 0) {
String stem = key.substring(0, index);
String[] keys = StringUtils.delimitedListToStringArray(stem, ".");
for (int i = 0; i < keys.length - 1; i++) {
if (current.get(keys[i]) == null) {
LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
current.put(keys[i], map);
current = map;
}
else {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) current
.get(keys[i]);
current = map;
}
}
String name = keys[keys.length - 1];
if (current.get(name) == null) {
current.put(name, new ArrayList<Object>());
}
@SuppressWarnings("unchecked")
List<Object> value = (List<Object>) current.get(name);
int position = Integer
.valueOf(key.substring(index + 1, key.indexOf("]")));
while (position >= value.size()) {
if (key.indexOf("].", index) > 0) {
value.add(new LinkedHashMap<String, Object>());
}
else {
value.add("");
}
}
}
}
}
private Map<String, Object> convertToProperties(Environment environment) {
Map<String, Object> map = new TreeMap<>();
List<PropertySource> sources = new ArrayList<>(environment.getPropertySources());
Collections.reverse(sources);
for (PropertySource source : sources) {
@SuppressWarnings("unchecked")
Map<String, String> value = (Map<String, String>) source.getSource();
map.putAll(value);
}
postProcessProperties(map);
return map;
}
private void postProcessProperties(Map<String, Object> propertiesMap) {
for (Iterator<String> iter = propertiesMap.keySet().iterator(); iter.hasNext();) {
String key = iter.next();
if (key.equals("spring.profiles")) {
iter.remove();
}
}
}
}
| apache-2.0 |
nhaarman/trinity | trinity-sample/src/main/java/com/nhaarman/trinity/sample/Team.java | 1485 | /*
* Copyright 2015 Niek Haarman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nhaarman.trinity.sample;
import com.nhaarman.trinity.annotations.Column;
import com.nhaarman.trinity.annotations.Foreign;
import com.nhaarman.trinity.annotations.PrimaryKey;
import com.nhaarman.trinity.annotations.Table;
@Table(name = "teams")
public class Team {
private Long mId;
private String mName;
private String mClubId;
@PrimaryKey
@Column("id")
public Long getId() {
return mId;
}
@Column("id")
@PrimaryKey
public void setId(final Long id) {
mId = id;
}
@Column("name")
public String getName() {
return mName;
}
@Column("name")
public void setName(final String name) {
mName = name;
}
@Foreign(tableName = "clubs", columnName = "id")
@Column("club_id")
public String getClubId() {
return mClubId;
}
@Column("club_id")
public void setClubId(final String clubId) {
mClubId = clubId;
}
}
| apache-2.0 |
prasanna08/oppia | core/controllers/topics_and_skills_dashboard_test.py | 34482 | # Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the topics and skills dashboard page."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import os
from constants import constants
from core.domain import config_services
from core.domain import question_services
from core.domain import skill_fetchers
from core.domain import skill_services
from core.domain import state_domain
from core.domain import topic_domain
from core.domain import topic_fetchers
from core.domain import topic_services
from core.tests import test_utils
import feconf
import python_utils
class BaseTopicsAndSkillsDashboardTests(test_utils.GenericTestBase):
def setUp(self):
"""Completes the sign-up process for the various users."""
super(BaseTopicsAndSkillsDashboardTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.TOPIC_MANAGER_EMAIL, self.TOPIC_MANAGER_USERNAME)
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.topic_manager_id = self.get_user_id_from_email(
self.TOPIC_MANAGER_EMAIL)
self.new_user_id = self.get_user_id_from_email(
self.NEW_USER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.set_topic_managers([self.TOPIC_MANAGER_USERNAME])
self.topic_id = topic_services.get_new_topic_id()
self.linked_skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
self.linked_skill_id, self.admin_id, description='Description 3')
self.subtopic_skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
self.subtopic_skill_id, self.admin_id, description='Subtopic Skill')
subtopic = topic_domain.Subtopic.create_default_subtopic(
1, 'Subtopic Title')
subtopic.skill_ids = [self.subtopic_skill_id]
self.save_new_topic(
self.topic_id, self.admin_id, name='Name',
abbreviated_name='name', url_fragment='name',
description='Description', canonical_story_ids=[],
additional_story_ids=[],
uncategorized_skill_ids=[self.linked_skill_id],
subtopics=[subtopic], next_subtopic_id=2)
class TopicsAndSkillsDashboardPageDataHandlerTests(
BaseTopicsAndSkillsDashboardTests):
def test_get(self):
# Check that non-admins or non-topic managers cannot access the
# topics and skills dashboard data.
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(skill_id, self.admin_id, description='Description')
self.login(self.NEW_USER_EMAIL)
self.get_json(
feconf.TOPICS_AND_SKILLS_DASHBOARD_DATA_URL,
expected_status_int=401)
self.logout()
# Check that admins can access the topics and skills dashboard data.
self.login(self.ADMIN_EMAIL)
config_services.set_property(
self.admin_id, 'classroom_pages_data', [{
'url_fragment': 'math',
'name': 'math',
'topic_ids': [self.topic_id],
'topic_list_intro': 'Topics covered',
'course_details': 'Course details'
}]
)
json_response = self.get_json(
feconf.TOPICS_AND_SKILLS_DASHBOARD_DATA_URL)
self.assertEqual(len(json_response['topic_summary_dicts']), 1)
self.assertEqual(
json_response['topic_summary_dicts'][0]['can_edit_topic'],
True)
self.assertEqual(
json_response['topic_summary_dicts'][0]['id'], self.topic_id)
self.assertEqual(
len(json_response['untriaged_skill_summary_dicts']), 1)
self.assertEqual(
len(json_response['mergeable_skill_summary_dicts']), 2)
for skill_dict in json_response['mergeable_skill_summary_dicts']:
if skill_dict['description'] == 'Description 3':
self.assertEqual(skill_dict['id'], self.linked_skill_id)
self.assertEqual(
len(json_response['categorized_skills_dict']), 1)
self.assertEqual(
json_response['untriaged_skill_summary_dicts'][0]['id'],
skill_id)
self.assertEqual(
json_response['can_delete_topic'], True)
self.assertEqual(
json_response['can_create_topic'], True)
self.assertEqual(
json_response['can_delete_skill'], True)
self.assertEqual(
json_response['can_create_skill'], True)
self.logout()
# Check that topic managers can access the topics and skills
# dashboard editable topic data. Topic managers should not have
# access to any unpublished skills.
self.login(self.TOPIC_MANAGER_EMAIL)
json_response = self.get_json(
feconf.TOPICS_AND_SKILLS_DASHBOARD_DATA_URL)
self.assertEqual(len(json_response['topic_summary_dicts']), 1)
self.assertEqual(
json_response['topic_summary_dicts'][0]['can_edit_topic'],
False)
self.assertEqual(
json_response['topic_summary_dicts'][0]['id'], self.topic_id)
self.assertEqual(
json_response['topic_summary_dicts'][0]['id'], self.topic_id)
self.assertEqual(
len(json_response['untriaged_skill_summary_dicts']), 1)
self.assertEqual(
len(json_response['mergeable_skill_summary_dicts']), 2)
for skill_dict in json_response['mergeable_skill_summary_dicts']:
if skill_dict['description'] == 'Description 3':
self.assertEqual(skill_dict['id'], self.linked_skill_id)
self.assertEqual(
json_response['untriaged_skill_summary_dicts'][0]['id'],
skill_id)
self.assertEqual(
len(json_response['all_classroom_names']), 1)
self.assertEqual(
json_response['all_classroom_names'], ['math'])
self.assertEqual(
json_response['can_delete_topic'], False)
self.assertEqual(
json_response['can_create_topic'], False)
self.assertEqual(
json_response['can_delete_skill'], False)
self.assertEqual(
json_response['can_create_skill'], False)
self.logout()
def test_topics_and_skills_dashboard_page(self):
self.login(self.ADMIN_EMAIL)
response = self.get_html_response(
feconf.TOPICS_AND_SKILLS_DASHBOARD_URL)
self.assertIn(
'{"title": "Topics and Skills Dashboard - Oppia"})', response.body)
self.logout()
class TopicAssignmentsHandlerTests(BaseTopicsAndSkillsDashboardTests):
def test_get(self):
self.login(self.ADMIN_EMAIL)
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
skill_id, self.admin_id, description='Skill description')
json_response = self.get_json(
'%s/%s' % (feconf.UNASSIGN_SKILL_DATA_HANDLER_URL, skill_id))
self.assertEqual(len(json_response['topic_assignment_dicts']), 0)
topic_id_1 = topic_services.get_new_topic_id()
topic_id_2 = topic_services.get_new_topic_id()
self.save_new_topic(
topic_id_1, self.admin_id, name='Topic1',
abbreviated_name='topic-one', url_fragment='topic-one',
description='Description1', canonical_story_ids=[],
additional_story_ids=[],
uncategorized_skill_ids=[skill_id],
subtopics=[], next_subtopic_id=1)
subtopic = topic_domain.Subtopic.from_dict({
'id': 1,
'title': 'subtopic1',
'skill_ids': [skill_id],
'thumbnail_filename': None,
'thumbnail_bg_color': None,
'url_fragment': 'subtopic-url'
})
self.save_new_topic(
topic_id_2, self.admin_id, name='Topic2',
abbreviated_name='topic-two', url_fragment='topic-two',
description='Description2', canonical_story_ids=[],
additional_story_ids=[],
uncategorized_skill_ids=[],
subtopics=[subtopic], next_subtopic_id=2)
json_response = self.get_json(
'%s/%s' % (feconf.UNASSIGN_SKILL_DATA_HANDLER_URL, skill_id))
topic_assignment_dicts = sorted(
json_response['topic_assignment_dicts'],
key=lambda i: i['topic_name'])
self.assertEqual(len(topic_assignment_dicts), 2)
self.assertEqual(topic_assignment_dicts[0]['topic_name'], 'Topic1')
self.assertEqual(topic_assignment_dicts[0]['topic_id'], topic_id_1)
self.assertIsNone(topic_assignment_dicts[0]['subtopic_id'])
self.assertEqual(topic_assignment_dicts[1]['topic_name'], 'Topic2')
self.assertEqual(topic_assignment_dicts[1]['topic_id'], topic_id_2)
self.assertEqual(topic_assignment_dicts[1]['subtopic_id'], 1)
class SkillsDashboardPageDataHandlerTests(BaseTopicsAndSkillsDashboardTests):
def test_post(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'sort': 'Oldest Created'
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 2)
self.assertEqual(
json_response['skill_summary_dicts'][0]['id'], self.linked_skill_id)
self.assertEqual(
json_response['skill_summary_dicts'][1]['id'],
self.subtopic_skill_id)
self.assertFalse(json_response['more'])
self.assertEqual(json_response['next_cursor'], None)
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'sort': 'Newly Created'
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 2)
self.assertEqual(
json_response['skill_summary_dicts'][0]['id'],
self.subtopic_skill_id)
self.assertEqual(
json_response['skill_summary_dicts'][1]['id'], self.linked_skill_id)
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'sort': 'Most Recently Updated'
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 2)
self.assertEqual(
json_response['skill_summary_dicts'][0]['id'],
self.subtopic_skill_id)
self.assertEqual(
json_response['skill_summary_dicts'][1]['id'], self.linked_skill_id)
self.assertFalse(json_response['more'])
self.assertEqual(json_response['next_cursor'], None)
def test_fetch_filtered_skills_with_given_keywords(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'keywords': ['description']
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 1)
self.assertEqual(
json_response['skill_summary_dicts'][0]['id'], self.linked_skill_id)
self.assertEqual(
json_response['skill_summary_dicts'][0]['description'],
'Description 3')
self.assertFalse(json_response['more'])
self.assertEqual(json_response['next_cursor'], None)
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'keywords': ['subtopic']
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 1)
self.assertEqual(
json_response['skill_summary_dicts'][0]['id'],
self.subtopic_skill_id)
self.assertEqual(
json_response['skill_summary_dicts'][0]['description'],
'Subtopic Skill')
self.assertFalse(json_response['more'])
self.assertEqual(json_response['next_cursor'], None)
def test_fetch_filtered_skills_with_given_status(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'status': 'Assigned'
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 2)
self.assertFalse(json_response['more'])
self.assertEqual(json_response['next_cursor'], None)
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'status': 'Unassigned'
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 0)
self.assertFalse(json_response['more'])
self.assertEqual(json_response['next_cursor'], None)
def test_fetch_filtered_skills_with_given_cursor(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
skill_id, self.admin_id, description='Random Skill')
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 1,
}, csrf_token=csrf_token)
# Default sort is "Newly created first". So, the skill with id-skill_id
# is the most "Newly created", and therefore it comes first. The skill
# with id-subtopic_skill_id was created before the above skill,
# so it comes second. Then the skill with id-linked_skill_id was created
# before the other two skills, hence it comes last because it is the
# least "Newly Created".
self.assertEqual(len(json_response['skill_summary_dicts']), 2)
self.assertEqual(
json_response['skill_summary_dicts'][0]['id'], skill_id)
self.assertEqual(
json_response['skill_summary_dicts'][1]['id'],
self.subtopic_skill_id)
self.assertTrue(json_response['more'])
self.assertTrue(
isinstance(json_response['next_cursor'], python_utils.BASESTRING))
next_cursor = json_response['next_cursor']
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 1,
'next_cursor': next_cursor,
}, csrf_token=csrf_token)
self.assertEqual(len(json_response['skill_summary_dicts']), 1)
self.assertEqual(
json_response['skill_summary_dicts'][0]['id'], self.linked_skill_id)
self.assertFalse(json_response['more'])
self.assertEqual(json_response['next_cursor'], None)
def test_fetch_filtered_skills_with_invalid_num_skills_to_fetch(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': '1',
}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
json_response['error'],
'Number of skills to fetch should be a number.')
def test_fetch_filtered_skills_with_invalid_cursor_type(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
skill_id, self.admin_id, description='Random Skill')
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 1,
'next_cursor': 40
}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
json_response['error'], 'Next Cursor should be a string.')
def test_fetch_filtered_skills_with_invalid_cursor_value(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
skill_id, self.admin_id, description='Random Skill')
self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 1,
'next_cursor': 'kfsdkam43k4334'
}, csrf_token=csrf_token,
expected_status_int=500)
def test_fetch_filtered_skills_with_invalid_classroom(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'classroom_name': 20,
}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
json_response['error'], 'Classroom name should be a string.')
def test_fetch_filtered_skills_with_invalid_keywords(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'keywords': 20,
}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
json_response['error'], 'Keywords should be a list of strings.')
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'keywords': ['apple', 20],
}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
json_response['error'], 'Keywords should be a list of strings.')
def test_fetch_filtered_skills_with_invalid_status(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'status': 20,
}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
json_response['error'], 'Status should be a string.')
def test_fetch_filtered_skills_with_invalid_sort(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
json_response = self.post_json(
feconf.SKILL_DASHBOARD_DATA_URL, {
'num_skills_to_fetch': 10,
'sort': 20,
}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
json_response['error'], 'The value of sort_by should be a string.')
class NewTopicHandlerTests(BaseTopicsAndSkillsDashboardTests):
def setUp(self):
super(NewTopicHandlerTests, self).setUp()
self.url = feconf.NEW_TOPIC_URL
def test_topic_creation(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
payload = {
'name': 'Topic name',
'abbreviatedName': 'name-one',
'description': 'Topic description',
'filename': 'test_svg.svg',
'thumbnailBgColor': '#C6DCDA',
'url_fragment': 'name-one'
}
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'),
'rb', encoding=None) as f:
raw_image = f.read()
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),)
)
topic_id = json_response['topicId']
self.assertEqual(len(topic_id), 12)
self.assertIsNotNone(
topic_fetchers.get_topic_by_id(topic_id, strict=False))
self.logout()
def test_topic_creation_with_invalid_name(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
payload = {
'name': 'Topic name that is too long for validation.',
'abbreviatedName': 'name-two'
}
self.post_json(
self.url, payload, csrf_token=csrf_token, expected_status_int=400)
self.logout()
def test_topic_creation_with_invalid_image(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
payload = {
'name': 'Topic name',
'abbreviatedName': 'name-three',
'description': 'Topic description',
'filename': 'cafe.flac',
'thumbnailBgColor': '#C6DCDA',
'url_fragment': 'name-three'
}
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'cafe.flac'),
'rb', encoding=None) as f:
raw_image = f.read()
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),),
expected_status_int=400
)
self.assertEqual(
json_response['error'], 'Image exceeds file size limit of 100 KB.')
class NewSkillHandlerTests(BaseTopicsAndSkillsDashboardTests):
def setUp(self):
super(NewSkillHandlerTests, self).setUp()
self.url = feconf.NEW_SKILL_URL
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'), 'rb',
encoding=None) as f:
self.original_image_content = f.read()
def test_skill_creation(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
rubrics = [{
'difficulty': constants.SKILL_DIFFICULTIES[0],
'explanations': ['Explanation 1']
}, {
'difficulty': constants.SKILL_DIFFICULTIES[1],
'explanations': ['Explanation 2']
}, {
'difficulty': constants.SKILL_DIFFICULTIES[2],
'explanations': ['Explanation 3']
}]
json_response = self.post_json(
self.url, {
'description': 'Skill Description',
'rubrics': rubrics,
'explanation_dict': state_domain.SubtitledHtml(
'1', '<p>Explanation</p>').to_dict(),
'thumbnail_filename': 'image.svg'
},
csrf_token=csrf_token,
upload_files=((
'image', 'unused_filename', self.original_image_content),))
skill_id = json_response['skillId']
self.assertEqual(len(skill_id), 12)
self.assertIsNotNone(
skill_fetchers.get_skill_by_id(skill_id, strict=False))
self.logout()
def test_skill_creation_in_invalid_topic(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
payload = {
'description': 'Skill Description',
'linked_topic_ids': ['topic'],
'rubrics': [],
'explanation_dict': state_domain.SubtitledHtml(
'1', '<p>Explanation</p>').to_dict(),
'thumbnail_filename': 'image.svg'
}
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token,
expected_status_int=400,
upload_files=((
'image', 'unused_filename', self.original_image_content),))
self.assertEqual(json_response['status_code'], 400)
self.logout()
def test_skill_creation_with_invalid_images(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
explanation_html = (
'<oppia-noninteractive-image filepath-with-value='
'""img.svg"" caption-with-value="""" '
'alt-with-value=""Image""></oppia-noninteractive-image>'
)
rubrics = [{
'difficulty': constants.SKILL_DIFFICULTIES[0],
'explanations': ['Explanation 1']
}, {
'difficulty': constants.SKILL_DIFFICULTIES[1],
'explanations': ['Explanation 2']
}, {
'difficulty': constants.SKILL_DIFFICULTIES[2],
'explanations': ['Explanation 3']
}]
post_data = {
'description': 'Skill Description',
'rubrics': rubrics,
'explanation_dict': state_domain.SubtitledHtml(
'1', explanation_html).to_dict(),
'thumbnail_filename': 'image.svg'
}
response_dict = self.post_json(
self.url, post_data,
csrf_token=csrf_token,
expected_status_int=400)
self.assertIn(
'No image data provided for file with name img.svg',
response_dict['error'])
large_image = '<svg><path d="%s" /></svg>' % (
'M150 0 L75 200 L225 200 Z ' * 4000)
response_dict = self.post_json(
self.url, post_data,
csrf_token=csrf_token,
upload_files=(
('img.svg', 'img.svg', large_image),
), expected_status_int=400)
self.assertIn(
'Image exceeds file size limit of 100 KB.',
response_dict['error'])
self.logout()
def test_skill_creation_with_valid_images(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
filename = 'img.png'
filename_2 = 'img_2.png'
explanation_html = (
'<oppia-noninteractive-image filepath-with-value='
'""img.png"" caption-with-value="""" '
'alt-with-value=""Image""></oppia-noninteractive-image>'
)
explanation_html_2 = (
'<oppia-noninteractive-image filepath-with-value='
'""img_2.png"" caption-with-value="""" '
'alt-with-value=""Image 2""></oppia-noninteractive-image>'
)
rubrics = [{
'difficulty': constants.SKILL_DIFFICULTIES[0],
'explanations': ['Explanation 1', explanation_html_2]
}, {
'difficulty': constants.SKILL_DIFFICULTIES[1],
'explanations': ['Explanation 2']
}, {
'difficulty': constants.SKILL_DIFFICULTIES[2],
'explanations': ['Explanation 3']
}]
post_data = {
'description': 'Skill Description',
'rubrics': rubrics,
'explanation_dict': state_domain.SubtitledHtml(
'1', explanation_html).to_dict(),
'thumbnail_filename': 'image.svg'
}
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None) as f:
raw_image = f.read()
json_response = self.post_json(
self.url, post_data,
csrf_token=csrf_token,
upload_files=(
(filename, filename, raw_image),
(filename_2, filename_2, raw_image),)
)
skill_id = json_response['skillId']
self.assertIsNotNone(
skill_fetchers.get_skill_by_id(skill_id, strict=False))
self.logout()
def test_skill_creation_in_invalid_rubrics(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
payload = {
'description': 'Skill Description',
'linked_topic_ids': [self.topic_id],
'rubrics': 'invalid',
'thumbnail_filename': 'image.svg'
}
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token,
expected_status_int=400,
upload_files=((
'image', 'unused_filename', self.original_image_content),))
self.assertEqual(json_response['status_code'], 400)
self.logout()
def test_skill_creation_in_invalid_explanation(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
payload = {
'description': 'Skill Description',
'linked_topic_ids': [self.topic_id],
'rubrics': [],
'explanation_dict': 'explanation',
'thumbnail_filename': 'image.svg'
}
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token,
expected_status_int=400,
upload_files=((
'image', 'unused_filename', self.original_image_content),))
self.assertEqual(json_response['status_code'], 400)
payload = {
'description': 'Skill Description',
'linked_topic_ids': [self.topic_id],
'rubrics': [],
'explanation_dict': {
'explanation': 'Explanation'
},
'thumbnail_filename': 'image.svg'
}
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(json_response['status_code'], 400)
self.logout()
def test_skill_creation_in_valid_topic(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
rubrics = [{
'difficulty': constants.SKILL_DIFFICULTIES[0],
'explanations': ['Explanation 1']
}, {
'difficulty': constants.SKILL_DIFFICULTIES[1],
'explanations': ['Explanation 2']
}, {
'difficulty': constants.SKILL_DIFFICULTIES[2],
'explanations': ['Explanation 3']
}]
payload = {
'description': 'Skill Description',
'linked_topic_ids': [self.topic_id],
'rubrics': rubrics,
'explanation_dict': state_domain.SubtitledHtml(
'1', '<p>Explanation</p>').to_dict(),
'thumbnail_filename': 'image.svg'
}
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token,
upload_files=((
'image', 'unused_filename', self.original_image_content),))
skill_id = json_response['skillId']
self.assertEqual(len(skill_id), 12)
self.assertIsNotNone(
skill_fetchers.get_skill_by_id(skill_id, strict=False))
topic = topic_fetchers.get_topic_by_id(self.topic_id)
self.assertEqual(
topic.uncategorized_skill_ids,
[self.linked_skill_id, skill_id])
self.logout()
class MergeSkillHandlerTests(BaseTopicsAndSkillsDashboardTests):
def setUp(self):
super(MergeSkillHandlerTests, self).setUp()
self.url = feconf.MERGE_SKILLS_URL
self.question_id = question_services.get_new_question_id()
self.question = self.save_new_question(
self.question_id, self.admin_id,
self._create_valid_question_data('ABC'), [self.linked_skill_id])
question_services.create_new_question_skill_link(
self.admin_id, self.question_id, self.linked_skill_id, 0.5)
def test_merge_skill(self):
self.login(self.ADMIN_EMAIL)
old_skill_id = self.linked_skill_id
new_skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
new_skill_id, self.admin_id, description='Skill Description')
old_links = question_services.get_question_skill_links_of_skill(
old_skill_id, 'Old Description')
new_links = question_services.get_question_skill_links_of_skill(
new_skill_id, 'Skill Description')
self.assertEqual(len(old_links), 1)
self.assertEqual(old_links[0].skill_id, old_skill_id)
self.assertEqual(len(new_links), 0)
csrf_token = self.get_new_csrf_token()
payload = {
'old_skill_id': old_skill_id,
'new_skill_id': new_skill_id
}
json_response = self.post_json(
self.url, payload, csrf_token=csrf_token)
old_links = question_services.get_question_skill_links_of_skill(
old_skill_id, 'Old Description')
new_links = question_services.get_question_skill_links_of_skill(
new_skill_id, 'Skill Description')
self.assertEqual(json_response['merged_into_skill'], new_skill_id)
self.assertEqual(len(old_links), 0)
self.assertEqual(len(new_links), 1)
self.assertEqual(new_links[0].skill_id, new_skill_id)
self.logout()
def test_merge_skill_fails_when_new_skill_id_is_invalid(self):
self.login(self.ADMIN_EMAIL)
old_skill_id = self.linked_skill_id
payload = {
'old_skill_id': old_skill_id,
'new_skill_id': 'invalid_new_skill_id'
}
csrf_token = self.get_new_csrf_token()
self.post_json(
self.url, payload, csrf_token=csrf_token,
expected_status_int=404)
self.logout()
def test_merge_skill_fails_when_old_skill_id_is_invalid(self):
self.login(self.ADMIN_EMAIL)
new_skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
new_skill_id, self.admin_id, description='Skill Description')
payload = {
'old_skill_id': 'invalid_old_skill_id',
'new_skill_id': new_skill_id
}
csrf_token = self.get_new_csrf_token()
self.post_json(
self.url, payload, csrf_token=csrf_token,
expected_status_int=404)
self.logout()
| apache-2.0 |
rvm-segfault/edx | python_for_data_sci_dse200x/week2/word_cloud.py | 719 |
import collections
#from nltk.corpus import stopwords
file = open('98-0.txt')
punctuations = set(['.', ':', '(', '/', ')', '\\', '"', '-', ','])
#stopwords = set(stopwords.words('english'))
wordcount = {}
for word in file.read().lower().split():
# for punctuation in punctuations:
# word = word.replace(punctuation, "")
# if word in stopwords
# continue;
word = word.replace(".","")
word = word.replace(",","")
word = word.replace("\"","")
word = word.replace("“","")
if word in wordcount:
wordcount[word] +=1
else:
wordcount[word] = 1
d = collections.Counter(wordcount)
for word, count in d.most_common(10):
print (word, ": ", count)
| apache-2.0 |
qtzheng/SIMP | utils/debug.go | 9787 | package utils
import (
"bytes"
"fmt"
"log"
"reflect"
"runtime"
)
var (
dunno = []byte("???")
centerDot = []byte("·")
dot = []byte(".")
lbr = []byte("{")
lbrn = []byte("{\n")
com = []byte(",")
comn = []byte(",\n")
rbr = []byte("}")
comnrbr = []byte(",\n}")
)
type pointerInfo struct {
prev *pointerInfo
n int
addr uintptr
pos int
used []int
}
// print the data in console
func Display(data ...interface{}) {
display(true, data...)
}
// return data print string
func GetDisplayString(data ...interface{}) string {
return display(false, data...)
}
func display(displayed bool, data ...interface{}) string {
var pc, file, line, ok = runtime.Caller(2)
if !ok {
return ""
}
var buf = new(bytes.Buffer)
fmt.Fprintf(buf, "[Debug] at %s() [%s:%d]\n", function(pc), file, line)
fmt.Fprintf(buf, "\n[Variables]\n")
for i := 0; i < len(data); i += 2 {
var output = fomateinfo(len(data[i].(string))+3, data[i+1])
fmt.Fprintf(buf, "%s = %s", data[i], output)
}
if displayed {
log.Print(buf)
}
return buf.String()
}
// return data dump and format bytes
func fomateinfo(headlen int, data ...interface{}) []byte {
var buf = new(bytes.Buffer)
if len(data) > 1 {
fmt.Fprint(buf, " ")
fmt.Fprint(buf, "[")
fmt.Fprintln(buf)
}
for k, v := range data {
var buf2 = new(bytes.Buffer)
var pointers *pointerInfo
var interfaces []reflect.Value = make([]reflect.Value, 0, 10)
printKeyValue(buf2, reflect.ValueOf(v), &pointers, &interfaces, nil, true, " ", 1)
if k < len(data)-1 {
fmt.Fprint(buf2, ", ")
}
fmt.Fprintln(buf2)
buf.Write(buf2.Bytes())
}
if len(data) > 1 {
fmt.Fprintln(buf)
fmt.Fprint(buf, " ")
fmt.Fprint(buf, "]")
}
return buf.Bytes()
}
// check data is golang basic type
func isSimpleType(val reflect.Value, kind reflect.Kind, pointers **pointerInfo, interfaces *[]reflect.Value) bool {
switch kind {
case reflect.Bool:
return true
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return true
case reflect.Uint8, reflect.Uint16, reflect.Uint, reflect.Uint32, reflect.Uint64:
return true
case reflect.Float32, reflect.Float64:
return true
case reflect.Complex64, reflect.Complex128:
return true
case reflect.String:
return true
case reflect.Chan:
return true
case reflect.Invalid:
return true
case reflect.Interface:
for _, in := range *interfaces {
if reflect.DeepEqual(in, val) {
return true
}
}
return false
case reflect.UnsafePointer:
if val.IsNil() {
return true
}
var elem = val.Elem()
if isSimpleType(elem, elem.Kind(), pointers, interfaces) {
return true
}
var addr = val.Elem().UnsafeAddr()
for p := *pointers; p != nil; p = p.prev {
if addr == p.addr {
return true
}
}
return false
}
return false
}
// dump value
func printKeyValue(buf *bytes.Buffer, val reflect.Value, pointers **pointerInfo, interfaces *[]reflect.Value, structFilter func(string, string) bool, formatOutput bool, indent string, level int) {
var t = val.Kind()
switch t {
case reflect.Bool:
fmt.Fprint(buf, val.Bool())
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
fmt.Fprint(buf, val.Int())
case reflect.Uint8, reflect.Uint16, reflect.Uint, reflect.Uint32, reflect.Uint64:
fmt.Fprint(buf, val.Uint())
case reflect.Float32, reflect.Float64:
fmt.Fprint(buf, val.Float())
case reflect.Complex64, reflect.Complex128:
fmt.Fprint(buf, val.Complex())
case reflect.UnsafePointer:
fmt.Fprintf(buf, "unsafe.Pointer(0x%X)", val.Pointer())
case reflect.Ptr:
if val.IsNil() {
fmt.Fprint(buf, "nil")
return
}
var addr = val.Elem().UnsafeAddr()
for p := *pointers; p != nil; p = p.prev {
if addr == p.addr {
p.used = append(p.used, buf.Len())
fmt.Fprintf(buf, "0x%X", addr)
return
}
}
*pointers = &pointerInfo{
prev: *pointers,
addr: addr,
pos: buf.Len(),
used: make([]int, 0),
}
fmt.Fprint(buf, "&")
printKeyValue(buf, val.Elem(), pointers, interfaces, structFilter, formatOutput, indent, level)
case reflect.String:
fmt.Fprint(buf, "\"", val.String(), "\"")
case reflect.Interface:
var value = val.Elem()
if !value.IsValid() {
fmt.Fprint(buf, "nil")
} else {
for _, in := range *interfaces {
if reflect.DeepEqual(in, val) {
fmt.Fprint(buf, "repeat")
return
}
}
*interfaces = append(*interfaces, val)
printKeyValue(buf, value, pointers, interfaces, structFilter, formatOutput, indent, level+1)
}
case reflect.Struct:
var t = val.Type()
fmt.Fprint(buf, t)
fmt.Fprint(buf, "{")
for i := 0; i < val.NumField(); i++ {
if formatOutput {
fmt.Fprintln(buf)
} else {
fmt.Fprint(buf, " ")
}
var name = t.Field(i).Name
if formatOutput {
for ind := 0; ind < level; ind++ {
fmt.Fprint(buf, indent)
}
}
fmt.Fprint(buf, name)
fmt.Fprint(buf, ": ")
if structFilter != nil && structFilter(t.String(), name) {
fmt.Fprint(buf, "ignore")
} else {
printKeyValue(buf, val.Field(i), pointers, interfaces, structFilter, formatOutput, indent, level+1)
}
fmt.Fprint(buf, ",")
}
if formatOutput {
fmt.Fprintln(buf)
for ind := 0; ind < level-1; ind++ {
fmt.Fprint(buf, indent)
}
} else {
fmt.Fprint(buf, " ")
}
fmt.Fprint(buf, "}")
case reflect.Array, reflect.Slice:
fmt.Fprint(buf, val.Type())
fmt.Fprint(buf, "{")
var allSimple = true
for i := 0; i < val.Len(); i++ {
var elem = val.Index(i)
var isSimple = isSimpleType(elem, elem.Kind(), pointers, interfaces)
if !isSimple {
allSimple = false
}
if formatOutput && !isSimple {
fmt.Fprintln(buf)
} else {
fmt.Fprint(buf, " ")
}
if formatOutput && !isSimple {
for ind := 0; ind < level; ind++ {
fmt.Fprint(buf, indent)
}
}
printKeyValue(buf, elem, pointers, interfaces, structFilter, formatOutput, indent, level+1)
if i != val.Len()-1 || !allSimple {
fmt.Fprint(buf, ",")
}
}
if formatOutput && !allSimple {
fmt.Fprintln(buf)
for ind := 0; ind < level-1; ind++ {
fmt.Fprint(buf, indent)
}
} else {
fmt.Fprint(buf, " ")
}
fmt.Fprint(buf, "}")
case reflect.Map:
var t = val.Type()
var keys = val.MapKeys()
fmt.Fprint(buf, t)
fmt.Fprint(buf, "{")
var allSimple = true
for i := 0; i < len(keys); i++ {
var elem = val.MapIndex(keys[i])
var isSimple = isSimpleType(elem, elem.Kind(), pointers, interfaces)
if !isSimple {
allSimple = false
}
if formatOutput && !isSimple {
fmt.Fprintln(buf)
} else {
fmt.Fprint(buf, " ")
}
if formatOutput && !isSimple {
for ind := 0; ind <= level; ind++ {
fmt.Fprint(buf, indent)
}
}
printKeyValue(buf, keys[i], pointers, interfaces, structFilter, formatOutput, indent, level+1)
fmt.Fprint(buf, ": ")
printKeyValue(buf, elem, pointers, interfaces, structFilter, formatOutput, indent, level+1)
if i != val.Len()-1 || !allSimple {
fmt.Fprint(buf, ",")
}
}
if formatOutput && !allSimple {
fmt.Fprintln(buf)
for ind := 0; ind < level-1; ind++ {
fmt.Fprint(buf, indent)
}
} else {
fmt.Fprint(buf, " ")
}
fmt.Fprint(buf, "}")
case reflect.Chan:
fmt.Fprint(buf, val.Type())
case reflect.Invalid:
fmt.Fprint(buf, "invalid")
default:
fmt.Fprint(buf, "unknow")
}
}
// dump pointer value
func printPointerInfo(buf *bytes.Buffer, headlen int, pointers *pointerInfo) {
var anyused = false
var pointerNum = 0
for p := pointers; p != nil; p = p.prev {
if len(p.used) > 0 {
anyused = true
}
pointerNum += 1
p.n = pointerNum
}
if anyused {
var pointerBufs = make([][]rune, pointerNum+1)
for i := 0; i < len(pointerBufs); i++ {
var pointerBuf = make([]rune, buf.Len()+headlen)
for j := 0; j < len(pointerBuf); j++ {
pointerBuf[j] = ' '
}
pointerBufs[i] = pointerBuf
}
for pn := 0; pn <= pointerNum; pn++ {
for p := pointers; p != nil; p = p.prev {
if len(p.used) > 0 && p.n >= pn {
if pn == p.n {
pointerBufs[pn][p.pos+headlen] = '└'
var maxpos = 0
for i, pos := range p.used {
if i < len(p.used)-1 {
pointerBufs[pn][pos+headlen] = '┴'
} else {
pointerBufs[pn][pos+headlen] = '┘'
}
maxpos = pos
}
for i := 0; i < maxpos-p.pos-1; i++ {
if pointerBufs[pn][i+p.pos+headlen+1] == ' ' {
pointerBufs[pn][i+p.pos+headlen+1] = '─'
}
}
} else {
pointerBufs[pn][p.pos+headlen] = '│'
for _, pos := range p.used {
if pointerBufs[pn][pos+headlen] == ' ' {
pointerBufs[pn][pos+headlen] = '│'
} else {
pointerBufs[pn][pos+headlen] = '┼'
}
}
}
}
}
buf.WriteString(string(pointerBufs[pn]) + "\n")
}
}
}
// get stack bytes
func stack(skip int, indent string) []byte {
var buf = new(bytes.Buffer)
for i := skip; ; i++ {
var pc, file, line, ok = runtime.Caller(i)
if !ok {
break
}
buf.WriteString(indent)
fmt.Fprintf(buf, "at %s() [%s:%d]\n", function(pc), file, line)
}
return buf.Bytes()
}
// return the name of the function containing the PC if possible,
func function(pc uintptr) []byte {
fn := runtime.FuncForPC(pc)
if fn == nil {
return dunno
}
name := []byte(fn.Name())
// The name includes the path name to the package, which is unnecessary
// since the file name is already included. Plus, it has center dots.
// That is, we see
// runtime/debug.*T·ptrmethod
// and want
// *T.ptrmethod
if period := bytes.Index(name, dot); period >= 0 {
name = name[period+1:]
}
name = bytes.Replace(name, centerDot, dot, -1)
return name
}
| apache-2.0 |
kingargyle/turmeric-bot | components/camel-irc/src/main/java/org/apache/camel/component/irc/IrcComponent.java | 5422 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.irc;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.impl.DefaultComponent;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.schwering.irc.lib.IRCConnection;
import org.schwering.irc.lib.IRCEventListener;
import org.schwering.irc.lib.ssl.SSLIRCConnection;
/**
* Defines the <a href="http://camel.apache.org/irc.html">IRC Component</a>
*
* @version $Revision$
*/
public class IrcComponent extends DefaultComponent {
private static final transient Log LOG = LogFactory.getLog(IrcComponent.class);
private final Map<String, IRCConnection> connectionCache = new HashMap<String, IRCConnection>();
public IrcEndpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
// every endpoint gets it's own configuration
IrcConfiguration config = new IrcConfiguration();
config.configure(uri);
IrcEndpoint endpoint = new IrcEndpoint(uri, this, config);
setProperties(endpoint.getConfiguration(), parameters);
return endpoint;
}
public synchronized IRCConnection getIRCConnection(IrcConfiguration configuration) {
final IRCConnection connection;
if (connectionCache.containsKey(configuration.getCacheKey())) {
if (LOG.isDebugEnabled()) {
LOG.debug("Returning Cached Connection to " + configuration.getHostname() + ":" + configuration.getNickname());
}
connection = connectionCache.get(configuration.getCacheKey());
} else {
connection = createConnection(configuration);
connectionCache.put(configuration.getCacheKey(), connection);
}
return connection;
}
protected IRCConnection createConnection(IrcConfiguration configuration) {
IRCConnection conn = null;
IRCEventListener ircLogger;
if (configuration.getUsingSSL()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating SSL Connection to " + configuration.getHostname() + " destination(s): " + configuration.getListOfChannels()
+ " nick: " + configuration.getNickname() + " user: " + configuration.getUsername());
}
SSLIRCConnection sconn = new SSLIRCConnection(configuration.getHostname(), configuration.getPorts(), configuration.getPassword(),
configuration.getNickname(), configuration.getUsername(), configuration.getRealname());
sconn.addTrustManager(configuration.getTrustManager());
conn = sconn;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating Connection to " + configuration.getHostname() + " destination(s): " + configuration.getListOfChannels()
+ " nick: " + configuration.getNickname() + " user: " + configuration.getUsername());
}
conn = new IRCConnection(configuration.getHostname(), configuration.getPorts(), configuration.getPassword(),
configuration.getNickname(), configuration.getUsername(), configuration.getRealname());
}
conn.setEncoding("UTF-8");
conn.setColors(configuration.isColors());
conn.setPong(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Adding IRC event logging listener");
ircLogger = createIrcLogger(configuration.getHostname());
conn.addIRCEventListener(ircLogger);
}
try {
conn.connect();
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
return conn;
}
public void closeConnection(String key, IRCConnection connection) {
try {
connection.doQuit();
connection.close();
} catch (Exception e) {
LOG.warn("Error during closing connection.", e);
}
}
@Override
protected void doStop() throws Exception {
// lets use a copy so we can clear the connections eagerly in case of exceptions
Map<String, IRCConnection> map = new HashMap<String, IRCConnection>(connectionCache);
connectionCache.clear();
for (Map.Entry<String, IRCConnection> entry : map.entrySet()) {
closeConnection(entry.getKey(), entry.getValue());
}
super.doStop();
}
protected IRCEventListener createIrcLogger(String hostname) {
return new IrcLogger(LOG, hostname);
}
}
| apache-2.0 |
Ccook/conniption4s | src/main/scala/com/celexus/conniption/model/BuyingPower.scala | 1808 | /**
* Copyright 2014 Cameron Cook
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.celexus.conniption.model
import scala.xml.NodeSeq
/**
* A model representing an Account's buying power generalizations
* @param xml
*/
class BuyingPower(xml: NodeSeq, res: org.scribe.model.Response) extends TKResponse(xml: NodeSeq, res: org.scribe.model.Response, format = "xml") {
/**
* @return Cash Available for Withdrawl
*/
def cashAvailableForWithdrawal: Double = toDouble(ends("buyingpower/cashavailableforwithdrawal"))
/**
* @return Day Trading Buying Power
*/
def daytrading: Double = toDouble(ends("buyingpower/daytading"))
/**
* @return Percentage of Equity available
*/
def equityPercentage: Double = toDouble(ends("buyingpower/equitypercentage"))
/**
* @return Options buying power
*/
def options: Double = toDouble(ends("buyingpower/options"))
/**
* @return day trading buying power at the start of the day
*/
def dayStartDayTrading: Double = toDouble(ends("buyingpower/soddaytrading"))
/**
* @return stock buying power at the start of the day
*/
def dayStartStock: Double = toDouble(ends("buyingpower/sodsock"))
/**
* @return stock buying power
*/
def stock: Double = toDouble(ends("buyingpower/stock"))
}
| apache-2.0 |
manipopopo/tensorflow | tensorflow/python/estimator/estimator.py | 87146 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base Estimator class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import os
import tempfile
import numpy as np
import six
from google.protobuf import message
from tensorflow.core.framework import summary_pb2
from tensorflow.python.client import session as tf_session
from tensorflow.python.eager import context
from tensorflow.python.estimator import model_fn as model_fn_lib
from tensorflow.python.estimator import run_config
from tensorflow.python.estimator import util as estimator_util
from tensorflow.python.estimator.export import export as export_helpers
from tensorflow.python.estimator.export import export_output
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import metrics as metrics_lib
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import utils_impl as saved_model_utils
from tensorflow.python.summary import summary
from tensorflow.python.summary.writer import writer_cache
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import device_setter
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.training import evaluation
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver
from tensorflow.python.training import training
from tensorflow.python.training import training_util
from tensorflow.python.training import warm_starting_util
from tensorflow.python.util import compat
from tensorflow.python.util import compat_internal
from tensorflow.python.util import function_utils
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import estimator_export
_VALID_MODEL_FN_ARGS = set(
['features', 'labels', 'mode', 'params', 'self', 'config'])
@estimator_export('estimator.Estimator')
class Estimator(object):
"""Estimator class to train and evaluate TensorFlow models.
The `Estimator` object wraps a model which is specified by a `model_fn`,
which, given inputs and a number of other parameters, returns the ops
necessary to perform training, evaluation, or predictions.
All outputs (checkpoints, event files, etc.) are written to `model_dir`, or a
subdirectory thereof. If `model_dir` is not set, a temporary directory is
used.
The `config` argument can be passed `RunConfig` object containing information
about the execution environment. It is passed on to the `model_fn`, if the
`model_fn` has a parameter named "config" (and input functions in the same
manner). If the `config` parameter is not passed, it is instantiated by the
`Estimator`. Not passing config means that defaults useful for local execution
are used. `Estimator` makes config available to the model (for instance, to
allow specialization based on the number of workers available), and also uses
some of its fields to control internals, especially regarding checkpointing.
The `params` argument contains hyperparameters. It is passed to the
`model_fn`, if the `model_fn` has a parameter named "params", and to the input
functions in the same manner. `Estimator` only passes params along, it does
not inspect it. The structure of `params` is therefore entirely up to the
developer.
None of `Estimator`'s methods can be overridden in subclasses (its
constructor enforces this). Subclasses should use `model_fn` to configure
the base class, and may add methods implementing specialized functionality.
@compatibility(eager)
Calling methods of `Estimator` will work while eager execution is enabled.
However, the `model_fn` and `input_fn` is not executed eagerly, `Estimator`
will switch to graph model before calling all user-provided functions (incl.
hooks), so their code has to be compatible with graph mode execution. Note
that `input_fn` code using `tf.data` generally works in both graph and eager
modes.
@end_compatibility
"""
def __init__(self, model_fn, model_dir=None, config=None, params=None,
warm_start_from=None):
"""Constructs an `Estimator` instance.
See @{$estimators} for more information. To warm-start an `Estimator`:
```python
estimator = tf.estimator.DNNClassifier(
feature_columns=[categorical_feature_a_emb, categorical_feature_b_emb],
hidden_units=[1024, 512, 256],
warm_start_from="/path/to/checkpoint/dir")
```
For more details on warm-start configuration, see
`tf.estimator.WarmStartSettings`.
Args:
model_fn: Model function. Follows the signature:
* Args:
* `features`: This is the first item returned from the `input_fn`
passed to `train`, `evaluate`, and `predict`. This should be a
single `Tensor` or `dict` of same.
* `labels`: This is the second item returned from the `input_fn`
passed to `train`, `evaluate`, and `predict`. This should be a
single `Tensor` or `dict` of same (for multi-head models). If
mode is `ModeKeys.PREDICT`, `labels=None` will be passed. If
the `model_fn`'s signature does not accept `mode`, the
`model_fn` must still be able to handle `labels=None`.
* `mode`: Optional. Specifies if this training, evaluation or
prediction. See `ModeKeys`.
* `params`: Optional `dict` of hyperparameters. Will receive what
is passed to Estimator in `params` parameter. This allows
to configure Estimators from hyper parameter tuning.
* `config`: Optional configuration object. Will receive what is passed
to Estimator in `config` parameter, or the default `config`.
Allows updating things in your `model_fn` based on
configuration such as `num_ps_replicas`, or `model_dir`.
* Returns:
`EstimatorSpec`
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model. If `PathLike` object, the
path will be resolved. If `None`, the model_dir in `config` will be used
if set. If both are set, they must be same. If both are `None`, a
temporary directory will be used.
config: Configuration object.
params: `dict` of hyper parameters that will be passed into `model_fn`.
Keys are names of parameters, values are basic python types.
warm_start_from: Optional string filepath to a checkpoint or SavedModel to
warm-start from, or a `tf.estimator.WarmStartSettings`
object to fully configure warm-starting. If the string
filepath is provided instead of a `WarmStartSettings`,
then all variables are warm-started, and it is assumed
that vocabularies and Tensor names are unchanged.
Raises:
ValueError: parameters of `model_fn` don't match `params`.
ValueError: if this is called via a subclass and if that class overrides
a member of `Estimator`.
"""
Estimator._assert_members_are_not_overridden(self)
config = maybe_overwrite_model_dir_and_session_config(config, model_dir)
self._config = config
# The distribute field contains an instance of DistributionStrategy.
self._train_distribution = self._config.train_distribute
self._eval_distribution = self._config.eval_distribute
# Model directory.
self._model_dir = self._config.model_dir
self._session_config = self._config.session_config
logging.info('Using config: %s', str(vars(self._config)))
self._device_fn = (
self._config.device_fn or _get_replica_device_setter(self._config))
if model_fn is None:
raise ValueError('model_fn must be provided to Estimator.')
_verify_model_fn_args(model_fn, params)
self._model_fn = model_fn
self._params = copy.deepcopy(params or {})
# pylint: disable=protected-access
self._warm_start_settings = _get_default_warm_start_settings(
warm_start_from)
# pylint: enable=protected-access
@property
def model_dir(self):
return self._model_dir
@property
def config(self):
return copy.deepcopy(self._config)
@property
def params(self):
return copy.deepcopy(self._params)
@property
def model_fn(self):
"""Returns the model_fn which is bound to self.params.
Returns:
The model_fn with following signature:
`def model_fn(features, labels, mode, config)`
"""
def public_model_fn(features, labels, mode, config):
return self._call_model_fn(features, labels, mode, config)
return public_model_fn
# TODO(ispir): support a list of names
def get_variable_value(self, name):
"""Returns value of the variable given by name.
Args:
name: string or a list of string, name of the tensor.
Returns:
Numpy array - value of the tensor.
Raises:
ValueError: If the Estimator has not produced a checkpoint yet.
"""
_check_checkpoint_available(self.model_dir)
with context.graph_mode():
return training.load_variable(self.model_dir, name)
def get_variable_names(self):
"""Returns list of all variable names in this model.
Returns:
List of names.
Raises:
ValueError: If the Estimator has not produced a checkpoint yet.
"""
_check_checkpoint_available(self.model_dir)
with context.graph_mode():
return [name for name, _ in training.list_variables(self.model_dir)]
def latest_checkpoint(self):
"""Finds the filename of latest saved checkpoint file in `model_dir`.
Returns:
The full path to the latest checkpoint or `None` if no checkpoint was
found.
"""
with context.graph_mode():
return checkpoint_management.latest_checkpoint(self.model_dir)
def train(self,
input_fn,
hooks=None,
steps=None,
max_steps=None,
saving_listeners=None):
"""Trains a model given training data input_fn.
Args:
input_fn: A function that provides input data for training as minibatches.
See @{$premade_estimators#create_input_functions} for more
information. The function should construct and return one of
the following:
* A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
tuple (features, labels) with same constraints as below.
* A tuple (features, labels): Where `features` is a `Tensor` or a
dictionary of string feature name to `Tensor` and `labels` is a
`Tensor` or a dictionary of string label name to `Tensor`. Both
`features` and `labels` are consumed by `model_fn`. They should
satisfy the expectation of `model_fn` from inputs.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the training loop.
steps: Number of steps for which to train model. If `None`, train forever
or train until input_fn generates the `OutOfRange` error or
`StopIteration` exception. 'steps' works incrementally. If you call two
times train(steps=10) then training occurs in total 20 steps. If
`OutOfRange` or `StopIteration` occurs in the middle, training stops
before 20 steps. If you don't want to have incremental behavior please
set `max_steps` instead. If set, `max_steps` must be `None`.
max_steps: Number of total steps for which to train model. If `None`,
train forever or train until input_fn generates the `OutOfRange` error
or `StopIteration` exception. If set, `steps` must be `None`. If
`OutOfRange` or `StopIteration` occurs in the middle, training stops
before `max_steps` steps.
Two calls to `train(steps=100)` means 200 training
iterations. On the other hand, two calls to `train(max_steps=100)` means
that the second call will not do any iteration since first call did
all 100 steps.
saving_listeners: list of `CheckpointSaverListener` objects. Used for
callbacks that run immediately before or after checkpoint savings.
Returns:
`self`, for chaining.
Raises:
ValueError: If both `steps` and `max_steps` are not `None`.
ValueError: If either `steps` or `max_steps` is <= 0.
"""
with context.graph_mode():
if (steps is not None) and (max_steps is not None):
raise ValueError('Can not provide both steps and max_steps.')
if steps is not None and steps <= 0:
raise ValueError('Must specify steps > 0, given: {}'.format(steps))
if max_steps is not None and max_steps <= 0:
raise ValueError(
'Must specify max_steps > 0, given: {}'.format(max_steps))
if max_steps is not None:
start_step = _load_global_step_from_checkpoint_dir(self._model_dir)
if max_steps <= start_step:
logging.info('Skipping training since max_steps has already saved.')
return self
hooks = _check_hooks_type(hooks)
hooks.extend(self._convert_train_steps_to_hooks(steps, max_steps))
saving_listeners = _check_listeners_type(saving_listeners)
loss = self._train_model(input_fn, hooks, saving_listeners)
logging.info('Loss for final step: %s.', loss)
return self
def _convert_train_steps_to_hooks(self, steps, max_steps):
"""Create hooks to run correct number of steps in training.
Args:
steps: number of steps to run during training.
max_steps: maximum number of steps to be run during training. It'll be
the maximum number of steps the model will train to after restoring
from checkpoint even across multiple estimator.train calls.
Returns:
List of hooks to be passed to the estimator.
"""
if steps is not None or max_steps is not None:
if self._train_distribution:
steps_per_run = getattr(self._train_distribution, 'steps_per_run', 1)
if steps_per_run > 1:
return [basic_session_run_hooks._MultiStepStopAtStepHook( # pylint: disable=protected-access
steps, max_steps, steps_per_run)]
return [training.StopAtStepHook(steps, max_steps)]
else:
return []
def eval_dir(self, name=None):
"""Shows directory name where evaluation metrics are dumped.
Args:
name: Name of the evaluation if user needs to run multiple evaluations on
different data sets, such as on training data vs test data. Metrics for
different evaluations are saved in separate folders, and appear
separately in tensorboard.
Returns:
A string which is the path of directory contains evaluation metrics.
"""
return os.path.join(self._model_dir, 'eval' if not name else
'eval_' + name)
def evaluate(self, input_fn, steps=None, hooks=None, checkpoint_path=None,
name=None):
"""Evaluates the model given evaluation data input_fn.
For each step, calls `input_fn`, which returns one batch of data.
Evaluates until:
- `steps` batches are processed, or
- `input_fn` raises an end-of-input exception (`OutOfRangeError` or
`StopIteration`).
Args:
input_fn: A function that constructs the input data for evaluation.
See @{$premade_estimators#create_input_functions} for more
information. The function should construct and return one of
the following:
* A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
tuple (features, labels) with same constraints as below.
* A tuple (features, labels): Where `features` is a `Tensor` or a
dictionary of string feature name to `Tensor` and `labels` is a
`Tensor` or a dictionary of string label name to `Tensor`. Both
`features` and `labels` are consumed by `model_fn`. They should
satisfy the expectation of `model_fn` from inputs.
steps: Number of steps for which to evaluate model. If `None`, evaluates
until `input_fn` raises an end-of-input exception.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the evaluation call.
checkpoint_path: Path of a specific checkpoint to evaluate. If `None`, the
latest checkpoint in `model_dir` is used. If there are no checkpoints
in `model_dir`, evaluation is run with newly initialized `Variables`
instead of restored from checkpoint.
name: Name of the evaluation if user needs to run multiple evaluations on
different data sets, such as on training data vs test data. Metrics for
different evaluations are saved in separate folders, and appear
separately in tensorboard.
Returns:
A dict containing the evaluation metrics specified in `model_fn` keyed by
name, as well as an entry `global_step` which contains the value of the
global step for which this evaluation was performed.
Raises:
ValueError: If `steps <= 0`.
ValueError: If no model has been trained, namely `model_dir`, or the
given `checkpoint_path` is empty.
"""
with context.graph_mode():
hooks = _check_hooks_type(hooks)
hooks.extend(self._convert_eval_steps_to_hooks(steps))
# Check that model has been trained (if nothing has been set explicitly).
if not checkpoint_path:
latest_path = checkpoint_management.latest_checkpoint(self._model_dir)
if not latest_path:
logging.info('Could not find trained model in model_dir: {}, running '
'initialization to evaluate.'.format(self._model_dir))
checkpoint_path = latest_path
def _evaluate():
(scaffold, update_op, eval_dict, all_hooks) = (
self._evaluate_build_graph(input_fn, hooks, checkpoint_path))
return self._evaluate_run(
checkpoint_path=checkpoint_path,
scaffold=scaffold,
update_op=update_op,
eval_dict=eval_dict,
all_hooks=all_hooks,
output_dir=self.eval_dir(name))
with ops.Graph().as_default():
# TODO(priyag): Support distributed eval on TPUs.
if (self._eval_distribution
and self._eval_distribution.__class__.__name__ != 'TPUStrategy'):
with self._eval_distribution.scope():
return _evaluate()
else:
return _evaluate()
def _convert_eval_steps_to_hooks(self, steps):
if steps is None:
return []
if steps <= 0:
raise ValueError('Must specify steps > 0, given: {}'.format(steps))
return [evaluation._StopAfterNEvalsHook(num_evals=steps)] # pylint: disable=protected-access
def predict(self,
input_fn,
predict_keys=None,
hooks=None,
checkpoint_path=None,
yield_single_examples=True):
"""Yields predictions for given features.
Args:
input_fn: A function that constructs the features. Prediction continues
until `input_fn` raises an end-of-input exception (`OutOfRangeError` or
`StopIteration`).
See @{$premade_estimators#create_input_functions} for more
information. The function should construct and return one of
the following:
* A 'tf.data.Dataset' object: Outputs of `Dataset` object must have
same constraints as below.
* features: A `Tensor` or a dictionary of string feature name to
`Tensor`. features are consumed by `model_fn`. They should satisfy
the expectation of `model_fn` from inputs.
* A tuple, in which case the first item is extracted as features.
predict_keys: list of `str`, name of the keys to predict. It is used if
the `EstimatorSpec.predictions` is a `dict`. If `predict_keys` is used
then rest of the predictions will be filtered from the dictionary. If
`None`, returns all.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the prediction call.
checkpoint_path: Path of a specific checkpoint to predict. If `None`, the
latest checkpoint in `model_dir` is used. If there are no checkpoints
in `model_dir`, prediction is run with newly initialized `Variables`
instead of restored from checkpoint.
yield_single_examples: If False, yield the whole batch as returned by the
`model_fn` instead of decomposing the batch into individual elements.
This is useful if `model_fn` returns some tensors whose first dimension
is not equal to the batch size.
Yields:
Evaluated values of `predictions` tensors.
Raises:
ValueError: Could not find a trained model in `model_dir`.
ValueError: If batch length of predictions is not the same and
`yield_single_examples` is True.
ValueError: If there is a conflict between `predict_keys` and
`predictions`. For example if `predict_keys` is not `None` but
`EstimatorSpec.predictions` is not a `dict`.
"""
with context.graph_mode():
hooks = _check_hooks_type(hooks)
# Check that model has been trained.
if not checkpoint_path:
checkpoint_path = checkpoint_management.latest_checkpoint(
self._model_dir)
if not checkpoint_path:
logging.info('Could not find trained model in model_dir: {}, running '
'initialization to predict.'.format(self._model_dir))
with ops.Graph().as_default() as g:
random_seed.set_random_seed(self._config.tf_random_seed)
self._create_and_assert_global_step(g)
features, input_hooks = self._get_features_from_input_fn(
input_fn, model_fn_lib.ModeKeys.PREDICT)
estimator_spec = self._call_model_fn(
features, None, model_fn_lib.ModeKeys.PREDICT, self.config)
# Call to warm_start has to be after model_fn is called.
self._maybe_warm_start(checkpoint_path)
predictions = self._extract_keys(
estimator_spec.predictions, predict_keys)
all_hooks = list(input_hooks)
all_hooks.extend(hooks)
all_hooks.extend(list(estimator_spec.prediction_hooks or []))
with training.MonitoredSession(
session_creator=training.ChiefSessionCreator(
checkpoint_filename_with_path=checkpoint_path,
master=self._config.master,
scaffold=estimator_spec.scaffold,
config=self._session_config),
hooks=all_hooks) as mon_sess:
while not mon_sess.should_stop():
preds_evaluated = mon_sess.run(predictions)
if not yield_single_examples:
yield preds_evaluated
elif not isinstance(predictions, dict):
for pred in preds_evaluated:
yield pred
else:
for i in range(self._extract_batch_length(preds_evaluated)):
yield {
key: value[i]
for key, value in six.iteritems(preds_evaluated)
}
def _assert_members_are_not_overridden(self):
"""Asserts members of `Estimator` are not overridden."""
# TPUEstimator is special cased (owned by TF).
if self.__class__.__name__ == 'TPUEstimator':
return
allowed_overrides = set([
'_call_input_fn', '_call_model_fn',
'_convert_train_steps_to_hooks', '_convert_eval_steps_to_hooks',
'_create_global_step', '_create_and_assert_global_step',
'_tf_api_names', '_tf_api_names_v1', '_estimator_api_names',
'_estimator_api_names_v1', '_estimator_api_constants',
'_estimator_api_constants_v1',
'_validate_features_in_predict_input',
'_add_meta_graph_for_mode'
])
estimator_members = set([m for m in Estimator.__dict__.keys()
if not m.startswith('__')])
subclass_members = set(self.__class__.__dict__.keys())
common_members = estimator_members & subclass_members - allowed_overrides
overridden_members = [
m for m in common_members
if Estimator.__dict__[m] != self.__class__.__dict__[m]]
if overridden_members:
raise ValueError(
'Subclasses of Estimator cannot override members of Estimator. '
'{} does override {}'.format(self.__class__, overridden_members))
def export_savedmodel(
self, export_dir_base, serving_input_receiver_fn,
assets_extra=None,
as_text=False,
checkpoint_path=None,
strip_default_attrs=False):
# pylint: disable=line-too-long
"""Exports inference graph as a SavedModel into given dir.
For a detailed guide, see
@{$saved_model#using_savedmodel_with_estimators$Using SavedModel with Estimators}.
This method builds a new graph by first calling the
serving_input_receiver_fn to obtain feature `Tensor`s, and then calling
this `Estimator`'s model_fn to generate the model graph based on those
features. It restores the given checkpoint (or, lacking that, the most
recent checkpoint) into this graph in a fresh session. Finally it creates
a timestamped export directory below the given export_dir_base, and writes
a `SavedModel` into it containing a single `MetaGraphDef` saved from this
session.
The exported `MetaGraphDef` will provide one `SignatureDef` for each
element of the export_outputs dict returned from the model_fn, named using
the same keys. One of these keys is always
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY, indicating which
signature will be served when a serving request does not specify one.
For each signature, the outputs are provided by the corresponding
`ExportOutput`s, and the inputs are always the input receivers provided by
the serving_input_receiver_fn.
Extra assets may be written into the SavedModel via the assets_extra
argument. This should be a dict, where each key gives a destination path
(including the filename) relative to the assets.extra directory. The
corresponding value gives the full path of the source file to be copied.
For example, the simple case of copying a single file without renaming it
is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
Args:
export_dir_base: A string containing a directory in which to create
timestamped subdirectories containing exported SavedModels.
serving_input_receiver_fn: A function that takes no argument and
returns a `ServingInputReceiver` or `TensorServingInputReceiver`.
assets_extra: A dict specifying how to populate the assets.extra directory
within the exported SavedModel, or `None` if no extra assets are needed.
as_text: whether to write the SavedModel proto in text format.
checkpoint_path: The checkpoint path to export. If `None` (the default),
the most recent checkpoint found within the model directory is chosen.
strip_default_attrs: Boolean. If `True`, default-valued attributes will be
removed from the NodeDefs. For a detailed guide, see
[Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
Returns:
The string path to the exported directory.
Raises:
ValueError: if no serving_input_receiver_fn is provided, no export_outputs
are provided, or no checkpoint can be found.
"""
# pylint: enable=line-too-long
return self._export_saved_model_for_mode(
export_dir_base,
serving_input_receiver_fn,
assets_extra=assets_extra,
as_text=as_text,
checkpoint_path=checkpoint_path,
strip_default_attrs=strip_default_attrs,
mode=model_fn_lib.ModeKeys.PREDICT)
def _export_saved_model_for_mode(
self, export_dir_base, input_receiver_fn,
assets_extra=None,
as_text=False,
checkpoint_path=None,
strip_default_attrs=False,
mode=model_fn_lib.ModeKeys.PREDICT):
# pylint: disable=line-too-long
"""Exports a single train/eval/predict graph as a SavedModel.
This method is a wrapper for _export_all_saved_models, and wraps a raw
input_receiver_fn in a dictionary to pass in to that function.
See _export_all_saved_models for full docs.
See tf.contrib.estimator.export_saved_model_for_mode for the currently
exposed version of this function.
Args:
export_dir_base: A string containing a directory in which to create
timestamped subdirectories containing exported SavedModels.
input_receiver_fn: a function that takes no argument and
returns the appropriate subclass of `InputReceiver`.
assets_extra: A dict specifying how to populate the assets.extra directory
within the exported SavedModel, or `None` if no extra assets are needed.
as_text: whether to write the SavedModel proto in text format.
checkpoint_path: The checkpoint path to export. If `None` (the default),
the most recent checkpoint found within the model directory is chosen.
strip_default_attrs: Boolean. If `True`, default-valued attributes will be
removed from the NodeDefs. For a detailed guide, see
[Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
mode: tf.estimator.ModeKeys value indicating with mode will be exported.
Returns:
The string path to the exported directory.
Raises:
ValueError: if input_receiver_fn is None, no export_outputs
are provided, or no checkpoint can be found.
"""
# pylint: enable=line-too-long
if not input_receiver_fn:
raise ValueError('An input_receiver_fn must be defined.')
input_receiver_fn_map = {mode: input_receiver_fn}
return self._export_all_saved_models(
export_dir_base,
input_receiver_fn_map,
assets_extra=assets_extra,
as_text=as_text,
checkpoint_path=checkpoint_path,
strip_default_attrs=strip_default_attrs)
def _export_all_saved_models(
self, export_dir_base, input_receiver_fn_map,
assets_extra=None,
as_text=False,
checkpoint_path=None,
strip_default_attrs=False):
# pylint: disable=line-too-long
"""Exports a SavedModel containing MetaGraphDefs for each requested mode.
See tf.contrib.estimator.export_all_saved_models for the currently
exposed version of this function.
For each mode passed in via the input_receiver_fn_map,
this method builds a new graph by calling the input_receiver_fn to obtain
feature and label `Tensor`s. Next, this method calls the `Estimator`'s
model_fn in the passed mode to generate the model graph based on
those features and labels, and restores the given checkpoint
(or, lacking that, the most recent checkpoint) into the graph.
Only one of the modes is used for saving variables to the SavedModel
(order of preference: TRAIN, EVAL, then PREDICT), such that up to three
MetaGraphDefs are saved with a single set of variables in a single
SavedModel directory.
For the variables and MetaGraphDefs, a timestamped export directory below
export_dir_base, and writes a `SavedModel` into it containing
the `MetaGraphDef` for the given mode and its associated signatures.
For prediction, the exported `MetaGraphDef` will provide one `SignatureDef`
for each element of the export_outputs dict returned from the model_fn,
named using the same keys. One of these keys is always
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY, indicating which
signature will be served when a serving request does not specify one.
For each signature, the outputs are provided by the corresponding
`ExportOutput`s, and the inputs are always the input receivers provided by
the serving_input_receiver_fn.
For training and evaluation, the train_op is stored in an extra collection,
and loss, metrics, and predictions are included in a SignatureDef for the
mode in question.
Extra assets may be written into the SavedModel via the assets_extra
argument. This should be a dict, where each key gives a destination path
(including the filename) relative to the assets.extra directory. The
corresponding value gives the full path of the source file to be copied.
For example, the simple case of copying a single file without renaming it
is specified as `{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
Args:
export_dir_base: A string containing a directory in which to create
timestamped subdirectories containing exported SavedModels.
input_receiver_fn_map: dict of tf.estimator.ModeKeys to input_receiver_fn
mappings, where the input_receiver_fn is a function that takes no
argument and returns the appropriate subclass of `InputReceiver`.
assets_extra: A dict specifying how to populate the assets.extra directory
within the exported SavedModel, or `None` if no extra assets are needed.
as_text: whether to write the SavedModel proto in text format.
checkpoint_path: The checkpoint path to export. If `None` (the default),
the most recent checkpoint found within the model directory is chosen.
strip_default_attrs: Boolean. If `True`, default-valued attributes will be
removed from the NodeDefs. For a detailed guide, see
[Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
Returns:
A dict of tf.estimator.ModeKeys value to string path for each exported
directory.
Raises:
ValueError: if any input_receiver_fn is None, no export_outputs
are provided, or no checkpoint can be found.
"""
# pylint: enable=line-too-long
# TODO(b/65561022): Consider allowing multiple input_receiver_fns per mode.
with context.graph_mode():
if not checkpoint_path:
# Locate the latest checkpoint
checkpoint_path = checkpoint_management.latest_checkpoint(
self._model_dir)
if not checkpoint_path:
raise ValueError("Couldn't find trained model at %s." % self._model_dir)
export_dir = export_helpers.get_timestamped_export_dir(export_dir_base)
temp_export_dir = export_helpers.get_temp_export_dir(export_dir)
builder = saved_model_builder.SavedModelBuilder(temp_export_dir)
save_variables = True
# Note that the order in which we run here matters, as the first
# mode we pass through will be used to save the variables. We run TRAIN
# first, as that is also the mode used for checkpoints, and therefore
# we are not likely to have vars in PREDICT that are not in the checkpoint
# created by TRAIN.
if input_receiver_fn_map.get(model_fn_lib.ModeKeys.TRAIN):
self._add_meta_graph_for_mode(
builder, input_receiver_fn_map, checkpoint_path,
strip_default_attrs, save_variables,
mode=model_fn_lib.ModeKeys.TRAIN)
save_variables = False
if input_receiver_fn_map.get(model_fn_lib.ModeKeys.EVAL):
self._add_meta_graph_for_mode(
builder, input_receiver_fn_map, checkpoint_path,
strip_default_attrs, save_variables,
mode=model_fn_lib.ModeKeys.EVAL)
save_variables = False
if input_receiver_fn_map.get(model_fn_lib.ModeKeys.PREDICT):
self._add_meta_graph_for_mode(
builder, input_receiver_fn_map, checkpoint_path,
strip_default_attrs, save_variables,
mode=model_fn_lib.ModeKeys.PREDICT)
save_variables = False
if save_variables:
raise ValueError('No valid modes for exporting found. Got {}.'.format(
input_receiver_fn_map.keys()))
builder.save(as_text)
# Add the extra assets
if assets_extra:
assets_extra_path = os.path.join(compat.as_bytes(temp_export_dir),
compat.as_bytes('assets.extra'))
for dest_relative, source in assets_extra.items():
dest_absolute = os.path.join(compat.as_bytes(assets_extra_path),
compat.as_bytes(dest_relative))
dest_path = os.path.dirname(dest_absolute)
gfile.MakeDirs(dest_path)
gfile.Copy(source, dest_absolute)
gfile.Rename(temp_export_dir, export_dir)
return export_dir
def _add_meta_graph_for_mode(self,
builder,
input_receiver_fn_map,
checkpoint_path,
strip_default_attrs,
save_variables=True,
mode=model_fn_lib.ModeKeys.PREDICT,
export_tags=None,
check_variables=True):
# pylint: disable=line-too-long
"""Loads variables and adds them along with a MetaGraphDef for saving.
Args:
builder: instance of SavedModelBuilder that will be used for saving.
input_receiver_fn_map: dict of tf.estimator.ModeKeys to input_receiver_fn
mappings, where the input_receiver_fn is a function that takes no
argument and returns the appropriate subclass of `InputReceiver`.
checkpoint_path: The checkpoint path to export. If `None` (the default),
the most recent checkpoint found within the model directory is chosen.
strip_default_attrs: Boolean. If `True`, default-valued attributes will be
removed from the NodeDefs. For a detailed guide, see
[Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
save_variables: bool, whether variables should be saved. If False, just
the MetaGraphDef will be saved. Note that save_variables should only be
True for the first call to this function, and the SavedModelBuilder will
raise an error if that is not the case.
mode: tf.estimator.ModeKeys value indicating which mode will be exported.
export_tags: The set of tags with which to save `MetaGraphDef`. If None,
a default set will be selected to matched the passed mode.
check_variables: bool, whether to check the checkpoint has all variables.
Raises:
ValueError: if `save_variables` is `True` and `check_variable` is `False`.
"""
# pylint: enable=line-too-long
if export_tags is None:
export_tags = model_fn_lib.EXPORT_TAG_MAP[mode]
input_receiver_fn = input_receiver_fn_map[mode]
with ops.Graph().as_default() as g:
self._create_and_assert_global_step(g)
random_seed.set_random_seed(self._config.tf_random_seed)
input_receiver = input_receiver_fn()
# Call the model_fn and collect the export_outputs.
estimator_spec = self._call_model_fn(
features=input_receiver.features,
labels=getattr(input_receiver, 'labels', None),
mode=mode,
config=self.config)
export_outputs = self._get_export_outputs_for_spec(estimator_spec)
# Build the SignatureDefs from receivers and all outputs
signature_def_map = export_helpers.build_all_signature_defs(
input_receiver.receiver_tensors,
export_outputs,
getattr(input_receiver, 'receiver_tensors_alternatives', None),
serving_only=(mode == model_fn_lib.ModeKeys.PREDICT))
with tf_session.Session(config=self._session_config) as session:
if estimator_spec.scaffold.local_init_op is not None:
local_init_op = estimator_spec.scaffold.local_init_op
else:
local_init_op = monitored_session.Scaffold.default_local_init_op()
# This saver will be used both for restoring variables now,
# and in saving out the metagraph below. This ensures that any
# Custom Savers stored with the Scaffold are passed through to the
# SavedModel for restore later.
graph_saver = estimator_spec.scaffold.saver or saver.Saver(sharded=True)
if save_variables and not check_variables:
raise ValueError('If `save_variables` is `True, `check_variables`'
'must not be `False`.')
if check_variables:
try:
graph_saver.restore(session, checkpoint_path)
except errors.NotFoundError as e:
msg = ('Could not load all requested variables from checkpoint. '
'Please make sure your model_fn does not expect variables '
'that were not saved in the checkpoint.\n\n'
'Encountered error with mode `{}` while restoring '
'checkpoint from: `{}`. Full Traceback:\n\n{}').format(
mode, checkpoint_path, e)
raise ValueError(msg)
# We add the train op explicitly for now, so that we don't have to
# change the Builder public interface. Note that this is a no-op
# for prediction, where train_op is None.
builder._add_train_op(estimator_spec.train_op) # pylint: disable=protected-access
meta_graph_kwargs = dict(
tags=export_tags,
signature_def_map=signature_def_map,
assets_collection=ops.get_collection(
ops.GraphKeys.ASSET_FILEPATHS),
strip_default_attrs=strip_default_attrs,
legacy_init_op=local_init_op,
saver=graph_saver)
if save_variables:
builder.add_meta_graph_and_variables(
session, **meta_graph_kwargs)
else:
builder.add_meta_graph(**meta_graph_kwargs)
def _get_export_outputs_for_spec(self, estimator_spec):
"""Given an EstimatorSpec, determine what our export outputs should be.
EstimatorSpecs contain export_outputs that are used for serving, but for
training and eval graphs, we must wrap the tensors of interest in
appropriate ExportOutput objects.
Args:
estimator_spec: EstimatorSpec object that will be exported.
Returns:
a dict mapping export_output_name to ExportOutput object.
Raises:
ValueError: if an appropriate ExportOutput cannot be found for the
passed EstimatorSpec.mode
"""
mode = estimator_spec.mode
if mode == model_fn_lib.ModeKeys.PREDICT:
outputs = estimator_spec.export_outputs
else:
if mode == model_fn_lib.ModeKeys.TRAIN:
output_class = export_output.TrainOutput
elif mode == model_fn_lib.ModeKeys.EVAL:
output_class = export_output.EvalOutput
else:
raise ValueError(
'Export output type not found for mode: {}'.format(mode))
export_out = output_class(
loss=estimator_spec.loss,
predictions=estimator_spec.predictions,
metrics=estimator_spec.eval_metric_ops)
outputs = {mode: export_out}
return outputs
def _get_features_from_input_fn(self, input_fn, mode):
"""Extracts the `features` from return values of `input_fn`."""
result = self._call_input_fn(input_fn, mode)
result, _, hooks = estimator_util.parse_input_fn_result(result)
self._validate_features_in_predict_input(result)
return result, hooks
def _validate_features_in_predict_input(self, result):
if not _has_dataset_or_queue_runner(result):
logging.warning('Input graph does not use tf.data.Dataset or contain a '
'QueueRunner. That means predict yields forever. '
'This is probably a mistake.')
def _get_features_and_labels_from_input_fn(self, input_fn, mode,
distribution=None):
"""Extracts the `features` and labels from return values of `input_fn`."""
if distribution is not None:
result = distribution.distribute_dataset(
lambda: self._call_input_fn(input_fn, mode))
else:
result = self._call_input_fn(input_fn, mode)
return estimator_util.parse_input_fn_result(result)
def _extract_batch_length(self, preds_evaluated):
"""Extracts batch length of predictions."""
batch_length = None
for key, value in six.iteritems(preds_evaluated):
batch_length = batch_length or value.shape[0]
if value.shape[0] != batch_length:
raise ValueError('Batch length of predictions should be same. %s has '
'different batch length than others.' % key)
return batch_length
def _extract_keys(self, predictions, predict_keys):
"""Extracts `predict_keys` from `predictions`."""
if not predict_keys:
return predictions
if not isinstance(predictions, dict):
raise ValueError(
'predict_keys argument is not valid in case of non-dict predictions.')
existing_keys = predictions.keys()
predictions = {
key: value
for key, value in six.iteritems(predictions) if key in predict_keys
}
if not predictions:
raise ValueError('Expected to run at least one output from %s, '
'provided %s.' % (existing_keys, predict_keys))
return predictions
def _create_global_step(self, graph):
"""Creates the global step tensor in graph.
The global step tensor must be an integer type with name 'global_step' and
be added to the collection `tf.GraphKeys.GLOBAL_STEP`.
Args:
graph: The graph in which to create the global step tensor.
Returns:
The global step `Tensor`.
"""
return training.create_global_step(graph)
def _create_and_assert_global_step(self, graph):
"""Creates and asserts properties of the global step.
Args:
graph: The graph in which to create the global step tensor.
Returns:
The global step `Tensor`.
"""
step = self._create_global_step(graph)
assert step == training.get_global_step()
assert step.dtype.is_integer
return step
def _call_input_fn(self, input_fn, mode):
"""Calls the input function.
Args:
input_fn: The input function.
mode: ModeKeys
Returns:
The return value of the passed input_fn, which should be one of:
* A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
tuple (features, labels) with same constraints as below.
* A tuple (features, labels): Where `features` is a `Tensor` or a
dictionary of string feature name to `Tensor` and `labels` is a
`Tensor` or a dictionary of string label name to `Tensor`. Both
`features` and `labels` are consumed by `model_fn`. They should
satisfy the expectation of `model_fn` from inputs.
Raises:
ValueError: if input_fn takes invalid arguments.
"""
input_fn_args = function_utils.fn_args(input_fn)
kwargs = {}
if 'mode' in input_fn_args:
kwargs['mode'] = mode
if 'params' in input_fn_args:
kwargs['params'] = self.params
if 'config' in input_fn_args:
kwargs['config'] = self.config
with ops.device('/cpu:0'):
return input_fn(**kwargs)
def _call_model_fn(self, features, labels, mode, config):
"""Calls model function.
Args:
features: features dict.
labels: labels dict.
mode: ModeKeys
config: RunConfig
Returns:
An `EstimatorSpec` object.
Raises:
ValueError: if model_fn returns invalid objects.
"""
model_fn_args = function_utils.fn_args(self._model_fn)
kwargs = {}
if 'labels' in model_fn_args:
kwargs['labels'] = labels
else:
if labels is not None:
raise ValueError(
'model_fn does not take labels, but input_fn returns labels.')
if 'mode' in model_fn_args:
kwargs['mode'] = mode
if 'params' in model_fn_args:
kwargs['params'] = self.params
if 'config' in model_fn_args:
kwargs['config'] = config
logging.info('Calling model_fn.')
model_fn_results = self._model_fn(features=features, **kwargs)
logging.info('Done calling model_fn.')
if not isinstance(model_fn_results, model_fn_lib.EstimatorSpec):
raise ValueError('model_fn should return an EstimatorSpec.')
return model_fn_results
def _train_model(self, input_fn, hooks, saving_listeners):
if self._train_distribution:
return self._train_model_distributed(input_fn, hooks, saving_listeners)
else:
return self._train_model_default(input_fn, hooks, saving_listeners)
def _train_model_default(self, input_fn, hooks, saving_listeners):
"""Initiate training with input_fn, without DistributionStrategies.
Args:
input_fn: A function that provides input data for training as minibatches.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the training loop.
saving_listeners: list of `CheckpointSaverListener` objects. Used for
callbacks that run immediately before or after checkpoint savings.
Returns:
Loss from training
"""
worker_hooks = []
with ops.Graph().as_default() as g, g.device(self._device_fn):
random_seed.set_random_seed(self._config.tf_random_seed)
global_step_tensor = self._create_and_assert_global_step(g)
# Skip creating a read variable if _create_and_assert_global_step
# returns None (e.g. tf.contrib.estimator.SavedModelEstimator).
if global_step_tensor is not None:
training_util._get_or_create_global_step_read(g) # pylint: disable=protected-access
features, labels, input_hooks = (
self._get_features_and_labels_from_input_fn(
input_fn, model_fn_lib.ModeKeys.TRAIN))
worker_hooks.extend(input_hooks)
estimator_spec = self._call_model_fn(
features, labels, model_fn_lib.ModeKeys.TRAIN, self.config)
global_step_tensor = training_util.get_global_step(g)
return self._train_with_estimator_spec(estimator_spec, worker_hooks,
hooks, global_step_tensor,
saving_listeners)
def _train_model_distributed(self, input_fn, hooks, saving_listeners):
"""Initiate training with input_fn, using DistributionStrategies.
Args:
input_fn: A function that provides input data for training as minibatches.
hooks: List of `SessionRunHook` subclass instances. Used for callbacks
inside the training loop.
saving_listeners: list of `CheckpointSaverListener` objects. Used for
callbacks that run immediately before or after checkpoint savings.
Returns:
Loss from training
"""
self._train_distribution.configure(self._session_config)
# TODO(sourabhbajaj): Remove this hack once we migrate the other strategies
# to use the new API
is_tpu_strategy = (
self._train_distribution.__class__.__name__ == 'TPUStrategy')
worker_hooks = []
with ops.Graph().as_default() as g:
# We want to create the iterations variable outside the distribution scope
# as that is just stored on the host and mainly used to drive the loop
# and doesn't need to be a Mirrored/Device variable.
steps_per_run_variable = training.get_or_create_steps_per_run_variable()
with self._train_distribution.scope():
random_seed.set_random_seed(self._config.tf_random_seed)
if is_tpu_strategy:
# Create the iterator for run_on_dataset function
# TODO(sourabhbajaj): refactor this out to call a function on the
# strategy
dataset = self._train_distribution.distribute_dataset(
lambda: self._call_input_fn(input_fn, # pylint: disable=g-long-lambda
model_fn_lib.ModeKeys.TRAIN))
iterator = dataset.make_initializable_iterator()
worker_hooks.append(
estimator_util._DatasetInitializerHook(iterator)) # pylint: disable=protected-access
global_step_tensor = self._create_and_assert_global_step(g)
# we want to add to the global collection in the main thread not the
# tower threads.
ops.add_to_collection(
training_util.GLOBAL_STEP_READ_KEY,
self._train_distribution.read_var(global_step_tensor))
# Create a step_fn from the train_op of grouped_estimator_spec
def step_fn(ctx, inputs):
"""A single step that is passed to run_on_dataset."""
features, labels = inputs
estimator_spec = self._train_distribution.call_for_each_tower(
self._call_model_fn,
features,
labels,
model_fn_lib.ModeKeys.TRAIN,
self.config)
ctx.set_last_step_output(
name='loss',
output=estimator_spec.loss,
aggregation=distribute_lib.get_loss_reduction())
ctx.set_non_tensor_output(
name='estimator_spec', output=estimator_spec)
return estimator_spec.train_op
# Create new train_op post graph rewrites
initial_training_loss = constant_op.constant(1e7)
ctx = self._train_distribution.run_steps_on_dataset(
step_fn, iterator, iterations=steps_per_run_variable,
initial_loop_values={'loss': initial_training_loss})
distributed_train_op = ctx.run_op
tpu_result = ctx.last_step_outputs
grouped_estimator_spec = ctx.non_tensor_outputs['estimator_spec']
else:
features, labels, input_hooks = (
self._get_features_and_labels_from_input_fn(
input_fn, model_fn_lib.ModeKeys.TRAIN,
self._train_distribution))
worker_hooks.extend(input_hooks)
global_step_tensor = self._create_and_assert_global_step(g)
# we want to add to the global collection in the main thread not the
# tower threads.
ops.add_to_collection(
training_util.GLOBAL_STEP_READ_KEY,
self._train_distribution.read_var(global_step_tensor))
grouped_estimator_spec = self._train_distribution.call_for_each_tower(
self._call_model_fn,
features,
labels, # although this will be None it seems
model_fn_lib.ModeKeys.TRAIN,
self.config)
scaffold = _combine_distributed_scaffold(
grouped_estimator_spec.scaffold, self._train_distribution)
def get_hooks_from_the_first_device(per_device_hooks):
hooks_list = self._train_distribution.unwrap(per_device_hooks)
assert hooks_list
return hooks_list[0]
training_hooks = get_hooks_from_the_first_device(
grouped_estimator_spec.training_hooks)
training_chief_hooks = get_hooks_from_the_first_device(
grouped_estimator_spec.training_chief_hooks)
# TODO(sourabhbajaj): Merge the two code paths and clean up the code
if is_tpu_strategy:
loss = tpu_result['loss']
worker_hooks.append(
estimator_util.StrategyInitFinalizeHook(
self._train_distribution.initialize,
self._train_distribution.finalize))
else:
loss = self._train_distribution.unwrap(
self._train_distribution.reduce(
distribute_lib.get_loss_reduction(),
grouped_estimator_spec.loss,
destinations='/device:CPU:0'))[0]
distributed_train_op = grouped_estimator_spec.train_op
estimator_spec = model_fn_lib.EstimatorSpec(
mode=grouped_estimator_spec.mode,
loss=loss,
train_op=self._train_distribution.group(distributed_train_op),
training_hooks=training_hooks,
training_chief_hooks=training_chief_hooks,
scaffold=scaffold)
return self._train_with_estimator_spec(estimator_spec, worker_hooks,
hooks, global_step_tensor,
saving_listeners)
def _train_with_estimator_spec(self, estimator_spec, worker_hooks, hooks,
global_step_tensor, saving_listeners):
"""Train a model with the given Estimator Spec."""
if self._warm_start_settings:
logging.info('Warm-starting with WarmStartSettings: %s' %
(self._warm_start_settings,))
warm_starting_util.warm_start(*self._warm_start_settings)
# Check if the user created a loss summary, and add one if they didn't.
# We assume here that the summary is called 'loss'. If it is not, we will
# make another one with the name 'loss' to ensure it shows up in the right
# graph in TensorBoard.
if not any([x.op.name == 'loss'
for x in ops.get_collection(ops.GraphKeys.SUMMARIES)]):
summary.scalar('loss', estimator_spec.loss)
ops.add_to_collection(ops.GraphKeys.LOSSES, estimator_spec.loss)
worker_hooks.extend(hooks)
worker_hooks.append(
training.NanTensorHook(estimator_spec.loss)
)
if self._config.log_step_count_steps is not None:
worker_hooks.append(
training.LoggingTensorHook(
{
'loss': estimator_spec.loss,
'step': global_step_tensor
},
every_n_iter=self._config.log_step_count_steps)
)
worker_hooks.extend(estimator_spec.training_hooks)
if not (estimator_spec.scaffold.saver or
ops.get_collection(ops.GraphKeys.SAVERS)):
ops.add_to_collection(
ops.GraphKeys.SAVERS,
training.Saver(
sharded=True,
max_to_keep=self._config.keep_checkpoint_max,
keep_checkpoint_every_n_hours=(
self._config.keep_checkpoint_every_n_hours),
defer_build=True,
save_relative_paths=True))
chief_hooks = []
all_hooks = worker_hooks + list(estimator_spec.training_chief_hooks)
saver_hooks = [
h for h in all_hooks if isinstance(h, training.CheckpointSaverHook)]
if (self._config.save_checkpoints_secs or
self._config.save_checkpoints_steps):
if not saver_hooks:
chief_hooks = [
training.CheckpointSaverHook(
self._model_dir,
save_secs=self._config.save_checkpoints_secs,
save_steps=self._config.save_checkpoints_steps,
scaffold=estimator_spec.scaffold)
]
saver_hooks = [chief_hooks[0]]
if saving_listeners:
if not saver_hooks:
raise ValueError(
'There should be a CheckpointSaverHook to use saving_listeners. '
'Please set one of the RunConfig.save_checkpoints_steps or '
'RunConfig.save_checkpoints_secs.')
else:
# It is expected to have one CheckpointSaverHook. If multiple, we pick
# up the first one to add listener.
saver_hooks[0]._listeners.extend(saving_listeners) # pylint: disable=protected-access
with training.MonitoredTrainingSession(
master=self._config.master,
is_chief=self._config.is_chief,
checkpoint_dir=self._model_dir,
scaffold=estimator_spec.scaffold,
hooks=worker_hooks,
chief_only_hooks=(
tuple(chief_hooks) + tuple(estimator_spec.training_chief_hooks)),
save_checkpoint_secs=0, # Saving is handled by a hook.
save_summaries_steps=self._config.save_summary_steps,
config=self._session_config,
log_step_count_steps=self._config.log_step_count_steps) as mon_sess:
loss = None
while not mon_sess.should_stop():
_, loss = mon_sess.run([estimator_spec.train_op, estimator_spec.loss])
return loss
def _evaluate_build_graph(self, input_fn, hooks=None, checkpoint_path=None):
"""Builds the graph and related hooks to run evaluation."""
random_seed.set_random_seed(self._config.tf_random_seed)
self._create_and_assert_global_step(ops.get_default_graph())
features, labels, input_hooks = (
self._get_features_and_labels_from_input_fn(
input_fn, model_fn_lib.ModeKeys.EVAL, self._eval_distribution))
if self._eval_distribution:
(loss_metric, scaffold, evaluation_hooks, eval_metric_ops) = (
self._call_model_fn_eval_distributed(features, labels, self.config))
else:
(loss_metric, scaffold, evaluation_hooks, eval_metric_ops) = (
self._call_model_fn_eval(features, labels, self.config))
global_step_tensor = training_util.get_global_step(ops.get_default_graph())
# Call to warm_start has to be after model_fn is called.
self._maybe_warm_start(checkpoint_path)
if model_fn_lib.LOSS_METRIC_KEY in eval_metric_ops:
raise ValueError(
'Metric with name "%s" is not allowed, because Estimator ' %
(model_fn_lib.LOSS_METRIC_KEY) +
'already defines a default metric with the same name.')
eval_metric_ops[model_fn_lib.LOSS_METRIC_KEY] = loss_metric
update_op, eval_dict = _extract_metric_update_ops(eval_metric_ops,
self._eval_distribution)
if ops.GraphKeys.GLOBAL_STEP in eval_dict:
raise ValueError(
'Metric with name `global_step` is not allowed, because Estimator '
'already defines a default metric with the same name.')
eval_dict[ops.GraphKeys.GLOBAL_STEP] = global_step_tensor
all_hooks = list(input_hooks)
all_hooks.extend(hooks)
all_hooks.extend(list(evaluation_hooks or []))
# New local variables have been added, so update the estimator spec's
# local init op if it was defined.
if scaffold and scaffold.local_init_op:
# Ensure that eval step has been created before updating local init op.
evaluation._get_or_create_eval_step() # pylint: disable=protected-access
scaffold = monitored_session.Scaffold(
local_init_op=control_flow_ops.group(
scaffold.local_init_op,
monitored_session.Scaffold.default_local_init_op()),
copy_from_scaffold=scaffold
)
return scaffold, update_op, eval_dict, all_hooks
def _call_model_fn_eval(self, features, labels, config):
estimator_spec = self._call_model_fn(
features, labels, model_fn_lib.ModeKeys.EVAL, config)
loss_metric = metrics_lib.mean(estimator_spec.loss)
return (loss_metric, estimator_spec.scaffold,
estimator_spec.evaluation_hooks, estimator_spec.eval_metric_ops)
def _call_model_fn_eval_distributed(self, features, labels, config):
"""Call model_fn in distribution mode and handle return values."""
grouped_estimator_spec = self._eval_distribution.call_for_each_tower(
self._call_model_fn, features, labels,
model_fn_lib.ModeKeys.EVAL, config)
scaffold = _combine_distributed_scaffold(
grouped_estimator_spec.scaffold, self._eval_distribution)
evaluation_hooks = self._eval_distribution.unwrap(
grouped_estimator_spec.evaluation_hooks)[0]
loss_metric = self._eval_distribution.call_for_each_tower(
metrics_lib.mean, grouped_estimator_spec.loss)
return (loss_metric, scaffold,
evaluation_hooks, grouped_estimator_spec.eval_metric_ops)
def _evaluate_run(self, checkpoint_path, scaffold, update_op, eval_dict,
all_hooks, output_dir):
"""Run evaluation."""
eval_results = evaluation._evaluate_once( # pylint: disable=protected-access
checkpoint_path=checkpoint_path,
master=self._config.evaluation_master,
scaffold=scaffold,
eval_ops=update_op,
final_ops=eval_dict,
hooks=all_hooks,
config=self._session_config)
current_global_step = eval_results[ops.GraphKeys.GLOBAL_STEP]
_write_dict_to_summary(
output_dir=output_dir,
dictionary=eval_results,
current_global_step=current_global_step)
if checkpoint_path:
_write_checkpoint_path_to_summary(
output_dir=output_dir,
checkpoint_path=checkpoint_path,
current_global_step=current_global_step)
return eval_results
def _maybe_warm_start(self, checkpoint_path):
if not checkpoint_path and self._warm_start_settings:
logging.info('Warm-starting with WarmStartSettings: %s' %
(self._warm_start_settings,))
warm_starting_util.warm_start(*self._warm_start_settings)
def maybe_overwrite_model_dir_and_session_config(config, model_dir):
"""Overwrite estimator config by `model_dir` and `session_config` if needed.
Args:
config: Original estimator config.
model_dir: Estimator model checkpoint directory.
Returns:
Overwritten estimator config.
Raises:
ValueError: Model directory inconsistent between `model_dir` and `config`.
"""
if config is None:
config = run_config.RunConfig()
logging.info('Using default config.')
if not isinstance(config, run_config.RunConfig):
raise ValueError(
'config must be an instance of `RunConfig`, but provided %s.' % config)
if config.session_config is None:
session_config = run_config.get_default_session_config()
config = run_config.RunConfig.replace(config, session_config=session_config)
model_dir = compat_internal.path_to_str(model_dir)
if model_dir is not None:
if (getattr(config, 'model_dir', None) is not None and
config.model_dir != model_dir):
raise ValueError(
"`model_dir` are set both in constructor and `RunConfig`, but with "
"different values. In constructor: '{}', in `RunConfig`: "
"'{}' ".format(model_dir, config.model_dir))
if model_dir:
config = run_config.RunConfig.replace(config, model_dir=model_dir)
if getattr(config, 'model_dir', None) is None:
model_dir = tempfile.mkdtemp()
logging.warning('Using temporary folder as model directory: %s', model_dir)
config = run_config.RunConfig.replace(config, model_dir=model_dir)
return config
def create_per_tower_ready_op(scaffold):
"""Create a Scaffold.ready_op inside a tower."""
if scaffold.ready_op:
return scaffold.ready_op
def default_ready_op():
return array_ops.concat([
variables.report_uninitialized_variables(),
resources.report_uninitialized_resources()
], 0)
return monitored_session.Scaffold.get_or_default(
'ready_op', ops.GraphKeys.READY_OP, default_ready_op)
def create_per_tower_ready_for_local_init_op(scaffold):
"""Create a Scaffold.ready_for_local_init_op inside a tower."""
if scaffold.ready_for_local_init_op:
return scaffold.ready_for_local_init_op
def default_ready_for_local_init_op():
return variables.report_uninitialized_variables(
variables.global_variables())
return monitored_session.Scaffold.get_or_default(
'ready_for_local_init_op', ops.GraphKeys.READY_FOR_LOCAL_INIT_OP,
default_ready_for_local_init_op)
def _combine_distributed_scaffold(grouped_scaffold, distribution):
"""Combines scaffold(s) returned from `distribution.call_for_each_tower`."""
# TODO(anjalisridhar): Figure out how to resolve the following scaffold
# parameters: init_feed_dict, init_fn.
scaffold_list = distribution.unwrap(grouped_scaffold)
init_feed_dict = [
s.init_feed_dict
for s in scaffold_list
if s.init_feed_dict is not None
]
if init_feed_dict:
init_feed_dict = distribution.group(init_feed_dict)
else:
init_feed_dict = None
init_fn = [s.init_fn for s in scaffold_list if s.init_fn is not None]
if init_fn:
init_fn = distribution.group(init_fn)
else:
init_fn = None
init_op = [s.init_op for s in scaffold_list if s.init_op is not None]
if init_op:
init_op = distribution.group(init_op)
else:
init_op = None
def _unwrap_and_concat(value):
value = nest.flatten(distribution.unwrap(value))
if len(value) != 1:
return array_ops.concat(value)
return value[0]
ready_op = distribution.call_for_each_tower(
create_per_tower_ready_op, grouped_scaffold)
if ready_op is not None:
ready_op = _unwrap_and_concat(ready_op)
else:
ready_op = None
ready_for_local_init_op = distribution.call_for_each_tower(
create_per_tower_ready_for_local_init_op, grouped_scaffold)
if ready_for_local_init_op is not None:
ready_for_local_init_op = _unwrap_and_concat(ready_for_local_init_op)
else:
ready_for_local_init_op = None
local_init_op = [
s.local_init_op
for s in scaffold_list
if s.local_init_op is not None
]
if local_init_op:
local_init_op = distribution.group(local_init_op)
else:
local_init_op = None
summary_op = [
s.summary_op for s in scaffold_list if s.summary_op is not None
]
if summary_op:
summary_op = distribution.group(summary_op)
else:
summary_op = None
scaffold = monitored_session.Scaffold(
init_op=init_op,
ready_op=ready_op,
ready_for_local_init_op=ready_for_local_init_op,
local_init_op=local_init_op,
summary_op=summary_op,
init_feed_dict=init_feed_dict,
init_fn=init_fn)
return scaffold
def _check_checkpoint_available(model_dir):
latest_path = checkpoint_management.latest_checkpoint(model_dir)
if not latest_path:
raise ValueError(
'Could not find trained model in model_dir: {}.'.format(model_dir))
def _check_hooks_type(hooks):
"""Returns hooks if all are SessionRunHook, raises TypeError otherwise."""
hooks = list(hooks or [])
for h in hooks:
if not isinstance(h, training.SessionRunHook):
raise TypeError('Hooks must be a SessionRunHook, given: {}'.format(h))
return hooks
def _check_listeners_type(saving_listeners):
"""Check listeners type."""
listeners = list(saving_listeners or [])
for l in listeners:
if not isinstance(l, training.CheckpointSaverListener):
raise TypeError(
'saving_listeners must be a list of CheckpointSaverListener, '
'given: {}'.format(l))
return listeners
def _get_replica_device_setter(config):
"""Creates a replica device setter if required as a default device_fn.
`Estimator` uses ReplicaDeviceSetter as a default device placer. It sets the
distributed related arguments such as number of ps_replicas based on given
config.
Args:
config: A `RunConfig` instance.
Returns:
A replica device setter, or None.
"""
if config.task_type:
worker_device = '/job:%s/task:%d' % (config.task_type, config.task_id)
else:
worker_device = '/job:worker'
if config.num_ps_replicas > 0:
return training.replica_device_setter(
ps_tasks=config.num_ps_replicas,
worker_device=worker_device,
merge_devices=True,
ps_ops=list(device_setter.STANDARD_PS_OPS),
cluster=config.cluster_spec)
else:
return None
def _verify_model_fn_args(model_fn, params):
"""Verifies model fn arguments."""
args = set(function_utils.fn_args(model_fn))
if 'features' not in args:
raise ValueError('model_fn (%s) must include features argument.' % model_fn)
if params is not None and 'params' not in args:
raise ValueError('model_fn (%s) does not include params argument, '
'but params (%s) is passed to Estimator.' % (model_fn,
params))
if params is None and 'params' in args:
logging.warning('Estimator\'s model_fn (%s) includes params '
'argument, but params are not passed to Estimator.',
model_fn)
non_valid_args = list(args - _VALID_MODEL_FN_ARGS)
if non_valid_args:
raise ValueError('model_fn (%s) has following not expected args: %s' %
(model_fn, non_valid_args))
def _load_global_step_from_checkpoint_dir(checkpoint_dir):
try:
checkpoint_reader = training.NewCheckpointReader(
training.latest_checkpoint(checkpoint_dir))
return checkpoint_reader.get_tensor(ops.GraphKeys.GLOBAL_STEP)
except: # pylint: disable=bare-except
return 0
def _extract_metric_update_ops(eval_dict, distribution=None):
"""Separate update operations from metric value operations."""
update_ops = []
value_ops = {}
# Sort metrics lexicographically so graph is identical every time.
for name, metric_ops in sorted(six.iteritems(eval_dict)):
value_ops[name] = metric_ops[0]
if distribution:
update_op = distribution.group(metric_ops[1])
else:
update_op = metric_ops[1]
update_ops.append(update_op)
if update_ops:
update_op = control_flow_ops.group(*update_ops)
else:
update_op = None
return update_op, value_ops
def _dict_to_str(dictionary):
"""Get a `str` representation of a `dict`.
Args:
dictionary: The `dict` to be represented as `str`.
Returns:
A `str` representing the `dictionary`.
"""
return ', '.join('%s = %s' % (k, v)
for k, v in sorted(six.iteritems(dictionary))
if not isinstance(v, six.binary_type))
def _write_dict_to_summary(output_dir,
dictionary,
current_global_step):
"""Writes a `dict` into summary file in given output directory.
Args:
output_dir: `str`, directory to write the summary file in.
dictionary: the `dict` to be written to summary file.
current_global_step: `int`, the current global step.
"""
logging.info('Saving dict for global step %d: %s', current_global_step,
_dict_to_str(dictionary))
summary_writer = writer_cache.FileWriterCache.get(output_dir)
summary_proto = summary_pb2.Summary()
for key in dictionary:
if dictionary[key] is None:
continue
if key == 'global_step':
continue
if (isinstance(dictionary[key], np.float32) or
isinstance(dictionary[key], float)):
summary_proto.value.add(tag=key, simple_value=float(dictionary[key]))
elif (isinstance(dictionary[key], np.int64) or
isinstance(dictionary[key], np.int32) or
isinstance(dictionary[key], int)):
summary_proto.value.add(tag=key, simple_value=int(dictionary[key]))
elif isinstance(dictionary[key], six.binary_type):
try:
summ = summary_pb2.Summary.FromString(dictionary[key])
for i, _ in enumerate(summ.value):
summ.value[i].tag = '%s/%d' % (key, i)
summary_proto.value.extend(summ.value)
except message.DecodeError:
logging.warn('Skipping summary for %s, cannot parse string to Summary.',
key)
continue
elif isinstance(dictionary[key], np.ndarray):
value = summary_proto.value.add()
value.tag = key
value.node_name = key
tensor_proto = tensor_util.make_tensor_proto(dictionary[key])
value.tensor.CopyFrom(tensor_proto)
# pylint: disable=line-too-long
logging.info(
'Summary for np.ndarray is not visible in Tensorboard by default. '
'Consider using a Tensorboard plugin for visualization (see '
'https://github.com/tensorflow/tensorboard-plugin-example/blob/master/README.md'
' for more information).')
# pylint: enable=line-too-long
else:
logging.warn(
'Skipping summary for %s, must be a float, np.float32, np.int64, '
'np.int32 or int or np.ndarray or a serialized string of Summary.',
key)
summary_writer.add_summary(summary_proto, current_global_step)
summary_writer.flush()
def _write_checkpoint_path_to_summary(output_dir, checkpoint_path,
current_global_step):
"""Writes `checkpoint_path` into summary file in the given output directory.
Args:
output_dir: `str`, directory to write the summary file in.
checkpoint_path: `str`, checkpoint file path to be written to summary file.
current_global_step: `int`, the current global step.
"""
checkpoint_path_tag = 'checkpoint_path'
logging.info('Saving \'%s\' summary for global step %d: %s',
checkpoint_path_tag, current_global_step, checkpoint_path)
summary_proto = summary_pb2.Summary()
summary_proto.value.add(
tag=checkpoint_path_tag,
tensor=tensor_util.make_tensor_proto(
checkpoint_path, dtype=dtypes.string))
summary_writer = writer_cache.FileWriterCache.get(output_dir)
summary_writer.add_summary(summary_proto, current_global_step)
summary_writer.flush()
def _has_dataset_or_queue_runner(maybe_tensor):
"""Returns True if TF dataset or QueueRunner has been used."""
# Check TF dataset first. Here, we use a simple algorithm to check the top
# level Tensors only, which should be sufficient for most users.
tensors = [x for x in nest.flatten(maybe_tensor) if isinstance(x, ops.Tensor)]
if any([t.op.type == 'IteratorGetNext' for t in tensors]):
return True
# Now, check queue.
return ops.get_default_graph().get_collection(ops.GraphKeys.QUEUE_RUNNERS)
VocabInfo = warm_starting_util.VocabInfo # pylint: disable=invalid-name
estimator_export('estimator.VocabInfo')(VocabInfo)
@estimator_export('estimator.WarmStartSettings')
class WarmStartSettings(
collections.namedtuple('WarmStartSettings', [
'ckpt_to_initialize_from',
'vars_to_warm_start',
'var_name_to_vocab_info',
'var_name_to_prev_var_name',
])):
"""Settings for warm-starting in Estimators.
Example Use with canned `DNNEstimator`:
```
emb_vocab_file = tf.feature_column.embedding_column(
tf.feature_column.categorical_column_with_vocabulary_file(
"sc_vocab_file", "new_vocab.txt", vocab_size=100),
dimension=8)
emb_vocab_list = tf.feature_column.embedding_column(
tf.feature_column.categorical_column_with_vocabulary_list(
"sc_vocab_list", vocabulary_list=["a", "b"]),
dimension=8)
estimator = tf.estimator.DNNClassifier(
hidden_units=[128, 64], feature_columns=[emb_vocab_file, emb_vocab_list],
warm_start_from=ws)
```
where `ws` could be defined as:
Warm-start all weights in the model (input layer and hidden weights).
Either the directory or a specific checkpoint can be provided (in the case
of the former, the latest checkpoint will be used):
```
ws = WarmStartSettings(ckpt_to_initialize_from="/tmp")
ws = WarmStartSettings(ckpt_to_initialize_from="/tmp/model-1000")
```
Warm-start only the embeddings (input layer):
```
ws = WarmStartSettings(ckpt_to_initialize_from="/tmp",
vars_to_warm_start=".*input_layer.*")
```
Warm-start all weights but the embedding parameters corresponding to
`sc_vocab_file` have a different vocab from the one used in the current
model:
```
vocab_info = tf.estimator.VocabInfo(
new_vocab=sc_vocab_file.vocabulary_file,
new_vocab_size=sc_vocab_file.vocabulary_size,
num_oov_buckets=sc_vocab_file.num_oov_buckets,
old_vocab="old_vocab.txt"
)
ws = WarmStartSettings(
ckpt_to_initialize_from="/tmp",
var_name_to_vocab_info={
"input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
})
```
Warm-start only `sc_vocab_file` embeddings (and no other variables), which
have a different vocab from the one used in the current model:
```
vocab_info = tf.estimator.VocabInfo(
new_vocab=sc_vocab_file.vocabulary_file,
new_vocab_size=sc_vocab_file.vocabulary_size,
num_oov_buckets=sc_vocab_file.num_oov_buckets,
old_vocab="old_vocab.txt"
)
ws = WarmStartSettings(
ckpt_to_initialize_from="/tmp",
vars_to_warm_start=None,
var_name_to_vocab_info={
"input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
})
```
Warm-start all weights but the parameters corresponding to `sc_vocab_file`
have a different vocab from the one used in current checkpoint, and only
100 of those entries were used:
```
vocab_info = tf.estimator.VocabInfo(
new_vocab=sc_vocab_file.vocabulary_file,
new_vocab_size=sc_vocab_file.vocabulary_size,
num_oov_buckets=sc_vocab_file.num_oov_buckets,
old_vocab="old_vocab.txt",
old_vocab_size=100
)
ws = WarmStartSettings(
ckpt_to_initialize_from="/tmp",
var_name_to_vocab_info={
"input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
})
```
Warm-start all weights but the parameters corresponding to `sc_vocab_file`
have a different vocab from the one used in current checkpoint and the
parameters corresponding to `sc_vocab_list` have a different name from the
current checkpoint:
```
vocab_info = tf.estimator.VocabInfo(
new_vocab=sc_vocab_file.vocabulary_file,
new_vocab_size=sc_vocab_file.vocabulary_size,
num_oov_buckets=sc_vocab_file.num_oov_buckets,
old_vocab="old_vocab.txt",
old_vocab_size=100
)
ws = WarmStartSettings(
ckpt_to_initialize_from="/tmp",
var_name_to_vocab_info={
"input_layer/sc_vocab_file_embedding/embedding_weights": vocab_info
},
var_name_to_prev_var_name={
"input_layer/sc_vocab_list_embedding/embedding_weights":
"old_tensor_name"
})
```
Attributes:
ckpt_to_initialize_from: [Required] A string specifying the directory with
checkpoint file(s) or path to checkpoint from which to warm-start the
model parameters.
vars_to_warm_start: [Optional] One of the following:
- A regular expression (string) that captures which variables to
warm-start (see tf.get_collection). This expression will only consider
variables in the TRAINABLE_VARIABLES collection.
- A list of Variables to warm-start.
- A list of strings, each representing a full variable name to warm-start.
- `None`, in which case only variables specified in
`var_name_to_vocab_info` will be warm-started.
Defaults to `'.*'`, which warm-starts all variables in the
TRAINABLE_VARIABLES collection. Note that this excludes variables such as
accumulators and moving statistics from batch norm.
var_name_to_vocab_info: [Optional] Dict of variable names (strings) to
VocabInfo. The variable names should be "full" variables, not the names
of the partitions. If not explicitly provided, the variable is assumed to
have no vocabulary.
var_name_to_prev_var_name: [Optional] Dict of variable names (strings) to
name of the previously-trained variable in `ckpt_to_initialize_from`. If
not explicitly provided, the name of the variable is assumed to be same
between previous checkpoint and current model.
"""
def __new__(cls,
ckpt_to_initialize_from,
vars_to_warm_start='.*',
var_name_to_vocab_info=None,
var_name_to_prev_var_name=None):
if not ckpt_to_initialize_from:
raise ValueError(
'`ckpt_to_initialize_from` MUST be set in WarmStartSettings')
return super(WarmStartSettings, cls).__new__(
cls,
ckpt_to_initialize_from,
vars_to_warm_start,
var_name_to_vocab_info or {},
var_name_to_prev_var_name or {},
)
def _get_saved_model_ckpt(saved_model_dir):
"""Return path to variables checkpoint in a SavedModel directory."""
if not gfile.Exists(
os.path.join(saved_model_utils.get_variables_dir(saved_model_dir),
compat.as_text('variables.index'))):
raise ValueError('Directory provided has an invalid SavedModel format: %s'
% saved_model_dir)
return saved_model_utils.get_variables_path(saved_model_dir)
def _get_default_warm_start_settings(warm_start_from):
"""Returns default WarmStartSettings.
Args:
warm_start_from: Either a string representing the filepath of a checkpoint
or SavedModel to initialize from, or an instance of WarmStartSettings.
Returns:
Either None or an instance of WarmStartSettings.
Raises:
ValueError: If warm_start_from is not None but is neither a string nor an
instance of WarmStartSettings.
"""
if warm_start_from is None:
return None
if isinstance(warm_start_from, (six.string_types, six.binary_type)):
# Infer that this is a SavedModel if export_path +
# 'variables/variables.index' exists, and if so, construct the
# WarmStartSettings pointing to the variables path
# (export_path + 'variables/variables').
if gfile.Exists(os.path.join(
saved_model_utils.get_variables_dir(warm_start_from),
compat.as_text('variables.index'))):
logging.info('Warm-starting from a SavedModel')
return WarmStartSettings(
ckpt_to_initialize_from=saved_model_utils.get_variables_path(
warm_start_from))
return WarmStartSettings(ckpt_to_initialize_from=warm_start_from)
elif isinstance(warm_start_from, WarmStartSettings):
return warm_start_from
else:
raise ValueError('warm_start_from must be a string or a WarmStartSettings, '
'instead got {}'.format(type(warm_start_from)))
| apache-2.0 |
jparsec/jparsec | jparsec-examples/src/main/java/org/jparsec/examples/java/ast/statement/TryStatement.java | 2437 | /*****************************************************************************
* Copyright (C) jparsec.org *
* ------------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*****************************************************************************/
package org.jparsec.examples.java.ast.statement;
import java.util.List;
import org.jparsec.examples.common.Strings;
import org.jparsec.examples.common.ValueObject;
/**
* Represents the "try-catch-finally" statement.
*
* @author Ben Yu
*/
public final class TryStatement extends ValueObject implements Statement {
public static final class CatchBlock extends ValueObject {
public final ParameterDef parameter;
public final BlockStatement body;
public CatchBlock(ParameterDef parameter, BlockStatement body) {
this.parameter = parameter;
this.body = body;
}
@Override public String toString() {
return "catch (" + parameter + ") " + body;
}
}
public final BlockStatement tryBlock;
public final List<CatchBlock> catchBlocks;
public final BlockStatement finallyBlock;
public TryStatement(
BlockStatement tryBlock, List<CatchBlock> catchBlocks, BlockStatement finallyBlock) {
this.tryBlock = tryBlock;
this.catchBlocks = catchBlocks;
this.finallyBlock = finallyBlock;
}
@Override public String toString() {
return "try " + tryBlock + Strings.prependEach(" ", catchBlocks) +
(finallyBlock == null ? "" : " finally " + finallyBlock);
}
}
| apache-2.0 |
stephentyrone/swift | lib/SILGen/SILGenExpr.cpp | 226226 | //===--- SILGenExpr.cpp - Implements Lowering of ASTs -> SIL for Exprs ----===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
#include "ArgumentScope.h"
#include "ArgumentSource.h"
#include "Callee.h"
#include "Condition.h"
#include "Conversion.h"
#include "ExitableFullExpr.h"
#include "Initialization.h"
#include "LValue.h"
#include "RValue.h"
#include "ResultPlan.h"
#include "SILGen.h"
#include "SILGenDynamicCast.h"
#include "SILGenFunctionBuilder.h"
#include "Scope.h"
#include "SwitchEnumBuilder.h"
#include "Varargs.h"
#include "swift/AST/ASTContext.h"
#include "swift/AST/ASTMangler.h"
#include "swift/AST/Decl.h"
#include "swift/AST/DiagnosticsCommon.h"
#include "swift/AST/Expr.h"
#include "swift/AST/ForeignErrorConvention.h"
#include "swift/AST/GenericEnvironment.h"
#include "swift/AST/ParameterList.h"
#include "swift/AST/ProtocolConformance.h"
#include "swift/AST/SubstitutionMap.h"
#include "swift/AST/Types.h"
#include "swift/Basic/SourceManager.h"
#include "swift/Basic/type_traits.h"
#include "swift/SIL/DynamicCasts.h"
#include "swift/SIL/SILArgument.h"
#include "swift/SIL/SILUndef.h"
#include "swift/SIL/TypeLowering.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/Support/Compiler.h"
#include "llvm/Support/ConvertUTF.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/SaveAndRestore.h"
#include "llvm/Support/raw_ostream.h"
#include "swift/AST/DiagnosticsSIL.h"
using namespace swift;
using namespace Lowering;
ManagedValue SILGenFunction::emitManagedRetain(SILLocation loc,
SILValue v) {
auto &lowering = getTypeLowering(v->getType());
return emitManagedRetain(loc, v, lowering);
}
ManagedValue SILGenFunction::emitManagedRetain(SILLocation loc,
SILValue v,
const TypeLowering &lowering) {
assert(lowering.getLoweredType() == v->getType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(v);
if (v->getType().isObject() &&
v.getOwnershipKind() == ValueOwnershipKind::None)
return ManagedValue::forUnmanaged(v);
assert((!lowering.isAddressOnly() || !silConv.useLoweredAddresses()) &&
"cannot retain an unloadable type");
v = lowering.emitCopyValue(B, loc, v);
return emitManagedRValueWithCleanup(v, lowering);
}
ManagedValue SILGenFunction::emitManagedLoadCopy(SILLocation loc, SILValue v) {
auto &lowering = getTypeLowering(v->getType());
return emitManagedLoadCopy(loc, v, lowering);
}
ManagedValue SILGenFunction::emitManagedLoadCopy(SILLocation loc, SILValue v,
const TypeLowering &lowering) {
assert(lowering.getLoweredType().getAddressType() == v->getType());
v = lowering.emitLoadOfCopy(B, loc, v, IsNotTake);
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(v);
if (v.getOwnershipKind() == ValueOwnershipKind::None)
return ManagedValue::forUnmanaged(v);
assert((!lowering.isAddressOnly() || !silConv.useLoweredAddresses()) &&
"cannot retain an unloadable type");
return emitManagedRValueWithCleanup(v, lowering);
}
ManagedValue SILGenFunction::emitManagedLoadBorrow(SILLocation loc,
SILValue v) {
auto &lowering = getTypeLowering(v->getType());
return emitManagedLoadBorrow(loc, v, lowering);
}
ManagedValue
SILGenFunction::emitManagedLoadBorrow(SILLocation loc, SILValue v,
const TypeLowering &lowering) {
assert(lowering.getLoweredType().getAddressType() == v->getType());
if (lowering.isTrivial()) {
v = lowering.emitLoadOfCopy(B, loc, v, IsNotTake);
return ManagedValue::forUnmanaged(v);
}
assert((!lowering.isAddressOnly() || !silConv.useLoweredAddresses()) &&
"cannot retain an unloadable type");
auto *lbi = B.createLoadBorrow(loc, v);
return emitManagedBorrowedRValueWithCleanup(v, lbi, lowering);
}
ManagedValue SILGenFunction::emitManagedStoreBorrow(SILLocation loc, SILValue v,
SILValue addr) {
auto &lowering = getTypeLowering(v->getType());
return emitManagedStoreBorrow(loc, v, addr, lowering);
}
ManagedValue SILGenFunction::emitManagedStoreBorrow(
SILLocation loc, SILValue v, SILValue addr, const TypeLowering &lowering) {
assert(lowering.getLoweredType().getObjectType() == v->getType());
if (lowering.isTrivial() ||
v.getOwnershipKind() == ValueOwnershipKind::None) {
lowering.emitStore(B, loc, v, addr, StoreOwnershipQualifier::Trivial);
return ManagedValue::forUnmanaged(v);
}
assert((!lowering.isAddressOnly() || !silConv.useLoweredAddresses()) &&
"cannot retain an unloadable type");
auto *sbi = B.createStoreBorrow(loc, v, addr);
return emitManagedBorrowedRValueWithCleanup(sbi->getSrc(), sbi, lowering);
}
ManagedValue SILGenFunction::emitManagedBeginBorrow(SILLocation loc,
SILValue v) {
auto &lowering = getTypeLowering(v->getType());
return emitManagedBeginBorrow(loc, v, lowering);
}
ManagedValue
SILGenFunction::emitManagedBeginBorrow(SILLocation loc, SILValue v,
const TypeLowering &lowering) {
assert(lowering.getLoweredType().getObjectType() ==
v->getType().getObjectType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(v);
if (v.getOwnershipKind() == ValueOwnershipKind::None)
return ManagedValue::forUnmanaged(v);
if (v.getOwnershipKind() == ValueOwnershipKind::Guaranteed)
return ManagedValue::forUnmanaged(v);
auto *bbi = B.createBeginBorrow(loc, v);
return emitManagedBorrowedRValueWithCleanup(v, bbi, lowering);
}
namespace {
struct EndBorrowCleanup : Cleanup {
SILValue borrowedValue;
EndBorrowCleanup(SILValue borrowedValue) : borrowedValue(borrowedValue) {
if (auto *arg = dyn_cast<SILPhiArgument>(borrowedValue)) {
if (auto *ti = arg->getSingleTerminator()) {
assert(!ti->isTransformationTerminator() &&
"Transforming terminators do not have end_borrow");
}
}
}
void emit(SILGenFunction &SGF, CleanupLocation l,
ForUnwind_t forUnwind) override {
SGF.B.createEndBorrow(l, borrowedValue);
}
void dump(SILGenFunction &) const override {
#ifndef NDEBUG
llvm::errs() << "EndBorrowCleanup "
<< "State:" << getState() << "\n"
<< "borrowed:" << borrowedValue
<< "\n";
#endif
}
};
struct FormalEvaluationEndBorrowCleanup : Cleanup {
FormalEvaluationContext::stable_iterator Depth;
FormalEvaluationEndBorrowCleanup() : Depth() {}
void emit(SILGenFunction &SGF, CleanupLocation l, ForUnwind_t forUnwind) override {
getEvaluation(SGF).finish(SGF);
}
void dump(SILGenFunction &SGF) const override {
#ifndef NDEBUG
llvm::errs() << "FormalEvaluationEndBorrowCleanup "
<< "State:" << getState() << "\n"
<< "original:" << getOriginalValue(SGF) << "\n"
<< "borrowed:" << getBorrowedValue(SGF) << "\n";
#endif
}
SharedBorrowFormalAccess &getEvaluation(SILGenFunction &SGF) const {
auto &evaluation = *SGF.FormalEvalContext.find(Depth);
assert(evaluation.getKind() == FormalAccess::Shared);
return static_cast<SharedBorrowFormalAccess &>(evaluation);
}
SILValue getOriginalValue(SILGenFunction &SGF) const {
return getEvaluation(SGF).getOriginalValue();
}
SILValue getBorrowedValue(SILGenFunction &SGF) const {
return getEvaluation(SGF).getBorrowedValue();
}
};
} // end anonymous namespace
ManagedValue
SILGenFunction::emitFormalEvaluationManagedBeginBorrow(SILLocation loc,
SILValue v) {
if (v.getOwnershipKind() == ValueOwnershipKind::Guaranteed)
return ManagedValue::forUnmanaged(v);
auto &lowering = getTypeLowering(v->getType());
return emitFormalEvaluationManagedBeginBorrow(loc, v, lowering);
}
ManagedValue SILGenFunction::emitFormalEvaluationManagedBeginBorrow(
SILLocation loc, SILValue v, const TypeLowering &lowering) {
assert(lowering.getLoweredType().getObjectType() ==
v->getType().getObjectType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(v);
if (v.getOwnershipKind() == ValueOwnershipKind::Guaranteed)
return ManagedValue::forUnmanaged(v);
auto *bbi = B.createBeginBorrow(loc, v);
return emitFormalEvaluationManagedBorrowedRValueWithCleanup(loc, v, bbi,
lowering);
}
ManagedValue
SILGenFunction::emitFormalEvaluationManagedBorrowedRValueWithCleanup(
SILLocation loc, SILValue original, SILValue borrowed) {
auto &lowering = getTypeLowering(original->getType());
return emitFormalEvaluationManagedBorrowedRValueWithCleanup(
loc, original, borrowed, lowering);
}
ManagedValue
SILGenFunction::emitFormalEvaluationManagedBorrowedRValueWithCleanup(
SILLocation loc, SILValue original, SILValue borrowed,
const TypeLowering &lowering) {
assert(lowering.getLoweredType().getObjectType() ==
original->getType().getObjectType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(borrowed);
if (!borrowed->getType().isObject()) {
return ManagedValue(borrowed, CleanupHandle::invalid());
}
assert(isInFormalEvaluationScope() && "Must be in formal evaluation scope");
auto &cleanup = Cleanups.pushCleanup<FormalEvaluationEndBorrowCleanup>();
CleanupHandle handle = Cleanups.getTopCleanup();
FormalEvalContext.push<SharedBorrowFormalAccess>(loc, handle, original,
borrowed);
cleanup.Depth = FormalEvalContext.stable_begin();
return ManagedValue(borrowed, CleanupHandle::invalid());
}
ManagedValue
SILGenFunction::emitManagedBorrowedArgumentWithCleanup(SILPhiArgument *arg) {
if (arg->getOwnershipKind() == ValueOwnershipKind::None ||
arg->getType().isTrivial(F)) {
return ManagedValue::forUnmanaged(arg);
}
assert(arg->getOwnershipKind() == ValueOwnershipKind::Guaranteed);
Cleanups.pushCleanup<EndBorrowCleanup>(arg);
return ManagedValue(arg, CleanupHandle::invalid());
}
ManagedValue
SILGenFunction::emitManagedBorrowedRValueWithCleanup(SILValue original,
SILValue borrowed) {
assert(original->getType().getObjectType() ==
borrowed->getType().getObjectType());
auto &lowering = getTypeLowering(original->getType());
return emitManagedBorrowedRValueWithCleanup(original, borrowed, lowering);
}
ManagedValue
SILGenFunction::emitManagedBorrowedRValueWithCleanup(SILValue borrowed) {
auto &lowering = getTypeLowering(borrowed->getType());
return emitManagedBorrowedRValueWithCleanup(borrowed, lowering);
}
ManagedValue SILGenFunction::emitManagedBorrowedRValueWithCleanup(
SILValue borrowed, const TypeLowering &lowering) {
assert(lowering.getLoweredType().getObjectType() ==
borrowed->getType().getObjectType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(borrowed);
if (borrowed->getType().isObject() &&
borrowed.getOwnershipKind() == ValueOwnershipKind::None)
return ManagedValue::forUnmanaged(borrowed);
if (borrowed->getType().isObject()) {
Cleanups.pushCleanup<EndBorrowCleanup>(borrowed);
}
return ManagedValue(borrowed, CleanupHandle::invalid());
}
ManagedValue SILGenFunction::emitManagedBorrowedRValueWithCleanup(
SILValue original, SILValue borrowed, const TypeLowering &lowering) {
assert(lowering.getLoweredType().getObjectType() ==
original->getType().getObjectType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(borrowed);
if (original->getType().isObject() &&
original.getOwnershipKind() == ValueOwnershipKind::None)
return ManagedValue::forUnmanaged(borrowed);
if (borrowed->getType().isObject()) {
Cleanups.pushCleanup<EndBorrowCleanup>(borrowed);
}
return ManagedValue(borrowed, CleanupHandle::invalid());
}
ManagedValue SILGenFunction::emitManagedRValueWithCleanup(SILValue v) {
auto &lowering = getTypeLowering(v->getType());
return emitManagedRValueWithCleanup(v, lowering);
}
ManagedValue SILGenFunction::emitManagedRValueWithCleanup(SILValue v,
const TypeLowering &lowering) {
assert(lowering.getLoweredType().getObjectType() ==
v->getType().getObjectType());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(v);
if (v->getType().isObject() &&
v.getOwnershipKind() == ValueOwnershipKind::None) {
return ManagedValue::forUnmanaged(v);
}
return ManagedValue(v, enterDestroyCleanup(v));
}
ManagedValue SILGenFunction::emitManagedBufferWithCleanup(SILValue v) {
auto &lowering = getTypeLowering(v->getType());
return emitManagedBufferWithCleanup(v, lowering);
}
ManagedValue SILGenFunction::emitManagedBufferWithCleanup(SILValue v,
const TypeLowering &lowering) {
assert(lowering.getLoweredType().getAddressType() == v->getType() ||
!silConv.useLoweredAddresses());
if (lowering.isTrivial())
return ManagedValue::forUnmanaged(v);
return ManagedValue(v, enterDestroyCleanup(v));
}
void SILGenFunction::emitExprInto(Expr *E, Initialization *I,
Optional<SILLocation> L) {
// Handle the special case of copying an lvalue.
if (auto load = dyn_cast<LoadExpr>(E)) {
FormalEvaluationScope writeback(*this);
auto lv = emitLValue(load->getSubExpr(),
SGFAccessKind::BorrowedAddressRead);
emitCopyLValueInto(E, std::move(lv), I);
return;
}
RValue result = emitRValue(E, SGFContext(I));
if (result.isInContext())
return;
std::move(result).ensurePlusOne(*this, E).forwardInto(*this, L ? *L : E, I);
}
namespace {
class RValueEmitter
: public Lowering::ExprVisitor<RValueEmitter, RValue, SGFContext>
{
typedef Lowering::ExprVisitor<RValueEmitter,RValue,SGFContext> super;
public:
SILGenFunction &SGF;
RValueEmitter(SILGenFunction &SGF) : SGF(SGF) {}
using super::visit;
RValue visit(Expr *E) {
assert(!E->getType()->is<LValueType>() &&
!E->getType()->is<InOutType>() &&
"RValueEmitter shouldn't be called on lvalues");
return visit(E, SGFContext());
}
// These always produce lvalues.
RValue visitInOutExpr(InOutExpr *E, SGFContext C) {
LValue lv = SGF.emitLValue(E->getSubExpr(), SGFAccessKind::ReadWrite);
return RValue(SGF, E, SGF.emitAddressOfLValue(E->getSubExpr(),
std::move(lv)));
}
RValue visitApplyExpr(ApplyExpr *E, SGFContext C);
RValue visitDiscardAssignmentExpr(DiscardAssignmentExpr *E, SGFContext C) {
llvm_unreachable("cannot appear in rvalue");
}
RValue visitDeclRefExpr(DeclRefExpr *E, SGFContext C);
RValue visitTypeExpr(TypeExpr *E, SGFContext C);
RValue visitSuperRefExpr(SuperRefExpr *E, SGFContext C);
RValue visitOtherConstructorDeclRefExpr(OtherConstructorDeclRefExpr *E,
SGFContext C);
RValue visitForceTryExpr(ForceTryExpr *E, SGFContext C);
RValue visitOptionalTryExpr(OptionalTryExpr *E, SGFContext C);
RValue visitNilLiteralExpr(NilLiteralExpr *E, SGFContext C);
RValue visitIntegerLiteralExpr(IntegerLiteralExpr *E, SGFContext C);
RValue visitFloatLiteralExpr(FloatLiteralExpr *E, SGFContext C);
RValue visitBooleanLiteralExpr(BooleanLiteralExpr *E, SGFContext C);
RValue emitStringLiteral(Expr *E, StringRef Str, SGFContext C,
StringLiteralExpr::Encoding encoding);
RValue visitStringLiteralExpr(StringLiteralExpr *E, SGFContext C);
RValue visitLoadExpr(LoadExpr *E, SGFContext C);
RValue visitDerivedToBaseExpr(DerivedToBaseExpr *E, SGFContext C);
RValue visitMetatypeConversionExpr(MetatypeConversionExpr *E,
SGFContext C);
RValue visitCollectionUpcastConversionExpr(
CollectionUpcastConversionExpr *E,
SGFContext C);
RValue visitBridgeToObjCExpr(BridgeToObjCExpr *E, SGFContext C);
RValue visitBridgeFromObjCExpr(BridgeFromObjCExpr *E, SGFContext C);
RValue visitConditionalBridgeFromObjCExpr(ConditionalBridgeFromObjCExpr *E,
SGFContext C);
RValue visitArchetypeToSuperExpr(ArchetypeToSuperExpr *E, SGFContext C);
RValue visitUnresolvedTypeConversionExpr(UnresolvedTypeConversionExpr *E,
SGFContext C);
RValue visitFunctionConversionExpr(FunctionConversionExpr *E,
SGFContext C);
RValue visitCovariantFunctionConversionExpr(
CovariantFunctionConversionExpr *E,
SGFContext C);
RValue visitCovariantReturnConversionExpr(
CovariantReturnConversionExpr *E,
SGFContext C);
RValue visitImplicitlyUnwrappedFunctionConversionExpr(
ImplicitlyUnwrappedFunctionConversionExpr *E, SGFContext C);
RValue visitErasureExpr(ErasureExpr *E, SGFContext C);
RValue visitAnyHashableErasureExpr(AnyHashableErasureExpr *E, SGFContext C);
RValue visitForcedCheckedCastExpr(ForcedCheckedCastExpr *E,
SGFContext C);
RValue visitConditionalCheckedCastExpr(ConditionalCheckedCastExpr *E,
SGFContext C);
RValue visitIsExpr(IsExpr *E, SGFContext C);
RValue visitCoerceExpr(CoerceExpr *E, SGFContext C);
RValue visitUnderlyingToOpaqueExpr(UnderlyingToOpaqueExpr *E, SGFContext C);
RValue visitTupleExpr(TupleExpr *E, SGFContext C);
RValue visitMemberRefExpr(MemberRefExpr *E, SGFContext C);
RValue visitDynamicMemberRefExpr(DynamicMemberRefExpr *E, SGFContext C);
RValue visitDotSyntaxBaseIgnoredExpr(DotSyntaxBaseIgnoredExpr *E,
SGFContext C);
RValue visitTupleElementExpr(TupleElementExpr *E, SGFContext C);
RValue visitSubscriptExpr(SubscriptExpr *E, SGFContext C);
RValue visitKeyPathApplicationExpr(KeyPathApplicationExpr *E, SGFContext C);
RValue visitDynamicSubscriptExpr(DynamicSubscriptExpr *E,
SGFContext C);
RValue visitDestructureTupleExpr(DestructureTupleExpr *E, SGFContext C);
RValue visitDynamicTypeExpr(DynamicTypeExpr *E, SGFContext C);
RValue visitCaptureListExpr(CaptureListExpr *E, SGFContext C);
RValue visitAbstractClosureExpr(AbstractClosureExpr *E, SGFContext C);
RValue visitInterpolatedStringLiteralExpr(InterpolatedStringLiteralExpr *E,
SGFContext C);
RValue visitObjectLiteralExpr(ObjectLiteralExpr *E, SGFContext C);
RValue visitEditorPlaceholderExpr(EditorPlaceholderExpr *E, SGFContext C);
RValue visitObjCSelectorExpr(ObjCSelectorExpr *E, SGFContext C);
RValue visitKeyPathExpr(KeyPathExpr *E, SGFContext C);
RValue visitMagicIdentifierLiteralExpr(MagicIdentifierLiteralExpr *E,
SGFContext C);
RValue visitCollectionExpr(CollectionExpr *E, SGFContext C);
RValue visitRebindSelfInConstructorExpr(RebindSelfInConstructorExpr *E,
SGFContext C);
RValue visitInjectIntoOptionalExpr(InjectIntoOptionalExpr *E, SGFContext C);
RValue visitClassMetatypeToObjectExpr(ClassMetatypeToObjectExpr *E,
SGFContext C);
RValue visitExistentialMetatypeToObjectExpr(ExistentialMetatypeToObjectExpr *E,
SGFContext C);
RValue visitProtocolMetatypeToObjectExpr(ProtocolMetatypeToObjectExpr *E,
SGFContext C);
RValue visitIfExpr(IfExpr *E, SGFContext C);
RValue visitAssignExpr(AssignExpr *E, SGFContext C);
RValue visitEnumIsCaseExpr(EnumIsCaseExpr *E, SGFContext C);
RValue visitBindOptionalExpr(BindOptionalExpr *E, SGFContext C);
RValue visitOptionalEvaluationExpr(OptionalEvaluationExpr *E,
SGFContext C);
RValue visitForceValueExpr(ForceValueExpr *E, SGFContext C);
RValue emitForceValue(ForceValueExpr *loc, Expr *E,
unsigned numOptionalEvaluations,
SGFContext C);
RValue visitOpenExistentialExpr(OpenExistentialExpr *E, SGFContext C);
RValue visitMakeTemporarilyEscapableExpr(
MakeTemporarilyEscapableExpr *E, SGFContext C);
RValue visitOpaqueValueExpr(OpaqueValueExpr *E, SGFContext C);
RValue visitPropertyWrapperValuePlaceholderExpr(
PropertyWrapperValuePlaceholderExpr *E, SGFContext C);
RValue visitInOutToPointerExpr(InOutToPointerExpr *E, SGFContext C);
RValue visitArrayToPointerExpr(ArrayToPointerExpr *E, SGFContext C);
RValue visitStringToPointerExpr(StringToPointerExpr *E, SGFContext C);
RValue visitPointerToPointerExpr(PointerToPointerExpr *E, SGFContext C);
RValue visitForeignObjectConversionExpr(ForeignObjectConversionExpr *E,
SGFContext C);
RValue visitUnevaluatedInstanceExpr(UnevaluatedInstanceExpr *E,
SGFContext C);
RValue visitTapExpr(TapExpr *E, SGFContext C);
RValue visitDefaultArgumentExpr(DefaultArgumentExpr *E, SGFContext C);
RValue visitErrorExpr(ErrorExpr *E, SGFContext C);
RValue visitDifferentiableFunctionExpr(DifferentiableFunctionExpr *E,
SGFContext C);
RValue visitLinearFunctionExpr(LinearFunctionExpr *E, SGFContext C);
RValue visitDifferentiableFunctionExtractOriginalExpr(
DifferentiableFunctionExtractOriginalExpr *E, SGFContext C);
RValue visitLinearFunctionExtractOriginalExpr(
LinearFunctionExtractOriginalExpr *E, SGFContext C);
RValue visitLinearToDifferentiableFunctionExpr(
LinearToDifferentiableFunctionExpr *E, SGFContext C);
};
} // end anonymous namespace
namespace {
struct BridgingConversion {
Expr *SubExpr;
Optional<Conversion::KindTy> Kind;
unsigned MaxOptionalDepth;
BridgingConversion() : SubExpr(nullptr) {}
BridgingConversion(Expr *sub, Optional<Conversion::KindTy> kind,
unsigned depth)
: SubExpr(sub), Kind(kind), MaxOptionalDepth(depth) {
assert(!kind || Conversion::isBridgingKind(*kind));
}
explicit operator bool() const { return SubExpr != nullptr; }
};
}
static BridgingConversion getBridgingConversion(Expr *E) {
E = E->getSemanticsProvidingExpr();
// Detect bridging conversions.
if (auto bridge = dyn_cast<BridgeToObjCExpr>(E)) {
return { bridge->getSubExpr(), Conversion::BridgeToObjC, 0 };
}
if (auto bridge = dyn_cast<BridgeFromObjCExpr>(E)) {
return { bridge->getSubExpr(), Conversion::BridgeFromObjC, 0 };
}
// We can handle optional injections.
if (auto inject = dyn_cast<InjectIntoOptionalExpr>(E)) {
return getBridgingConversion(inject->getSubExpr());
}
// Look through optional-to-optional conversions.
if (auto optEval = dyn_cast<OptionalEvaluationExpr>(E)) {
auto sub = optEval->getSubExpr()->getSemanticsProvidingExpr();
if (auto subResult = getBridgingConversion(sub)) {
sub = subResult.SubExpr->getSemanticsProvidingExpr();
if (auto bind = dyn_cast<BindOptionalExpr>(sub)) {
if (bind->getDepth() == subResult.MaxOptionalDepth) {
return { bind->getSubExpr(),
subResult.Kind,
subResult.MaxOptionalDepth + 1 };
}
}
}
}
// Open-existentials can be part of bridging conversions in very
// specific patterns.
auto open = dyn_cast<OpenExistentialExpr>(E);
if (open) E = open->getSubExpr();
// Existential erasure.
if (auto erasure = dyn_cast<ErasureExpr>(E)) {
Conversion::KindTy kind;
// Converting to Any is sometimes part of bridging and definitely
// needs special peepholing behavior.
if (erasure->getType()->isAny()) {
kind = Conversion::AnyErasure;
// Otherwise, nope.
} else {
return {};
}
// Tentatively look through the erasure.
E = erasure->getSubExpr();
// If we have an opening, we can only peephole if the value being
// used is exactly the original value.
if (open) {
if (E == open->getOpaqueValue()) {
return { open->getExistentialValue(), kind, 0 };
}
return {};
}
// Otherwise we can always peephole.
return { E, kind, 0 };
}
// If we peeked through an opening, and we didn't recognize a specific
// pattern above involving the opaque value, make sure we use the opening
// as the final expression instead of accidentally look through it.
if (open) return { open, None, 0 };
return { E, None, 0 };
}
/// If the given expression represents a bridging conversion, emit it with
/// the special reabstracting context.
static Optional<ManagedValue>
tryEmitAsBridgingConversion(SILGenFunction &SGF, Expr *E, bool isExplicit,
SGFContext C) {
// Try to pattern-match a conversion. This can find bridging
// conversions, but it can also find simple optional conversions:
// injections and opt-to-opt conversions.
auto result = getBridgingConversion(E);
// If we didn't find a conversion at all, there's nothing special to do.
if (!result ||
result.SubExpr == E ||
result.SubExpr->getType()->isEqual(E->getType()))
return None;
// Even if the conversion doesn't involve bridging, we might still
// expose more peephole opportunities by combining it with a contextual
// conversion.
if (!result.Kind) {
// Only do this if the conversion is implicit.
if (isExplicit)
return None;
// Look for a contextual conversion.
auto conversion = C.getAsConversion();
if (!conversion)
return None;
// Adjust the contextual conversion.
auto sub = result.SubExpr;
auto sourceType = sub->getType()->getCanonicalType();
if (auto adjusted = conversion->getConversion()
.adjustForInitialOptionalConversions(sourceType)) {
// Emit into the applied conversion.
return conversion->emitWithAdjustedConversion(SGF, E, *adjusted,
[sub](SILGenFunction &SGF, SILLocation loc, SGFContext C) {
return SGF.emitRValueAsSingleValue(sub, C);
});
}
// If that didn't work, there's nothing special to do.
return None;
}
auto kind = *result.Kind;
auto subExpr = result.SubExpr;
CanType resultType = E->getType()->getCanonicalType();
Conversion conversion =
Conversion::getBridging(kind, subExpr->getType()->getCanonicalType(),
resultType, SGF.getLoweredType(resultType),
isExplicit);
// Only use this special pattern for AnyErasure conversions when we're
// emitting into a peephole.
if (kind == Conversion::AnyErasure) {
auto outerConversion = C.getAsConversion();
if (!outerConversion ||
!canPeepholeConversions(SGF, outerConversion->getConversion(),
conversion)) {
return None;
}
}
return SGF.emitConvertedRValue(subExpr, conversion, C);
}
RValue RValueEmitter::visitApplyExpr(ApplyExpr *E, SGFContext C) {
return SGF.emitApplyExpr(E, C);
}
SILValue SILGenFunction::emitEmptyTuple(SILLocation loc) {
return B.createTuple(
loc, getLoweredType(TupleType::getEmpty(SGM.M.getASTContext())),
ArrayRef<SILValue>());
}
namespace {
/// This is a simple cleanup class that at the end of a lexical scope consumes
/// an owned value by writing it back to memory. The user can forward this
/// cleanup to take ownership of the value and thus prevent it form being
/// written back.
struct OwnedValueWritebackCleanup final : Cleanup {
using Flags = Cleanup::Flags;
/// We store our own loc so that we can ensure that DI ignores our writeback.
SILLocation loc;
SILValue lvalueAddress;
SILValue value;
OwnedValueWritebackCleanup(SILLocation loc, SILValue lvalueAddress,
SILValue value)
: loc(loc), lvalueAddress(lvalueAddress), value(value) {}
bool getWritebackBuffer(function_ref<void(SILValue)> func) override {
func(lvalueAddress);
return true;
}
void emit(SILGenFunction &SGF, CleanupLocation l, ForUnwind_t forUnwind) override {
SILValue valueToStore = value;
SILType lvalueObjTy = lvalueAddress->getType().getObjectType();
// If we calling a super.init and thus upcasted self, when we store self
// back into the self slot, we need to perform a downcast from the upcasted
// store value to the derived type of our lvalueAddress.
if (valueToStore->getType() != lvalueObjTy) {
if (!valueToStore->getType().isExactSuperclassOf(lvalueObjTy)) {
llvm_unreachable("Invalid usage of delegate init self writeback");
}
valueToStore = SGF.B.createUncheckedRefCast(loc, valueToStore,
lvalueObjTy);
}
SGF.B.emitStoreValueOperation(loc, valueToStore, lvalueAddress,
StoreOwnershipQualifier::Init);
}
void dump(SILGenFunction &) const override {
#ifndef NDEBUG
llvm::errs() << "OwnedValueWritebackCleanup "
<< "State:" << getState() << "\n"
<< "lvalueAddress:" << lvalueAddress << "value:" << value
<< "\n";
#endif
}
};
} // end anonymous namespace
CleanupHandle SILGenFunction::enterOwnedValueWritebackCleanup(
SILLocation loc, SILValue address, SILValue newValue) {
Cleanups.pushCleanup<OwnedValueWritebackCleanup>(loc, address, newValue);
return Cleanups.getTopCleanup();
}
RValue SILGenFunction::emitRValueForSelfInDelegationInit(SILLocation loc,
CanType refType,
SILValue addr,
SGFContext C) {
assert(SelfInitDelegationState != SILGenFunction::NormalSelf &&
"This should never be called unless we are in a delegation sequence");
assert(getTypeLowering(addr->getType()).isLoadable() &&
"Make sure that we are not dealing with semantic rvalues");
// If we are currently in the WillSharedBorrowSelf state, then we know that
// old self is not the self to our delegating initializer. Self in this case
// to the delegating initializer is a metatype. Thus, we perform a
// load_borrow. And move from WillSharedBorrowSelf -> DidSharedBorrowSelf.
if (SelfInitDelegationState == SILGenFunction::WillSharedBorrowSelf) {
assert(C.isGuaranteedPlusZeroOk() &&
"This should only be called if guaranteed plus zero is ok");
SelfInitDelegationState = SILGenFunction::DidSharedBorrowSelf;
ManagedValue result =
B.createLoadBorrow(loc, ManagedValue::forUnmanaged(addr));
return RValue(*this, loc, refType, result);
}
// If we are already in the did shared borrow self state, just return the
// shared borrow value.
if (SelfInitDelegationState == SILGenFunction::DidSharedBorrowSelf) {
assert(C.isGuaranteedPlusZeroOk() &&
"This should only be called if guaranteed plus zero is ok");
ManagedValue result =
B.createLoadBorrow(loc, ManagedValue::forUnmanaged(addr));
return RValue(*this, loc, refType, result);
}
// If we are in WillExclusiveBorrowSelf, then we need to perform an exclusive
// borrow (i.e. a load take) and then move to DidExclusiveBorrowSelf.
if (SelfInitDelegationState == SILGenFunction::WillExclusiveBorrowSelf) {
const auto &typeLowering = getTypeLowering(addr->getType());
SelfInitDelegationState = SILGenFunction::DidExclusiveBorrowSelf;
SILValue self =
emitLoad(loc, addr, typeLowering, C, IsTake, false).forward(*this);
// Forward our initial value for init delegation self and create a new
// cleanup that performs a writeback at the end of lexical scope if our
// value is not consumed.
InitDelegationSelf = ManagedValue::forExclusivelyBorrowedOwnedObjectRValue(
self, enterOwnedValueWritebackCleanup(*InitDelegationLoc, addr, self));
InitDelegationSelfBox = addr;
return RValue(*this, loc, refType, InitDelegationSelf);
}
// If we hit this point, we must have DidExclusiveBorrowSelf. We should have
// gone through the formal evaluation variant but did not. The only way that
// this can happen is if during argument evaluation, we are accessing self in
// a way that is illegal before we call super. Return a copy of self in this
// case so that DI will flag on this issue. We do not care where the destroy
// occurs, so we can use a normal scoped copy.
ManagedValue Result;
if (!SuperInitDelegationSelf) {
Result = InitDelegationSelf.copy(*this, loc);
} else {
Result =
B.createUncheckedRefCast(loc, SuperInitDelegationSelf.copy(*this, loc),
InitDelegationSelf.getType());
}
return RValue(*this, loc, refType, Result);
}
RValue SILGenFunction::emitFormalEvaluationRValueForSelfInDelegationInit(
SILLocation loc, CanType refType, SILValue addr, SGFContext C) {
assert(SelfInitDelegationState != SILGenFunction::NormalSelf &&
"This should never be called unless we are in a delegation sequence");
assert(getTypeLowering(addr->getType()).isLoadable() &&
"Make sure that we are not dealing with semantic rvalues");
// If we are currently in the WillSharedBorrowSelf state, then we know that
// old self is not the self to our delegating initializer. Self in this case
// to the delegating initializer is a metatype. Thus, we perform a
// load_borrow. And move from WillSharedBorrowSelf -> DidSharedBorrowSelf.
if (SelfInitDelegationState == SILGenFunction::WillSharedBorrowSelf) {
assert(C.isGuaranteedPlusZeroOk() &&
"This should only be called if guaranteed plus zero is ok");
SelfInitDelegationState = SILGenFunction::DidSharedBorrowSelf;
ManagedValue result =
B.createFormalAccessLoadBorrow(loc, ManagedValue::forUnmanaged(addr));
return RValue(*this, loc, refType, result);
}
// If we are already in the did shared borrow self state, just return the
// shared borrow value.
if (SelfInitDelegationState == SILGenFunction::DidSharedBorrowSelf) {
assert(C.isGuaranteedPlusZeroOk() &&
"This should only be called if guaranteed plus zero is ok");
ManagedValue result =
B.createFormalAccessLoadBorrow(loc, ManagedValue::forUnmanaged(addr));
return RValue(*this, loc, refType, result);
}
// If we hit this point, we must have DidExclusiveBorrowSelf. Thus borrow
// self.
//
// *NOTE* This routine should /never/ begin an exclusive borrow of self. It is
// only called when emitting self as a base in lvalue emission.
assert(SelfInitDelegationState == SILGenFunction::DidExclusiveBorrowSelf);
// If we do not have a super init delegation self, just perform a formal
// access borrow and return. This occurs with delegating initializers.
if (!SuperInitDelegationSelf) {
return RValue(*this, loc, refType,
InitDelegationSelf.formalAccessBorrow(*this, loc));
}
// Otherwise, we had an upcast of some sort due to a chaining
// initializer. This means that we need to perform a borrow from
// SuperInitDelegationSelf and then downcast that borrow.
ManagedValue borrowedUpcast =
SuperInitDelegationSelf.formalAccessBorrow(*this, loc);
ManagedValue castedBorrowedType = B.createUncheckedRefCast(
loc, borrowedUpcast, InitDelegationSelf.getType());
return RValue(*this, loc, refType, castedBorrowedType);
}
RValue SILGenFunction::
emitRValueForDecl(SILLocation loc, ConcreteDeclRef declRef, Type ncRefType,
AccessSemantics semantics, SGFContext C) {
assert(!ncRefType->is<LValueType>() &&
"RValueEmitter shouldn't be called on lvalues");
// If this is a decl that we have an lvalue for, produce and return it.
ValueDecl *decl = declRef.getDecl();
CanType refType = ncRefType->getCanonicalType();
// If this is a reference to a module, produce an undef value. The
// module value should never actually be used.
if (isa<ModuleDecl>(decl)) {
return emitUndefRValue(loc, refType);
}
// If this is a reference to a var, emit it as an l-value and then load.
if (auto *var = dyn_cast<VarDecl>(decl))
return emitRValueForNonMemberVarDecl(loc, declRef, refType, semantics, C);
assert(!isa<TypeDecl>(decl));
// If the referenced decl isn't a VarDecl, it should be a constant of some
// sort.
SILDeclRef silDeclRef(decl);
assert(silDeclRef.getParameterListCount() == 1);
ManagedValue result = emitClosureValue(loc, silDeclRef, refType,
declRef.getSubstitutions());
return RValue(*this, loc, refType, result);
}
RValue RValueEmitter::visitDeclRefExpr(DeclRefExpr *E, SGFContext C) {
return SGF.emitRValueForDecl(E, E->getDeclRef(), E->getType(),
E->getAccessSemantics(), C);
}
RValue RValueEmitter::visitTypeExpr(TypeExpr *E, SGFContext C) {
assert(E->getType()->is<AnyMetatypeType>() &&
"TypeExpr must have metatype type");
auto Val = SGF.B.createMetatype(E, SGF.getLoweredType(E->getType()));
return RValue(SGF, E, ManagedValue::forUnmanaged(Val));
}
RValue RValueEmitter::visitSuperRefExpr(SuperRefExpr *E, SGFContext C) {
assert(!E->getType()->is<LValueType>() &&
"RValueEmitter shouldn't be called on lvalues");
// If we have a normal self call, then use the emitRValueForDecl call. This
// will emit self at +0 since it is guaranteed.
ManagedValue Self =
SGF.emitRValueForDecl(E, E->getSelf(), E->getSelf()->getType(),
AccessSemantics::Ordinary)
.getScalarValue();
// Perform an upcast to convert self to the indicated super type.
auto result = SGF.B.createUpcast(E, Self, SGF.getLoweredType(E->getType()));
return RValue(SGF, E, result);
}
RValue RValueEmitter::
visitUnresolvedTypeConversionExpr(UnresolvedTypeConversionExpr *E,
SGFContext C) {
llvm_unreachable("invalid code made its way into SILGen");
}
RValue RValueEmitter::visitOtherConstructorDeclRefExpr(
OtherConstructorDeclRefExpr *E, SGFContext C) {
// This should always be a child of an ApplyExpr and so will be emitted by
// SILGenApply.
llvm_unreachable("unapplied reference to constructor?!");
}
RValue RValueEmitter::visitNilLiteralExpr(NilLiteralExpr *E, SGFContext C) {
// Peephole away the call to Optional<T>(nilLiteral: ()).
if (E->getType()->getOptionalObjectType()) {
auto *noneDecl = SGF.getASTContext().getOptionalNoneDecl();
auto enumTy = SGF.getLoweredType(E->getType());
ManagedValue noneValue;
if (enumTy.isLoadable(SGF.F) || !SGF.silConv.useLoweredAddresses()) {
noneValue = ManagedValue::forUnmanaged(
SGF.B.createEnum(E, SILValue(), noneDecl, enumTy));
} else {
noneValue =
SGF.B.bufferForExpr(E, enumTy, SGF.getTypeLowering(enumTy), C,
[&](SILValue newAddr) {
SGF.B.createInjectEnumAddr(E, newAddr, noneDecl);
});
}
return RValue(SGF, E, noneValue);
}
return SGF.emitLiteral(E, C);
}
RValue RValueEmitter::visitIntegerLiteralExpr(IntegerLiteralExpr *E,
SGFContext C) {
if (E->getType()->is<AnyBuiltinIntegerType>())
return RValue(SGF, E,
ManagedValue::forUnmanaged(SGF.B.createIntegerLiteral(E)));
return SGF.emitLiteral(E, C);
}
RValue RValueEmitter::visitFloatLiteralExpr(FloatLiteralExpr *E,
SGFContext C) {
if (E->getType()->is<BuiltinFloatType>())
return RValue(SGF, E,
ManagedValue::forUnmanaged(SGF.B.createFloatLiteral(E)));
return SGF.emitLiteral(E, C);
}
RValue RValueEmitter::visitBooleanLiteralExpr(BooleanLiteralExpr *E,
SGFContext C) {
return SGF.emitLiteral(E, C);
}
RValue RValueEmitter::visitStringLiteralExpr(StringLiteralExpr *E,
SGFContext C) {
return SGF.emitLiteral(E, C);
}
RValue RValueEmitter::visitLoadExpr(LoadExpr *E, SGFContext C) {
// Any writebacks here are tightly scoped.
FormalEvaluationScope writeback(SGF);
LValue lv = SGF.emitLValue(E->getSubExpr(), SGFAccessKind::OwnedObjectRead);
// We can't load at immediate +0 from the lvalue without deeper analysis,
// since the access will be immediately ended and might invalidate the value
// we loaded.
return SGF.emitLoadOfLValue(E, std::move(lv), C.withFollowingSideEffects());
}
SILValue SILGenFunction::emitTemporaryAllocation(SILLocation loc,
SILType ty) {
ty = ty.getObjectType();
Optional<SILDebugVariable> DbgVar;
if (auto *VD = loc.getAsASTNode<VarDecl>())
DbgVar = SILDebugVariable(VD->isLet(), 0);
auto alloc = B.createAllocStack(loc, ty, DbgVar);
enterDeallocStackCleanup(alloc);
return alloc;
}
SILValue SILGenFunction::
getBufferForExprResult(SILLocation loc, SILType ty, SGFContext C) {
// If you change this, change manageBufferForExprResult below as well.
// If we have a single-buffer "emit into" initialization, use that for the
// result.
if (SILValue address = C.getAddressForInPlaceInitialization(*this, loc))
return address;
// If we couldn't emit into the Initialization, emit into a temporary
// allocation.
return emitTemporaryAllocation(loc, ty.getObjectType());
}
ManagedValue SILGenFunction::
manageBufferForExprResult(SILValue buffer, const TypeLowering &bufferTL,
SGFContext C) {
// If we have a single-buffer "emit into" initialization, use that for the
// result.
if (C.finishInPlaceInitialization(*this))
return ManagedValue::forInContext();
// Add a cleanup for the temporary we allocated.
if (bufferTL.isTrivial())
return ManagedValue::forUnmanaged(buffer);
return ManagedValue(buffer, enterDestroyCleanup(buffer));
}
SILGenFunction::ForceTryEmission::ForceTryEmission(SILGenFunction &SGF,
ForceTryExpr *loc)
: SGF(SGF), Loc(loc), OldThrowDest(SGF.ThrowDest) {
assert(loc && "cannot pass a null location");
// Set up a "catch" block for when an error occurs.
SILBasicBlock *catchBB = SGF.createBasicBlock(FunctionSection::Postmatter);
SGF.ThrowDest = JumpDest(catchBB, SGF.Cleanups.getCleanupsDepth(),
CleanupLocation::get(loc));
}
void SILGenFunction::ForceTryEmission::finish() {
assert(Loc && "emission already finished");
auto catchBB = SGF.ThrowDest.getBlock();
SGF.ThrowDest = OldThrowDest;
// If there are no uses of the catch block, just drop it.
if (catchBB->pred_empty()) {
SGF.eraseBasicBlock(catchBB);
} else {
// Otherwise, we need to emit it.
SILGenSavedInsertionPoint scope(SGF, catchBB, FunctionSection::Postmatter);
if (auto diagnoseError = SGF.getASTContext().getDiagnoseUnexpectedError()) {
ASTContext &ctx = SGF.getASTContext();
auto error = SGF.B.createOwnedPhiArgument(SILType::getExceptionType(ctx));
auto args = SGF.emitSourceLocationArgs(Loc->getExclaimLoc(), Loc);
SGF.emitApplyOfLibraryIntrinsic(
Loc,
diagnoseError,
SubstitutionMap(),
{
error,
args.filenameStartPointer,
args.filenameLength,
args.filenameIsAscii,
args.line
},
SGFContext());
}
SGF.B.createUnreachable(Loc);
}
// Prevent double-finishing and make the destructor a no-op.
Loc = nullptr;
}
RValue RValueEmitter::visitForceTryExpr(ForceTryExpr *E, SGFContext C) {
SILGenFunction::ForceTryEmission emission(SGF, E);
// Visit the sub-expression.
return visit(E->getSubExpr(), C);
}
RValue RValueEmitter::visitOptionalTryExpr(OptionalTryExpr *E, SGFContext C) {
// FIXME: Much of this was copied from visitOptionalEvaluationExpr.
// Prior to Swift 5, an optional try's subexpression is always wrapped in an additional optional
bool shouldWrapInOptional = !(SGF.getASTContext().LangOpts.isSwiftVersionAtLeast(5));
auto &optTL = SGF.getTypeLowering(E->getType());
Initialization *optInit = C.getEmitInto();
bool usingProvidedContext =
optInit && optInit->canPerformInPlaceInitialization();
// Form the optional using address operations if the type is address-only or
// if we already have an address to use.
bool isByAddress = usingProvidedContext || optTL.isAddressOnly();
std::unique_ptr<TemporaryInitialization> optTemp;
if (!usingProvidedContext && isByAddress) {
// Allocate the temporary for the Optional<T> if we didn't get one from the
// context.
optTemp = SGF.emitTemporary(E, optTL);
optInit = optTemp.get();
} else if (!usingProvidedContext) {
// If the caller produced a context for us, but we can't use it, then don't.
optInit = nullptr;
}
FullExpr localCleanups(SGF.Cleanups, E);
// Set up a "catch" block for when an error occurs.
SILBasicBlock *catchBB = SGF.createBasicBlock(FunctionSection::Postmatter);
llvm::SaveAndRestore<JumpDest> throwDest{
SGF.ThrowDest,
JumpDest(catchBB, SGF.Cleanups.getCleanupsDepth(), E)};
SILValue branchArg;
if (shouldWrapInOptional) {
if (isByAddress) {
assert(optInit);
SILValue optAddr = optInit->getAddressForInPlaceInitialization(SGF, E);
SGF.emitInjectOptionalValueInto(E, E->getSubExpr(), optAddr, optTL);
} else {
ManagedValue subExprValue = SGF.emitRValueAsSingleValue(E->getSubExpr());
ManagedValue wrapped = SGF.getOptionalSomeValue(E, subExprValue, optTL);
branchArg = wrapped.forward(SGF);
}
}
else {
if (isByAddress) {
assert(optInit);
SGF.emitExprInto(E->getSubExpr(), optInit);
} else {
ManagedValue subExprValue = SGF.emitRValueAsSingleValue(E->getSubExpr());
branchArg = subExprValue.forward(SGF);
}
}
localCleanups.pop();
// If it turns out there are no uses of the catch block, just drop it.
if (catchBB->pred_empty()) {
// Remove the dead failureBB.
SGF.eraseBasicBlock(catchBB);
// The value we provide is the one we've already got.
if (!isByAddress)
return RValue(SGF, E,
SGF.emitManagedRValueWithCleanup(branchArg, optTL));
if (shouldWrapInOptional) {
optInit->finishInitialization(SGF);
}
// If we emitted into the provided context, we're done.
if (usingProvidedContext)
return RValue::forInContext();
return RValue(SGF, E, optTemp->getManagedAddress());
}
SILBasicBlock *contBB = SGF.createBasicBlock();
// Branch to the continuation block.
if (isByAddress)
SGF.B.createBranch(E, contBB);
else
SGF.B.createBranch(E, contBB, branchArg);
// If control branched to the failure block, inject .None into the
// result type.
SGF.B.emitBlock(catchBB);
FullExpr catchCleanups(SGF.Cleanups, E);
auto *errorArg =
catchBB->createPhiArgument(SILType::getExceptionType(SGF.getASTContext()),
ValueOwnershipKind::Owned);
(void) SGF.emitManagedRValueWithCleanup(errorArg);
catchCleanups.pop();
if (isByAddress) {
SGF.emitInjectOptionalNothingInto(E,
optInit->getAddressForInPlaceInitialization(SGF, E), optTL);
SGF.B.createBranch(E, contBB);
} else {
auto branchArg = SGF.getOptionalNoneValue(E, optTL);
SGF.B.createBranch(E, contBB, branchArg);
}
// Emit the continuation block.
SGF.B.emitBlock(contBB);
// If this was done in SSA registers, then the value is provided as an
// argument to the block.
if (!isByAddress) {
auto arg = contBB->createPhiArgument(optTL.getLoweredType(),
ValueOwnershipKind::Owned);
return RValue(SGF, E, SGF.emitManagedRValueWithCleanup(arg, optTL));
}
if (shouldWrapInOptional) {
optInit->finishInitialization(SGF);
}
// If we emitted into the provided context, we're done.
if (usingProvidedContext)
return RValue::forInContext();
assert(optTemp);
return RValue(SGF, E, optTemp->getManagedAddress());
}
static bool inExclusiveBorrowSelfSection(
SILGenFunction::SelfInitDelegationStates delegationState) {
return delegationState == SILGenFunction::WillExclusiveBorrowSelf ||
delegationState == SILGenFunction::DidExclusiveBorrowSelf;
}
static RValue visitDerivedToBaseExprOfSelf(SILGenFunction &SGF,
DeclRefExpr *dre,
DerivedToBaseExpr *E, SGFContext C) {
SGFContext ctx;
auto *vd = cast<ParamDecl>(dre->getDecl());
SILType derivedType = SGF.getLoweredType(E->getType());
ManagedValue selfValue;
// If we have not exclusively borrowed self, we need to do so now.
if (SGF.SelfInitDelegationState == SILGenFunction::WillExclusiveBorrowSelf) {
// We need to use a full scope here to ensure that any underlying
// "normal cleanup" borrows are cleaned up.
Scope S(SGF, E);
selfValue = S.popPreservingValue(SGF.emitRValueAsSingleValue(dre));
} else {
// If we already exclusively borrowed self, then we need to emit self
// using formal evaluation primitives.
assert(SGF.SelfInitDelegationState ==
SILGenFunction::DidExclusiveBorrowSelf);
// This needs to be inlined since there is a Formal Evaluation Scope
// in emitRValueForDecl that causing any borrow for this LValue to be
// popped too soon.
selfValue =
SGF.emitAddressOfLocalVarDecl(dre, vd, dre->getType()->getCanonicalType(),
SGFAccessKind::OwnedObjectRead);
selfValue = SGF.emitFormalEvaluationRValueForSelfInDelegationInit(
E, dre->getType()->getCanonicalType(),
selfValue.getLValueAddress(), ctx)
.getAsSingleValue(SGF, E);
}
assert(selfValue);
// Check if we need to perform a conversion here.
if (derivedType && selfValue.getType() != derivedType)
selfValue = SGF.B.createUpcast(E, selfValue, derivedType);
return RValue(SGF, dre, selfValue);
}
RValue RValueEmitter::visitDerivedToBaseExpr(DerivedToBaseExpr *E,
SGFContext C) {
// If we are going through a decl ref expr and have self and we are in the
// exclusive borrow section of delegating init emission, use a special case.
if (inExclusiveBorrowSelfSection(SGF.SelfInitDelegationState)) {
if (auto *dre = dyn_cast<DeclRefExpr>(E->getSubExpr())) {
if (isa<ParamDecl>(dre->getDecl()) &&
dre->getDecl()->getName() == SGF.getASTContext().Id_self &&
dre->getDecl()->isImplicit()) {
return visitDerivedToBaseExprOfSelf(SGF, dre, E, C);
}
}
}
// We can pass down the SGFContext as a following projection. We have never
// actually implemented emit into here, so we are not changing behavior.
ManagedValue original =
SGF.emitRValueAsSingleValue(E->getSubExpr(), C.withFollowingProjection());
// Derived-to-base casts in the AST might not be reflected as such
// in the SIL type system, for example, a cast from DynamicSelf
// directly to its own Self type.
auto loweredResultTy = SGF.getLoweredType(E->getType());
if (original.getType() == loweredResultTy)
return RValue(SGF, E, original);
ManagedValue converted = SGF.B.createUpcast(E, original, loweredResultTy);
return RValue(SGF, E, converted);
}
RValue RValueEmitter::visitMetatypeConversionExpr(MetatypeConversionExpr *E,
SGFContext C) {
SILValue metaBase =
SGF.emitRValueAsSingleValue(E->getSubExpr()).getUnmanagedValue();
// Metatype conversion casts in the AST might not be reflected as
// such in the SIL type system, for example, a cast from DynamicSelf.Type
// directly to its own Self.Type.
auto loweredResultTy = SGF.getLoweredLoadableType(E->getType());
if (metaBase->getType() == loweredResultTy)
return RValue(SGF, E, ManagedValue::forUnmanaged(metaBase));
auto upcast = SGF.B.createUpcast(E, metaBase, loweredResultTy);
return RValue(SGF, E, ManagedValue::forUnmanaged(upcast));
}
RValue SILGenFunction::emitCollectionConversion(SILLocation loc,
FuncDecl *fn,
CanType fromCollection,
CanType toCollection,
ManagedValue mv,
SGFContext C) {
auto *fromDecl = fromCollection->getAnyNominal();
auto *toDecl = toCollection->getAnyNominal();
auto fromSubMap = fromCollection->getContextSubstitutionMap(
SGM.SwiftModule, fromDecl);
auto toSubMap = toCollection->getContextSubstitutionMap(
SGM.SwiftModule, toDecl);
// Form type parameter substitutions.
auto genericSig = fn->getGenericSignature();
unsigned fromParamCount = fromDecl->getGenericSignature()
->getGenericParams().size();
auto subMap =
SubstitutionMap::combineSubstitutionMaps(fromSubMap,
toSubMap,
CombineSubstitutionMaps::AtIndex,
fromParamCount,
0,
genericSig);
return emitApplyOfLibraryIntrinsic(loc, fn, subMap, {mv}, C);
}
RValue RValueEmitter::
visitCollectionUpcastConversionExpr(CollectionUpcastConversionExpr *E,
SGFContext C) {
SILLocation loc = RegularLocation(E);
// Get the sub expression argument as a managed value
auto mv = SGF.emitRValueAsSingleValue(E->getSubExpr());
// Compute substitutions for the intrinsic call.
auto fromCollection = E->getSubExpr()->getType()->getCanonicalType();
auto toCollection = E->getType()->getCanonicalType();
// Get the intrinsic function.
auto &ctx = SGF.getASTContext();
FuncDecl *fn = nullptr;
if (fromCollection->getAnyNominal() == ctx.getArrayDecl()) {
fn = SGF.SGM.getArrayForceCast(loc);
} else if (fromCollection->getAnyNominal() == ctx.getDictionaryDecl()) {
fn = SGF.SGM.getDictionaryUpCast(loc);
} else if (fromCollection->getAnyNominal() == ctx.getSetDecl()) {
fn = SGF.SGM.getSetUpCast(loc);
} else {
llvm_unreachable("unsupported collection upcast kind");
}
return SGF.emitCollectionConversion(loc, fn, fromCollection, toCollection,
mv, C);
}
RValue
RValueEmitter::visitConditionalBridgeFromObjCExpr(
ConditionalBridgeFromObjCExpr *E, SGFContext C) {
// Get the sub expression argument as a managed value
auto mv = SGF.emitRValueAsSingleValue(E->getSubExpr());
auto conversionRef = E->getConversion();
auto conversion = cast<FuncDecl>(conversionRef.getDecl());
auto subs = conversionRef.getSubstitutions();
auto nativeType =
Type(GenericTypeParamType::get(0, 0, SGF.getASTContext())).subst(subs);
auto metatypeType = SGF.getLoweredType(MetatypeType::get(nativeType));
auto metatype =
ManagedValue::forUnmanaged(SGF.B.createMetatype(E, metatypeType));
return SGF.emitApplyOfLibraryIntrinsic(E, conversion, subs,
{ mv, metatype }, C);
}
/// Given an implicit bridging conversion, check whether the context
/// can be peepholed.
static bool
tryPeepholeBridgingConversion(SILGenFunction &SGF, Conversion::KindTy kind,
ImplicitConversionExpr *E, SGFContext C) {
assert(isa<BridgeFromObjCExpr>(E) || isa<BridgeToObjCExpr>(E));
if (auto outerConversion = C.getAsConversion()) {
auto subExpr = E->getSubExpr();
CanType sourceType = subExpr->getType()->getCanonicalType();
CanType resultType = E->getType()->getCanonicalType();
SILType loweredResultTy = SGF.getLoweredType(resultType);
auto conversion = Conversion::getBridging(kind, sourceType, resultType,
loweredResultTy);
if (outerConversion->tryPeephole(SGF, E->getSubExpr(), conversion)) {
outerConversion->finishInitialization(SGF);
return true;
}
}
return false;
}
RValue
RValueEmitter::visitBridgeFromObjCExpr(BridgeFromObjCExpr *E, SGFContext C) {
if (tryPeepholeBridgingConversion(SGF, Conversion::BridgeFromObjC, E, C))
return RValue::forInContext();
// Emit the sub-expression.
auto mv = SGF.emitRValueAsSingleValue(E->getSubExpr());
CanType origType = E->getSubExpr()->getType()->getCanonicalType();
CanType resultType = E->getType()->getCanonicalType();
SILType loweredResultTy = SGF.getLoweredType(resultType);
auto result = SGF.emitBridgedToNativeValue(E, mv, origType, resultType,
loweredResultTy, C);
return RValue(SGF, E, result);
}
RValue
RValueEmitter::visitBridgeToObjCExpr(BridgeToObjCExpr *E, SGFContext C) {
if (tryPeepholeBridgingConversion(SGF, Conversion::BridgeToObjC, E, C))
return RValue::forInContext();
// Emit the sub-expression.
auto mv = SGF.emitRValueAsSingleValue(E->getSubExpr());
CanType origType = E->getSubExpr()->getType()->getCanonicalType();
CanType resultType = E->getType()->getCanonicalType();
SILType loweredResultTy = SGF.getLoweredType(resultType);
auto result = SGF.emitNativeToBridgedValue(E, mv, origType, resultType,
loweredResultTy, C);
return RValue(SGF, E, result);
}
RValue RValueEmitter::visitArchetypeToSuperExpr(ArchetypeToSuperExpr *E,
SGFContext C) {
ManagedValue archetype = SGF.emitRValueAsSingleValue(E->getSubExpr());
auto loweredTy = SGF.getLoweredLoadableType(E->getType());
if (loweredTy == archetype.getType())
return RValue(SGF, E, archetype);
// Replace the cleanup with a new one on the superclass value so we always use
// concrete retain/release operations.
auto base = SGF.B.createUpcast(E, archetype, loweredTy);
return RValue(SGF, E, base);
}
static ManagedValue convertCFunctionSignature(SILGenFunction &SGF,
FunctionConversionExpr *e,
SILType loweredResultTy,
llvm::function_ref<ManagedValue ()> fnEmitter) {
SILType loweredDestTy = SGF.getLoweredType(e->getType());
ManagedValue result;
// We're converting between C function pointer types. They better be
// ABI-compatible, since we can't emit a thunk.
switch (SGF.SGM.Types.checkForABIDifferences(SGF.SGM.M,
loweredResultTy, loweredDestTy)){
case TypeConverter::ABIDifference::CompatibleRepresentation:
case TypeConverter::ABIDifference::CompatibleCallingConvention:
result = fnEmitter();
assert(result.getType() == loweredResultTy);
if (loweredResultTy != loweredDestTy) {
assert(!result.hasCleanup());
result = SGF.B.createConvertFunction(e, result, loweredDestTy);
}
break;
case TypeConverter::ABIDifference::NeedsThunk:
// Note: in this case, we don't call the emitter at all -- doing so
// just runs the risk of tripping up asserts in SILGenBridging.cpp
SGF.SGM.diagnose(e, diag::unsupported_c_function_pointer_conversion,
e->getSubExpr()->getType(), e->getType());
result = SGF.emitUndef(loweredDestTy);
break;
case TypeConverter::ABIDifference::CompatibleCallingConvention_ThinToThick:
case TypeConverter::ABIDifference::CompatibleRepresentation_ThinToThick:
llvm_unreachable("Cannot have thin to thick conversion here");
}
return result;
}
static
ManagedValue emitCFunctionPointer(SILGenFunction &SGF,
FunctionConversionExpr *conversionExpr) {
auto expr = conversionExpr->getSubExpr();
// Look through base-ignored exprs to get to the function ref.
auto semanticExpr = expr->getSemanticsProvidingExpr();
while (auto ignoredBase = dyn_cast<DotSyntaxBaseIgnoredExpr>(semanticExpr)){
SGF.emitIgnoredExpr(ignoredBase->getLHS());
semanticExpr = ignoredBase->getRHS()->getSemanticsProvidingExpr();
}
// Recover the decl reference.
SILDeclRef::Loc loc;
auto setLocFromConcreteDeclRef = [&](ConcreteDeclRef declRef) {
// TODO: Handle generic instantiations, where we need to eagerly specialize
// on the given generic parameters, and static methods, where we need to drop
// in the metatype.
assert(!declRef.getDecl()->getDeclContext()->isTypeContext()
&& "c pointers to static methods not implemented");
loc = declRef.getDecl();
};
if (auto declRef = dyn_cast<DeclRefExpr>(semanticExpr)) {
setLocFromConcreteDeclRef(declRef->getDeclRef());
} else if (auto memberRef = dyn_cast<MemberRefExpr>(semanticExpr)) {
setLocFromConcreteDeclRef(memberRef->getMember());
} else if (auto closure = dyn_cast<AbstractClosureExpr>(semanticExpr)) {
// Emit the closure body.
SGF.SGM.emitClosure(closure);
loc = closure;
} else if (auto captureList = dyn_cast<CaptureListExpr>(semanticExpr)) {
// Ensure that weak captures are in a separate scope.
DebugScope scope(SGF, CleanupLocation(captureList));
// CaptureListExprs evaluate their bound variables.
for (auto capture : captureList->getCaptureList()) {
SGF.visit(capture.Var);
SGF.visit(capture.Init);
}
// Emit the closure body.
auto *closure = captureList->getClosureBody();
SGF.SGM.emitClosure(closure);
loc = closure;
} else {
llvm_unreachable("c function pointer converted from a non-concrete decl ref");
}
// Produce a reference to the C-compatible entry point for the function.
SILDeclRef constant(loc, /*foreign*/ true);
SILConstantInfo constantInfo =
SGF.getConstantInfo(SGF.getTypeExpansionContext(), constant);
// C function pointers cannot capture anything from their context.
auto captures = SGF.SGM.Types.getLoweredLocalCaptures(constant);
if (!captures.getCaptures().empty() ||
captures.hasGenericParamCaptures() ||
captures.hasDynamicSelfCapture() ||
captures.hasOpaqueValueCapture()) {
unsigned kind = 0;
if (captures.hasGenericParamCaptures())
kind = 1;
else if (captures.hasDynamicSelfCapture())
kind = 2;
SGF.SGM.diagnose(expr->getLoc(),
diag::c_function_pointer_from_function_with_context,
/*closure*/ constant.hasClosureExpr(),
kind);
auto loweredTy = SGF.getLoweredType(conversionExpr->getType());
return SGF.emitUndef(loweredTy);
}
return convertCFunctionSignature(
SGF, conversionExpr,
constantInfo.getSILType(),
[&]() -> ManagedValue {
SILValue cRef = SGF.emitGlobalFunctionRef(expr, constant);
return ManagedValue::forUnmanaged(cRef);
});
}
// Change the representation without changing the signature or
// abstraction level.
static ManagedValue convertFunctionRepresentation(SILGenFunction &SGF,
SILLocation loc,
ManagedValue source,
CanAnyFunctionType sourceFormalTy,
CanAnyFunctionType resultFormalTy) {
auto sourceTy = source.getType().castTo<SILFunctionType>();
CanSILFunctionType resultTy =
SGF.getLoweredType(resultFormalTy).castTo<SILFunctionType>();
// Note that conversions to and from block require a thunk
switch (resultFormalTy->getRepresentation()) {
// Convert thin, c, block => thick
case AnyFunctionType::Representation::Swift: {
switch (sourceTy->getRepresentation()) {
case SILFunctionType::Representation::Thin: {
auto v = SGF.B.createThinToThickFunction(
loc, source.getValue(),
SILType::getPrimitiveObjectType(
sourceTy->getWithRepresentation(
SILFunctionTypeRepresentation::Thick)));
// FIXME: what if other reabstraction is required?
return ManagedValue(v, source.getCleanup());
}
case SILFunctionType::Representation::Thick:
llvm_unreachable("should not try thick-to-thick repr change");
case SILFunctionType::Representation::CFunctionPointer:
case SILFunctionType::Representation::Block:
return SGF.emitBlockToFunc(loc, source, sourceFormalTy, resultFormalTy,
resultTy);
case SILFunctionType::Representation::Method:
case SILFunctionType::Representation::Closure:
case SILFunctionType::Representation::ObjCMethod:
case SILFunctionType::Representation::WitnessMethod:
llvm_unreachable("should not do function conversion from method rep");
}
llvm_unreachable("bad representation");
}
// Convert thin, thick, c => block
case AnyFunctionType::Representation::Block:
switch (sourceTy->getRepresentation()) {
case SILFunctionType::Representation::Thin: {
// Make thick first.
auto v = SGF.B.createThinToThickFunction(
loc, source.getValue(),
SILType::getPrimitiveObjectType(
sourceTy->getWithRepresentation(
SILFunctionTypeRepresentation::Thick)));
source = ManagedValue(v, source.getCleanup());
LLVM_FALLTHROUGH;
}
case SILFunctionType::Representation::Thick:
case SILFunctionType::Representation::CFunctionPointer:
// Convert to a block.
return SGF.emitFuncToBlock(loc, source, sourceFormalTy, resultFormalTy,
resultTy);
case SILFunctionType::Representation::Block:
llvm_unreachable("should not try block-to-block repr change");
case SILFunctionType::Representation::Method:
case SILFunctionType::Representation::Closure:
case SILFunctionType::Representation::ObjCMethod:
case SILFunctionType::Representation::WitnessMethod:
llvm_unreachable("should not do function conversion from method rep");
}
llvm_unreachable("bad representation");
// Unsupported
case AnyFunctionType::Representation::Thin:
llvm_unreachable("should not do function conversion to thin");
case AnyFunctionType::Representation::CFunctionPointer:
llvm_unreachable("should not do C function pointer conversion here");
}
llvm_unreachable("bad representation");
}
RValue RValueEmitter::visitFunctionConversionExpr(FunctionConversionExpr *e,
SGFContext C)
{
CanAnyFunctionType srcRepTy =
cast<FunctionType>(e->getSubExpr()->getType()->getCanonicalType());
CanAnyFunctionType destRepTy =
cast<FunctionType>(e->getType()->getCanonicalType());
if (destRepTy->getRepresentation() ==
FunctionTypeRepresentation::CFunctionPointer) {
ManagedValue result;
if (srcRepTy->getRepresentation() !=
FunctionTypeRepresentation::CFunctionPointer) {
// A "conversion" of a DeclRef a C function pointer is done by referencing
// the thunk (or original C function) with the C calling convention.
result = emitCFunctionPointer(SGF, e);
} else {
// Ok, we're converting a C function pointer value to another C function
// pointer.
// Emit the C function pointer
result = SGF.emitRValueAsSingleValue(e->getSubExpr());
// Possibly bitcast the C function pointer to account for ABI-compatible
// parameter and result type conversions
result = convertCFunctionSignature(SGF, e, result.getType(),
[&]() -> ManagedValue {
return result;
});
}
return RValue(SGF, e, result);
}
// Handle a reference to a "thin" native Swift function that only changes
// representation and refers to an inherently thin function reference.
if (destRepTy->getRepresentation() == FunctionTypeRepresentation::Thin) {
if (srcRepTy->getRepresentation() == FunctionTypeRepresentation::Swift
&& srcRepTy->withExtInfo(destRepTy->getExtInfo())->isEqual(destRepTy)) {
auto value = SGF.emitRValueAsSingleValue(e->getSubExpr());
auto expectedTy = SGF.getLoweredType(e->getType());
if (auto thinToThick =
dyn_cast<ThinToThickFunctionInst>(value.getValue())) {
value = ManagedValue::forUnmanaged(thinToThick->getOperand());
} else {
SGF.SGM.diagnose(e->getLoc(), diag::not_implemented,
"nontrivial thin function reference");
value = SGF.emitUndef(expectedTy);
}
if (value.getType() != expectedTy) {
SGF.SGM.diagnose(e->getLoc(), diag::not_implemented,
"nontrivial thin function reference");
value = SGF.emitUndef(expectedTy);
}
return RValue(SGF, e, value);
}
}
// Break the conversion into three stages:
// 1) changing the representation from foreign to native
// 2) changing the signature within the representation
// 3) changing the representation from native to foreign
//
// We only do one of 1) or 3), but we have to do them in the right order
// with respect to 2).
CanAnyFunctionType srcTy = srcRepTy;
CanAnyFunctionType destTy = destRepTy;
switch(srcRepTy->getRepresentation()) {
case AnyFunctionType::Representation::Swift:
case AnyFunctionType::Representation::Thin:
// Source is native, so we can convert signature first.
destTy = adjustFunctionType(destRepTy,
srcTy->getRepresentation());
break;
case AnyFunctionType::Representation::Block:
case AnyFunctionType::Representation::CFunctionPointer:
// Source is foreign, so do the representation change first.
srcTy = adjustFunctionType(srcRepTy,
destRepTy->getRepresentation());
}
auto result = SGF.emitRValueAsSingleValue(e->getSubExpr());
if (srcRepTy != srcTy)
result = convertFunctionRepresentation(SGF, e, result, srcRepTy, srcTy);
if (srcTy != destTy) {
result = SGF.emitTransformedValue(e, result, srcTy, destTy, SGFContext());
}
if (destTy != destRepTy)
result = convertFunctionRepresentation(SGF, e, result, destTy, destRepTy);
return RValue(SGF, e, result);
}
RValue RValueEmitter::visitCovariantFunctionConversionExpr(
CovariantFunctionConversionExpr *e,
SGFContext C) {
ManagedValue original = SGF.emitRValueAsSingleValue(e->getSubExpr());
CanAnyFunctionType destTy
= cast<AnyFunctionType>(e->getType()->getCanonicalType());
SILType resultType = SGF.getLoweredType(destTy);
SILValue result =
SGF.B.createConvertFunction(e, original.forward(SGF), resultType,
/*Withoutactuallyescaping=*/false);
return RValue(SGF, e, SGF.emitManagedRValueWithCleanup(result));
}
RValue RValueEmitter::visitCovariantReturnConversionExpr(
CovariantReturnConversionExpr *e,
SGFContext C) {
ManagedValue original = SGF.emitRValueAsSingleValue(e->getSubExpr());
SILType resultType = SGF.getLoweredType(e->getType());
// DynamicSelfType lowers as its self type, so no SIL-level conversion
// is required in this case.
if (resultType == original.getType())
return RValue(SGF, e, original);
ManagedValue result = SGF.B.createUncheckedRefCast(e, original, resultType);
return RValue(SGF, e, result);
}
RValue RValueEmitter::visitImplicitlyUnwrappedFunctionConversionExpr(
ImplicitlyUnwrappedFunctionConversionExpr *e, SGFContext C) {
// These are generated for short term use in the type checker.
llvm_unreachable(
"We should not see ImplicitlyUnwrappedFunctionConversionExpr here");
}
RValue RValueEmitter::visitErasureExpr(ErasureExpr *E, SGFContext C) {
if (auto result = tryEmitAsBridgingConversion(SGF, E, false, C)) {
return RValue(SGF, E, *result);
}
auto &existentialTL = SGF.getTypeLowering(E->getType());
auto concreteFormalType = E->getSubExpr()->getType()->getCanonicalType();
auto archetype = OpenedArchetypeType::getAny(E->getType());
AbstractionPattern abstractionPattern(archetype);
auto &concreteTL = SGF.getTypeLowering(abstractionPattern,
concreteFormalType);
ManagedValue mv = SGF.emitExistentialErasure(E, concreteFormalType,
concreteTL, existentialTL,
E->getConformances(), C,
[&](SGFContext C) -> ManagedValue {
return SGF.emitRValueAsOrig(E->getSubExpr(),
abstractionPattern,
concreteTL, C);
});
return RValue(SGF, E, mv);
}
RValue SILGenFunction::emitAnyHashableErasure(SILLocation loc,
ManagedValue value,
Type type,
ProtocolConformanceRef conformance,
SGFContext C) {
// Ensure that the intrinsic function exists.
auto convertFn = SGM.getConvertToAnyHashable(loc);
if (!convertFn)
return emitUndefRValue(
loc, getASTContext().getAnyHashableDecl()->getDeclaredType());
// Construct the substitution for T: Hashable.
auto subMap = SubstitutionMap::getProtocolSubstitutions(
conformance.getRequirement(), type, conformance);
return emitApplyOfLibraryIntrinsic(loc, convertFn, subMap, value, C);
}
RValue RValueEmitter::visitAnyHashableErasureExpr(AnyHashableErasureExpr *E,
SGFContext C) {
// Emit the source value into a temporary.
auto sourceOrigType = AbstractionPattern::getOpaque();
auto source =
SGF.emitMaterializedRValueAsOrig(E->getSubExpr(), sourceOrigType);
return SGF.emitAnyHashableErasure(E, source,
E->getSubExpr()->getType(),
E->getConformance(), C);
}
/// Treating this as a successful operation, turn a CMV into a +1 MV.
ManagedValue SILGenFunction::getManagedValue(SILLocation loc,
ConsumableManagedValue value) {
// If the consumption rules say that this is already +1 given a
// successful operation, just use the value.
if (value.isOwned())
return value.getFinalManagedValue();
SILType valueTy = value.getType();
auto &valueTL = getTypeLowering(valueTy);
// If the type is trivial, it's always +1.
if (valueTL.isTrivial())
return ManagedValue::forUnmanaged(value.getValue());
// If it's an object...
if (valueTy.isObject()) {
// See if we have more accurate information from the ownership kind. This
// detects trivial cases of enums.
if (value.getOwnershipKind() == ValueOwnershipKind::None)
return ManagedValue::forUnmanaged(value.getValue());
// Otherwise, copy the value and return.
return value.getFinalManagedValue().copy(*this, loc);
}
// Otherwise, produce a temporary and copy into that.
auto temporary = emitTemporary(loc, valueTL);
valueTL.emitCopyInto(B, loc, value.getValue(), temporary->getAddress(),
IsNotTake, IsInitialization);
temporary->finishInitialization(*this);
return temporary->getManagedAddress();
}
RValue RValueEmitter::visitForcedCheckedCastExpr(ForcedCheckedCastExpr *E,
SGFContext C) {
return emitUnconditionalCheckedCast(SGF, E, E->getSubExpr(), E->getType(),
E->getCastKind(), C);
}
RValue RValueEmitter::
visitConditionalCheckedCastExpr(ConditionalCheckedCastExpr *E,
SGFContext C) {
ProfileCounter trueCount = ProfileCounter();
ProfileCounter falseCount = ProfileCounter();
auto parent = SGF.getPGOParent(E);
if (parent) {
auto &Node = parent.getValue();
auto *NodeS = Node.get<Stmt *>();
if (auto *IS = dyn_cast<IfStmt>(NodeS)) {
trueCount = SGF.loadProfilerCount(IS->getThenStmt());
if (auto *ElseStmt = IS->getElseStmt()) {
falseCount = SGF.loadProfilerCount(ElseStmt);
}
}
}
ManagedValue operand = SGF.emitRValueAsSingleValue(E->getSubExpr());
return emitConditionalCheckedCast(SGF, E, operand, E->getSubExpr()->getType(),
E->getType(), E->getCastKind(), C,
trueCount, falseCount);
}
static RValue emitBoolLiteral(SILGenFunction &SGF, SILLocation loc,
SILValue builtinBool,
SGFContext C) {
// Call the Bool(_builtinBooleanLiteral:) initializer
ASTContext &ctx = SGF.getASTContext();
auto init = ctx.getBoolBuiltinInitDecl();
auto builtinArgType = CanType(BuiltinIntegerType::get(1, ctx));
RValue builtinArg(SGF, ManagedValue::forUnmanaged(builtinBool),
builtinArgType);
PreparedArguments builtinArgs((AnyFunctionType::Param(builtinArgType)));
builtinArgs.add(loc, std::move(builtinArg));
auto result =
SGF.emitApplyAllocatingInitializer(loc, ConcreteDeclRef(init),
std::move(builtinArgs), Type(),
C);
return result;
}
RValue RValueEmitter::visitIsExpr(IsExpr *E, SGFContext C) {
SILValue isa = emitIsa(SGF, E, E->getSubExpr(),
E->getCastType(), E->getCastKind());
return emitBoolLiteral(SGF, E, isa, C);
}
RValue RValueEmitter::visitEnumIsCaseExpr(EnumIsCaseExpr *E,
SGFContext C) {
// Get the enum value.
auto subExpr = SGF.emitRValueAsSingleValue(E->getSubExpr(),
SGFContext(SGFContext::AllowImmediatePlusZero));
// Test its case.
auto i1Ty = SILType::getBuiltinIntegerType(1, SGF.getASTContext());
auto t = SGF.B.createIntegerLiteral(E, i1Ty, 1);
auto f = SGF.B.createIntegerLiteral(E, i1Ty, 0);
SILValue selected;
if (subExpr.getType().isAddress()) {
selected = SGF.B.createSelectEnumAddr(E, subExpr.getValue(), i1Ty, f,
{{E->getEnumElement(), t}});
} else {
selected = SGF.B.createSelectEnum(E, subExpr.getValue(), i1Ty, f,
{{E->getEnumElement(), t}});
}
return emitBoolLiteral(SGF, E, selected, C);
}
RValue RValueEmitter::visitCoerceExpr(CoerceExpr *E, SGFContext C) {
if (auto result = tryEmitAsBridgingConversion(SGF, E->getSubExpr(), true, C))
return RValue(SGF, E, *result);
return visit(E->getSubExpr(), C);
}
RValue RValueEmitter::visitUnderlyingToOpaqueExpr(UnderlyingToOpaqueExpr *E,
SGFContext C) {
// The opaque type has the layout of the underlying type, abstracted as
// a type parameter.
auto &opaqueTL = SGF.getTypeLowering(E->getType());
auto &underlyingTL = SGF.getTypeLowering(AbstractionPattern::getOpaque(),
E->getSubExpr()->getType());
auto &underlyingSubstTL = SGF.getTypeLowering(E->getSubExpr()->getType());
if (underlyingSubstTL.getLoweredType() == opaqueTL.getLoweredType()) {
return SGF.emitRValue(E->getSubExpr(), C);
}
// If the opaque type is address only, initialize in place.
if (opaqueTL.getLoweredType().isAddress()) {
auto opaqueAddr = SGF.getBufferForExprResult(
E, opaqueTL.getLoweredType(), C);
// Initialize the buffer as the underlying type.
auto underlyingAddr = SGF.B.createUncheckedAddrCast(E,
opaqueAddr,
underlyingTL.getLoweredType().getAddressType());
auto underlyingInit = SGF.useBufferAsTemporary(underlyingAddr, underlyingTL);
// Try to emit directly into the buffer if no reabstraction is necessary.
ManagedValue underlying;
if (underlyingSubstTL.getLoweredType() == underlyingTL.getLoweredType()) {
underlying = SGF.emitRValueAsSingleValue(E->getSubExpr(),
SGFContext(underlyingInit.get()));
} else {
// Otherwise, emit the underlying value then bring it to the right
// abstraction level.
underlying = SGF.emitRValueAsSingleValue(E->getSubExpr());
underlying = SGF.emitSubstToOrigValue(E, underlying,
AbstractionPattern::getOpaque(),
E->getSubExpr()->getType()->getCanonicalType());
}
if (!underlying.isInContext()) {
underlyingInit->copyOrInitValueInto(SGF, E, underlying, /*init*/ true);
underlyingInit->finishInitialization(SGF);
}
// Kill the cleanup on the underlying value, and hand off the opaque buffer
// as the result.
underlyingInit->getManagedAddress().forward(SGF);
auto opaque = SGF.manageBufferForExprResult(opaqueAddr, opaqueTL, C);
return RValue(SGF, E, opaque);
}
// If the opaque type is loadable, emit the subexpression and bitcast it.
auto value = SGF.emitRValueAsSingleValue(E->getSubExpr());
if (underlyingSubstTL.getLoweredType() != underlyingTL.getLoweredType()) {
value = SGF.emitSubstToOrigValue(E, value, AbstractionPattern::getOpaque(),
E->getSubExpr()->getType()->getCanonicalType());
}
if (value.getType() == opaqueTL.getLoweredType())
return RValue(SGF, E, value);
auto cast = SGF.B.createUncheckedBitCast(E, value.forward(SGF),
opaqueTL.getLoweredType());
value = SGF.emitManagedRValueWithCleanup(cast);
return RValue(SGF, E, value);
}
VarargsInfo Lowering::emitBeginVarargs(SILGenFunction &SGF, SILLocation loc,
CanType baseTy, CanType arrayTy,
unsigned numElements) {
// Reabstract the base type against the array element type.
auto baseAbstraction = AbstractionPattern::getOpaque();
auto &baseTL = SGF.getTypeLowering(baseAbstraction, baseTy);
// Allocate the array.
SILValue numEltsVal = SGF.B.createIntegerLiteral(loc,
SILType::getBuiltinWordType(SGF.getASTContext()),
numElements);
// The first result is the array value.
ManagedValue array;
// The second result is a RawPointer to the base address of the array.
SILValue basePtr;
std::tie(array, basePtr)
= SGF.emitUninitializedArrayAllocation(arrayTy, numEltsVal, loc);
// Temporarily deactivate the main array cleanup.
if (array.hasCleanup())
SGF.Cleanups.setCleanupState(array.getCleanup(), CleanupState::Dormant);
// Push a new cleanup to deallocate the array.
auto abortCleanup =
SGF.enterDeallocateUninitializedArrayCleanup(array.getValue());
// Turn the pointer into an address.
basePtr = SGF.B.createPointerToAddress(
loc, basePtr, baseTL.getLoweredType().getAddressType(),
/*isStrict*/ true,
/*isInvariant*/ false);
return VarargsInfo(array, abortCleanup, basePtr, baseTL, baseAbstraction);
}
ManagedValue Lowering::emitEndVarargs(SILGenFunction &SGF, SILLocation loc,
VarargsInfo &&varargs) {
// Kill the abort cleanup.
SGF.Cleanups.setCleanupState(varargs.getAbortCleanup(), CleanupState::Dead);
// Reactivate the result cleanup.
auto array = varargs.getArray();
if (array.hasCleanup())
SGF.Cleanups.setCleanupState(array.getCleanup(), CleanupState::Active);
return SGF.emitUninitializedArrayFinalization(loc, std::move(array));
}
RValue RValueEmitter::visitTupleExpr(TupleExpr *E, SGFContext C) {
auto type = cast<TupleType>(E->getType()->getCanonicalType());
// If we have an Initialization, emit the tuple elements into its elements.
if (Initialization *I = C.getEmitInto()) {
bool implodeTuple = false;
if (I->canPerformInPlaceInitialization() &&
I->isInPlaceInitializationOfGlobal() &&
SGF.getLoweredType(type).isTrivial(SGF.F)) {
// Implode tuples in initialization of globals if they are
// of trivial types.
implodeTuple = true;
}
if (!implodeTuple && I->canSplitIntoTupleElements()) {
SmallVector<InitializationPtr, 4> subInitializationBuf;
auto subInitializations =
I->splitIntoTupleElements(SGF, RegularLocation(E), type,
subInitializationBuf);
assert(subInitializations.size() == E->getElements().size() &&
"initialization for tuple has wrong number of elements");
for (unsigned i = 0, size = subInitializations.size(); i < size; ++i)
SGF.emitExprInto(E->getElement(i), subInitializations[i].get());
I->finishInitialization(SGF);
return RValue::forInContext();
}
}
llvm::SmallVector<RValue, 8> tupleElts;
bool hasAtleastOnePlusOneValue = false;
for (Expr *elt : E->getElements()) {
RValue RV = SGF.emitRValue(elt);
hasAtleastOnePlusOneValue |= RV.isPlusOne(SGF);
tupleElts.emplace_back(std::move(RV));
}
// Once we have found if we have any plus one arguments, add each element of
// tuple elts into result, making sure each value is at plus 1.
RValue result(type);
if (hasAtleastOnePlusOneValue) {
for (unsigned i : indices(tupleElts)) {
result.addElement(std::move(tupleElts[i]).ensurePlusOne(SGF, E));
}
} else {
for (unsigned i : indices(tupleElts)) {
result.addElement(std::move(tupleElts[i]));
}
}
return result;
}
RValue RValueEmitter::visitMemberRefExpr(MemberRefExpr *e,
SGFContext resultCtx) {
assert(!e->getType()->is<LValueType>() &&
"RValueEmitter shouldn't be called on lvalues");
assert(isa<VarDecl>(e->getMember().getDecl()));
// Everything else should use the l-value logic.
// Any writebacks for this access are tightly scoped.
FormalEvaluationScope scope(SGF);
LValue lv = SGF.emitLValue(e, SGFAccessKind::OwnedObjectRead);
// Otherwise, we can't load at +0 without further analysis, since the formal
// access into the lvalue will end immediately.
return SGF.emitLoadOfLValue(e, std::move(lv),
resultCtx.withFollowingSideEffects());
}
RValue RValueEmitter::visitDynamicMemberRefExpr(DynamicMemberRefExpr *E,
SGFContext C) {
return SGF.emitDynamicMemberRefExpr(E, C);
}
RValue RValueEmitter::
visitDotSyntaxBaseIgnoredExpr(DotSyntaxBaseIgnoredExpr *E, SGFContext C) {
visit(E->getLHS());
return visit(E->getRHS());
}
RValue RValueEmitter::visitSubscriptExpr(SubscriptExpr *E, SGFContext C) {
// Any writebacks for this access are tightly scoped.
FormalEvaluationScope scope(SGF);
LValue lv = SGF.emitLValue(E, SGFAccessKind::OwnedObjectRead);
// We can't load at +0 without further analysis, since the formal access into
// the lvalue will end immediately.
return SGF.emitLoadOfLValue(E, std::move(lv), C.withFollowingSideEffects());
}
RValue RValueEmitter::visitDynamicSubscriptExpr(
DynamicSubscriptExpr *E, SGFContext C) {
return SGF.emitDynamicSubscriptExpr(E, C);
}
RValue RValueEmitter::visitTupleElementExpr(TupleElementExpr *E,
SGFContext C) {
assert(!E->getType()->is<LValueType>() &&
"RValueEmitter shouldn't be called on lvalues");
// If our client is ok with a +0 result, then we can compute our base as +0
// and return its element that way. It would not be ok to reuse the Context's
// address buffer though, since our base value will a different type than the
// element.
SGFContext SubContext = C.withFollowingProjection();
return visit(E->getBase(), SubContext).extractElement(E->getFieldNumber());
}
RValue
SILGenFunction::emitApplyOfDefaultArgGenerator(SILLocation loc,
ConcreteDeclRef defaultArgsOwner,
unsigned destIndex,
CanType resultType,
AbstractionPattern origResultType,
SGFContext C) {
SILDeclRef generator
= SILDeclRef::getDefaultArgGenerator(defaultArgsOwner.getDecl(),
destIndex);
auto fnRef = ManagedValue::forUnmanaged(emitGlobalFunctionRef(loc,generator));
auto fnType = fnRef.getType().castTo<SILFunctionType>();
SubstitutionMap subs;
if (fnType->isPolymorphic())
subs = defaultArgsOwner.getSubstitutions();
auto substFnType =
fnType->substGenericArgs(SGM.M, subs, getTypeExpansionContext());
CalleeTypeInfo calleeTypeInfo(substFnType, origResultType, resultType);
ResultPlanPtr resultPtr =
ResultPlanBuilder::computeResultPlan(*this, calleeTypeInfo, loc, C);
ArgumentScope argScope(*this, loc);
SmallVector<ManagedValue, 4> captures;
emitCaptures(loc, generator, CaptureEmission::ImmediateApplication,
captures);
return emitApply(std::move(resultPtr), std::move(argScope), loc, fnRef,
subs, captures, calleeTypeInfo, ApplyOptions::None, C);
}
RValue SILGenFunction::emitApplyOfStoredPropertyInitializer(
SILLocation loc,
VarDecl *var,
SubstitutionMap subs,
CanType resultType,
AbstractionPattern origResultType,
SGFContext C) {
SILDeclRef constant(var, SILDeclRef::Kind::StoredPropertyInitializer);
auto fnRef = ManagedValue::forUnmanaged(emitGlobalFunctionRef(loc, constant));
auto fnType = fnRef.getType().castTo<SILFunctionType>();
auto substFnType =
fnType->substGenericArgs(SGM.M, subs, getTypeExpansionContext());
CalleeTypeInfo calleeTypeInfo(substFnType, origResultType, resultType);
ResultPlanPtr resultPlan =
ResultPlanBuilder::computeResultPlan(*this, calleeTypeInfo, loc, C);
ArgumentScope argScope(*this, loc);
return emitApply(std::move(resultPlan), std::move(argScope), loc, fnRef,
subs, {}, calleeTypeInfo, ApplyOptions::None, C);
}
RValue RValueEmitter::visitDestructureTupleExpr(DestructureTupleExpr *E,
SGFContext C) {
// Emit the sub-expression tuple and destructure it into elements.
SmallVector<RValue, 4> elements;
visit(E->getSubExpr()).extractElements(elements);
// Bind each element of the input tuple to its corresponding
// opaque value.
for (unsigned i = 0, e = E->getDestructuredElements().size();
i != e; ++i) {
auto *opaqueElt = E->getDestructuredElements()[i];
assert(!SGF.OpaqueValues.count(opaqueElt));
auto opaqueMV = std::move(elements[i]).getAsSingleValue(SGF, E);
SGF.OpaqueValues[opaqueElt] = opaqueMV;
}
// Emit the result expression written in terms of the above
// opaque values.
auto result = visit(E->getResultExpr(), C);
// Clean up.
for (unsigned i = 0, e = E->getDestructuredElements().size();
i != e; ++i) {
auto *opaqueElt = E->getDestructuredElements()[i];
SGF.OpaqueValues.erase(opaqueElt);
}
return result;
}
static SILValue emitMetatypeOfDelegatingInitExclusivelyBorrowedSelf(
SILGenFunction &SGF, SILLocation loc, DeclRefExpr *dre, SILType metaTy) {
SGFContext ctx;
auto *vd = cast<ParamDecl>(dre->getDecl());
ManagedValue selfValue;
Scope S(SGF, loc);
Optional<FormalEvaluationScope> FES;
// If we have not exclusively borrowed self, we need to do so now.
if (SGF.SelfInitDelegationState == SILGenFunction::WillExclusiveBorrowSelf) {
// We need to use a full scope here to ensure that any underlying
// "normal cleanup" borrows are cleaned up.
selfValue = SGF.emitRValueAsSingleValue(dre);
} else {
// If we already exclusively borrowed self, then we need to emit self
// using formal evaluation primitives.
assert(SGF.SelfInitDelegationState ==
SILGenFunction::DidExclusiveBorrowSelf);
// This needs to be inlined since there is a Formal Evaluation Scope
// in emitRValueForDecl that causing any borrow for this LValue to be
// popped too soon.
FES.emplace(SGF);
CanType formalRValueType = dre->getType()->getCanonicalType();
selfValue = SGF.emitAddressOfLocalVarDecl(dre, vd, formalRValueType,
SGFAccessKind::OwnedObjectRead);
selfValue = SGF.emitFormalEvaluationRValueForSelfInDelegationInit(
loc, formalRValueType,
selfValue.getLValueAddress(), ctx)
.getAsSingleValue(SGF, loc);
}
return SGF.B.createValueMetatype(loc, metaTy, selfValue.getValue());
}
SILValue SILGenFunction::emitMetatypeOfValue(SILLocation loc, Expr *baseExpr) {
Type formalBaseType = baseExpr->getType()->getWithoutSpecifierType();
CanType baseTy = formalBaseType->getCanonicalType();
// For class, archetype, and protocol types, look up the dynamic metatype.
if (baseTy.isAnyExistentialType()) {
SILType metaTy = getLoweredLoadableType(
CanExistentialMetatypeType::get(baseTy));
auto base = emitRValueAsSingleValue(baseExpr,
SGFContext::AllowImmediatePlusZero).getValue();
return B.createExistentialMetatype(loc, metaTy, base);
}
SILType metaTy = getLoweredLoadableType(CanMetatypeType::get(baseTy));
// If the lowered metatype has a thick representation, we need to derive it
// dynamically from the instance.
if (metaTy.castTo<MetatypeType>()->getRepresentation()
!= MetatypeRepresentation::Thin) {
if (inExclusiveBorrowSelfSection(SelfInitDelegationState)) {
if (auto *dre = dyn_cast<DeclRefExpr>(baseExpr)) {
if (isa<ParamDecl>(dre->getDecl()) &&
dre->getDecl()->getName() == getASTContext().Id_self &&
dre->getDecl()->isImplicit()) {
return emitMetatypeOfDelegatingInitExclusivelyBorrowedSelf(
*this, loc, dre, metaTy);
}
}
}
Scope S(*this, loc);
auto base = emitRValueAsSingleValue(baseExpr, SGFContext::AllowImmediatePlusZero);
return S.popPreservingValue(B.createValueMetatype(loc, metaTy, base))
.getValue();
}
// Otherwise, ignore the base and return the static thin metatype.
emitIgnoredExpr(baseExpr);
return B.createMetatype(loc, metaTy);
}
RValue RValueEmitter::visitDynamicTypeExpr(DynamicTypeExpr *E, SGFContext C) {
auto metatype = SGF.emitMetatypeOfValue(E, E->getBase());
return RValue(SGF, E, ManagedValue::forUnmanaged(metatype));
}
RValue RValueEmitter::visitCaptureListExpr(CaptureListExpr *E, SGFContext C) {
// Ensure that weak captures are in a separate scope.
DebugScope scope(SGF, CleanupLocation(E));
// CaptureListExprs evaluate their bound variables.
for (auto capture : E->getCaptureList()) {
SGF.visit(capture.Var);
SGF.visit(capture.Init);
}
// Then they evaluate to their body.
return visit(E->getClosureBody(), C);
}
/// Returns the wrapped value placeholder that is meant to be substituted
/// in for the given autoclosure. This autoclosure placeholder is created
/// when \c init(wrappedValue:) takes an autoclosure for the \c wrappedValue
/// parameter.
static PropertyWrapperValuePlaceholderExpr *
wrappedValueAutoclosurePlaceholder(const AbstractClosureExpr *e) {
if (auto ace = dyn_cast<AutoClosureExpr>(e)) {
if (auto ce = dyn_cast<CallExpr>(ace->getSingleExpressionBody())) {
return dyn_cast<PropertyWrapperValuePlaceholderExpr>(ce->getFn());
}
}
return nullptr;
}
RValue RValueEmitter::visitAbstractClosureExpr(AbstractClosureExpr *e,
SGFContext C) {
if (auto *placeholder = wrappedValueAutoclosurePlaceholder(e))
return visitPropertyWrapperValuePlaceholderExpr(placeholder, C);
// Emit the closure body.
SGF.SGM.emitClosure(e);
SubstitutionMap subs;
if (e->getCaptureInfo().hasGenericParamCaptures())
subs = SGF.getForwardingSubstitutionMap();
// Generate the closure value (if any) for the closure expr's function
// reference.
auto refType = e->getType()->getCanonicalType();
SILLocation L = e;
L.markAutoGenerated();
ManagedValue result = SGF.emitClosureValue(L, SILDeclRef(e),
refType, subs);
return RValue(SGF, e, refType, result);
}
RValue RValueEmitter::
visitInterpolatedStringLiteralExpr(InterpolatedStringLiteralExpr *E,
SGFContext C) {
RValue interpolation;
{
TapExpr *ETap = E->getAppendingExpr();
// Inlined from TapExpr:
// TODO: This is only necessary because constant evaluation requires that
// the box for the var gets defined before the initializer happens.
auto Var = ETap->getVar();
auto VarType = ETap->getType()->getCanonicalType();
Scope outerScope(SGF, CleanupLocation(ETap));
// Initialize the var with our SubExpr.
auto VarInit =
SGF.emitInitializationForVarDecl(Var, /*forceImmutable=*/false);
{
// Modified from TapExpr to evaluate the SubExpr directly rather than
// indirectly through the OpaqueValue system.
PreparedArguments builderInitArgs;
RValue literalCapacity = visit(E->getLiteralCapacityExpr(), SGFContext());
RValue interpolationCount =
visit(E->getInterpolationCountExpr(), SGFContext());
builderInitArgs.emplace(
{AnyFunctionType::Param(literalCapacity.getType()),
AnyFunctionType::Param(interpolationCount.getType())});
builderInitArgs.add(E, std::move(literalCapacity));
builderInitArgs.add(E, std::move(interpolationCount));
RValue subexpr_result = SGF.emitApplyAllocatingInitializer(
E, E->getBuilderInit(), std::move(builderInitArgs), Type(),
SGFContext(VarInit.get()));
if (!subexpr_result.isInContext()) {
ArgumentSource(
SILLocation(E),
std::move(subexpr_result).ensurePlusOne(SGF, SILLocation(E)))
.forwardInto(SGF, VarInit.get());
}
}
// Emit the body and let it mutate the var if it chooses.
SGF.emitStmt(ETap->getBody());
// Retrieve and return the var, making it +1 so it survives the scope.
auto result = SGF.emitRValueForDecl(SILLocation(ETap), Var, VarType,
AccessSemantics::Ordinary, SGFContext());
result = std::move(result).ensurePlusOne(SGF, SILLocation(ETap));
interpolation = outerScope.popPreservingValue(std::move(result));
}
PreparedArguments resultInitArgs;
resultInitArgs.emplace(AnyFunctionType::Param(interpolation.getType()));
resultInitArgs.add(E, std::move(interpolation));
return SGF.emitApplyAllocatingInitializer(
E, E->getResultInit(), std::move(resultInitArgs), Type(), C);
}
RValue RValueEmitter::
visitObjectLiteralExpr(ObjectLiteralExpr *E, SGFContext C) {
ConcreteDeclRef init = E->getInitializer();
auto *decl = cast<ConstructorDecl>(init.getDecl());
AnyFunctionType *fnTy = decl->getMethodInterfaceType()
.subst(init.getSubstitutions())
->getAs<AnyFunctionType>();
PreparedArguments args(fnTy->getParams(), E->getArg());
return SGF.emitApplyAllocatingInitializer(SILLocation(E), init,
std::move(args), E->getType(), C);
}
RValue RValueEmitter::
visitEditorPlaceholderExpr(EditorPlaceholderExpr *E, SGFContext C) {
return visit(E->getSemanticExpr(), C);
}
RValue RValueEmitter::visitObjCSelectorExpr(ObjCSelectorExpr *e, SGFContext C) {
SILType loweredSelectorTy = SGF.getLoweredType(e->getType());
// Dig out the declaration of the Selector type.
auto selectorDecl = e->getType()->getAs<StructType>()->getDecl();
// Dig out the type of its pointer.
Type selectorMemberTy;
for (auto member : selectorDecl->getMembers()) {
if (auto var = dyn_cast<VarDecl>(member)) {
if (!var->isStatic() && var->hasStorage()) {
selectorMemberTy = var->getInterfaceType();
break;
}
}
}
if (!selectorMemberTy) {
SGF.SGM.diagnose(e, diag::objc_selector_malformed);
return RValue(SGF, e, SGF.emitUndef(loweredSelectorTy));
}
// Form the selector string.
llvm::SmallString<64> selectorScratch;
auto selectorString =
e->getMethod()->getObjCSelector().getString(selectorScratch);
// Create an Objective-C selector string literal.
auto selectorLiteral =
SGF.B.createStringLiteral(e, selectorString,
StringLiteralInst::Encoding::ObjCSelector);
// Create the pointer struct from the raw pointer.
SILType loweredPtrTy = SGF.getLoweredType(selectorMemberTy);
auto ptrValue = SGF.B.createStruct(e, loweredPtrTy, { selectorLiteral });
// Wrap that up in a Selector and return it.
auto selectorValue = SGF.B.createStruct(e, loweredSelectorTy, { ptrValue });
return RValue(SGF, e, ManagedValue::forUnmanaged(selectorValue));
}
static ManagedValue
emitKeyPathRValueBase(SILGenFunction &subSGF,
AbstractStorageDecl *storage,
SILLocation loc,
SILValue paramArg,
CanType &baseType,
SubstitutionMap subs) {
// If the storage is at global scope, then the base value () is a formality.
// There no real argument to pass to the underlying accessors.
if (!storage->getDeclContext()->isTypeContext())
return ManagedValue();
auto paramOrigValue =
ManagedValue::forBorrowedRValue(paramArg).copy(subSGF, loc);
auto paramSubstValue = subSGF.emitOrigToSubstValue(loc, paramOrigValue,
AbstractionPattern::getOpaque(),
baseType);
// Pop open an existential container base.
if (baseType->isAnyExistentialType()) {
// Use the opened archetype from the AST for a protocol member, or make a
// new one (which we'll upcast immediately below) for a class member.
ArchetypeType *opened;
if (storage->getDeclContext()->getSelfClassDecl()) {
opened = OpenedArchetypeType::get(baseType);
} else {
opened = subs.getReplacementTypes()[0]->castTo<ArchetypeType>();
}
assert(opened->isOpenedExistential());
FormalEvaluationScope scope(subSGF);
baseType = opened->getCanonicalType();
auto openedOpaqueValue = subSGF.emitOpenExistential(loc, paramSubstValue,
subSGF.getLoweredType(baseType),
AccessKind::Read);
// Maybe we could peephole this if we know the property load can borrow the
// base value…
paramSubstValue = openedOpaqueValue.ensurePlusOne(subSGF, loc);
}
// Upcast a class instance to the property's declared type if necessary.
if (auto propertyClass = storage->getDeclContext()->getSelfClassDecl()) {
if (baseType->getClassOrBoundGenericClass() != propertyClass) {
baseType = baseType->getSuperclassForDecl(propertyClass)
->getCanonicalType();
paramSubstValue = subSGF.B.createUpcast(loc, paramSubstValue,
SILType::getPrimitiveObjectType(baseType));
}
}
// …or pop open an existential container.
return paramSubstValue;
}
using IndexTypePair = std::pair<CanType, SILType>;
/// Helper function to load the captured indexes out of a key path component
/// in order to invoke the accessors on that key path. A component with captured
/// indexes passes down a pointer to those captures to the accessor thunks,
/// which we can copy out of to produce values we can pass to the real
/// accessor functions.
static PreparedArguments
loadIndexValuesForKeyPathComponent(SILGenFunction &SGF, SILLocation loc,
AbstractStorageDecl *storage,
ArrayRef<IndexTypePair> indexes,
SILValue pointer) {
// If not a subscript, do nothing.
if (!isa<SubscriptDecl>(storage))
return PreparedArguments();
SmallVector<AnyFunctionType::Param, 8> indexParams;
for (auto &elt : indexes) {
// FIXME: Varargs?
indexParams.emplace_back(SGF.F.mapTypeIntoContext(elt.first));
}
PreparedArguments indexValues(indexParams);
if (indexes.empty()) {
assert(indexValues.isValid());
return indexValues;
}
auto indexLoweredTy =
SGF.getLoweredType(
AnyFunctionType::composeInput(SGF.getASTContext(), indexParams,
/*canonicalVararg=*/false));
auto addr = SGF.B.createPointerToAddress(loc, pointer,
indexLoweredTy.getAddressType(),
/*isStrict*/ false);
for (unsigned i : indices(indexes)) {
SILValue eltAddr = addr;
if (indexes.size() > 1) {
eltAddr = SGF.B.createTupleElementAddr(loc, eltAddr, i);
}
auto ty = SGF.F.mapTypeIntoContext(indexes[i].second);
auto value = SGF.emitLoad(loc, eltAddr,
SGF.getTypeLowering(ty),
SGFContext(), IsNotTake);
auto substType =
SGF.F.mapTypeIntoContext(indexes[i].first)->getCanonicalType();
indexValues.add(loc, RValue(SGF, loc, substType, value));
}
assert(indexValues.isValid());
return indexValues;
}
static AccessorDecl *
getRepresentativeAccessorForKeyPath(AbstractStorageDecl *storage) {
if (storage->requiresOpaqueGetter())
return storage->getOpaqueAccessor(AccessorKind::Get);
assert(storage->requiresOpaqueReadCoroutine());
return storage->getOpaqueAccessor(AccessorKind::Read);
}
static SILFunction *getOrCreateKeyPathGetter(SILGenModule &SGM,
SILLocation loc,
AbstractStorageDecl *property,
SubstitutionMap subs,
GenericEnvironment *genericEnv,
ResilienceExpansion expansion,
ArrayRef<IndexTypePair> indexes,
CanType baseType,
CanType propertyType) {
// If the storage declaration is from a protocol, chase the override chain
// back to the declaration whose getter introduced the witness table
// entry.
if (isa<ProtocolDecl>(property->getDeclContext())) {
auto accessor = getRepresentativeAccessorForKeyPath(property);
if (!SILDeclRef::requiresNewWitnessTableEntry(accessor)) {
// Find the getter that does have a witness table entry.
auto wtableAccessor =
cast<AccessorDecl>(SILDeclRef::getOverriddenWitnessTableEntry(accessor));
// Substitute the 'Self' type of the base protocol.
subs = SILGenModule::mapSubstitutionsForWitnessOverride(
accessor, wtableAccessor, subs);
property = wtableAccessor->getStorage();
}
}
auto genericSig =
genericEnv ? genericEnv->getGenericSignature().getCanonicalSignature()
: nullptr;
if (genericSig && genericSig->areAllParamsConcrete()) {
genericSig = nullptr;
genericEnv = nullptr;
}
// Build the signature of the thunk as expected by the keypath runtime.
auto signature = [&]() {
CanType loweredBaseTy, loweredPropTy;
AbstractionPattern opaque = AbstractionPattern::getOpaque();
loweredBaseTy = SGM.Types.getLoweredRValueType(
TypeExpansionContext::minimal(), opaque, baseType);
loweredPropTy = SGM.Types.getLoweredRValueType(
TypeExpansionContext::minimal(), opaque, propertyType);
auto paramConvention = ParameterConvention::Indirect_In_Guaranteed;
SmallVector<SILParameterInfo, 2> params;
params.push_back({loweredBaseTy, paramConvention});
auto &C = SGM.getASTContext();
if (!indexes.empty())
params.push_back({C.getUnsafeRawPointerDecl()->getDeclaredType()
->getCanonicalType(),
ParameterConvention::Direct_Unowned});
SILResultInfo result(loweredPropTy, ResultConvention::Indirect);
return SILFunctionType::get(genericSig,
SILFunctionType::ExtInfo::getThin(),
SILCoroutineKind::None,
ParameterConvention::Direct_Unowned,
params, {}, result, None,
SubstitutionMap(), SubstitutionMap(),
SGM.getASTContext());
}();
// Find the function and see if we already created it.
auto name = Mangle::ASTMangler()
.mangleKeyPathGetterThunkHelper(property, genericSig, baseType,
subs, expansion);
SILGenFunctionBuilder builder(SGM);
auto thunk = builder.getOrCreateSharedFunction(
loc, name, signature, IsBare, IsNotTransparent,
(expansion == ResilienceExpansion::Minimal
? IsSerializable
: IsNotSerialized),
ProfileCounter(), IsThunk, IsNotDynamic);
if (!thunk->empty())
return thunk;
// Emit the thunk, which accesses the underlying property normally with
// reabstraction where necessary.
if (genericEnv) {
baseType = genericEnv->mapTypeIntoContext(baseType)->getCanonicalType();
propertyType = genericEnv->mapTypeIntoContext(propertyType)
->getCanonicalType();
thunk->setGenericEnvironment(genericEnv);
}
SILGenFunction subSGF(SGM, *thunk, SGM.SwiftModule);
signature = subSGF.F.getLoweredFunctionTypeInContext(
subSGF.F.getTypeExpansionContext());
auto entry = thunk->begin();
auto resultArgTy = signature->getSingleResult().getSILStorageType(
SGM.M, signature, subSGF.F.getTypeExpansionContext());
auto baseArgTy = signature->getParameters()[0].getSILStorageType(
SGM.M, signature, subSGF.F.getTypeExpansionContext());
if (genericEnv) {
resultArgTy = genericEnv->mapTypeIntoContext(SGM.M, resultArgTy);
baseArgTy = genericEnv->mapTypeIntoContext(SGM.M, baseArgTy);
}
auto resultArg = entry->createFunctionArgument(resultArgTy);
auto baseArg = entry->createFunctionArgument(baseArgTy);
SILValue indexPtrArg;
if (!indexes.empty()) {
auto indexArgTy = signature->getParameters()[1].getSILStorageType(
SGM.M, signature, subSGF.F.getTypeExpansionContext());
indexPtrArg = entry->createFunctionArgument(indexArgTy);
}
ArgumentScope scope(subSGF, loc);
auto baseSubstValue = emitKeyPathRValueBase(subSGF, property,
loc, baseArg,
baseType, subs);
auto subscriptIndices =
loadIndexValuesForKeyPathComponent(subSGF, loc, property,
indexes, indexPtrArg);
auto resultSubst = subSGF.emitRValueForStorageLoad(loc, baseSubstValue,
baseType, /*super*/false,
property, std::move(subscriptIndices),
subs, AccessSemantics::Ordinary,
propertyType, SGFContext())
.getAsSingleValue(subSGF, loc);
if (resultSubst.getType().getAddressType() != resultArg->getType())
resultSubst = subSGF.emitSubstToOrigValue(loc, resultSubst,
AbstractionPattern::getOpaque(),
propertyType);
resultSubst.forwardInto(subSGF, loc, resultArg);
scope.pop();
subSGF.B.createReturn(loc, subSGF.emitEmptyTuple(loc));
SGM.emitLazyConformancesForFunction(thunk);
return thunk;
}
static SILFunction *getOrCreateKeyPathSetter(SILGenModule &SGM,
SILLocation loc,
AbstractStorageDecl *property,
SubstitutionMap subs,
GenericEnvironment *genericEnv,
ResilienceExpansion expansion,
ArrayRef<IndexTypePair> indexes,
CanType baseType,
CanType propertyType) {
// If the storage declaration is from a protocol, chase the override chain
// back to the declaration whose setter introduced the witness table
// entry.
if (isa<ProtocolDecl>(property->getDeclContext())) {
auto setter = property->getOpaqueAccessor(AccessorKind::Set);
if (!SILDeclRef::requiresNewWitnessTableEntry(setter)) {
// Find the setter that does have a witness table entry.
auto wtableSetter =
cast<AccessorDecl>(SILDeclRef::getOverriddenWitnessTableEntry(setter));
// Substitute the 'Self' type of the base protocol.
subs = SILGenModule::mapSubstitutionsForWitnessOverride(
setter, wtableSetter, subs);
property = wtableSetter->getStorage();
}
}
auto genericSig =
genericEnv ? genericEnv->getGenericSignature().getCanonicalSignature()
: nullptr;
if (genericSig && genericSig->areAllParamsConcrete()) {
genericSig = nullptr;
genericEnv = nullptr;
}
// Build the signature of the thunk as expected by the keypath runtime.
auto signature = [&]() {
CanType loweredBaseTy, loweredPropTy;
{
AbstractionPattern opaque = AbstractionPattern::getOpaque();
loweredBaseTy = SGM.Types.getLoweredRValueType(
TypeExpansionContext::minimal(), opaque, baseType);
loweredPropTy = SGM.Types.getLoweredRValueType(
TypeExpansionContext::minimal(), opaque, propertyType);
}
auto &C = SGM.getASTContext();
auto paramConvention = ParameterConvention::Indirect_In_Guaranteed;
SmallVector<SILParameterInfo, 3> params;
// property value
params.push_back({loweredPropTy, paramConvention});
// base
params.push_back({loweredBaseTy,
property->isSetterMutating()
? ParameterConvention::Indirect_Inout
: paramConvention});
// indexes
if (!indexes.empty())
params.push_back({C.getUnsafeRawPointerDecl()->getDeclaredType()
->getCanonicalType(),
ParameterConvention::Direct_Unowned});
return SILFunctionType::get(genericSig,
SILFunctionType::ExtInfo::getThin(),
SILCoroutineKind::None,
ParameterConvention::Direct_Unowned,
params, {}, {}, None,
SubstitutionMap(), SubstitutionMap(),
SGM.getASTContext());
}();
// Mangle the name of the thunk to see if we already created it.
auto name = Mangle::ASTMangler()
.mangleKeyPathSetterThunkHelper(property, genericSig, baseType,
subs, expansion);
SILGenFunctionBuilder builder(SGM);
auto thunk = builder.getOrCreateSharedFunction(
loc, name, signature, IsBare, IsNotTransparent,
(expansion == ResilienceExpansion::Minimal
? IsSerializable
: IsNotSerialized),
ProfileCounter(), IsThunk, IsNotDynamic);
if (!thunk->empty())
return thunk;
// Emit the thunk, which accesses the underlying property normally with
// reabstraction where necessary.
if (genericEnv) {
baseType = genericEnv->mapTypeIntoContext(baseType)->getCanonicalType();
propertyType = genericEnv->mapTypeIntoContext(propertyType)
->getCanonicalType();
thunk->setGenericEnvironment(genericEnv);
}
SILGenFunction subSGF(SGM, *thunk, SGM.SwiftModule);
signature = subSGF.F.getLoweredFunctionTypeInContext(
subSGF.F.getTypeExpansionContext());
auto entry = thunk->begin();
auto valueArgTy = signature->getParameters()[0].getSILStorageType(
SGM.M, signature, subSGF.getTypeExpansionContext());
auto baseArgTy = signature->getParameters()[1].getSILStorageType(
SGM.M, signature, subSGF.getTypeExpansionContext());
if (genericEnv) {
valueArgTy = genericEnv->mapTypeIntoContext(SGM.M, valueArgTy);
baseArgTy = genericEnv->mapTypeIntoContext(SGM.M, baseArgTy);
}
auto valueArg = entry->createFunctionArgument(valueArgTy);
auto baseArg = entry->createFunctionArgument(baseArgTy);
SILValue indexPtrArg;
if (!indexes.empty()) {
auto indexArgTy = signature->getParameters()[2].getSILStorageType(
SGM.M, signature, subSGF.getTypeExpansionContext());
indexPtrArg = entry->createFunctionArgument(indexArgTy);
}
Scope scope(subSGF, loc);
auto subscriptIndices =
loadIndexValuesForKeyPathComponent(subSGF, loc, property,
indexes, indexPtrArg);
auto valueOrig = ManagedValue::forBorrowedRValue(valueArg)
.copy(subSGF, loc);
auto valueSubst = subSGF.emitOrigToSubstValue(loc, valueOrig,
AbstractionPattern::getOpaque(),
propertyType);
LValue lv;
if (!property->isSetterMutating()) {
auto baseSubst = emitKeyPathRValueBase(subSGF, property,
loc, baseArg,
baseType, subs);
lv = LValue::forValue(SGFAccessKind::BorrowedObjectRead,
baseSubst, baseType);
} else {
auto baseOrig = ManagedValue::forLValue(baseArg);
lv = LValue::forAddress(SGFAccessKind::ReadWrite, baseOrig, None,
AbstractionPattern::getOpaque(),
baseType);
// Open an existential lvalue, if necessary.
if (baseType->isAnyExistentialType()) {
auto opened = subs.getReplacementTypes()[0]->castTo<ArchetypeType>();
assert(opened->isOpenedExistential());
baseType = opened->getCanonicalType();
lv = subSGF.emitOpenExistentialLValue(loc, std::move(lv),
CanArchetypeType(opened),
baseType,
SGFAccessKind::ReadWrite);
}
}
auto semantics = AccessSemantics::Ordinary;
auto strategy = property->getAccessStrategy(semantics, AccessKind::Write,
SGM.M.getSwiftModule(),
expansion);
LValueOptions lvOptions;
lv.addMemberComponent(subSGF, loc, property, subs, lvOptions,
/*super*/ false, SGFAccessKind::Write,
strategy, propertyType,
std::move(subscriptIndices),
/*index for diags*/ nullptr);
subSGF.emitAssignToLValue(loc,
RValue(subSGF, loc, propertyType, valueSubst),
std::move(lv));
scope.pop();
subSGF.B.createReturn(loc, subSGF.emitEmptyTuple(loc));
SGM.emitLazyConformancesForFunction(thunk);
return thunk;
}
static void
getOrCreateKeyPathEqualsAndHash(SILGenModule &SGM,
SILLocation loc,
GenericEnvironment *genericEnv,
ResilienceExpansion expansion,
ArrayRef<KeyPathPatternComponent::Index> indexes,
SILFunction *&equals,
SILFunction *&hash) {
if (indexes.empty()) {
equals = nullptr;
hash = nullptr;
return;
}
auto genericSig =
genericEnv ? genericEnv->getGenericSignature().getCanonicalSignature()
: nullptr;
if (genericSig && genericSig->areAllParamsConcrete()) {
genericSig = nullptr;
genericEnv = nullptr;
}
auto &C = SGM.getASTContext();
auto unsafeRawPointerTy = C.getUnsafeRawPointerDecl()->getDeclaredType()
->getCanonicalType();
auto boolTy = C.getBoolDecl()->getDeclaredType()->getCanonicalType();
auto intTy = C.getIntDecl()->getDeclaredType()->getCanonicalType();
auto hashableProto = C.getProtocol(KnownProtocolKind::Hashable);
SmallVector<CanType, 4> indexTypes;
indexTypes.reserve(indexes.size());
for (auto &index : indexes)
indexTypes.push_back(index.FormalType);
SmallVector<TupleTypeElt, 2> indexElts;
for (auto &elt : indexes) {
indexElts.push_back(GenericEnvironment::mapTypeIntoContext(genericEnv,
elt.FormalType));
}
auto indexTupleTy = TupleType::get(indexElts, SGM.getASTContext())
->getCanonicalType();
RValue indexValue(indexTupleTy);
auto indexLoweredTy =
SILType::getPrimitiveAddressType(SGM.Types.getLoweredRValueType(
TypeExpansionContext::minimal(), indexTupleTy));
// Get or create the equals witness
[unsafeRawPointerTy, boolTy, genericSig, &C, &indexTypes, &equals, loc,
&SGM, genericEnv, expansion, indexLoweredTy, indexes]{
// (RawPointer, RawPointer) -> Bool
SmallVector<SILParameterInfo, 2> params;
params.push_back({unsafeRawPointerTy,
ParameterConvention::Direct_Unowned});
params.push_back({unsafeRawPointerTy,
ParameterConvention::Direct_Unowned});
SmallVector<SILResultInfo, 1> results;
results.push_back({boolTy, ResultConvention::Unowned});
auto signature = SILFunctionType::get(genericSig,
SILFunctionType::ExtInfo::getThin(),
SILCoroutineKind::None,
ParameterConvention::Direct_Unowned,
params, /*yields*/ {}, results, None,
SubstitutionMap(), SubstitutionMap(),
C);
// Mangle the name of the thunk to see if we already created it.
auto name = Mangle::ASTMangler()
.mangleKeyPathEqualsHelper(indexTypes, genericSig, expansion);
SILGenFunctionBuilder builder(SGM);
equals = builder.getOrCreateSharedFunction(
loc, name, signature, IsBare, IsNotTransparent,
(expansion == ResilienceExpansion::Minimal
? IsSerializable
: IsNotSerialized),
ProfileCounter(), IsThunk, IsNotDynamic);
if (!equals->empty()) {
return;
}
SILGenFunction subSGF(SGM, *equals, SGM.SwiftModule);
equals->setGenericEnvironment(genericEnv);
auto entry = equals->begin();
auto lhsPtr = entry->createFunctionArgument(params[0].getSILStorageType(
SGM.M, signature, subSGF.getTypeExpansionContext()));
auto rhsPtr = entry->createFunctionArgument(params[1].getSILStorageType(
SGM.M, signature, subSGF.getTypeExpansionContext()));
Scope scope(subSGF, loc);
auto lhsAddr = subSGF.B.createPointerToAddress(loc, lhsPtr,
indexLoweredTy,
/*isStrict*/ false);
auto rhsAddr = subSGF.B.createPointerToAddress(loc, rhsPtr,
indexLoweredTy,
/*isStrict*/ false);
// Compare each pair of index values using the == witness from the
// conformance.
auto equatableProtocol = C.getProtocol(KnownProtocolKind::Equatable);
auto equalsMethod = equatableProtocol->getSingleRequirement(
C.Id_EqualsOperator);
auto equalsRef = SILDeclRef(equalsMethod);
auto equalsTy = subSGF.SGM.Types.getConstantType(
TypeExpansionContext(subSGF.F), equalsRef);
auto isFalseBB = subSGF.createBasicBlock();
auto i1Ty = SILType::getBuiltinIntegerType(1, C);
for (unsigned i : indices(indexes)) {
auto &index = indexes[i];
Type formalTy = index.FormalType;
ProtocolConformanceRef hashable = index.Hashable;
std::tie(formalTy, hashable)
= GenericEnvironment::mapConformanceRefIntoContext(genericEnv,
formalTy,
hashable);
auto formalCanTy = formalTy->getCanonicalType(genericSig);
// Get the Equatable conformance from the Hashable conformance.
auto equatable = hashable.getAssociatedConformance(formalTy,
GenericTypeParamType::get(0, 0, C),
equatableProtocol);
assert(equatable.isAbstract() == hashable.isAbstract());
if (equatable.isConcrete())
assert(equatable.getConcrete()->getType()->isEqual(
hashable.getConcrete()->getType()));
auto equalsWitness = subSGF.B.createWitnessMethod(loc,
formalCanTy, equatable,
equalsRef, equalsTy);
auto equatableSub
= SubstitutionMap::getProtocolSubstitutions(equatableProtocol,
formalCanTy,
equatable);
auto equalsSubstTy = equalsTy.castTo<SILFunctionType>()->substGenericArgs(
SGM.M, equatableSub, TypeExpansionContext(subSGF.F));
auto equalsInfo = CalleeTypeInfo(equalsSubstTy,
AbstractionPattern(boolTy), boolTy,
None,
ImportAsMemberStatus());
Scope branchScope(subSGF, loc);
SILValue lhsEltAddr = lhsAddr;
SILValue rhsEltAddr = rhsAddr;
if (indexes.size() > 1) {
lhsEltAddr = subSGF.B.createTupleElementAddr(loc, lhsEltAddr, i);
rhsEltAddr = subSGF.B.createTupleElementAddr(loc, rhsEltAddr, i);
}
auto lhsArg = subSGF.emitLoad(loc, lhsEltAddr,
subSGF.getTypeLowering(AbstractionPattern::getOpaque(), formalTy),
SGFContext(), IsNotTake);
auto rhsArg = subSGF.emitLoad(loc, rhsEltAddr,
subSGF.getTypeLowering(AbstractionPattern::getOpaque(), formalTy),
SGFContext(), IsNotTake);
if (!lhsArg.getType().isAddress()) {
auto lhsBuf = subSGF.emitTemporaryAllocation(loc, lhsArg.getType());
lhsArg.forwardInto(subSGF, loc, lhsBuf);
lhsArg = subSGF.emitManagedBufferWithCleanup(lhsBuf);
auto rhsBuf = subSGF.emitTemporaryAllocation(loc, rhsArg.getType());
rhsArg.forwardInto(subSGF, loc, rhsBuf);
rhsArg = subSGF.emitManagedBufferWithCleanup(rhsBuf);
}
auto metaty = CanMetatypeType::get(formalCanTy,
MetatypeRepresentation::Thick);
auto metatyValue = ManagedValue::forUnmanaged(subSGF.B.createMetatype(loc,
SILType::getPrimitiveObjectType(metaty)));
SILValue isEqual;
{
auto equalsResultPlan = ResultPlanBuilder::computeResultPlan(subSGF,
equalsInfo, loc, SGFContext());
ArgumentScope argScope(subSGF, loc);
isEqual = subSGF
.emitApply(std::move(equalsResultPlan), std::move(argScope),
loc, ManagedValue::forUnmanaged(equalsWitness),
equatableSub,
{lhsArg, rhsArg, metatyValue},
equalsInfo, ApplyOptions::None, SGFContext())
.getUnmanagedSingleValue(subSGF, loc);
}
branchScope.pop();
auto isEqualI1 = subSGF.B.createStructExtract(loc, isEqual,
C.getBoolDecl()->getStoredProperties()[0], i1Ty);
auto isTrueBB = subSGF.createBasicBlock();
// Each false condition needs its own block to avoid critical edges.
auto falseEdgeBB = subSGF.createBasicBlockAndBranch(loc, isFalseBB);
subSGF.B.createCondBranch(loc, isEqualI1, isTrueBB, falseEdgeBB);
subSGF.B.emitBlock(isTrueBB);
}
auto returnBB = subSGF.createBasicBlock(FunctionSection::Postmatter);
SILValue trueValue = subSGF.B.createIntegerLiteral(loc, i1Ty, 1);
subSGF.B.createBranch(loc, returnBB, trueValue);
subSGF.B.emitBlock(isFalseBB);
SILValue falseValue = subSGF.B.createIntegerLiteral(loc, i1Ty, 0);
subSGF.B.createBranch(loc, returnBB, falseValue);
subSGF.B.emitBlock(returnBB);
scope.pop();
SILValue returnVal =
returnBB->createPhiArgument(i1Ty, ValueOwnershipKind::None);
auto returnBoolVal = subSGF.B.createStruct(loc,
SILType::getPrimitiveObjectType(boolTy), returnVal);
subSGF.B.createReturn(loc, returnBoolVal);
SGM.emitLazyConformancesForFunction(equals);
}();
// Get or create the hash witness
[unsafeRawPointerTy, intTy, genericSig, &C, indexTypes, &hash, &loc,
&SGM, genericEnv, expansion, indexLoweredTy, hashableProto, indexes]{
// (RawPointer) -> Int
SmallVector<SILParameterInfo, 1> params;
params.push_back({unsafeRawPointerTy,
ParameterConvention::Direct_Unowned});
SmallVector<SILResultInfo, 1> results;
results.push_back({intTy, ResultConvention::Unowned});
auto signature = SILFunctionType::get(genericSig,
SILFunctionType::ExtInfo::getThin(),
SILCoroutineKind::None,
ParameterConvention::Direct_Unowned,
params, /*yields*/ {}, results, None,
SubstitutionMap(), SubstitutionMap(), C);
// Mangle the name of the thunk to see if we already created it.
SmallString<64> nameBuf;
auto name = Mangle::ASTMangler()
.mangleKeyPathHashHelper(indexTypes, genericSig, expansion);
SILGenFunctionBuilder builder(SGM);
hash = builder.getOrCreateSharedFunction(
loc, name, signature, IsBare, IsNotTransparent,
(expansion == ResilienceExpansion::Minimal
? IsSerializable
: IsNotSerialized),
ProfileCounter(), IsThunk, IsNotDynamic);
if (!hash->empty()) {
return;
}
SILGenFunction subSGF(SGM, *hash, SGM.SwiftModule);
hash->setGenericEnvironment(genericEnv);
auto entry = hash->begin();
auto indexPtr = entry->createFunctionArgument(params[0].getSILStorageType(
SGM.M, signature, subSGF.getTypeExpansionContext()));
SILValue hashCode;
// For now, just use the hash value of the first index.
// TODO: Combine hashes of the indexes using an inout Hasher
{
ArgumentScope scope(subSGF, loc);
auto &index = indexes[0];
// Extract the index value.
SILValue indexAddr = subSGF.B.createPointerToAddress(loc, indexPtr,
indexLoweredTy,
/*isStrict*/ false);
if (indexes.size() > 1) {
indexAddr = subSGF.B.createTupleElementAddr(loc, indexAddr, 0);
}
VarDecl *hashValueVar =
cast<VarDecl>(hashableProto->getSingleRequirement(C.Id_hashValue));
auto formalTy = index.FormalType;
auto hashable = index.Hashable;
if (genericEnv) {
formalTy = genericEnv->mapTypeIntoContext(formalTy)->getCanonicalType();
hashable = hashable.subst(index.FormalType,
[&](Type t) -> Type { return genericEnv->mapTypeIntoContext(t); },
LookUpConformanceInSignature(genericSig.getPointer()));
}
// Set up a substitution of Self => IndexType.
auto hashGenericSig =
hashValueVar->getDeclContext()->getGenericSignatureOfContext();
assert(hashGenericSig);
SubstitutionMap hashableSubsMap = SubstitutionMap::get(
hashGenericSig,
[&](SubstitutableType *type) -> Type { return formalTy; },
[&](CanType dependentType, Type replacementType, ProtocolDecl *proto)
-> ProtocolConformanceRef { return hashable; });
// Read the storage.
ManagedValue base = ManagedValue::forBorrowedAddressRValue(indexAddr);
hashCode =
subSGF.emitRValueForStorageLoad(loc, base, formalTy, /*super*/ false,
hashValueVar, PreparedArguments(),
hashableSubsMap,
AccessSemantics::Ordinary,
intTy, SGFContext())
.getUnmanagedSingleValue(subSGF, loc);
scope.pop();
}
subSGF.B.createReturn(loc, hashCode);
SGM.emitLazyConformancesForFunction(hash);
}();
return;
}
static KeyPathPatternComponent::ComputedPropertyId
getIdForKeyPathComponentComputedProperty(SILGenModule &SGM,
AbstractStorageDecl *storage,
AccessStrategy strategy) {
switch (strategy.getKind()) {
case AccessStrategy::Storage:
// Identify reabstracted stored properties by the property itself.
return cast<VarDecl>(storage);
case AccessStrategy::MaterializeToTemporary:
// Use the read strategy. But try to avoid turning e.g. an
// observed property into a stored property.
strategy = strategy.getReadStrategy();
if (strategy.getKind() != AccessStrategy::Storage ||
!getRepresentativeAccessorForKeyPath(storage)) {
return getIdForKeyPathComponentComputedProperty(SGM, storage, strategy);
}
LLVM_FALLTHROUGH;
case AccessStrategy::DirectToAccessor: {
// Identify the property using its (unthunked) getter. For a
// computed property, this should be stable ABI; for a resilient public
// property, this should also be stable ABI across modules.
auto representativeDecl = getRepresentativeAccessorForKeyPath(storage);
// If the property came from an import-as-member function defined in C,
// use the original C function as the key.
bool isForeign = representativeDecl->isImportAsMember();
auto getterRef = SILDeclRef(representativeDecl,
SILDeclRef::Kind::Func, isForeign);
// TODO: If the getter has shared linkage (say it's synthesized for a
// Clang-imported thing), we'll need some other sort of
// stable identifier.
return SGM.getFunction(getterRef, NotForDefinition);
}
case AccessStrategy::DispatchToAccessor: {
// Identify the property by its vtable or wtable slot.
return SGM.getAccessorDeclRef(getRepresentativeAccessorForKeyPath(storage));
}
}
llvm_unreachable("unhandled access strategy");
}
static void
lowerKeyPathSubscriptIndexTypes(
SILGenModule &SGM,
SmallVectorImpl<IndexTypePair> &indexPatterns,
SubscriptDecl *subscript,
SubstitutionMap subscriptSubs,
ResilienceExpansion expansion,
bool &needsGenericContext) {
// Capturing an index value dependent on the generic context means we
// need the generic context captured in the key path.
auto subscriptSubstTy = subscript->getInterfaceType();
SubstitutionMap subMap;
auto sig = subscript->getGenericSignature();
if (sig) {
subscriptSubstTy = subscriptSubstTy.subst(subscriptSubs);
}
needsGenericContext |= subscriptSubstTy->hasArchetype();
for (auto *index : *subscript->getIndices()) {
auto indexTy = index->getInterfaceType();
if (sig) {
indexTy = indexTy.subst(subscriptSubs);
}
auto indexLoweredTy = SGM.Types.getLoweredType(
AbstractionPattern::getOpaque(), indexTy,
TypeExpansionContext::noOpaqueTypeArchetypesSubstitution(expansion));
indexLoweredTy = indexLoweredTy.mapTypeOutOfContext();
indexPatterns.push_back({indexTy->mapTypeOutOfContext()
->getCanonicalType(),
indexLoweredTy});
}
};
static void
lowerKeyPathSubscriptIndexPatterns(
SmallVectorImpl<KeyPathPatternComponent::Index> &indexPatterns,
ArrayRef<IndexTypePair> indexTypes,
ArrayRef<ProtocolConformanceRef> indexHashables,
unsigned &baseOperand) {
for (unsigned i : indices(indexTypes)) {
CanType formalTy;
SILType loweredTy;
std::tie(formalTy, loweredTy) = indexTypes[i];
auto hashable = indexHashables[i].mapConformanceOutOfContext();
assert(hashable.isAbstract() ||
hashable.getConcrete()->getType()->isEqual(formalTy));
indexPatterns.push_back({baseOperand++, formalTy, loweredTy, hashable});
}
};
KeyPathPatternComponent
SILGenModule::emitKeyPathComponentForDecl(SILLocation loc,
GenericEnvironment *genericEnv,
ResilienceExpansion expansion,
unsigned &baseOperand,
bool &needsGenericContext,
SubstitutionMap subs,
AbstractStorageDecl *storage,
ArrayRef<ProtocolConformanceRef> indexHashables,
CanType baseTy,
DeclContext *useDC,
bool forPropertyDescriptor) {
auto baseDecl = storage;
// ABI-compatible overrides do not have property descriptors, so we need
// to reference the overridden declaration instead.
if (isa<ClassDecl>(baseDecl->getDeclContext())) {
while (!baseDecl->isValidKeyPathComponent())
baseDecl = baseDecl->getOverriddenDecl();
}
/// Returns true if a key path component for the given property or
/// subscript should be externally referenced.
auto shouldUseExternalKeyPathComponent = [&]() -> bool {
return (!forPropertyDescriptor &&
(baseDecl->getModuleContext() != SwiftModule ||
baseDecl->isResilient(SwiftModule, expansion)) &&
// Protocol requirements don't have nor need property descriptors.
!isa<ProtocolDecl>(baseDecl->getDeclContext()) &&
// Properties that only dispatch via ObjC lookup do not have nor
// need property descriptors, since the selector identifies the
// storage.
// Properties that are not public don't need property descriptors
// either.
(!baseDecl->requiresOpaqueAccessors() ||
(!getAccessorDeclRef(getRepresentativeAccessorForKeyPath(baseDecl))
.isForeign &&
getAccessorDeclRef(getRepresentativeAccessorForKeyPath(baseDecl))
.getLinkage(ForDefinition) <= SILLinkage::PublicNonABI)));
};
auto strategy = storage->getAccessStrategy(AccessSemantics::Ordinary,
storage->supportsMutation()
? AccessKind::ReadWrite
: AccessKind::Read,
M.getSwiftModule(),
expansion);
AbstractStorageDecl *externalDecl = nullptr;
SubstitutionMap externalSubs;
if (shouldUseExternalKeyPathComponent()) {
externalDecl = storage;
// Map the substitutions out of context.
if (!subs.empty()) {
externalSubs = subs;
// If any of the substitutions involve local archetypes, then the
// key path pattern needs to capture the generic context, and we need
// to map the pattern substitutions out of this context.
if (externalSubs.hasArchetypes()) {
needsGenericContext = true;
externalSubs = externalSubs.mapReplacementTypesOutOfContext();
}
}
// ABI-compatible overrides do not have property descriptors, so we need
// to reference the overridden declaration instead.
if (baseDecl != externalDecl) {
externalSubs = SubstitutionMap::getOverrideSubstitutions(baseDecl,
externalDecl,
externalSubs);
externalDecl = baseDecl;
}
}
auto isSettableInComponent = [&]() -> bool {
// For storage we reference by a property descriptor, the descriptor will
// supply the settability if needed. We only reference it here if the
// setter is public.
if (shouldUseExternalKeyPathComponent())
return storage->isSettable(useDC)
&& storage->isSetterAccessibleFrom(useDC);
return storage->isSettable(storage->getDeclContext());
};
if (auto var = dyn_cast<VarDecl>(storage)) {
CanType componentTy;
if (!var->getDeclContext()->isTypeContext()) {
componentTy = var->getInterfaceType()->getCanonicalType();
} else {
componentTy =
GenericEnvironment::mapTypeIntoContext(genericEnv, baseTy)
->getTypeOfMember(SwiftModule, var)
->getReferenceStorageReferent()
->mapTypeOutOfContext()
->getCanonicalType(
genericEnv ? genericEnv->getGenericSignature() : nullptr);
}
if (canStorageUseStoredKeyPathComponent(var, expansion)) {
return KeyPathPatternComponent::forStoredProperty(var, componentTy);
}
// We need thunks to bring the getter and setter to the right signature
// expected by the key path runtime.
auto id = getIdForKeyPathComponentComputedProperty(*this, var,
strategy);
auto getter = getOrCreateKeyPathGetter(*this, loc,
var, subs,
needsGenericContext ? genericEnv : nullptr,
expansion, {}, baseTy, componentTy);
if (isSettableInComponent()) {
auto setter = getOrCreateKeyPathSetter(*this, loc,
var, subs,
needsGenericContext ? genericEnv : nullptr,
expansion, {}, baseTy, componentTy);
return KeyPathPatternComponent::forComputedSettableProperty(id,
getter, setter, {}, nullptr, nullptr,
externalDecl, externalSubs, componentTy);
} else {
return KeyPathPatternComponent::forComputedGettableProperty(id,
getter, {}, nullptr, nullptr,
externalDecl, externalSubs, componentTy);
}
}
if (auto decl = dyn_cast<SubscriptDecl>(storage)) {
auto baseSubscriptTy =
decl->getInterfaceType()->castTo<AnyFunctionType>();
if (auto genSubscriptTy = baseSubscriptTy->getAs<GenericFunctionType>())
baseSubscriptTy = genSubscriptTy->substGenericArgs(subs);
auto baseSubscriptInterfaceTy = cast<AnyFunctionType>(
baseSubscriptTy->mapTypeOutOfContext()->getCanonicalType());
auto componentTy = baseSubscriptInterfaceTy.getResult();
SmallVector<IndexTypePair, 4> indexTypes;
lowerKeyPathSubscriptIndexTypes(*this, indexTypes,
decl, subs,
expansion,
needsGenericContext);
SmallVector<KeyPathPatternComponent::Index, 4> indexPatterns;
SILFunction *indexEquals = nullptr, *indexHash = nullptr;
// Property descriptors get their index information from the client.
if (!forPropertyDescriptor) {
lowerKeyPathSubscriptIndexPatterns(indexPatterns,
indexTypes, indexHashables,
baseOperand);
getOrCreateKeyPathEqualsAndHash(*this, loc,
needsGenericContext ? genericEnv : nullptr,
expansion,
indexPatterns,
indexEquals, indexHash);
}
auto id = getIdForKeyPathComponentComputedProperty(*this, decl, strategy);
auto getter = getOrCreateKeyPathGetter(*this, loc,
decl, subs,
needsGenericContext ? genericEnv : nullptr,
expansion,
indexTypes,
baseTy, componentTy);
auto indexPatternsCopy = getASTContext().AllocateCopy(indexPatterns);
if (isSettableInComponent()) {
auto setter = getOrCreateKeyPathSetter(*this, loc,
decl, subs,
needsGenericContext ? genericEnv : nullptr,
expansion,
indexTypes,
baseTy, componentTy);
return KeyPathPatternComponent::forComputedSettableProperty(id,
getter, setter,
indexPatternsCopy,
indexEquals,
indexHash,
externalDecl,
externalSubs,
componentTy);
} else {
return KeyPathPatternComponent::forComputedGettableProperty(id,
getter,
indexPatternsCopy,
indexEquals,
indexHash,
externalDecl,
externalSubs,
componentTy);
}
}
llvm_unreachable("unknown kind of storage");
}
RValue RValueEmitter::visitKeyPathExpr(KeyPathExpr *E, SGFContext C) {
if (E->isObjC()) {
return visit(E->getObjCStringLiteralExpr(), C);
}
// Figure out the key path pattern, abstracting out generic arguments and
// subscript indexes.
SmallVector<KeyPathPatternComponent, 4> loweredComponents;
auto loweredTy = SGF.getLoweredType(E->getType());
CanType rootTy = E->getType()->castTo<BoundGenericType>()->getGenericArgs()[0]
->getCanonicalType();
bool needsGenericContext = false;
if (rootTy->hasArchetype()) {
needsGenericContext = true;
rootTy = rootTy->mapTypeOutOfContext()->getCanonicalType();
}
auto baseTy = rootTy;
SmallVector<SILValue, 4> operands;
for (auto &component : E->getComponents()) {
switch (auto kind = component.getKind()) {
case KeyPathExpr::Component::Kind::Property:
case KeyPathExpr::Component::Kind::Subscript: {
auto decl = cast<AbstractStorageDecl>(component.getDeclRef().getDecl());
unsigned numOperands = operands.size();
loweredComponents.push_back(
SGF.SGM.emitKeyPathComponentForDecl(SILLocation(E),
SGF.F.getGenericEnvironment(),
SGF.F.getResilienceExpansion(),
numOperands,
needsGenericContext,
component.getDeclRef().getSubstitutions(),
decl,
component.getSubscriptIndexHashableConformances(),
baseTy,
SGF.FunctionDC,
/*for descriptor*/ false));
baseTy = loweredComponents.back().getComponentType();
if (kind == KeyPathExpr::Component::Kind::Property)
break;
auto subscript = cast<SubscriptDecl>(decl);
auto loweredArgs = SGF.emitKeyPathSubscriptOperands(
subscript, component.getDeclRef().getSubstitutions(),
component.getIndexExpr());
for (auto &arg : loweredArgs) {
operands.push_back(arg.forward(SGF));
}
break;
}
case KeyPathExpr::Component::Kind::TupleElement: {
assert(baseTy->is<TupleType>() && "baseTy is expected to be a TupleType");
auto tupleIndex = component.getTupleIndex();
auto elementTy = baseTy->getAs<TupleType>()
->getElementType(tupleIndex)
->getCanonicalType();
loweredComponents.push_back(
KeyPathPatternComponent::forTupleElement(tupleIndex, elementTy));
baseTy = loweredComponents.back().getComponentType();
break;
}
case KeyPathExpr::Component::Kind::OptionalChain:
case KeyPathExpr::Component::Kind::OptionalForce:
case KeyPathExpr::Component::Kind::OptionalWrap: {
KeyPathPatternComponent::Kind loweredKind;
switch (kind) {
case KeyPathExpr::Component::Kind::OptionalChain:
loweredKind = KeyPathPatternComponent::Kind::OptionalChain;
baseTy = baseTy->getOptionalObjectType()->getCanonicalType();
break;
case KeyPathExpr::Component::Kind::OptionalForce:
loweredKind = KeyPathPatternComponent::Kind::OptionalForce;
baseTy = baseTy->getOptionalObjectType()->getCanonicalType();
break;
case KeyPathExpr::Component::Kind::OptionalWrap:
loweredKind = KeyPathPatternComponent::Kind::OptionalWrap;
baseTy = OptionalType::get(baseTy)->getCanonicalType();
break;
default:
llvm_unreachable("out of sync");
}
loweredComponents.push_back(
KeyPathPatternComponent::forOptional(loweredKind, baseTy));
break;
}
case KeyPathExpr::Component::Kind::Identity:
continue;
case KeyPathExpr::Component::Kind::Invalid:
case KeyPathExpr::Component::Kind::UnresolvedProperty:
case KeyPathExpr::Component::Kind::UnresolvedSubscript:
llvm_unreachable("not resolved");
}
}
StringRef objcString;
if (auto objcExpr = dyn_cast_or_null<StringLiteralExpr>
(E->getObjCStringLiteralExpr()))
objcString = objcExpr->getValue();
auto pattern = KeyPathPattern::get(SGF.SGM.M,
needsGenericContext
? SGF.F.getLoweredFunctionType()
->getInvocationGenericSignature()
: nullptr,
rootTy, baseTy,
loweredComponents,
objcString);
auto keyPath = SGF.B.createKeyPath(SILLocation(E), pattern,
needsGenericContext
? SGF.F.getForwardingSubstitutionMap()
: SubstitutionMap(),
operands,
loweredTy);
auto value = SGF.emitManagedRValueWithCleanup(keyPath);
return RValue(SGF, E, value);
}
RValue RValueEmitter::
visitKeyPathApplicationExpr(KeyPathApplicationExpr *E, SGFContext C) {
FormalEvaluationScope scope(SGF);
auto lv = SGF.emitLValue(E, SGFAccessKind::OwnedObjectRead);
return SGF.emitLoadOfLValue(E, std::move(lv), C);
}
RValue RValueEmitter::
visitMagicIdentifierLiteralExpr(MagicIdentifierLiteralExpr *E, SGFContext C) {
switch (E->getKind()) {
case MagicIdentifierLiteralExpr::File:
case MagicIdentifierLiteralExpr::FilePath:
case MagicIdentifierLiteralExpr::Function:
case MagicIdentifierLiteralExpr::Line:
case MagicIdentifierLiteralExpr::Column:
return SGF.emitLiteral(E, C);
case MagicIdentifierLiteralExpr::DSOHandle: {
auto SILLoc = SILLocation(E);
auto UnsafeRawPointer = SGF.getASTContext().getUnsafeRawPointerDecl();
auto UnsafeRawPtrTy =
SGF.getLoweredType(UnsafeRawPointer->getDeclaredInterfaceType());
SILType BuiltinRawPtrTy = SILType::getRawPointerType(SGF.getASTContext());
SILModule &M = SGF.SGM.M;
SILBuilder &B = SGF.B;
StructInst *S = nullptr;
if (M.getASTContext().LangOpts.Target.isOSWindows()) {
auto ImageBase = M.lookUpGlobalVariable("__ImageBase");
if (!ImageBase)
ImageBase =
SILGlobalVariable::create(M, SILLinkage::Public, IsNotSerialized,
"__ImageBase", BuiltinRawPtrTy);
auto ImageBaseAddr = B.createGlobalAddr(SILLoc, ImageBase);
auto ImageBasePointer =
B.createAddressToPointer(SILLoc, ImageBaseAddr, BuiltinRawPtrTy);
S = B.createStruct(SILLoc, UnsafeRawPtrTy, { ImageBasePointer });
} else {
auto DSOGlobal = M.lookUpGlobalVariable("__dso_handle");
if (!DSOGlobal)
DSOGlobal =
SILGlobalVariable::create(M, SILLinkage::PublicExternal,
IsNotSerialized, "__dso_handle",
BuiltinRawPtrTy);
auto DSOAddr = B.createGlobalAddr(SILLoc, DSOGlobal);
auto DSOPointer =
B.createAddressToPointer(SILLoc, DSOAddr, BuiltinRawPtrTy);
S = B.createStruct(SILLoc, UnsafeRawPtrTy, { DSOPointer });
}
return RValue(SGF, E, ManagedValue::forUnmanaged(S));
}
}
llvm_unreachable("Unhandled MagicIdentifierLiteralExpr in switch.");
}
RValue RValueEmitter::visitCollectionExpr(CollectionExpr *E, SGFContext C) {
auto loc = SILLocation(E);
ArgumentScope scope(SGF, loc);
// CSApply builds ArrayExprs without an initializer for the trivial case
// of emitting varargs.
CanType arrayType, elementType;
if (E->getInitializer()) {
if (auto *arrayExpr = dyn_cast<ArrayExpr>(E)) {
elementType = arrayExpr->getElementType()->getCanonicalType();
} else {
auto *dictionaryExpr = cast<DictionaryExpr>(E);
elementType = dictionaryExpr->getElementType()->getCanonicalType();
}
arrayType = ArraySliceType::get(elementType)->getCanonicalType();
} else {
arrayType = E->getType()->getCanonicalType();
auto genericType = cast<BoundGenericStructType>(arrayType);
assert(genericType->getDecl() == SGF.getASTContext().getArrayDecl());
elementType = genericType.getGenericArgs()[0];
}
VarargsInfo varargsInfo =
emitBeginVarargs(SGF, loc, elementType, arrayType,
E->getNumElements());
// Cleanups for any elements that have been initialized so far.
SmallVector<CleanupHandle, 8> cleanups;
for (unsigned index : range(E->getNumElements())) {
auto destAddr = varargsInfo.getBaseAddress();
if (index != 0) {
SILValue indexValue = SGF.B.createIntegerLiteral(
loc, SILType::getBuiltinWordType(SGF.getASTContext()), index);
destAddr = SGF.B.createIndexAddr(loc, destAddr, indexValue);
}
auto &destTL = varargsInfo.getBaseTypeLowering();
// Create a dormant cleanup for the value in case we exit before the
// full array has been constructed.
CleanupHandle destCleanup = CleanupHandle::invalid();
if (!destTL.isTrivial()) {
destCleanup = SGF.enterDestroyCleanup(destAddr);
SGF.Cleanups.setCleanupState(destCleanup, CleanupState::Dormant);
cleanups.push_back(destCleanup);
}
TemporaryInitialization init(destAddr, destCleanup);
ArgumentSource(E->getElements()[index])
.forwardInto(SGF, varargsInfo.getBaseAbstractionPattern(), &init,
destTL);
}
// Kill the per-element cleanups. The array will take ownership of them.
for (auto destCleanup : cleanups)
SGF.Cleanups.setCleanupState(destCleanup, CleanupState::Dead);
RValue array(SGF, loc, arrayType,
emitEndVarargs(SGF, loc, std::move(varargsInfo)));
array = scope.popPreservingValue(std::move(array));
// If we're building an array, we don't have to call the initializer;
// we've already built one.
if (arrayType->isEqual(E->getType()))
return array;
// Call the builtin initializer.
PreparedArguments args(AnyFunctionType::Param(E->getType()));
args.add(E, std::move(array));
return SGF.emitApplyAllocatingInitializer(
loc, E->getInitializer(), std::move(args), E->getType(), C);
}
/// Flattens one level of optional from a nested optional value.
static ManagedValue flattenOptional(SILGenFunction &SGF, SILLocation loc,
ManagedValue optVal) {
// This code assumes that we have a +1 value.
assert(optVal.isPlusOne(SGF));
// FIXME: Largely copied from SILGenFunction::emitOptionalToOptional.
auto contBB = SGF.createBasicBlock();
auto isNotPresentBB = SGF.createBasicBlock();
auto isPresentBB = SGF.createBasicBlock();
SILType resultTy = optVal.getType().getOptionalObjectType();
auto &resultTL = SGF.getTypeLowering(resultTy);
assert(resultTy.getASTType().getOptionalObjectType() &&
"input was not a nested optional value");
SILValue contBBArg;
TemporaryInitializationPtr addrOnlyResultBuf;
if (resultTL.isAddressOnly()) {
addrOnlyResultBuf = SGF.emitTemporary(loc, resultTL);
} else {
contBBArg = contBB->createPhiArgument(resultTy, ValueOwnershipKind::Owned);
}
SwitchEnumBuilder SEB(SGF.B, loc, optVal);
SEB.addOptionalSomeCase(
isPresentBB, contBB, [&](ManagedValue input, SwitchCaseFullExpr &&scope) {
if (resultTL.isAddressOnly()) {
SILValue addr =
addrOnlyResultBuf->getAddressForInPlaceInitialization(SGF, loc);
auto *someDecl = SGF.getASTContext().getOptionalSomeDecl();
input = SGF.B.createUncheckedTakeEnumDataAddr(
loc, input, someDecl, input.getType().getOptionalObjectType());
SGF.B.createCopyAddr(loc, input.getValue(), addr, IsNotTake,
IsInitialization);
scope.exitAndBranch(loc);
return;
}
scope.exitAndBranch(loc, input.forward(SGF));
});
SEB.addOptionalNoneCase(
isNotPresentBB, contBB,
[&](ManagedValue input, SwitchCaseFullExpr &&scope) {
if (resultTL.isAddressOnly()) {
SILValue addr =
addrOnlyResultBuf->getAddressForInPlaceInitialization(SGF, loc);
SGF.emitInjectOptionalNothingInto(loc, addr, resultTL);
scope.exitAndBranch(loc);
return;
}
auto mv = SGF.B.createManagedOptionalNone(loc, resultTy).forward(SGF);
scope.exitAndBranch(loc, mv);
});
std::move(SEB).emit();
// Continue.
SGF.B.emitBlock(contBB);
if (resultTL.isAddressOnly()) {
addrOnlyResultBuf->finishInitialization(SGF);
return addrOnlyResultBuf->getManagedAddress();
}
return SGF.emitManagedRValueWithCleanup(contBBArg, resultTL);
}
static ManagedValue
computeNewSelfForRebindSelfInConstructorExpr(SILGenFunction &SGF,
RebindSelfInConstructorExpr *E) {
// Get newSelf, forward the cleanup for newSelf and clean everything else
// up.
FormalEvaluationScope Scope(SGF);
ManagedValue newSelfWithCleanup =
SGF.emitRValueAsSingleValue(E->getSubExpr());
SGF.InitDelegationSelf = ManagedValue();
SGF.SuperInitDelegationSelf = ManagedValue();
SGF.InitDelegationLoc.reset();
return newSelfWithCleanup;
}
RValue RValueEmitter::visitRebindSelfInConstructorExpr(
RebindSelfInConstructorExpr *E, SGFContext C) {
auto selfDecl = E->getSelf();
auto ctorDecl = cast<ConstructorDecl>(selfDecl->getDeclContext());
auto selfIfaceTy = ctorDecl->getDeclContext()->getSelfInterfaceType();
auto selfTy = ctorDecl->mapTypeIntoContext(selfIfaceTy);
auto newSelfTy = E->getSubExpr()->getType();
bool outerIsOptional = false;
bool innerIsOptional = false;
auto objTy = newSelfTy->getOptionalObjectType();
if (objTy) {
outerIsOptional = true;
newSelfTy = objTy;
// "try? self.init()" can give us two levels of optional if the initializer
// we delegate to is failable.
objTy = newSelfTy->getOptionalObjectType();
if (objTy) {
innerIsOptional = true;
newSelfTy = objTy;
}
}
// The subexpression consumes the current 'self' binding.
assert(SGF.SelfInitDelegationState == SILGenFunction::NormalSelf
&& "already doing something funky with self?!");
SGF.SelfInitDelegationState = SILGenFunction::WillSharedBorrowSelf;
SGF.InitDelegationLoc.emplace(E);
// Emit the subexpression, computing new self. New self is always returned at
// +1.
ManagedValue newSelf = computeNewSelfForRebindSelfInConstructorExpr(SGF, E);
// We know that self is a box, so get its address.
SILValue selfAddr =
SGF.emitAddressOfLocalVarDecl(E, selfDecl, selfTy->getCanonicalType(),
SGFAccessKind::Write).getLValueAddress();
// Handle a nested optional case (see above).
if (innerIsOptional)
newSelf = flattenOptional(SGF, E, newSelf);
// If both the delegated-to initializer and our enclosing initializer can
// fail, deal with the failure.
if (outerIsOptional && ctorDecl->isFailable()) {
SILBasicBlock *someBB = SGF.createBasicBlock();
auto hasValue = SGF.emitDoesOptionalHaveValue(E, newSelf.getValue());
assert(SGF.FailDest.isValid() && "too big to fail");
auto noneBB = SGF.Cleanups.emitBlockForCleanups(SGF.FailDest, E);
SGF.B.createCondBranch(E, hasValue, someBB, noneBB);
// Otherwise, project out the value and carry on.
SGF.B.emitBlock(someBB);
// If the current constructor is not failable, force out the value.
newSelf = SGF.emitUncheckedGetOptionalValueFrom(E, newSelf,
SGF.getTypeLowering(newSelf.getType()),
SGFContext());
}
// If we called a constructor that requires a downcast, perform the downcast.
auto destTy = SGF.getLoweredType(selfTy);
if (newSelf.getType() != destTy) {
assert(newSelf.getType().isObject() && destTy.isObject());
// Assume that the returned 'self' is the appropriate subclass
// type (or a derived class thereof). Only Objective-C classes can
// violate this assumption.
newSelf = SGF.B.createUncheckedRefCast(E, newSelf, destTy);
}
// Forward or assign into the box depending on whether we actually consumed
// 'self'.
switch (SGF.SelfInitDelegationState) {
case SILGenFunction::NormalSelf:
llvm_unreachable("self isn't normal in a constructor delegation");
case SILGenFunction::WillSharedBorrowSelf:
// We did not perform any borrow of self, exclusive or shared. This means
// that old self is still located in the relevant box. This will ensure that
// old self is destroyed.
newSelf.assignInto(SGF, E, selfAddr);
break;
case SILGenFunction::DidSharedBorrowSelf:
// We performed a shared borrow of self. This means that old self is still
// located in the self box. Perform an assign to destroy old self.
newSelf.assignInto(SGF, E, selfAddr);
break;
case SILGenFunction::WillExclusiveBorrowSelf:
llvm_unreachable("Should never have newSelf without finishing an exclusive "
"borrow scope");
case SILGenFunction::DidExclusiveBorrowSelf:
// We performed an exclusive borrow of self and have a new value to
// writeback. Writeback the self value into the now empty box.
newSelf.forwardInto(SGF, E, selfAddr);
break;
}
SGF.SelfInitDelegationState = SILGenFunction::NormalSelf;
SGF.InitDelegationSelf = ManagedValue();
return SGF.emitEmptyTupleRValue(E, C);
}
static bool isVerbatimNullableTypeInC(SILModule &M, Type ty) {
ty = ty->getWithoutSpecifierType()->getReferenceStorageReferent();
// Class instances, and @objc existentials are all nullable.
if (ty->hasReferenceSemantics()) {
// So are blocks, but we usually bridge them to Swift closures before we get
// a chance to check for optional promotion, so we're already screwed if
// an API lies about nullability.
if (auto fnTy = ty->getAs<AnyFunctionType>()) {
switch (fnTy->getRepresentation()) {
// Carried verbatim from C.
case FunctionTypeRepresentation::Block:
case FunctionTypeRepresentation::CFunctionPointer:
return true;
// Was already bridged.
case FunctionTypeRepresentation::Swift:
case FunctionTypeRepresentation::Thin:
return false;
}
}
return true;
}
// Other types like UnsafePointer can also be nullable.
const DeclContext *DC = M.getAssociatedContext();
ty = OptionalType::get(ty);
return ty->isTriviallyRepresentableIn(ForeignLanguage::C, DC);
}
/// Determine whether the given declaration returns a non-optional object that
/// might actually be nil.
///
/// This is an awful hack that makes it possible to work around several kinds
/// of problems:
/// - initializers currently cannot fail, so they always return non-optional.
/// - an Objective-C method might have been annotated to state (incorrectly)
/// that it returns a non-optional object
/// - an Objective-C property might be annotated to state (incorrectly) that
/// it is non-optional
static bool mayLieAboutNonOptionalReturn(SILModule &M,
ValueDecl *decl) {
// Any Objective-C initializer, because failure propagates from any
// initializer written in Objective-C (and there's no way to tell).
if (auto constructor = dyn_cast<ConstructorDecl>(decl)) {
return constructor->isObjC();
}
// Functions that return non-optional reference type and were imported from
// Objective-C.
if (auto func = dyn_cast<FuncDecl>(decl)) {
assert((func->getResultInterfaceType()->hasTypeParameter()
|| isVerbatimNullableTypeInC(M, func->getResultInterfaceType()))
&& "func's result type is not nullable?!");
return func->hasClangNode();
}
// Computed properties of non-optional reference type that were imported from
// Objective-C.
if (auto var = dyn_cast<VarDecl>(decl)) {
#ifndef NDEBUG
auto type = var->getInterfaceType();
assert((type->hasTypeParameter()
|| isVerbatimNullableTypeInC(M, type->getReferenceStorageReferent()))
&& "property's result type is not nullable?!");
#endif
return var->hasClangNode();
}
// Subscripts of non-optional reference type that were imported from
// Objective-C.
if (auto subscript = dyn_cast<SubscriptDecl>(decl)) {
assert((subscript->getElementInterfaceType()->hasTypeParameter()
|| isVerbatimNullableTypeInC(M, subscript->getElementInterfaceType()))
&& "subscript's result type is not nullable?!");
return subscript->hasClangNode();
}
return false;
}
/// Determine whether the given expression returns a non-optional object that
/// might actually be nil.
///
/// This is an awful hack that makes it possible to work around several kinds
/// of problems:
/// - an Objective-C method might have been annotated to state (incorrectly)
/// that it returns a non-optional object
/// - an Objective-C property might be annotated to state (incorrectly) that
/// it is non-optional
static bool mayLieAboutNonOptionalReturn(SILModule &M, Expr *expr) {
expr = expr->getSemanticsProvidingExpr();
// An application that produces a reference type, which we look through to
// get the function we're calling.
if (auto apply = dyn_cast<ApplyExpr>(expr)) {
// The result has to be a nullable type.
if (!isVerbatimNullableTypeInC(M, apply->getType()))
return false;
auto getFuncDeclFromDynamicMemberLookup = [&](Expr *expr) -> FuncDecl * {
if (auto open = dyn_cast<OpenExistentialExpr>(expr))
expr = open->getSubExpr();
if (auto memberRef = dyn_cast<DynamicMemberRefExpr>(expr))
return dyn_cast<FuncDecl>(memberRef->getMember().getDecl());
return nullptr;
};
// The function should come from C, being either an ObjC function or method
// or having a C-derived convention.
ValueDecl *method = nullptr;
if (auto selfApply = dyn_cast<ApplyExpr>(apply->getFn())) {
if (auto methodRef = dyn_cast<DeclRefExpr>(selfApply->getFn())) {
method = methodRef->getDecl();
}
} else if (auto force = dyn_cast<ForceValueExpr>(apply->getFn())) {
method = getFuncDeclFromDynamicMemberLookup(force->getSubExpr());
} else if (auto bind = dyn_cast<BindOptionalExpr>(apply->getFn())) {
method = getFuncDeclFromDynamicMemberLookup(bind->getSubExpr());
} else if (auto fnRef = dyn_cast<DeclRefExpr>(apply->getFn())) {
// Only consider a full application of a method. Partial applications
// never lie.
if (auto func = dyn_cast<AbstractFunctionDecl>(fnRef->getDecl()))
if (!func->hasImplicitSelfDecl())
method = fnRef->getDecl();
}
if (method && mayLieAboutNonOptionalReturn(M, method))
return true;
auto convention = apply->getFn()->getType()->castTo<AnyFunctionType>()
->getRepresentation();
switch (convention) {
case FunctionTypeRepresentation::Block:
case FunctionTypeRepresentation::CFunctionPointer:
return true;
case FunctionTypeRepresentation::Swift:
case FunctionTypeRepresentation::Thin:
return false;
}
}
// A load.
if (auto load = dyn_cast<LoadExpr>(expr)) {
return mayLieAboutNonOptionalReturn(M, load->getSubExpr());
}
// A reference to a potentially dynamic member/subscript property.
if (auto member = dyn_cast<LookupExpr>(expr)) {
return isVerbatimNullableTypeInC(M, member->getType()) &&
mayLieAboutNonOptionalReturn(M, member->getMember().getDecl());
}
return false;
}
RValue RValueEmitter::visitInjectIntoOptionalExpr(InjectIntoOptionalExpr *E,
SGFContext C) {
// This is an awful hack. When the source expression might produce a
// non-optional reference that could legitimated be nil, such as with an
// initializer, allow this workaround to capture that nil:
//
// let x: NSFoo? = NSFoo(potentiallyFailingInit: x)
//
// However, our optimizer is smart enough now to recognize that an initializer
// can "never" produce nil, and will optimize away any attempts to check the
// resulting optional for nil. As a special case, when we're injecting the
// result of an ObjC constructor into an optional, do it using an unchecked
// bitcast, which is opaque to the optimizer.
if (mayLieAboutNonOptionalReturn(SGF.SGM.M, E->getSubExpr())) {
auto result = SGF.emitRValueAsSingleValue(E->getSubExpr());
auto optType = SGF.getLoweredLoadableType(E->getType());
ManagedValue bitcast = SGF.B.createUncheckedBitCast(E, result, optType);
return RValue(SGF, E, bitcast);
}
// Try the bridging peephole.
if (auto result = tryEmitAsBridgingConversion(SGF, E, false, C)) {
return RValue(SGF, E, *result);
}
auto helper = [E](SILGenFunction &SGF, SILLocation loc, SGFContext C) {
return SGF.emitRValueAsSingleValue(E->getSubExpr(), C);
};
auto result =
SGF.emitOptionalSome(E, SGF.getLoweredType(E->getType()), helper, C);
return RValue(SGF, E, result);
}
RValue RValueEmitter::visitClassMetatypeToObjectExpr(
ClassMetatypeToObjectExpr *E,
SGFContext C) {
ManagedValue v = SGF.emitRValueAsSingleValue(E->getSubExpr());
SILType resultTy = SGF.getLoweredLoadableType(E->getType());
return RValue(SGF, E, SGF.emitClassMetatypeToObject(E, v, resultTy));
}
RValue RValueEmitter::visitExistentialMetatypeToObjectExpr(
ExistentialMetatypeToObjectExpr *E,
SGFContext C) {
ManagedValue v = SGF.emitRValueAsSingleValue(E->getSubExpr());
SILType resultTy = SGF.getLoweredLoadableType(E->getType());
return RValue(SGF, E, SGF.emitExistentialMetatypeToObject(E, v, resultTy));
}
RValue RValueEmitter::visitProtocolMetatypeToObjectExpr(
ProtocolMetatypeToObjectExpr *E,
SGFContext C) {
SGF.emitIgnoredExpr(E->getSubExpr());
CanType inputTy = E->getSubExpr()->getType()->getCanonicalType();
SILType resultTy = SGF.getLoweredLoadableType(E->getType());
ManagedValue v = SGF.emitProtocolMetatypeToObject(E, inputTy, resultTy);
return RValue(SGF, E, v);
}
RValue RValueEmitter::visitIfExpr(IfExpr *E, SGFContext C) {
auto &lowering = SGF.getTypeLowering(E->getType());
auto NumTrueTaken = SGF.loadProfilerCount(E->getThenExpr());
auto NumFalseTaken = SGF.loadProfilerCount(E->getElseExpr());
if (lowering.isLoadable() || !SGF.silConv.useLoweredAddresses()) {
// If the result is loadable, emit each branch and forward its result
// into the destination block argument.
// FIXME: We could avoid imploding and reexploding tuples here.
Condition cond = SGF.emitCondition(E->getCondExpr(),
/*invertCondition*/ false,
SGF.getLoweredType(E->getType()),
NumTrueTaken, NumFalseTaken);
cond.enterTrue(SGF);
SGF.emitProfilerIncrement(E->getThenExpr());
SILValue trueValue;
{
auto TE = E->getThenExpr();
FullExpr trueScope(SGF.Cleanups, CleanupLocation(TE));
trueValue = visit(TE).forwardAsSingleValue(SGF, TE);
}
cond.exitTrue(SGF, trueValue);
cond.enterFalse(SGF);
SILValue falseValue;
{
auto EE = E->getElseExpr();
FullExpr falseScope(SGF.Cleanups, CleanupLocation(EE));
falseValue = visit(EE).forwardAsSingleValue(SGF, EE);
}
cond.exitFalse(SGF, falseValue);
SILBasicBlock *cont = cond.complete(SGF);
assert(cont && "no continuation block for if expr?!");
SILValue result = cont->args_begin()[0];
return RValue(SGF, E, SGF.emitManagedRValueWithCleanup(result));
} else {
// If the result is address-only, emit the result into a common stack buffer
// that dominates both branches.
SILValue resultAddr = SGF.getBufferForExprResult(
E, lowering.getLoweredType(), C);
Condition cond = SGF.emitCondition(E->getCondExpr(),
/*invertCondition*/ false,
/*contArgs*/ {},
NumTrueTaken, NumFalseTaken);
cond.enterTrue(SGF);
SGF.emitProfilerIncrement(E->getThenExpr());
{
auto TE = E->getThenExpr();
FullExpr trueScope(SGF.Cleanups, CleanupLocation(TE));
KnownAddressInitialization init(resultAddr);
SGF.emitExprInto(TE, &init);
}
cond.exitTrue(SGF);
cond.enterFalse(SGF);
{
auto EE = E->getElseExpr();
FullExpr trueScope(SGF.Cleanups, CleanupLocation(EE));
KnownAddressInitialization init(resultAddr);
SGF.emitExprInto(EE, &init);
}
cond.exitFalse(SGF);
cond.complete(SGF);
return RValue(SGF, E,
SGF.manageBufferForExprResult(resultAddr, lowering, C));
}
}
RValue SILGenFunction::emitEmptyTupleRValue(SILLocation loc,
SGFContext C) {
return RValue(CanType(TupleType::getEmpty(F.getASTContext())));
}
namespace {
/// A visitor for creating a flattened list of LValues from a
/// tuple-of-lvalues expression.
///
/// Note that we can have tuples down to arbitrary depths in the
/// type, but every branch should lead to an l-value otherwise.
class TupleLValueEmitter
: public Lowering::ExprVisitor<TupleLValueEmitter> {
SILGenFunction &SGF;
SGFAccessKind TheAccessKind;
/// A flattened list of l-values.
SmallVectorImpl<Optional<LValue>> &Results;
public:
TupleLValueEmitter(SILGenFunction &SGF, SGFAccessKind accessKind,
SmallVectorImpl<Optional<LValue>> &results)
: SGF(SGF), TheAccessKind(accessKind), Results(results) {}
// If the destination is a tuple, recursively destructure.
void visitTupleExpr(TupleExpr *E) {
for (auto &elt : E->getElements()) {
visit(elt);
}
}
// If the destination is '_', queue up a discard.
void visitDiscardAssignmentExpr(DiscardAssignmentExpr *E) {
Results.push_back(None);
}
// Otherwise, queue up a scalar assignment to an lvalue.
void visitExpr(Expr *E) {
assert(E->getType()->is<LValueType>());
Results.push_back(SGF.emitLValue(E, TheAccessKind));
}
};
/// A visitor for consuming tuples of l-values.
class TupleLValueAssigner
: public CanTypeVisitor<TupleLValueAssigner, void, RValue &&> {
SILGenFunction &SGF;
SILLocation AssignLoc;
MutableArrayRef<Optional<LValue>> DestLVQueue;
Optional<LValue> &&getNextDest() {
assert(!DestLVQueue.empty());
Optional<LValue> &next = DestLVQueue.front();
DestLVQueue = DestLVQueue.slice(1);
return std::move(next);
}
public:
TupleLValueAssigner(SILGenFunction &SGF, SILLocation assignLoc,
SmallVectorImpl<Optional<LValue>> &destLVs)
: SGF(SGF), AssignLoc(assignLoc), DestLVQueue(destLVs) {}
/// Top-level entrypoint.
void emit(CanType destType, RValue &&src) {
visitTupleType(cast<TupleType>(destType), std::move(src));
assert(DestLVQueue.empty() && "didn't consume all l-values!");
}
// If the destination is a tuple, recursively destructure.
void visitTupleType(CanTupleType destTupleType, RValue &&srcTuple) {
// Break up the source r-value.
SmallVector<RValue, 4> srcElts;
std::move(srcTuple).extractElements(srcElts);
// Consume source elements off the queue.
unsigned eltIndex = 0;
for (CanType destEltType : destTupleType.getElementTypes()) {
visit(destEltType, std::move(srcElts[eltIndex++]));
}
}
// Okay, otherwise we pull one destination off the queue.
void visitType(CanType destType, RValue &&src) {
assert(isa<LValueType>(destType));
Optional<LValue> &&next = getNextDest();
// If the destination is a discard, do nothing.
if (!next.hasValue())
return;
// Otherwise, emit the scalar assignment.
SGF.emitAssignToLValue(AssignLoc, std::move(src),
std::move(next.getValue()));
}
};
} // end anonymous namespace
/// Emit a simple assignment, i.e.
///
/// dest = src
///
/// The destination operand can be an arbitrarily-structured tuple of
/// l-values.
static void emitSimpleAssignment(SILGenFunction &SGF, SILLocation loc,
Expr *dest, Expr *src) {
// Handle lvalue-to-lvalue assignments with a high-level copy_addr
// instruction if possible.
if (auto *srcLoad = dyn_cast<LoadExpr>(src)) {
// Check that the two l-value expressions have the same type.
// Compound l-values like (a,b) have tuple type, so this check
// also prevents us from getting into that case.
if (dest->getType()->isEqual(srcLoad->getSubExpr()->getType())) {
assert(!dest->getType()->is<TupleType>());
dest = dest->getSemanticsProvidingExpr();
if (isa<DiscardAssignmentExpr>(dest)) {
// The logical thing to do here would be emitIgnoredExpr, but that
// changed some test results in a way I wanted to avoid, so instead
// we're doing this.
FormalEvaluationScope writeback(SGF);
auto srcLV = SGF.emitLValue(srcLoad->getSubExpr(),
SGFAccessKind::IgnoredRead);
(void) SGF.emitLoadOfLValue(loc, std::move(srcLV), SGFContext());
return;
}
FormalEvaluationScope writeback(SGF);
auto destLV = SGF.emitLValue(dest, SGFAccessKind::Write);
auto srcLV = SGF.emitLValue(srcLoad->getSubExpr(),
SGFAccessKind::BorrowedAddressRead);
SGF.emitAssignLValueToLValue(loc, std::move(srcLV), std::move(destLV));
return;
}
}
// Handle tuple destinations by destructuring them if present.
CanType destType = dest->getType()->getCanonicalType();
// But avoid this in the common case.
if (!isa<TupleType>(destType)) {
// If we're assigning to a discard, just emit the operand as ignored.
dest = dest->getSemanticsProvidingExpr();
if (isa<DiscardAssignmentExpr>(dest)) {
SGF.emitIgnoredExpr(src);
return;
}
FormalEvaluationScope writeback(SGF);
LValue destLV = SGF.emitLValue(dest, SGFAccessKind::Write);
SGF.emitAssignToLValue(loc, src, std::move(destLV));
return;
}
FormalEvaluationScope writeback(SGF);
// Produce a flattened queue of LValues.
SmallVector<Optional<LValue>, 4> destLVs;
TupleLValueEmitter(SGF, SGFAccessKind::Write, destLVs).visit(dest);
// Emit the r-value.
RValue srcRV = SGF.emitRValue(src);
// Recurse on the type of the destination, pulling LValues as
// needed from the queue we built up before.
TupleLValueAssigner(SGF, loc, destLVs).emit(destType, std::move(srcRV));
}
RValue RValueEmitter::visitAssignExpr(AssignExpr *E, SGFContext C) {
FullExpr scope(SGF.Cleanups, CleanupLocation(E));
emitSimpleAssignment(SGF, E, E->getDest(), E->getSrc());
return SGF.emitEmptyTupleRValue(E, C);
}
void SILGenFunction::emitBindOptionalAddress(SILLocation loc,
ManagedValue optAddress,
unsigned depth) {
assert(optAddress.getType().isAddress() && "Expected an address here");
assert(depth < BindOptionalFailureDests.size());
auto failureDest =
BindOptionalFailureDests[BindOptionalFailureDests.size() - depth - 1];
assert(failureDest.isValid() && "too big to fail");
// Since we know that we have an address, we do not need to worry about
// ownership invariants. Instead just use a select_enum_addr.
SILBasicBlock *someBB = createBasicBlock();
SILValue hasValue = emitDoesOptionalHaveValue(loc, optAddress.getValue());
auto noneBB = Cleanups.emitBlockForCleanups(failureDest, loc);
B.createCondBranch(loc, hasValue, someBB, noneBB);
// Reset the insertion point at the end of hasValueBB so we can
// continue to emit code there.
B.setInsertionPoint(someBB);
}
ManagedValue SILGenFunction::emitBindOptional(SILLocation loc,
ManagedValue optValue,
unsigned depth) {
assert(optValue.isPlusOne(*this) && "Can only bind plus one values");
assert(depth < BindOptionalFailureDests.size());
auto failureDest = BindOptionalFailureDests[BindOptionalFailureDests.size()
- depth - 1];
SILBasicBlock *hasValueBB = createBasicBlock();
SILBasicBlock *hasNoValueBB = createBasicBlock();
SILType optValueTy = optValue.getType();
SwitchEnumBuilder SEB(B, loc, optValue);
SEB.addOptionalSomeCase(hasValueBB, nullptr,
[&](ManagedValue mv, SwitchCaseFullExpr &&expr) {
// If mv is not an address, forward it. We will
// recreate the cleanup outside when we return the
// argument.
if (mv.getType().isObject()) {
mv.forward(*this);
}
expr.exit();
});
// If not, thread out through a bunch of cleanups.
SEB.addOptionalNoneCase(hasNoValueBB, failureDest,
[&](ManagedValue mv, SwitchCaseFullExpr &&expr) {
expr.exitAndBranch(loc);
});
std::move(SEB).emit();
// Reset the insertion point at the end of hasValueBB so we can
// continue to emit code there.
B.setInsertionPoint(hasValueBB);
// If optValue was loadable, we emitted a switch_enum. In such a case, return
// the argument from hasValueBB.
if (optValue.getType().isLoadable(F)) {
return emitManagedRValueWithCleanup(hasValueBB->getArgument(0));
}
// Otherwise, if we had an address only value, we emitted the value at +0. In
// such a case, since we want to model this as a consuming operation. Use
// ensure_plus_one and extract out the value from there.
auto *someDecl = getASTContext().getOptionalSomeDecl();
auto eltTy =
optValueTy.getObjectType().getOptionalObjectType().getAddressType();
assert(eltTy);
SILValue address = optValue.forward(*this);
return emitManagedBufferWithCleanup(
B.createUncheckedTakeEnumDataAddr(loc, address, someDecl, eltTy));
}
RValue RValueEmitter::visitBindOptionalExpr(BindOptionalExpr *E, SGFContext C) {
// Create a temporary of type Optional<T> if it is address-only.
auto &optTL = SGF.getTypeLowering(E->getSubExpr()->getType());
ManagedValue optValue;
if (!SGF.silConv.useLoweredAddresses() || optTL.isLoadable()
|| E->getType()->hasOpenedExistential()) {
optValue = SGF.emitRValueAsSingleValue(E->getSubExpr());
} else {
auto temp = SGF.emitTemporary(E, optTL);
// Emit the operand into the temporary.
SGF.emitExprInto(E->getSubExpr(), temp.get());
// And then grab the managed address.
optValue = temp->getManagedAddress();
}
// Check to see whether the optional is present, if not, jump to the current
// nil handler block. Otherwise, return the value as the result of the
// expression.
optValue = SGF.emitBindOptional(E, optValue, E->getDepth());
return RValue(SGF, E, optValue);
}
namespace {
/// A RAII object to save and restore BindOptionalFailureDest.
class RestoreOptionalFailureDest {
SILGenFunction &SGF;
#ifndef NDEBUG
unsigned Depth;
#endif
public:
RestoreOptionalFailureDest(SILGenFunction &SGF, JumpDest &&dest)
: SGF(SGF)
#ifndef NDEBUG
, Depth(SGF.BindOptionalFailureDests.size())
#endif
{
SGF.BindOptionalFailureDests.push_back(std::move(dest));
}
~RestoreOptionalFailureDest() {
assert(SGF.BindOptionalFailureDests.size() == Depth + 1);
SGF.BindOptionalFailureDests.pop_back();
}
};
} // end anonymous namespace
/// emitOptimizedOptionalEvaluation - Look for cases where we can short-circuit
/// evaluation of an OptionalEvaluationExpr by pattern matching the AST.
///
static bool emitOptimizedOptionalEvaluation(SILGenFunction &SGF,
OptionalEvaluationExpr *E,
ManagedValue &result,
SGFContext ctx) {
// It is a common occurrence to get conversions back and forth from T! to T?.
// Peephole these by looking for a subexpression that is a BindOptionalExpr.
// If we see one, we can produce a single instruction, which doesn't require
// a CFG diamond.
//
// Check for:
// (optional_evaluation_expr type='T?'
// (inject_into_optional type='T?'
// (bind_optional_expr type='T'
// (whatever type='T?' ...)
auto *IIO = dyn_cast<InjectIntoOptionalExpr>(E->getSubExpr()
->getSemanticsProvidingExpr());
if (!IIO) return false;
// Make sure the bind is to the OptionalEvaluationExpr we're emitting.
auto *BO = dyn_cast<BindOptionalExpr>(IIO->getSubExpr()
->getSemanticsProvidingExpr());
if (!BO || BO->getDepth() != 0) return false;
// SIL defines away abstraction differences between T? and T!,
// so we can just emit the sub-initialization normally.
result = SGF.emitRValueAsSingleValue(BO->getSubExpr(), ctx);
return true;
}
RValue RValueEmitter::visitOptionalEvaluationExpr(OptionalEvaluationExpr *E,
SGFContext C) {
if (auto result = tryEmitAsBridgingConversion(SGF, E, false, C)) {
return RValue(SGF, E, *result);
}
SmallVector<ManagedValue, 1> results;
SGF.emitOptionalEvaluation(E, E->getType(), results, C,
[&](SmallVectorImpl<ManagedValue> &results, SGFContext primaryC) {
ManagedValue result;
if (!emitOptimizedOptionalEvaluation(SGF, E, result, primaryC)) {
result = SGF.emitRValueAsSingleValue(E->getSubExpr(), primaryC);
}
assert(results.empty());
results.push_back(result);
});
assert(results.size() == 1);
if (results[0].isInContext()) {
return RValue::forInContext();
} else {
return RValue(SGF, E, results[0]);
}
}
void SILGenFunction::emitOptionalEvaluation(SILLocation loc, Type optType,
SmallVectorImpl<ManagedValue> &results,
SGFContext C,
llvm::function_ref<void(SmallVectorImpl<ManagedValue> &,
SGFContext primaryC)>
generateNormalResults) {
assert(results.empty());
auto &optTL = getTypeLowering(optType);
Initialization *optInit = C.getEmitInto();
bool usingProvidedContext =
optInit && optInit->canPerformInPlaceInitialization();
// Form the optional using address operations if the type is address-only or
// if we already have an address to use.
bool isByAddress = ((usingProvidedContext || optTL.isAddressOnly()) &&
silConv.useLoweredAddresses());
std::unique_ptr<TemporaryInitialization> optTemp;
if (!isByAddress) {
// If the caller produced a context for us, but we're not going
// to use it, make sure we don't.
optInit = nullptr;
} else if (!usingProvidedContext) {
// Allocate the temporary for the Optional<T> if we didn't get one from the
// context. This needs to happen outside of the cleanups scope we're about
// to push.
optTemp = emitTemporary(loc, optTL);
optInit = optTemp.get();
}
assert(isByAddress == (optInit != nullptr));
// Acquire the address to emit into outside of the cleanups scope.
SILValue optAddr;
if (isByAddress)
optAddr = optInit->getAddressForInPlaceInitialization(*this, loc);
// Enter a cleanups scope.
FullExpr scope(Cleanups, CleanupLocation::get(loc));
// Inside of the cleanups scope, create a new initialization to
// emit into optAddr.
std::unique_ptr<TemporaryInitialization> normalInit;
if (isByAddress) {
normalInit = useBufferAsTemporary(optAddr, optTL);
}
// Install a new optional-failure destination just outside of the
// cleanups scope.
SILBasicBlock *failureBB = createBasicBlock();
RestoreOptionalFailureDest
restoreFailureDest(*this, JumpDest(failureBB, Cleanups.getCleanupsDepth(),
CleanupLocation::get(loc)));
generateNormalResults(results, SGFContext(normalInit.get()));
assert(results.size() >= 1 && "didn't include a normal result");
assert(results[0].isInContext() ||
results[0].getType().getObjectType()
== optTL.getLoweredType().getObjectType());
// If we're emitting into the context, make sure the normal value is there.
if (normalInit && !results[0].isInContext()) {
normalInit->copyOrInitValueInto(*this, loc, results[0], /*init*/ true);
normalInit->finishInitialization(*this);
results[0] = ManagedValue::forInContext();
}
// We fell out of the normal result, which generated a T? as either
// a scalar in normalArgument or directly into normalInit.
// If we're using by-address initialization, we must've emitted into
// normalInit. Forward its cleanup before popping the scope.
if (isByAddress) {
normalInit->getManagedAddress().forward(*this);
normalInit.reset(); // Make sure we don't use this anymore.
} else {
assert(!results[0].isInContext());
results[0].forward(*this);
}
// For all the secondary results, forward their cleanups and make sure
// they're of optional type so that we can inject nil into them in
// the failure path.
// (Should this be controllable by the client?)
for (auto &result : MutableArrayRef<ManagedValue>(results).slice(1)) {
assert(!result.isInContext() && "secondary result was in context");
auto resultTy = result.getType();
assert(resultTy.isObject() && "secondary result wasn't an object");
// Forward the cleanup.
SILValue value = result.forward(*this);
// If it's not already an optional type, make it optional.
if (!resultTy.getOptionalObjectType()) {
resultTy = SILType::getOptionalType(resultTy);
value = B.createOptionalSome(loc, value, resultTy);
result = ManagedValue::forUnmanaged(value);
}
}
// This concludes the conditional scope.
scope.pop();
// In the usual case, the code will have emitted one or more branches to the
// failure block. However, if the body is simple enough, we can end up with
// no branches to the failureBB. Detect this and simplify the generated code
// if so.
if (failureBB->pred_empty()) {
// Remove the dead failureBB.
failureBB->eraseFromParent();
// Just re-manage all the secondary results.
for (auto &result : MutableArrayRef<ManagedValue>(results).slice(1)) {
result = emitManagedRValueWithCleanup(result.getValue());
}
// Just re-manage the main result if we're not using address-based IRGen.
if (!isByAddress) {
results[0] = emitManagedRValueWithCleanup(results[0].getValue(), optTL);
return;
}
// Otherwise, we must have emitted into normalInit, which means that,
// now that we're out of the cleanups scope, we need to finish optInit.
assert(results[0].isInContext());
optInit->finishInitialization(*this);
// If optInit came from the SGFContext, then we've successfully emitted
// into that.
if (usingProvidedContext) return;
// Otherwise, we must have emitted into optTemp.
assert(optTemp);
results[0] = optTemp->getManagedAddress();
return;
}
// Okay, we do have uses of the failure block, so we'll need to merge
// control paths.
SILBasicBlock *contBB = createBasicBlock();
// Branch to the continuation block.
SmallVector<SILValue, 4> bbArgs;
if (!isByAddress)
bbArgs.push_back(results[0].getValue());
for (const auto &result : llvm::makeArrayRef(results).slice(1))
bbArgs.push_back(result.getValue());
// Branch to the continuation block.
B.createBranch(loc, contBB, bbArgs);
// In the failure block, inject nil into the result.
B.emitBlock(failureBB);
// Note that none of the code here introduces any cleanups.
// If it did, we'd need to push a scope.
bbArgs.clear();
if (isByAddress) {
emitInjectOptionalNothingInto(loc, optAddr, optTL);
} else {
bbArgs.push_back(getOptionalNoneValue(loc, optTL));
}
for (const auto &result : llvm::makeArrayRef(results).slice(1)) {
auto resultTy = result.getType();
bbArgs.push_back(getOptionalNoneValue(loc, getTypeLowering(resultTy)));
}
B.createBranch(loc, contBB, bbArgs);
// Emit the continuation block.
B.emitBlock(contBB);
// Create a PHI for the optional result if desired.
if (isByAddress) {
assert(results[0].isInContext());
} else {
auto arg = contBB->createPhiArgument(optTL.getLoweredType(),
ValueOwnershipKind::Owned);
results[0] = emitManagedRValueWithCleanup(arg, optTL);
}
// Create PHIs for all the secondary results and manage them.
for (auto &result : MutableArrayRef<ManagedValue>(results).slice(1)) {
auto arg = contBB->createPhiArgument(result.getType(),
ValueOwnershipKind::Owned);
result = emitManagedRValueWithCleanup(arg);
}
// We may need to manage the value in optInit.
if (!isByAddress) return;
assert(results[0].isInContext());
optInit->finishInitialization(*this);
// If we didn't emit into the provided context, the primary result
// is really a temporary.
if (usingProvidedContext) return;
assert(optTemp);
results[0] = optTemp->getManagedAddress();
}
RValue RValueEmitter::visitForceValueExpr(ForceValueExpr *E, SGFContext C) {
return emitForceValue(E, E->getSubExpr(), 0, C);
}
/// Emit an expression in a forced context.
///
/// \param loc - the location that is causing the force
/// \param E - the forced expression
/// \param numOptionalEvaluations - the number of enclosing
/// OptionalEvaluationExprs that we've opened.
RValue RValueEmitter::emitForceValue(ForceValueExpr *loc, Expr *E,
unsigned numOptionalEvaluations,
SGFContext C) {
auto valueType = E->getType()->getOptionalObjectType();
assert(valueType);
E = E->getSemanticsProvidingExpr();
// If the subexpression is a conditional checked cast, emit an unconditional
// cast, which drastically simplifies the generated SIL for something like:
//
// (x as? Foo)!
if (auto checkedCast = dyn_cast<ConditionalCheckedCastExpr>(E)) {
return emitUnconditionalCheckedCast(SGF, loc, checkedCast->getSubExpr(),
valueType, checkedCast->getCastKind(),
C);
}
// If the subexpression is a monadic optional operation, peephole
// the emission of the operation.
if (auto eval = dyn_cast<OptionalEvaluationExpr>(E)) {
CleanupLocation cleanupLoc = CleanupLocation::get(loc);
SILBasicBlock *failureBB;
JumpDest failureDest(cleanupLoc);
// Set up an optional-failure scope (which cannot actually return).
// We can just borrow the enclosing one if we're in a nested context.
if (numOptionalEvaluations) {
failureBB = nullptr; // remember that we did this
failureDest = SGF.BindOptionalFailureDests.back();
} else {
failureBB = SGF.createBasicBlock(FunctionSection::Postmatter);
failureDest = JumpDest(failureBB, SGF.Cleanups.getCleanupsDepth(),
cleanupLoc);
}
RestoreOptionalFailureDest restoreFailureDest(SGF, std::move(failureDest));
RValue result = emitForceValue(loc, eval->getSubExpr(),
numOptionalEvaluations + 1, C);
// Emit the failure destination, but only if actually used.
if (failureBB) {
if (failureBB->pred_empty()) {
SGF.eraseBasicBlock(failureBB);
} else {
SILGenBuilder failureBuilder(SGF, failureBB);
failureBuilder.setTrackingList(SGF.getBuilder().getTrackingList());
auto boolTy = SILType::getBuiltinIntegerType(1, SGF.getASTContext());
auto trueV = failureBuilder.createIntegerLiteral(loc, boolTy, 1);
failureBuilder.createCondFail(loc, trueV, "force unwrapped a nil value");
failureBuilder.createUnreachable(loc);
}
}
return result;
}
// Handle injections.
if (auto injection = dyn_cast<InjectIntoOptionalExpr>(E)) {
auto subexpr = injection->getSubExpr()->getSemanticsProvidingExpr();
// An injection of a bind is the idiom for a conversion between
// optional types (e.g. ImplicitlyUnwrappedOptional<T> -> Optional<T>).
// Handle it specially to avoid unnecessary control flow.
if (auto bindOptional = dyn_cast<BindOptionalExpr>(subexpr)) {
if (bindOptional->getDepth() < numOptionalEvaluations) {
return emitForceValue(loc, bindOptional->getSubExpr(),
numOptionalEvaluations, C);
}
}
// Otherwise, just emit the injected value directly into the result.
return SGF.emitRValue(injection->getSubExpr(), C);
}
// If this is an implicit force of an ImplicitlyUnwrappedOptional,
// and we're emitting into an unbridging conversion, try adjusting the
// context.
bool isImplicitUnwrap = loc->isImplicit() &&
loc->isForceOfImplicitlyUnwrappedOptional();
if (isImplicitUnwrap) {
if (auto conv = C.getAsConversion()) {
if (auto adjusted = conv->getConversion().adjustForInitialForceValue()) {
auto value =
conv->emitWithAdjustedConversion(SGF, loc, *adjusted,
[E](SILGenFunction &SGF, SILLocation loc, SGFContext C) {
return SGF.emitRValueAsSingleValue(E, C);
});
return RValue(SGF, loc, value);
}
}
}
// Otherwise, emit the optional and force its value out.
const TypeLowering &optTL = SGF.getTypeLowering(E->getType());
ManagedValue opt = SGF.emitRValueAsSingleValue(E);
ManagedValue V =
SGF.emitCheckedGetOptionalValueFrom(loc, opt, isImplicitUnwrap, optTL, C);
return RValue(SGF, loc, valueType->getCanonicalType(), V);
}
void SILGenFunction::emitOpenExistentialExprImpl(
OpenExistentialExpr *E,
llvm::function_ref<void(Expr *)> emitSubExpr) {
assert(isInFormalEvaluationScope());
// Emit the existential value.
if (E->getExistentialValue()->getType()->is<LValueType>()) {
bool inserted = OpaqueValueExprs.insert({E->getOpaqueValue(), E}).second;
(void)inserted;
assert(inserted && "already have this opened existential?");
emitSubExpr(E->getSubExpr());
return;
}
auto existentialValue = emitRValueAsSingleValue(
E->getExistentialValue(),
SGFContext::AllowGuaranteedPlusZero);
Type opaqueValueType = E->getOpaqueValue()->getType()->getRValueType();
auto payload = emitOpenExistential(
E, existentialValue,
getLoweredType(opaqueValueType),
AccessKind::Read);
// Register the opaque value for the projected existential.
SILGenFunction::OpaqueValueRAII opaqueValueRAII(
*this, E->getOpaqueValue(), payload);
emitSubExpr(E->getSubExpr());
}
RValue RValueEmitter::visitOpenExistentialExpr(OpenExistentialExpr *E,
SGFContext C) {
if (auto result = tryEmitAsBridgingConversion(SGF, E, false, C)) {
return RValue(SGF, E, *result);
}
FormalEvaluationScope writebackScope(SGF);
return SGF.emitOpenExistentialExpr<RValue>(E,
[&](Expr *subExpr) -> RValue {
return visit(subExpr, C);
});
}
RValue RValueEmitter::visitMakeTemporarilyEscapableExpr(
MakeTemporarilyEscapableExpr *E, SGFContext C) {
// Emit the non-escaping function value.
auto functionValue =
visit(E->getNonescapingClosureValue()).getAsSingleValue(SGF, E);
auto escapingFnTy = SGF.getLoweredType(E->getOpaqueValue()->getType());
auto silFnTy = escapingFnTy.castTo<SILFunctionType>();
auto visitSubExpr = [&](ManagedValue escapingClosure,
bool isClosureConsumable) -> RValue {
// Bind the opaque value to the escaping function.
assert(isClosureConsumable == escapingClosure.hasCleanup());
SILGenFunction::OpaqueValueRAII pushOpaqueValue(SGF, E->getOpaqueValue(),
escapingClosure);
// Emit the guarded expression.
return visit(E->getSubExpr(), C);
};
// Handle @convention(block) an @convention(c). No withoutActuallyEscaping
// verification yet.
auto closureRepresentation = silFnTy->getExtInfo().getRepresentation();
if (closureRepresentation != SILFunctionTypeRepresentation::Thick) {
auto escapingClosure =
SGF.B.createConvertFunction(E, functionValue, escapingFnTy,
/*WithoutActuallyEscaping=*/true);
bool isBlockConvention =
closureRepresentation == SILFunctionTypeRepresentation::Block;
return visitSubExpr(escapingClosure,
isBlockConvention /*isClosureConsumable*/);
}
// Convert it to an escaping function value.
auto escapingClosure =
SGF.createWithoutActuallyEscapingClosure(E, functionValue, escapingFnTy);
auto loc = SILLocation(E);
auto borrowedClosure = escapingClosure.borrow(SGF, loc);
RValue rvalue = visitSubExpr(borrowedClosure, false /* isClosureConsumable */);
// Now create the verification of the withoutActuallyEscaping operand.
// Either we fail the uniquenes check (which means the closure has escaped)
// and abort or we continue and destroy the ultimate reference.
auto isEscaping = SGF.B.createIsEscapingClosure(
loc, borrowedClosure.getValue(),
IsEscapingClosureInst::WithoutActuallyEscaping);
SGF.B.createCondFail(loc, isEscaping, "non-escaping closure has escaped");
return rvalue;
}
RValue RValueEmitter::visitOpaqueValueExpr(OpaqueValueExpr *E, SGFContext C) {
assert(SGF.OpaqueValues.count(E) && "Didn't bind OpaqueValueExpr");
auto value = SGF.OpaqueValues[E];
return RValue(SGF, E, SGF.manageOpaqueValue(value, E, C));
}
RValue RValueEmitter::visitPropertyWrapperValuePlaceholderExpr(
PropertyWrapperValuePlaceholderExpr *E, SGFContext C) {
return visitOpaqueValueExpr(E->getOpaqueValuePlaceholder(), C);
}
ProtocolDecl *SILGenFunction::getPointerProtocol() {
if (SGM.PointerProtocol)
return *SGM.PointerProtocol;
SmallVector<ValueDecl*, 1> lookup;
getASTContext().lookupInSwiftModule("_Pointer", lookup);
// FIXME: Should check for protocol in Sema
assert(lookup.size() == 1 && "no _Pointer protocol");
assert(isa<ProtocolDecl>(lookup[0]) && "_Pointer is not a protocol");
SGM.PointerProtocol = cast<ProtocolDecl>(lookup[0]);
return cast<ProtocolDecl>(lookup[0]);
}
namespace {
class AutoreleasingWritebackComponent : public LogicalPathComponent {
public:
AutoreleasingWritebackComponent(LValueTypeData typeData)
: LogicalPathComponent(typeData, AutoreleasingWritebackKind)
{}
std::unique_ptr<LogicalPathComponent>
clone(SILGenFunction &SGF, SILLocation l) const override {
return std::unique_ptr<LogicalPathComponent>(
new AutoreleasingWritebackComponent(getTypeData()));
}
virtual bool isLoadingPure() const override { return true; }
void set(SILGenFunction &SGF, SILLocation loc,
ArgumentSource &&value, ManagedValue base) && override {
// Convert the value back to a +1 strong reference.
auto unowned = std::move(value).getAsSingleValue(SGF).getUnmanagedValue();
auto strongType = SILType::getPrimitiveObjectType(
unowned->getType().castTo<UnmanagedStorageType>().getReferentType());
auto owned = SGF.B.createUnmanagedToRef(loc, unowned, strongType);
auto ownedMV = SGF.emitManagedRetain(loc, owned);
// Reassign the +1 storage with it.
ownedMV.assignInto(SGF, loc, base.getUnmanagedValue());
}
RValue get(SILGenFunction &SGF, SILLocation loc,
ManagedValue base, SGFContext c) && override {
FullExpr TightBorrowScope(SGF.Cleanups, CleanupLocation::get(loc));
// Load the value at +0.
ManagedValue loadedBase = SGF.B.createLoadBorrow(loc, base);
// Convert it to unowned.
auto refType = loadedBase.getType().getASTType();
auto unownedType = SILType::getPrimitiveObjectType(
CanUnmanagedStorageType::get(refType));
SILValue unowned = SGF.B.createRefToUnmanaged(
loc, loadedBase.getUnmanagedValue(), unownedType);
// A reference type should never be exploded.
return RValue(SGF, ManagedValue::forUnmanaged(unowned), refType);
}
Optional<AccessedStorage> getAccessedStorage() const override {
return None;
}
void dump(raw_ostream &OS, unsigned indent) const override {
OS.indent(indent) << "AutoreleasingWritebackComponent()\n";
}
};
} // end anonymous namespace
SILGenFunction::PointerAccessInfo
SILGenFunction::getPointerAccessInfo(Type type) {
PointerTypeKind pointerKind;
Type elt = type->getAnyPointerElementType(pointerKind);
assert(elt && "not a pointer");
(void)elt;
SGFAccessKind accessKind =
((pointerKind == PTK_UnsafePointer || pointerKind == PTK_UnsafeRawPointer)
? SGFAccessKind::BorrowedAddressRead : SGFAccessKind::ReadWrite);
return { type->getCanonicalType(), pointerKind, accessKind };
}
RValue RValueEmitter::visitInOutToPointerExpr(InOutToPointerExpr *E,
SGFContext C) {
// If we're converting on the behalf of an
// AutoreleasingUnsafeMutablePointer, convert the lvalue to
// unowned(unsafe), so we can point at +0 storage.
auto accessInfo = SGF.getPointerAccessInfo(E->getType());
// Get the original lvalue.
LValue lv = SGF.emitLValue(E->getSubExpr(), accessInfo.AccessKind);
auto ptr = SGF.emitLValueToPointer(E, std::move(lv), accessInfo);
return RValue(SGF, E, ptr);
}
/// Convert an l-value to a pointer type: unsafe, unsafe-mutable, or
/// autoreleasing-unsafe-mutable.
ManagedValue SILGenFunction::emitLValueToPointer(SILLocation loc, LValue &&lv,
PointerAccessInfo pointerInfo) {
assert(pointerInfo.AccessKind == lv.getAccessKind());
// The incoming lvalue should be at the abstraction level of T in
// Unsafe*Pointer<T>. Reabstract it if necessary.
auto opaqueTy = AbstractionPattern::getOpaque();
auto loweredTy = getLoweredType(opaqueTy, lv.getSubstFormalType());
if (lv.getTypeOfRValue().getASTType() != loweredTy.getASTType()) {
lv.addSubstToOrigComponent(opaqueTy, loweredTy);
}
switch (pointerInfo.PointerKind) {
case PTK_UnsafeMutablePointer:
case PTK_UnsafePointer:
case PTK_UnsafeMutableRawPointer:
case PTK_UnsafeRawPointer:
// +1 is fine.
break;
case PTK_AutoreleasingUnsafeMutablePointer: {
// Set up a writeback through a +0 buffer.
LValueTypeData typeData = lv.getTypeData();
auto rvalueType = CanUnmanagedStorageType::get(typeData.TypeOfRValue);
LValueTypeData unownedTypeData(
lv.getAccessKind(),
AbstractionPattern(
typeData.OrigFormalType.getGenericSignature(),
CanUnmanagedStorageType::get(typeData.OrigFormalType.getType())),
CanUnmanagedStorageType::get(typeData.SubstFormalType),
rvalueType);
lv.add<AutoreleasingWritebackComponent>(unownedTypeData);
break;
}
}
// Get the lvalue address as a raw pointer.
SILValue address =
emitAddressOfLValue(loc, std::move(lv)).getUnmanagedValue();
address = B.createAddressToPointer(loc, address,
SILType::getRawPointerType(getASTContext()));
// Disable nested writeback scopes for any calls evaluated during the
// conversion intrinsic.
InOutConversionScope scope(*this);
// Invoke the conversion intrinsic.
FuncDecl *converter =
getASTContext().getConvertInOutToPointerArgument();
auto pointerType = pointerInfo.PointerType;
auto subMap = pointerType->getContextSubstitutionMap(SGM.M.getSwiftModule(),
getPointerProtocol());
return emitApplyOfLibraryIntrinsic(loc, converter, subMap,
ManagedValue::forUnmanaged(address),
SGFContext())
.getAsSingleValue(*this, loc);
}
RValue RValueEmitter::visitArrayToPointerExpr(ArrayToPointerExpr *E,
SGFContext C) {
FormalEvaluationScope writeback(SGF);
auto subExpr = E->getSubExpr();
auto accessInfo = SGF.getArrayAccessInfo(E->getType(),
subExpr->getType()->getInOutObjectType());
// Convert the array mutably if it's being passed inout.
ManagedValue array;
if (accessInfo.AccessKind == SGFAccessKind::ReadWrite) {
array = SGF.emitAddressOfLValue(subExpr,
SGF.emitLValue(subExpr, SGFAccessKind::ReadWrite));
} else {
assert(isReadAccess(accessInfo.AccessKind));
array = SGF.emitRValueAsSingleValue(subExpr);
}
auto pointer = SGF.emitArrayToPointer(E, array, accessInfo).first;
return RValue(SGF, E, pointer);
}
SILGenFunction::ArrayAccessInfo
SILGenFunction::getArrayAccessInfo(Type pointerType, Type arrayType) {
auto pointerAccessInfo = getPointerAccessInfo(pointerType);
return { pointerType, arrayType, pointerAccessInfo.AccessKind };
}
std::pair<ManagedValue, ManagedValue>
SILGenFunction::emitArrayToPointer(SILLocation loc, LValue &&lv,
ArrayAccessInfo accessInfo) {
auto array = emitAddressOfLValue(loc, std::move(lv));
return emitArrayToPointer(loc, array, accessInfo);
}
std::pair<ManagedValue, ManagedValue>
SILGenFunction::emitArrayToPointer(SILLocation loc, ManagedValue array,
ArrayAccessInfo accessInfo) {
auto &ctx = getASTContext();
FuncDecl *converter;
if (accessInfo.AccessKind != SGFAccessKind::ReadWrite) {
assert(isReadAccess(accessInfo.AccessKind));
converter = ctx.getConvertConstArrayToPointerArgument();
if (array.isLValue())
array = B.createLoadCopy(loc, array);
} else {
converter = ctx.getConvertMutableArrayToPointerArgument();
assert(array.isLValue());
}
// Invoke the conversion intrinsic, which will produce an owner-pointer pair.
auto *M = SGM.M.getSwiftModule();
auto firstSubMap =
accessInfo.ArrayType->getContextSubstitutionMap(M, ctx.getArrayDecl());
auto secondSubMap = accessInfo.PointerType->getContextSubstitutionMap(
M, getPointerProtocol());
auto genericSig = converter->getGenericSignature();
auto subMap = SubstitutionMap::combineSubstitutionMaps(
firstSubMap, secondSubMap, CombineSubstitutionMaps::AtIndex, 1, 0,
genericSig);
SmallVector<ManagedValue, 2> resultScalars;
emitApplyOfLibraryIntrinsic(loc, converter, subMap, array, SGFContext())
.getAll(resultScalars);
assert(resultScalars.size() == 2);
// Mark the dependence of the pointer on the owner value.
auto owner = resultScalars[0];
auto pointer = resultScalars[1].forward(*this);
pointer = B.createMarkDependence(loc, pointer, owner.getValue());
// The owner's already in its own cleanup. Return the pointer.
return {ManagedValue::forTrivialObjectRValue(pointer), owner};
}
RValue RValueEmitter::visitStringToPointerExpr(StringToPointerExpr *E,
SGFContext C) {
// Get the original value.
ManagedValue orig = SGF.emitRValueAsSingleValue(E->getSubExpr());
// Perform the conversion.
auto results = SGF.emitStringToPointer(E, orig, E->getType());
// Implicitly leave the owner managed and return the pointer.
return RValue(SGF, E, results.first);
}
std::pair<ManagedValue, ManagedValue>
SILGenFunction::emitStringToPointer(SILLocation loc, ManagedValue stringValue,
Type pointerType) {
auto &Ctx = getASTContext();
FuncDecl *converter = Ctx.getConvertConstStringToUTF8PointerArgument();
// Invoke the conversion intrinsic, which will produce an owner-pointer pair.
auto subMap = pointerType->getContextSubstitutionMap(SGM.M.getSwiftModule(),
getPointerProtocol());
SmallVector<ManagedValue, 2> results;
emitApplyOfLibraryIntrinsic(loc, converter, subMap, stringValue, SGFContext())
.getAll(results);
assert(results.size() == 2);
// Mark the dependence of the pointer on the owner value.
auto owner = results[0];
auto pointer = results[1].forward(*this);
pointer = B.createMarkDependence(loc, pointer, owner.getValue());
return {ManagedValue::forTrivialObjectRValue(pointer), owner};
}
RValue RValueEmitter::visitPointerToPointerExpr(PointerToPointerExpr *E,
SGFContext C) {
auto &Ctx = SGF.getASTContext();
auto converter = Ctx.getConvertPointerToPointerArgument();
// Get the original pointer value, abstracted to the converter function's
// expected level.
AbstractionPattern origTy(converter->getInterfaceType());
origTy = origTy.getFunctionParamType(0);
CanType inputTy = E->getSubExpr()->getType()->getCanonicalType();
auto &origTL = SGF.getTypeLowering(origTy, inputTy);
ManagedValue orig = SGF.emitRValueAsOrig(E->getSubExpr(), origTy, origTL);
CanType outputTy = E->getType()->getCanonicalType();
return SGF.emitPointerToPointer(E, orig, inputTy, outputTy, C);
}
RValue RValueEmitter::visitForeignObjectConversionExpr(
ForeignObjectConversionExpr *E,
SGFContext C) {
// Get the original value.
ManagedValue orig = SGF.emitRValueAsSingleValue(E->getSubExpr());
ManagedValue result = SGF.B.createUncheckedRefCast(
E, orig, SGF.getLoweredType(E->getType()));
return RValue(SGF, E, E->getType()->getCanonicalType(), result);
}
RValue RValueEmitter::visitUnevaluatedInstanceExpr(UnevaluatedInstanceExpr *E,
SGFContext C) {
llvm_unreachable("unevaluated_instance expression can never be evaluated");
}
RValue RValueEmitter::visitDifferentiableFunctionExpr(
DifferentiableFunctionExpr *E, SGFContext C) {
auto origFunc = SGF.emitRValueAsSingleValue(E->getSubExpr());
auto destTy = SGF.getLoweredType(E->getType()).castTo<SILFunctionType>();
auto *diffFunc = SGF.B.createDifferentiableFunction(
E, destTy->getDifferentiabilityParameterIndices(),
destTy->getDifferentiabilityResultIndices(), origFunc.forward(SGF));
return RValue(SGF, E, SGF.emitManagedRValueWithCleanup(diffFunc));
}
RValue RValueEmitter::visitLinearFunctionExpr(
LinearFunctionExpr *E, SGFContext C) {
auto origFunc = SGF.emitRValueAsSingleValue(E->getSubExpr());
auto destTy = SGF.getLoweredType(E->getType()).castTo<SILFunctionType>();
auto *diffFunc = SGF.B.createLinearFunction(
E, destTy->getDifferentiabilityParameterIndices(), origFunc.forward(SGF));
return RValue(SGF, E, SGF.emitManagedRValueWithCleanup(diffFunc));
}
RValue RValueEmitter::visitDifferentiableFunctionExtractOriginalExpr(
DifferentiableFunctionExtractOriginalExpr *E, SGFContext C) {
auto diffFunc = SGF.emitRValueAsSingleValue(E->getSubExpr());
auto borrowedDiffFunc = diffFunc.borrow(SGF, E);
auto *borrowedOrigFunc = SGF.B.createDifferentiableFunctionExtractOriginal(
E, borrowedDiffFunc.getValue());
auto ownedOrigFunc = SGF.B.emitCopyValueOperation(E, borrowedOrigFunc);
return RValue(SGF, E, SGF.emitManagedRValueWithCleanup(ownedOrigFunc));
}
RValue RValueEmitter::visitLinearFunctionExtractOriginalExpr(
LinearFunctionExtractOriginalExpr *E, SGFContext C) {
auto diffFunc = SGF.emitRValueAsSingleValue(E->getSubExpr());
auto borrowedDiffFunc = diffFunc.borrow(SGF, E);
auto *borrowedOrigFunc = SGF.B.createLinearFunctionExtract(
E, LinearDifferentiableFunctionTypeComponent::Original,
borrowedDiffFunc.getValue());
auto ownedOrigFunc = SGF.B.emitCopyValueOperation(E, borrowedOrigFunc);
return RValue(SGF, E, SGF.emitManagedRValueWithCleanup(ownedOrigFunc));
}
RValue RValueEmitter::visitLinearToDifferentiableFunctionExpr(
LinearToDifferentiableFunctionExpr *E, SGFContext C) {
// TODO: Implement this.
llvm_unreachable("Unsupported!");
}
RValue RValueEmitter::visitTapExpr(TapExpr *E, SGFContext C) {
// This implementation is not very robust; if TapExpr were to ever become
// user-accessible (as some sort of "with" statement), it should probably
// permit a full pattern binding, saving the unused parts and "re-structuring"
// them to return the modified value.
auto Var = E->getVar();
auto VarType = E->getType()->getCanonicalType();
Scope outerScope(SGF, CleanupLocation(E));
// Initialize the var with our SubExpr.
auto VarInit =
SGF.emitInitializationForVarDecl(Var, /*forceImmutable=*/false);
SGF.emitExprInto(E->getSubExpr(), VarInit.get(), SILLocation(E));
// Emit the body and let it mutate the var if it chooses.
SGF.emitStmt(E->getBody());
// Retrieve and return the var, making it +1 so it survives the scope.
auto result = SGF.emitRValueForDecl(SILLocation(E), Var,
VarType, AccessSemantics::Ordinary, C);
result = std::move(result).ensurePlusOne(SGF, SILLocation(E));
return outerScope.popPreservingValue(std::move(result));
}
RValue RValueEmitter::visitDefaultArgumentExpr(DefaultArgumentExpr *E,
SGFContext C) {
// We should only be emitting this as an rvalue for caller-side default
// arguments such as magic literals. Other default arguments get handled
// specially.
return SGF.emitRValue(E->getCallerSideDefaultExpr());
}
RValue RValueEmitter::visitErrorExpr(ErrorExpr *E, SGFContext C) {
// Running into an ErrorExpr here means we've failed to lazily typecheck
// something. Just emit an undef of the appropriate type and carry on.
if (SGF.getASTContext().Diags.hadAnyError())
return SGF.emitUndefRValue(E, E->getType());
// Use report_fatal_error to ensure we trap in release builds instead of
// miscompiling.
llvm::report_fatal_error("Found an ErrorExpr but didn't emit an error?");
}
RValue SILGenFunction::emitRValue(Expr *E, SGFContext C) {
assert(!E->getType()->hasLValueType() &&
"l-values must be emitted with emitLValue");
return RValueEmitter(*this).visit(E, C);
}
RValue SILGenFunction::emitPlusOneRValue(Expr *E, SGFContext C) {
Scope S(*this, SILLocation(E));
assert(!E->getType()->hasLValueType() &&
"l-values must be emitted with emitLValue");
return S.popPreservingValue(
RValueEmitter(*this).visit(E, C.withSubExprSideEffects()));
}
RValue SILGenFunction::emitPlusZeroRValue(Expr *E) {
// Check if E is a case that we know how to emit at plus zero. If so, handle
// it here.
//
// TODO: Fill this in.
// Otherwise, we go through the +1 path and borrow the result.
return emitPlusOneRValue(E).borrow(*this, SILLocation(E));
}
// Evaluate the expression as an lvalue or rvalue, discarding the result.
void SILGenFunction::emitIgnoredExpr(Expr *E) {
// If this is a tuple expression, recursively ignore its elements.
// This may let us recursively avoid work.
if (auto *TE = dyn_cast<TupleExpr>(E)) {
for (auto *elt : TE->getElements())
emitIgnoredExpr(elt);
return;
}
// TODO: Could look through arbitrary implicit conversions that don't have
// side effects, or through tuple shuffles, by emitting ignored default
// arguments.
FullExpr scope(Cleanups, CleanupLocation(E));
if (E->getType()->hasLValueType()) {
// Emit the l-value, but don't perform an access.
FormalEvaluationScope scope(*this);
emitLValue(E, SGFAccessKind::IgnoredRead);
return;
}
// If this is a load expression, we try hard not to actually do the load
// (which could materialize a potentially expensive value with cleanups).
if (auto *LE = dyn_cast<LoadExpr>(E)) {
FormalEvaluationScope scope(*this);
LValue lv = emitLValue(LE->getSubExpr(), SGFAccessKind::IgnoredRead);
// If loading from the lvalue is guaranteed to have no side effects, we
// don't need to drill into it.
if (lv.isLoadingPure())
return;
// If the last component is physical, then we just need to drill through
// side effects in the lvalue, but don't need to perform the final load.
if (lv.isLastComponentPhysical()) {
emitAddressOfLValue(E, std::move(lv));
return;
}
// Otherwise, we must call the ultimate getter to get its potential side
// effect.
emitLoadOfLValue(E, std::move(lv), SGFContext::AllowImmediatePlusZero);
return;
}
auto findLoadThroughForceValueExprs = [](Expr *E,
SmallVectorImpl<ForceValueExpr *>
&forceValueExprs) -> LoadExpr * {
while (auto FVE = dyn_cast<ForceValueExpr>(E)) {
forceValueExprs.push_back(FVE);
E = FVE->getSubExpr();
}
return dyn_cast<LoadExpr>(E);
};
// Look through force unwrap(s) of an lvalue. If possible, we want to just to
// emit the precondition(s) without having to load the value.
SmallVector<ForceValueExpr *, 4> forceValueExprs;
if (auto *LE = findLoadThroughForceValueExprs(E, forceValueExprs)) {
FormalEvaluationScope scope(*this);
LValue lv = emitLValue(LE->getSubExpr(), SGFAccessKind::IgnoredRead);
ManagedValue value;
if (lv.isLastComponentPhysical()) {
value = emitAddressOfLValue(LE, std::move(lv));
} else {
value = emitLoadOfLValue(LE, std::move(lv),
SGFContext::AllowImmediatePlusZero).getAsSingleValue(*this, LE);
}
for (auto &FVE : llvm::reverse(forceValueExprs)) {
const TypeLowering &optTL = getTypeLowering(FVE->getSubExpr()->getType());
bool isImplicitUnwrap = FVE->isImplicit() &&
FVE->isForceOfImplicitlyUnwrappedOptional();
value = emitCheckedGetOptionalValueFrom(
FVE, value, isImplicitUnwrap, optTL, SGFContext::AllowImmediatePlusZero);
}
return;
}
// Otherwise, emit the result (to get any side effects), but produce it at +0
// if that allows simplification.
emitRValue(E, SGFContext::AllowImmediatePlusZero);
}
/// Emit the given expression as an r-value, then (if it is a tuple), combine
/// it together into a single ManagedValue.
ManagedValue SILGenFunction::emitRValueAsSingleValue(Expr *E, SGFContext C) {
return emitRValue(E, C).getAsSingleValue(*this, E);
}
RValue SILGenFunction::emitUndefRValue(SILLocation loc, Type type) {
return RValue(*this, loc, type->getCanonicalType(),
emitUndef(getLoweredType(type)));
}
ManagedValue SILGenFunction::emitUndef(Type type) {
return emitUndef(getLoweredType(type));
}
ManagedValue SILGenFunction::emitUndef(SILType type) {
SILValue undef = SILUndef::get(type, F);
return ManagedValue::forUnmanaged(undef);
}
| apache-2.0 |
yuri0x7c1/ofbiz-explorer | src/test/resources/apache-ofbiz-17.12.04/framework/entity/src/main/java/org/apache/ofbiz/entity/condition/EntityDateFilterCondition.java | 7627 | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.entity.condition;
import java.sql.Timestamp;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.ofbiz.base.util.UtilDateTime;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.GenericModelException;
import org.apache.ofbiz.entity.config.model.Datasource;
import org.apache.ofbiz.entity.model.ModelEntity;
/**
* Date-range condition.
*
*/
@SuppressWarnings("serial")
public final class EntityDateFilterCondition extends EntityCondition {
private final String fromDateName;
private final String thruDateName;
public EntityDateFilterCondition(String fromDateName, String thruDateName) {
this.fromDateName = fromDateName;
this.thruDateName = thruDateName;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public String makeWhereString(ModelEntity modelEntity, List<EntityConditionParam> entityConditionParams, Datasource datasourceInfo) {
EntityCondition condition = makeCondition();
return condition.makeWhereString(modelEntity, entityConditionParams, datasourceInfo);
}
@Override
public void checkCondition(ModelEntity modelEntity) throws GenericModelException {
EntityCondition condition = makeCondition();
condition.checkCondition(modelEntity);
}
@Override
public boolean mapMatches(Delegator delegator, Map<String, ? extends Object> map) {
EntityCondition condition = makeCondition();
return condition.mapMatches(delegator, map);
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof EntityDateFilterCondition)) {
return false;
}
EntityDateFilterCondition other = (EntityDateFilterCondition) obj;
return equals(fromDateName, other.fromDateName) && equals(thruDateName, other.thruDateName);
}
@Override
public int hashCode() {
return hashCode(fromDateName) ^ hashCode(thruDateName);
}
@Override
public void visit(EntityConditionVisitor visitor) {
visitor.acceptEntityDateFilterCondition(this);
}
@Override
public void accept(EntityConditionVisitor visitor) {
visitor.acceptEntityDateFilterCondition(this);
}
@Override
public EntityCondition freeze() {
return this;
}
protected EntityCondition makeCondition() {
return makeCondition(UtilDateTime.nowTimestamp(), fromDateName, thruDateName);
}
public static EntityExpr makeCondition(Timestamp moment, String fromDateName, String thruDateName) {
return EntityCondition.makeCondition(
EntityCondition.makeCondition(
EntityCondition.makeCondition(thruDateName, EntityOperator.EQUALS, null),
EntityOperator.OR,
EntityCondition.makeCondition(thruDateName, EntityOperator.GREATER_THAN, moment)
),
EntityOperator.AND,
EntityCondition.makeCondition(
EntityCondition.makeCondition(fromDateName, EntityOperator.EQUALS, null),
EntityOperator.OR,
EntityCondition.makeCondition(fromDateName, EntityOperator.LESS_THAN_EQUAL_TO, moment)
)
);
}
/**
* Creates an EntityCondition representing a date range filter query to be used against
* entities that themselves represent a date range. When used the resulting entities
* will meet at least one of the following criteria:
* - fromDate is equal to or after rangeStart but before rangeEnd
* - thruDate is equal to or after rangeStart but before rangeEnd
* - fromDate is null and thruDate is equal to or after rangeStart
* - thruDate is null and fromDate is before rangeEnd
* - fromDate is null and thruDate is null
*
* @param rangeStart The start of the range to filter against
* @param rangeEnd The end of the range to filter against
* @param fromDateName The name of the field containing the entity's "fromDate"
* @param thruDateName The name of the field containing the entity's "thruDate"
* @return EntityCondition representing the date range filter
*/
public static EntityCondition makeRangeCondition(Timestamp rangeStart, Timestamp rangeEnd, String fromDateName, String thruDateName) {
List<EntityCondition> criteria = new LinkedList<>();
// fromDate is equal to or after rangeStart but before rangeEnd
criteria.add(
EntityCondition.makeCondition(
EntityCondition.makeCondition(fromDateName, EntityOperator.GREATER_THAN_EQUAL_TO, rangeStart),
EntityOperator.AND,
EntityCondition.makeCondition(fromDateName, EntityOperator.LESS_THAN, rangeEnd)
)
);
// thruDate is equal to or after rangeStart but before rangeEnd
criteria.add(
EntityCondition.makeCondition(
EntityCondition.makeCondition(thruDateName, EntityOperator.GREATER_THAN_EQUAL_TO, rangeStart),
EntityOperator.AND,
EntityCondition.makeCondition(thruDateName, EntityOperator.LESS_THAN, rangeEnd)
)
);
// fromDate is null and thruDate is equal to or after rangeStart
criteria.add(
EntityCondition.makeCondition(
EntityCondition.makeCondition(fromDateName, EntityOperator.EQUALS, null),
EntityOperator.AND,
EntityCondition.makeCondition(thruDateName, EntityOperator.GREATER_THAN_EQUAL_TO, rangeStart)
)
);
// thruDate is null and fromDate is before rangeEnd
criteria.add(
EntityCondition.makeCondition(
EntityCondition.makeCondition(thruDateName, EntityOperator.EQUALS, null),
EntityOperator.AND,
EntityCondition.makeCondition(fromDateName, EntityOperator.LESS_THAN, rangeEnd)
)
);
// fromDate is null and thruDate is null
criteria.add(
EntityCondition.makeCondition(
EntityCondition.makeCondition(thruDateName, EntityOperator.EQUALS, null),
EntityOperator.AND,
EntityCondition.makeCondition(fromDateName, EntityOperator.EQUALS, null)
)
);
// require at least one of the above to be true
return EntityCondition.makeCondition(criteria, EntityOperator.OR);
}
}
| apache-2.0 |
alanzw/sharerender | Modules/LibCore/CommandRecorder.cpp | 4119 | #include "CommandRecorder.h"
#include "Opcode.h"
#include <algorithm>
using namespace std;
#include <iostream>
using namespace cg;
using namespace cg::core;
char mp[100][100] = {
"CreateDevice",
"BeginScene",
"EndScene",
"Clear",
"Present",
"SetTransform",
"SetRenderState",
"SetStreamSource",
"SetFVF",
"DrawPrimitive",
"DrawIndexedPrimitive",
"CreateVertexBuffer",
"VertexBufferLock",
"VertexBufferUnlock",
"SetIndices",
"CreateIndexBuffer",
"IndexBufferLock",
"IndexBufferUnlock",
"SetSamplerState",
"CreateVertexDeclaration",
"SetVertexDeclaration",
"SetSoftwareVertexProcessing",
"SetLight",
"LightEnable",
"CreateVertexShader",
"SetVertexShader",
"SetVertexShaderConstantF",
"CreatePixelShader",
"SetPixelShader",
"SetPixelShaderConstantF",
"DrawPrimitiveUP",
"DrawIndexedPrimitiveUP",
"SetVertexShaderConstantI",
"SetVertexShaderConstantB",
"SetPixelShaderConstantI",
"SetPixelShaderConstantB",
"Reset",
"SetMaterial",
"CreateTexture",
"SetTexture",
"SetTextureStageState",
"TransmitTextureData",
"CreateStateBlock",
"BeginStateBlock",
"EndStateBlock",
"StateBlockCapture",
"StateBlockApply",
"DeviceAddRef",
"DeviceRelease",
"SetViewport",
"SetNPatchMode",
"CreateCubeTexture",
"SetCubeTexture",
"GetSwapChain",
"SwapChainPresent",
"TextureSetAutoGenFilterType",
"TextureGenerateMipSubLevels",
"SetRenderTarget",
"SetDepthStencilSurface",
"TextureGetSurfaceLevel",
"SwapChainGetBackBuffer",
"GetDepthStencilSurface",
"CreateDepthStencilSurface",
"CubeGetCubeMapSurface",
"DIConfigureDevices",
"DICreateDevice",
"DIGetDeviceStatus",
"DIRunControlPanel",
"DIDAcquire",
"DIDBuildActionMap",
"DIDCreateEffect",
"DIDEnumCreateEffectObjects",
"DIDEnumEffects",
"DIDEscape",
"DIDGetCapabilities",
"DIDGetDeviceData",
"DIDGetDeviceInfo",
"DIDGetDeviceState",
"DIDRunControlPanel",
"DIDSetActionMap",
"DIDSetCooperativeLevel",
"DIDSetDataFormat",
"DIDUnacquire",
"CreateWindow",
"DirectCreate",
"DirectInputCreate",
"DIDAddRef",
"DIDRelease",
"DIDSetProperty",
"TransmitSurfaceData",
"D3DDeviceGetBackBuffer",
"D3DGetDeviceCaps",
"D3DDGetRenderTarget",
"D3DDSetScissorRect",
"SetVertexBufferFormat",
"SetDecimateResult"
};
bool cmp_by_cnt(RecordType A, RecordType B) {
return A.count > B.count;
}
bool cmp_by_len(RecordType A, RecordType B) {
return A.length > B.length;
}
CommandRecorder::CommandRecorder() {
//infoRecorder->logTrace("Recorder::Recorder() called\n");
memset(info, 0, sizeof info);
for(int i=0; i<1010; ++i) info[i].id = i;
cmd_cnt = 0;
inst_cnt = 0;
frame_cnt = 0;
frame_len = 0;
total_len = 0;
}
void CommandRecorder::cache_hit(int op_code) {
info[op_code].hit_cnt++;
}
void CommandRecorder::cache_miss(int op_code) {
info[op_code].miss_cnt++;
}
void CommandRecorder::add_record(int op_code, int len) {
//if(op_code == VertexBufferUnlock_Opcode || op_code == SetVertexShaderConstantF_Opcode || op_code == SetPixelShaderConstantF_Opcode)
//return;
info[op_code].count++;
info[op_code].length += len;
cmd_cnt++;
if(op_code == BeginScene_Opcode) {
frame_cnt++;
}
}
void CommandRecorder::add_lengh(int length) {
total_len += length;
}
void CommandRecorder::print_info() {
sort(info, info + 1010, cmp_by_len);
//sort(info, info + 1010, cmp_by_cnt);
FILE* f = fopen("stat_command.log", "w");
fprintf(f, "=====================================================\nCommand Statistic\n");
fprintf(f, "frame count:\t%I64d\n", frame_cnt);
fprintf(f, "total length:\t%I64d\n", total_len);
fprintf(f, "size per frame:\t%.2lf\n", total_len * 1.0 / frame_cnt);
fprintf(f, "cmd_cnt per frame:\t%.2lf\n", cmd_cnt * 1.0 / frame_cnt);
for(int i=0; i<1010; ++i) {
if(info[i].count == 0) break;
fprintf(f, "op_code=%d, name=%s, cache hit=%.2lf%%, length=%I64d, count=%d, ave=%.3lf\n",
info[i].id,
mp[info[i].id],
info[i].hit_cnt ? (info[i].hit_cnt * 100.0 / (info[i].hit_cnt + info[i].miss_cnt)) : 0,
info[i].length, info[i].count, info[i].length * 1.0f / info[i].count);
}
fprintf(f, "=====================================================\n");
fclose(f);
}
| apache-2.0 |
eug48/hapi-fhir | hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/Meta.java | 24202 | package org.hl7.fhir.dstu3.model;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Mon, Apr 17, 2017 17:38-0400 for FHIR v3.0.1
import java.util.*;
import org.hl7.fhir.utilities.Utilities;
import org.hl7.fhir.dstu3.model.Enumerations.*;
import ca.uhn.fhir.model.api.annotation.Child;
import ca.uhn.fhir.model.api.annotation.ChildOrder;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.api.annotation.DatatypeDef;
import ca.uhn.fhir.model.api.annotation.Block;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.exceptions.FHIRException;
/**
* The metadata about a resource. This is content in the resource that is maintained by the infrastructure. Changes to the content may not always be associated with version changes to the resource.
*/
@DatatypeDef(name="Meta")
public class Meta extends Type implements IBaseMetaType {
/**
* The version specific identifier, as it appears in the version portion of the URL. This values changes when the resource is created, updated, or deleted.
*/
@Child(name = "versionId", type = {IdType.class}, order=0, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="Version specific identifier", formalDefinition="The version specific identifier, as it appears in the version portion of the URL. This values changes when the resource is created, updated, or deleted." )
protected IdType versionId;
/**
* When the resource last changed - e.g. when the version changed.
*/
@Child(name = "lastUpdated", type = {InstantType.class}, order=1, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="When the resource version last changed", formalDefinition="When the resource last changed - e.g. when the version changed." )
protected InstantType lastUpdated;
/**
* A list of profiles (references to [[[StructureDefinition]]] resources) that this resource claims to conform to. The URL is a reference to [[[StructureDefinition.url]]].
*/
@Child(name = "profile", type = {UriType.class}, order=2, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true)
@Description(shortDefinition="Profiles this resource claims to conform to", formalDefinition="A list of profiles (references to [[[StructureDefinition]]] resources) that this resource claims to conform to. The URL is a reference to [[[StructureDefinition.url]]]." )
protected List<UriType> profile;
/**
* Security labels applied to this resource. These tags connect specific resources to the overall security policy and infrastructure.
*/
@Child(name = "security", type = {Coding.class}, order=3, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true)
@Description(shortDefinition="Security Labels applied to this resource", formalDefinition="Security labels applied to this resource. These tags connect specific resources to the overall security policy and infrastructure." )
@ca.uhn.fhir.model.api.annotation.Binding(valueSet="http://hl7.org/fhir/ValueSet/security-labels")
protected List<Coding> security;
/**
* Tags applied to this resource. Tags are intended to be used to identify and relate resources to process and workflow, and applications are not required to consider the tags when interpreting the meaning of a resource.
*/
@Child(name = "tag", type = {Coding.class}, order=4, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true)
@Description(shortDefinition="Tags applied to this resource", formalDefinition="Tags applied to this resource. Tags are intended to be used to identify and relate resources to process and workflow, and applications are not required to consider the tags when interpreting the meaning of a resource." )
@ca.uhn.fhir.model.api.annotation.Binding(valueSet="http://hl7.org/fhir/ValueSet/common-tags")
protected List<Coding> tag;
private static final long serialVersionUID = 867134915L;
/**
* Constructor
*/
public Meta() {
super();
}
/**
* @return {@link #versionId} (The version specific identifier, as it appears in the version portion of the URL. This values changes when the resource is created, updated, or deleted.). This is the underlying object with id, value and extensions. The accessor "getVersionId" gives direct access to the value
*/
public IdType getVersionIdElement() {
if (this.versionId == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create Meta.versionId");
else if (Configuration.doAutoCreate())
this.versionId = new IdType(); // bb
return this.versionId;
}
public boolean hasVersionIdElement() {
return this.versionId != null && !this.versionId.isEmpty();
}
public boolean hasVersionId() {
return this.versionId != null && !this.versionId.isEmpty();
}
/**
* @param value {@link #versionId} (The version specific identifier, as it appears in the version portion of the URL. This values changes when the resource is created, updated, or deleted.). This is the underlying object with id, value and extensions. The accessor "getVersionId" gives direct access to the value
*/
public Meta setVersionIdElement(IdType value) {
this.versionId = value;
return this;
}
/**
* @return The version specific identifier, as it appears in the version portion of the URL. This values changes when the resource is created, updated, or deleted.
*/
public String getVersionId() {
return this.versionId == null ? null : this.versionId.getValue();
}
/**
* @param value The version specific identifier, as it appears in the version portion of the URL. This values changes when the resource is created, updated, or deleted.
*/
public Meta setVersionId(String value) {
if (Utilities.noString(value))
this.versionId = null;
else {
if (this.versionId == null)
this.versionId = new IdType();
this.versionId.setValue(value);
}
return this;
}
/**
* @return {@link #lastUpdated} (When the resource last changed - e.g. when the version changed.). This is the underlying object with id, value and extensions. The accessor "getLastUpdated" gives direct access to the value
*/
public InstantType getLastUpdatedElement() {
if (this.lastUpdated == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create Meta.lastUpdated");
else if (Configuration.doAutoCreate())
this.lastUpdated = new InstantType(); // bb
return this.lastUpdated;
}
public boolean hasLastUpdatedElement() {
return this.lastUpdated != null && !this.lastUpdated.isEmpty();
}
public boolean hasLastUpdated() {
return this.lastUpdated != null && !this.lastUpdated.isEmpty();
}
/**
* @param value {@link #lastUpdated} (When the resource last changed - e.g. when the version changed.). This is the underlying object with id, value and extensions. The accessor "getLastUpdated" gives direct access to the value
*/
public Meta setLastUpdatedElement(InstantType value) {
this.lastUpdated = value;
return this;
}
/**
* @return When the resource last changed - e.g. when the version changed.
*/
public Date getLastUpdated() {
return this.lastUpdated == null ? null : this.lastUpdated.getValue();
}
/**
* @param value When the resource last changed - e.g. when the version changed.
*/
public Meta setLastUpdated(Date value) {
if (value == null)
this.lastUpdated = null;
else {
if (this.lastUpdated == null)
this.lastUpdated = new InstantType();
this.lastUpdated.setValue(value);
}
return this;
}
/**
* @return {@link #profile} (A list of profiles (references to [[[StructureDefinition]]] resources) that this resource claims to conform to. The URL is a reference to [[[StructureDefinition.url]]].)
*/
public List<UriType> getProfile() {
if (this.profile == null)
this.profile = new ArrayList<UriType>();
return this.profile;
}
/**
* @return Returns a reference to <code>this</code> for easy method chaining
*/
public Meta setProfile(List<UriType> theProfile) {
this.profile = theProfile;
return this;
}
public boolean hasProfile() {
if (this.profile == null)
return false;
for (UriType item : this.profile)
if (!item.isEmpty())
return true;
return false;
}
/**
* @return {@link #profile} (A list of profiles (references to [[[StructureDefinition]]] resources) that this resource claims to conform to. The URL is a reference to [[[StructureDefinition.url]]].)
*/
public UriType addProfileElement() {//2
UriType t = new UriType();
if (this.profile == null)
this.profile = new ArrayList<UriType>();
this.profile.add(t);
return t;
}
/**
* @param value {@link #profile} (A list of profiles (references to [[[StructureDefinition]]] resources) that this resource claims to conform to. The URL is a reference to [[[StructureDefinition.url]]].)
*/
public Meta addProfile(String value) { //1
UriType t = new UriType();
t.setValue(value);
if (this.profile == null)
this.profile = new ArrayList<UriType>();
this.profile.add(t);
return this;
}
/**
* @param value {@link #profile} (A list of profiles (references to [[[StructureDefinition]]] resources) that this resource claims to conform to. The URL is a reference to [[[StructureDefinition.url]]].)
*/
public boolean hasProfile(String value) {
if (this.profile == null)
return false;
for (UriType v : this.profile)
if (v.equals(value)) // uri
return true;
return false;
}
/**
* @return {@link #security} (Security labels applied to this resource. These tags connect specific resources to the overall security policy and infrastructure.)
*/
public List<Coding> getSecurity() {
if (this.security == null)
this.security = new ArrayList<Coding>();
return this.security;
}
/**
* @return Returns a reference to <code>this</code> for easy method chaining
*/
public Meta setSecurity(List<Coding> theSecurity) {
this.security = theSecurity;
return this;
}
public boolean hasSecurity() {
if (this.security == null)
return false;
for (Coding item : this.security)
if (!item.isEmpty())
return true;
return false;
}
public Coding addSecurity() { //3
Coding t = new Coding();
if (this.security == null)
this.security = new ArrayList<Coding>();
this.security.add(t);
return t;
}
public Meta addSecurity(Coding t) { //3
if (t == null)
return this;
if (this.security == null)
this.security = new ArrayList<Coding>();
this.security.add(t);
return this;
}
/**
* @return The first repetition of repeating field {@link #security}, creating it if it does not already exist
*/
public Coding getSecurityFirstRep() {
if (getSecurity().isEmpty()) {
addSecurity();
}
return getSecurity().get(0);
}
/**
* @return {@link #tag} (Tags applied to this resource. Tags are intended to be used to identify and relate resources to process and workflow, and applications are not required to consider the tags when interpreting the meaning of a resource.)
*/
public List<Coding> getTag() {
if (this.tag == null)
this.tag = new ArrayList<Coding>();
return this.tag;
}
/**
* @return Returns a reference to <code>this</code> for easy method chaining
*/
public Meta setTag(List<Coding> theTag) {
this.tag = theTag;
return this;
}
public boolean hasTag() {
if (this.tag == null)
return false;
for (Coding item : this.tag)
if (!item.isEmpty())
return true;
return false;
}
public Coding addTag() { //3
Coding t = new Coding();
if (this.tag == null)
this.tag = new ArrayList<Coding>();
this.tag.add(t);
return t;
}
public Meta addTag(Coding t) { //3
if (t == null)
return this;
if (this.tag == null)
this.tag = new ArrayList<Coding>();
this.tag.add(t);
return this;
}
/**
* @return The first repetition of repeating field {@link #tag}, creating it if it does not already exist
*/
public Coding getTagFirstRep() {
if (getTag().isEmpty()) {
addTag();
}
return getTag().get(0);
}
/**
* Convenience method which adds a tag
*
* @param theSystem The code system
* @param theCode The code
* @param theDisplay The display name
* @return Returns a reference to <code>this</code> for easy chaining
*/
public Meta addTag(String theSystem, String theCode, String theDisplay) {
addTag().setSystem(theSystem).setCode(theCode).setDisplay(theDisplay);
return this;
}
/**
* Convenience method which adds a security tag
*
* @param theSystem The code system
* @param theCode The code
* @param theDisplay The display name
* @return Returns a reference to <code>this</code> for easy chaining
*/
public Meta addSecurity(String theSystem, String theCode, String theDisplay) {
addSecurity().setSystem(theSystem).setCode(theCode).setDisplay(theDisplay);
return this;
}
/**
* Returns the first tag (if any) that has the given system and code, or returns
* <code>null</code> if none
*/
public Coding getTag(String theSystem, String theCode) {
for (Coding next : getTag()) {
if (ca.uhn.fhir.util.ObjectUtil.equals(next.getSystem(), theSystem) && ca.uhn.fhir.util.ObjectUtil.equals(next.getCode(), theCode)) {
return next;
}
}
return null;
}
/**
* Returns the first security label (if any) that has the given system and code, or returns
* <code>null</code> if none
*/
public Coding getSecurity(String theSystem, String theCode) {
for (Coding next : getTag()) {
if (ca.uhn.fhir.util.ObjectUtil.equals(next.getSystem(), theSystem) && ca.uhn.fhir.util.ObjectUtil.equals(next.getCode(), theCode)) {
return next;
}
}
return null;
}
protected void listChildren(List<Property> childrenList) {
super.listChildren(childrenList);
childrenList.add(new Property("versionId", "id", "The version specific identifier, as it appears in the version portion of the URL. This values changes when the resource is created, updated, or deleted.", 0, java.lang.Integer.MAX_VALUE, versionId));
childrenList.add(new Property("lastUpdated", "instant", "When the resource last changed - e.g. when the version changed.", 0, java.lang.Integer.MAX_VALUE, lastUpdated));
childrenList.add(new Property("profile", "uri", "A list of profiles (references to [[[StructureDefinition]]] resources) that this resource claims to conform to. The URL is a reference to [[[StructureDefinition.url]]].", 0, java.lang.Integer.MAX_VALUE, profile));
childrenList.add(new Property("security", "Coding", "Security labels applied to this resource. These tags connect specific resources to the overall security policy and infrastructure.", 0, java.lang.Integer.MAX_VALUE, security));
childrenList.add(new Property("tag", "Coding", "Tags applied to this resource. Tags are intended to be used to identify and relate resources to process and workflow, and applications are not required to consider the tags when interpreting the meaning of a resource.", 0, java.lang.Integer.MAX_VALUE, tag));
}
@Override
public Base[] getProperty(int hash, String name, boolean checkValid) throws FHIRException {
switch (hash) {
case -1407102957: /*versionId*/ return this.versionId == null ? new Base[0] : new Base[] {this.versionId}; // IdType
case 1649733957: /*lastUpdated*/ return this.lastUpdated == null ? new Base[0] : new Base[] {this.lastUpdated}; // InstantType
case -309425751: /*profile*/ return this.profile == null ? new Base[0] : this.profile.toArray(new Base[this.profile.size()]); // UriType
case 949122880: /*security*/ return this.security == null ? new Base[0] : this.security.toArray(new Base[this.security.size()]); // Coding
case 114586: /*tag*/ return this.tag == null ? new Base[0] : this.tag.toArray(new Base[this.tag.size()]); // Coding
default: return super.getProperty(hash, name, checkValid);
}
}
@Override
public Base setProperty(int hash, String name, Base value) throws FHIRException {
switch (hash) {
case -1407102957: // versionId
this.versionId = castToId(value); // IdType
return value;
case 1649733957: // lastUpdated
this.lastUpdated = castToInstant(value); // InstantType
return value;
case -309425751: // profile
this.getProfile().add(castToUri(value)); // UriType
return value;
case 949122880: // security
this.getSecurity().add(castToCoding(value)); // Coding
return value;
case 114586: // tag
this.getTag().add(castToCoding(value)); // Coding
return value;
default: return super.setProperty(hash, name, value);
}
}
@Override
public Base setProperty(String name, Base value) throws FHIRException {
if (name.equals("versionId")) {
this.versionId = castToId(value); // IdType
} else if (name.equals("lastUpdated")) {
this.lastUpdated = castToInstant(value); // InstantType
} else if (name.equals("profile")) {
this.getProfile().add(castToUri(value));
} else if (name.equals("security")) {
this.getSecurity().add(castToCoding(value));
} else if (name.equals("tag")) {
this.getTag().add(castToCoding(value));
} else
return super.setProperty(name, value);
return value;
}
@Override
public Base makeProperty(int hash, String name) throws FHIRException {
switch (hash) {
case -1407102957: return getVersionIdElement();
case 1649733957: return getLastUpdatedElement();
case -309425751: return addProfileElement();
case 949122880: return addSecurity();
case 114586: return addTag();
default: return super.makeProperty(hash, name);
}
}
@Override
public String[] getTypesForProperty(int hash, String name) throws FHIRException {
switch (hash) {
case -1407102957: /*versionId*/ return new String[] {"id"};
case 1649733957: /*lastUpdated*/ return new String[] {"instant"};
case -309425751: /*profile*/ return new String[] {"uri"};
case 949122880: /*security*/ return new String[] {"Coding"};
case 114586: /*tag*/ return new String[] {"Coding"};
default: return super.getTypesForProperty(hash, name);
}
}
@Override
public Base addChild(String name) throws FHIRException {
if (name.equals("versionId")) {
throw new FHIRException("Cannot call addChild on a primitive type Meta.versionId");
}
else if (name.equals("lastUpdated")) {
throw new FHIRException("Cannot call addChild on a primitive type Meta.lastUpdated");
}
else if (name.equals("profile")) {
throw new FHIRException("Cannot call addChild on a primitive type Meta.profile");
}
else if (name.equals("security")) {
return addSecurity();
}
else if (name.equals("tag")) {
return addTag();
}
else
return super.addChild(name);
}
public String fhirType() {
return "Meta";
}
public Meta copy() {
Meta dst = new Meta();
copyValues(dst);
dst.versionId = versionId == null ? null : versionId.copy();
dst.lastUpdated = lastUpdated == null ? null : lastUpdated.copy();
if (profile != null) {
dst.profile = new ArrayList<UriType>();
for (UriType i : profile)
dst.profile.add(i.copy());
};
if (security != null) {
dst.security = new ArrayList<Coding>();
for (Coding i : security)
dst.security.add(i.copy());
};
if (tag != null) {
dst.tag = new ArrayList<Coding>();
for (Coding i : tag)
dst.tag.add(i.copy());
};
return dst;
}
protected Meta typedCopy() {
return copy();
}
@Override
public boolean equalsDeep(Base other) {
if (!super.equalsDeep(other))
return false;
if (!(other instanceof Meta))
return false;
Meta o = (Meta) other;
return compareDeep(versionId, o.versionId, true) && compareDeep(lastUpdated, o.lastUpdated, true)
&& compareDeep(profile, o.profile, true) && compareDeep(security, o.security, true) && compareDeep(tag, o.tag, true)
;
}
@Override
public boolean equalsShallow(Base other) {
if (!super.equalsShallow(other))
return false;
if (!(other instanceof Meta))
return false;
Meta o = (Meta) other;
return compareValues(versionId, o.versionId, true) && compareValues(lastUpdated, o.lastUpdated, true)
&& compareValues(profile, o.profile, true);
}
public boolean isEmpty() {
return super.isEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(versionId, lastUpdated, profile
, security, tag);
}
}
| apache-2.0 |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs_/link_protection_type/__init__.py | 12398 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
class link_protection_type(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isn/neighbors/neighbor/subTLVs/subTLVs/link-protection-type. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: ISIS LSDB parameters relating to the type of link protection
offered.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "link-protection-type"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isn",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"link-protection-type",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/link_protection_type/state (container)
YANG Description: State parameters of sub-TLV 20.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/link_protection_type/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of sub-TLV 20.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
from . import state
class link_protection_type(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isn/neighbors/neighbor/subTLVs/subTLVs/link-protection-type. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: ISIS LSDB parameters relating to the type of link protection
offered.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "link-protection-type"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isn",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"link-protection-type",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/link_protection_type/state (container)
YANG Description: State parameters of sub-TLV 20.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/link_protection_type/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of sub-TLV 20.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
| apache-2.0 |
lucaspouzac/jmeter-reporting | src/main/java/org/jmeter/reporting/AppServer.java | 1277 | package org.jmeter.reporting;
import restx.server.JettyWebServer;
import restx.server.WebServer;
import com.google.common.base.Optional;
/**
* This class can be used to run the app.
*
* Alternatively, you can deploy the app as a war in a regular container like tomcat or jetty.
*
* Reading the port from system env PORT makes it compatible with heroku.
*/
public class AppServer {
public static final String WEB_INF_LOCATION = "src/main/webapp/WEB-INF/web.xml";
public static final String WEB_APP_LOCATION = "src/main/webapp";
public static void main(String[] args) throws Exception {
int port = Integer.valueOf(Optional.fromNullable(System.getenv("PORT")).or("8080"));
WebServer server = new JettyWebServer(WEB_INF_LOCATION, WEB_APP_LOCATION, port, "0.0.0.0");
/*
* load mode from system property if defined, or default to dev
* be careful with that setting, if you use this class to launch your server in production, make sure to launch
* it with -Drestx.mode=prod or change the default here
*/
System.setProperty("restx.mode", System.getProperty("restx.mode", "dev"));
System.setProperty("restx.app.package", "org.jmeter.reporting");
server.startAndAwait();
}
}
| apache-2.0 |
samvera/hyrax | app/forms/hyrax/forms/widgets/admin_set_embargo_period.rb | 732 | # frozen_string_literal: true
module Hyrax
module Forms
module Widgets
class AdminSetEmbargoPeriod
# Visibility options for permission templates
def options
i18n_prefix = "hyrax.admin.admin_sets.form_visibility.release.varies.embargo"
[[Hyrax::PermissionTemplate::RELEASE_TEXT_VALUE_6_MONTHS, I18n.t('.6mos', scope: i18n_prefix)],
[Hyrax::PermissionTemplate::RELEASE_TEXT_VALUE_1_YEAR, I18n.t('.1yr', scope: i18n_prefix)],
[Hyrax::PermissionTemplate::RELEASE_TEXT_VALUE_2_YEARS, I18n.t('.2yrs', scope: i18n_prefix)],
[Hyrax::PermissionTemplate::RELEASE_TEXT_VALUE_3_YEARS, I18n.t('.3yrs', scope: i18n_prefix)]]
end
end
end
end
end
| apache-2.0 |
minio/minio-dotnet | Minio.Examples/Cases/CopyObjectReplaceTags.cs | 2552 | /*
* MinIO .NET Library for Amazon S3 Compatible Cloud Storage, (C) 2021 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Minio.DataModel.Tags;
namespace Minio.Examples.Cases
{
class CopyObjectReplaceTags
{
// Copy object from one bucket to another, replace tags in the copied object
public async static Task Run(MinioClient minio,
string fromBucketName = "from-bucket-name",
string fromObjectName = "from-object-name",
string destBucketName = "dest-bucket",
string destObjectName =" to-object-name")
{
try
{
Console.WriteLine("Running example for API: CopyObjectAsync with Tags");
var tags = new Dictionary<string, string>
{
{ "Test-TagKey", "Test-TagValue" },
};
Tagging tagObj = Tagging.GetObjectTags(tags);
CopySourceObjectArgs cpSrcArgs = new CopySourceObjectArgs()
.WithBucket(fromBucketName)
.WithObject(fromObjectName);
CopyObjectArgs args = new CopyObjectArgs()
.WithBucket(destBucketName)
.WithObject(destObjectName)
.WithTagging(tagObj)
.WithReplaceTagsDirective(true)
.WithCopyObjectSource(cpSrcArgs);
await minio.CopyObjectAsync(args).ConfigureAwait(false);
}
catch (Exception e)
{
Console.WriteLine("[Bucket] Exception: {0}", e);
}
}
}
} | apache-2.0 |
desruisseaux/sis | core/sis-metadata/src/test/java/org/apache/sis/metadata/iso/citation/HardCodedCitations.java | 4978 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.metadata.iso.citation;
import java.net.URI;
import org.opengis.metadata.citation.Role;
import org.opengis.metadata.citation.Citation;
import org.opengis.metadata.citation.OnLineFunction;
import org.opengis.metadata.citation.PresentationForm;
import org.apache.sis.metadata.iso.DefaultIdentifier;
import org.apache.sis.metadata.iso.ImmutableIdentifier;
import org.apache.sis.util.iso.SimpleInternationalString;
import org.apache.sis.internal.util.Constants;
import org.apache.sis.util.Static;
/**
* Hard-coded citation constants used for testing purpose only.
* We use those hard-coded constants instead than the ones defined in the
* {@link org.apache.sis.metadata.iso.citation.Citations} class in order
* to protect the test suite against any change in the definition of the
* above-cited public constants.
*
* @author Martin Desruisseaux (Geomatys)
* @since 0.3
* @version 0.6
* @module
*/
public final strictfp class HardCodedCitations extends Static {
/**
* The ISO 19111 standard.
*/
public static final DefaultCitation ISO_19111;
static {
final DefaultCitation c = new DefaultCitation("Spatial referencing by coordinates");
c.getAlternateTitles().add(new SimpleInternationalString("ISO 19111"));
c.getIdentifiers().add(new ImmutableIdentifier(null, "ISO", "19111"));
c.getPresentationForms().add(PresentationForm.DOCUMENT_DIGITAL);
c.freeze();
ISO_19111 = c;
}
/**
* The ISO 19115 standard.
*/
public static final DefaultCitation ISO_19115;
static {
final DefaultCitation c = new DefaultCitation("ISO 19115");
c.getPresentationForms().add(PresentationForm.DOCUMENT_DIGITAL);
c.freeze();
ISO_19115 = c;
}
/**
* The <a href="http://www.epsg.org">EPSG Geodetic Parameter Dataset</a> authority.
* This citation contains the "EPSG" {@linkplain Citation#getIdentifiers() identifier}.
*
* <p>String representation:</p>
*
* {@preformat text
* Citation
* ├─Title………………………………………………………… EPSG Geodetic Parameter Dataset
* ├─Identifier
* │ └─Code………………………………………………… EPSG
* ├─Cited responsible party
* │ ├─Party
* │ │ ├─Name……………………………………… International Association of Oil & Gas Producers
* │ │ └─Contact info
* │ │ └─Online resource
* │ │ ├─Linkage………… http://www.epsg.org
* │ │ └─Function……… Information
* │ └─Role………………………………………………… Principal investigator
* └─Presentation form………………………… Table digital
* }
*/
public static final DefaultCitation EPSG;
static {
final DefaultOnlineResource r = new DefaultOnlineResource(URI.create("http://www.epsg.org"));
r.setFunction(OnLineFunction.INFORMATION);
final DefaultResponsibility p = new DefaultResponsibility(Role.PRINCIPAL_INVESTIGATOR, null,
new DefaultOrganisation("International Association of Oil & Gas Producers", null, null, new DefaultContact(r)));
final DefaultCitation c = new DefaultCitation("EPSG Geodetic Parameter Dataset");
c.getPresentationForms().add(PresentationForm.TABLE_DIGITAL);
c.getIdentifiers().add(new DefaultIdentifier(Constants.EPSG));
c.getCitedResponsibleParties().add(p);
c.freeze();
EPSG = c;
}
/**
* Codespace for objects specific to <a href="http://sis.apache.org">Apache SIS</a>.
*/
public static final DefaultCitation SIS;
static {
final DefaultCitation c = new DefaultCitation(Constants.SIS);
c.freeze();
SIS = c;
}
/**
* Do not allow instantiation of this class.
*/
private HardCodedCitations() {
}
}
| apache-2.0 |
spektrumprojekt/spektrum | message-api/src/main/java/de/spektrumprojekt/datamodel/message/MessageType.java | 1128 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package de.spektrumprojekt.datamodel.message;
public enum MessageType {
/**
* the message has content from external systems such a news or items
*/
CONTENT,
/**
* the message contains information about an error
*/
ERROR,
/**
* the message contains statuss information
*/
STATUS;
}
| apache-2.0 |
danielegrosso/amazon-mws-feeds | src/MarketplaceWebService/Model/GetReportRequestListByNextTokenRequest.php | 5928 | <?php
/**
* PHP Version 5
*
* @category Amazon
* @package MarketplaceWebService
* @copyright Copyright 2009 Amazon Technologies, Inc.
* @link http://aws.amazon.com
* @license http://aws.amazon.com/apache2.0 Apache License, Version 2.0
* @version 2009-01-01
*/
/*******************************************************************************
* Marketplace Web Service PHP5 Library
* Generated: Thu May 07 13:07:36 PDT 2009
*
*/
/**
* @see MarketplaceWebService_Model
*/
//require_once ('MarketplaceWebService/Model.php');
/**
* MarketplaceWebService_Model_GetReportRequestListByNextTokenRequest
*
* Properties:
* <ul>
*
* <li>Marketplace: string</li>
* <li>Merchant: string</li>
* <li>NextToken: string</li>
*
* </ul>
*/
class MarketplaceWebService_Model_GetReportRequestListByNextTokenRequest extends MarketplaceWebService_Model
{
/**
* Construct new MarketplaceWebService_Model_GetReportRequestListByNextTokenRequest
*
* @param mixed $data DOMElement or Associative Array to construct from.
*
* Valid properties:
* <ul>
*
* <li>Marketplace: string</li>
* <li>Merchant: string</li>
* <li>NextToken: string</li>
*
* </ul>
*/
public function __construct($data = null)
{
$this->fields = array (
'Marketplace' => array('FieldValue' => null, 'FieldType' => 'string'),
'Merchant' => array('FieldValue' => null, 'FieldType' => 'string'),
'MWSAuthToken' => array('FieldValue' => null, 'FieldType' => 'string'),
'NextToken' => array('FieldValue' => null, 'FieldType' => 'string'),
);
parent::__construct($data);
}
/**
* Gets the value of the Marketplace property.
*
* @return string Marketplace
*/
public function getMarketplace()
{
return $this->fields['Marketplace']['FieldValue'];
}
/**
* Sets the value of the Marketplace property.
*
* @param string Marketplace
* @return this instance
*/
public function setMarketplace($value)
{
$this->fields['Marketplace']['FieldValue'] = $value;
return $this;
}
/**
* Sets the value of the Marketplace and returns this instance
*
* @param string $value Marketplace
* @return MarketplaceWebService_Model_GetReportRequestListByNextTokenRequest instance
*/
public function withMarketplace($value)
{
$this->setMarketplace($value);
return $this;
}
/**
* Checks if Marketplace is set
*
* @return bool true if Marketplace is set
*/
public function isSetMarketplace()
{
return !is_null($this->fields['Marketplace']['FieldValue']);
}
/**
* Gets the value of the Merchant property.
*
* @return string Merchant
*/
public function getMerchant()
{
return $this->fields['Merchant']['FieldValue'];
}
/**
* Sets the value of the Merchant property.
*
* @param string Merchant
* @return this instance
*/
public function setMerchant($value)
{
$this->fields['Merchant']['FieldValue'] = $value;
return $this;
}
/**
* Sets the value of the Merchant and returns this instance
*
* @param string $value Merchant
* @return MarketplaceWebService_Model_GetReportRequestListByNextTokenRequest instance
*/
public function withMerchant($value)
{
$this->setMerchant($value);
return $this;
}
/**
* Checks if Merchant is set
*
* @return bool true if Merchant is set
*/
public function isSetMerchant()
{
return !is_null($this->fields['Merchant']['FieldValue']);
}
/**
* Gets the value of the MWSAuthToken property.
*
* @return string MWSAuthToken
*/
public function getMWSAuthToken()
{
return $this->fields['MWSAuthToken']['FieldValue'];
}
/**
* Sets the value of the MWSAuthToken property.
*
* @param string MWSAuthToken
* @return this instance
*/
public function setMWSAuthToken($value)
{
$this->fields['MWSAuthToken']['FieldValue'] = $value;
return $this;
}
/**
* Sets the value of the MWSAuthToken and returns this instance
*
* @param string $value MWSAuthToken
* @return MarketplaceWebService_Model_GetReportRequestListByNextTokenRequest instance
*/
public function withMWSAuthToken($value)
{
$this->setMWSAuthToken($value);
return $this;
}
/**
* Checks if MWSAuthToken is set
*
* @return bool true if MWSAuthToken is set
*/
public function isSetMWSAuthToken()
{
return !is_null($this->fields['MWSAuthToken']['FieldValue']);
}
/**
* Gets the value of the NextToken property.
*
* @return string NextToken
*/
public function getNextToken()
{
return $this->fields['NextToken']['FieldValue'];
}
/**
* Sets the value of the NextToken property.
*
* @param string NextToken
* @return this instance
*/
public function setNextToken($value)
{
$this->fields['NextToken']['FieldValue'] = $value;
return $this;
}
/**
* Sets the value of the NextToken and returns this instance
*
* @param string $value NextToken
* @return MarketplaceWebService_Model_GetReportRequestListByNextTokenRequest instance
*/
public function withNextToken($value)
{
$this->setNextToken($value);
return $this;
}
/**
* Checks if NextToken is set
*
* @return bool true if NextToken is set
*/
public function isSetNextToken()
{
return !is_null($this->fields['NextToken']['FieldValue']);
}
}
| apache-2.0 |
android-art-intel/Nougat | art-extension/opttests/src/OptimizationTests/LoadHoistStoreSink/Throw/Main.java | 1407 | /*
* Copyright (C) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package OptimizationTests.LoadHoistStoreSink.Throw;
public class Main
{
public class A
{
public int value;
}
public int testLoop()
{
A x;
x = new A();
int a0 = 0x7;
try
{
for (int i = 0; i < 10; i++)
{
a0 ++;
x.value = a0;
throw new java.lang.ArithmeticException();
}
}catch(java.lang.ArithmeticException e){
}
return x.value;
}
public void test()
{
int sum = 0;
for (int i = 0; i < 10; i++)
{
sum = sum + testLoop();
}
System.out.println(testLoop());
}
public static void main(String[] args)
{
new Main().test();
}
}
| apache-2.0 |
VHAINNOVATIONS/Telepathology | Source/Java/CoreValueObjects/main/src/java/gov/va/med/imaging/exchange/storage/DataSourceByteBufferPoolManagerMBean.java | 2069 | /**
*
Package: MAG - VistA Imaging
WARNING: Per VHA Directive 2004-038, this routine should not be modified.
Date Created: Oct 2, 2008
Site Name: Washington OI Field Office, Silver Spring, MD
Developer: VHAISWWERFEJ
Description:
;; +--------------------------------------------------------------------+
;; Property of the US Government.
;; No permission to copy or redistribute this software is given.
;; Use of unreleased versions of this software requires the user
;; to execute a written test agreement with the VistA Imaging
;; Development Office of the Department of Veterans Affairs,
;; telephone (301) 734-0100.
;;
;; The Food and Drug Administration classifies this software as
;; a Class II medical device. As such, it may not be changed
;; in any way. Modifications to this software may result in an
;; adulterated medical device under 21CFR820, the use of which
;; is considered to be a violation of US Federal Statutes.
;; +--------------------------------------------------------------------+
*/
package gov.va.med.imaging.exchange.storage;
/**
* @author VHAISWWERFEJ
*
*/
public interface DataSourceByteBufferPoolManagerMBean
{
public abstract int getBufferPoolCount();
public abstract int getOverloadedBufferSizeRequests();
public abstract int getHighestOverloadedBufferSizeRequest();
/**
* Return a comma separated list of the buffer names
* @return
*/
public abstract String getBufferNames();
/**
* Get the total number of requests to get buffers
* @return
*/
public abstract int getTotalRequestBufferCount();
/**
*
* @return
*/
public abstract int getTotalReturnBufferCount();
/**
* Total new buffer creation count
* @return
*/
public abstract int getTotalCreateNewBufferCount();
/**
* Total bytes put into buffers
* @return
*/
public abstract long getTotalBufferSizeUse();
/**
* Reset the counters
*/
public abstract void resetCounters();
}
| apache-2.0 |
Grandbrain/Space | Source/Space.System/System.Desktop.cpp | 5947 |
#include <Windows.h>
#include <vector>
#include <Xinput.h>
#include "System.Desktop.h"
namespace Space
{
std::vector<HGLRC> mContexts;
std::vector<HWND> mHandles;
std::vector<unsigned> mCells;
void(*Back)(void*) = nullptr;
unsigned Insert(HWND handle, HGLRC context)
{
bool empty = mCells.empty();
if (empty) mHandles.push_back(handle);
if (empty) mContexts.push_back(context);
if (empty) return mHandles.size() - 1;
unsigned cell = mCells.back();
mCells.pop_back();
mHandles[cell] = handle;
mContexts[cell] = context;
return cell;
}
void Remove(unsigned cell)
{
mHandles[cell] = nullptr;
mContexts[cell] = nullptr;
mCells.push_back(cell);
}
LRESULT CALLBACK WindowProcedure(HWND handle, UINT message, WPARAM wparam, LPARAM lparam)
{
if (message == WM_CREATE)
{
PVOID optional = LPCREATESTRUCT(lparam)->lpCreateParams;
SetWindowLongPtr(handle, GWLP_USERDATA, (LONG_PTR)optional);
return 0;
}
PVOID optional = (PVOID)GetWindowLongPtr(handle, GWLP_USERDATA);
LRESULT result = 0;
if (message == WM_DESTROY)
{
PostQuitMessage(0);
}
else if (message == WM_CLOSE)
{
dMESSAGE msg;
msg.Type = eMESSAGE::CLOSE;
msg.Optional = optional;
msg.Message = nullptr;
Back(&msg);
}
else if (message == WM_PAINT)
{
dMESSAGE msg;
PAINTSTRUCT paint;
BeginPaint(handle, &paint);
msg.Type = eMESSAGE::PAINT;
msg.Optional = optional;
msg.Message = nullptr;
Back(&msg);
EndPaint(handle, &paint);
}
else if (message == WM_SIZE)
{
dSIZE size;
dMESSAGE msg;
size.Width = LOWORD(lparam);
size.Height = HIWORD(lparam);
msg.Type = eMESSAGE::SIZE;
msg.Optional = optional;
msg.Message = &size;
Back(&msg);
}
else result = DefWindowProc(handle, message, wparam, lparam);
return result;
}
bool WindowCreate(unsigned& handle, void(*back)(void*), void* optional)
{
if (!Back) Back = back;
if (!Back) return false;
HINSTANCE instance = GetModuleHandle(NULL);
WNDCLASS wnd;
wnd.cbClsExtra = 0;
wnd.cbWndExtra = 0;
wnd.hbrBackground = HBRUSH(COLOR_WINDOW + 1);
wnd.hCursor = LoadCursor(nullptr, IDC_ARROW);
wnd.hIcon = LoadIcon(nullptr, IDI_APPLICATION);
wnd.hInstance = instance;
wnd.lpfnWndProc = WindowProcedure;
wnd.lpszClassName = L"Default";
wnd.lpszMenuName = nullptr;
wnd.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
RegisterClass(&wnd);
HWND hwnd = CreateWindow(wnd.lpszClassName, NULL,
WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, NULL,
CW_USEDEFAULT, NULL, NULL, NULL,
instance, optional);
if (!hwnd) return false;
handle = Insert(hwnd, nullptr);
return true;
}
bool WindowMessage(unsigned handle, const wchar_t* caption, const wchar_t* message)
{
HWND hwnd = mHandles[handle];
return MessageBox(hwnd, message, caption, MB_ICONINFORMATION) != 0;
}
bool WindowDestroy(unsigned handle)
{
HWND hwnd = mHandles[handle];
HGLRC context = mContexts[handle];
Remove(handle);
DestroyWindow(hwnd);
return true;
}
bool WindowHandle(unsigned handle, void*& hwnd)
{
HWND native = mHandles[handle];
hwnd = native;
return true;
}
bool WindowClose(unsigned handle)
{
HWND hwnd = mHandles[handle];
return CloseWindow(hwnd) == TRUE;
}
bool WindowClientSize(unsigned handle, dSIZE& size)
{
RECT r;
HWND hwnd = mHandles[handle];
if (!GetClientRect(hwnd, &r)) return false;
size.Width = r.right - r.left;
size.Height = r.bottom - r.top;
return true;
}
bool WindowSize(unsigned handle, dSIZE& size)
{
RECT r;
HWND hwnd = mHandles[handle];
if (!GetWindowRect(hwnd, &r)) return false;
size.Width = r.right - r.left;
size.Height = r.bottom - r.top;
return true;
}
bool WindowClientCorner(unsigned handle, dPOINT& point)
{
RECT r;
HWND hwnd = mHandles[handle];
if (!GetClientRect(hwnd, &r)) return false;
POINT p = { r.left, r.top };
if (!ClientToScreen(hwnd, &p)) return false;
point.X = p.x;
point.Y = p.y;
return true;
}
bool WindowCorner(unsigned handle, dPOINT& point)
{
RECT r;
HWND hwnd = mHandles[handle];
if (!GetWindowRect(hwnd, &r)) return false;
point.X = r.left;
point.Y = r.top;
return true;
}
bool WindowShow(unsigned handle, eSHOW mode)
{
int show;
if (mode == eSHOW::MAXIMIZE) show = SW_SHOWMAXIMIZED;
if (mode == eSHOW::MINIMIZE) show = SW_SHOWMINIMIZED;
if (mode == eSHOW::NORMAL) show = SW_SHOWNORMAL;
if (show == 0) return false;
HWND hwnd = mHandles[handle];
if (!ShowWindow(hwnd, show)) return false;
if (!UpdateWindow(hwnd)) return false;
return true;
}
bool WindowUpdate(unsigned handle)
{
HWND hwnd = mHandles[handle];
return InvalidateRect(hwnd, nullptr, FALSE) == TRUE;
}
bool WindowCaption(unsigned handle, const wchar_t* caption)
{
HWND hwnd = mHandles[handle];
return SetWindowText(hwnd, caption) == TRUE;
}
bool WindowIcon(unsigned handle, unsigned resource)
{
HINSTANCE instance = GetModuleHandle(NULL);
HWND hwnd = mHandles[handle];
LPCWSTR name = MAKEINTRESOURCE(resource);
HANDLE icon = LoadImage(instance, name, IMAGE_ICON, 0, 0, LR_DEFAULTSIZE);
if (!icon) return false;
SendMessage(hwnd, WM_SETICON, ICON_BIG, (LPARAM)icon);
SendMessage(hwnd, WM_SETICON, ICON_SMALL, (LPARAM)icon);
return true;
}
bool GamepadConnected(unsigned handle)
{
XINPUT_STATE state;
return XInputGetState(handle, &state) == ERROR_SUCCESS;
}
bool GamepadVibration(unsigned handle, const dVIBRATION& vibration)
{
XINPUT_VIBRATION raw{ vibration.LSpeed, vibration.RSpeed };
return XInputSetState(handle, &raw) == ERROR_SUCCESS;
}
bool GamepadState(unsigned handle, dSTATE& state)
{
XINPUT_STATE raw;
if (XInputGetState(handle, &raw) != ERROR_SUCCESS) return false;
return true;
}
void AppLoop()
{
MSG message;
do
{
if (PeekMessage(&message, nullptr, 0, 0, PM_REMOVE))
DispatchMessage(&message);
else WaitMessage();
}
while (message.message != WM_QUIT);
}
} | apache-2.0 |
xianfengxiong/how-tomcat-work | book/tomcat-5.0.18-src/jakarta-tomcat-catalina/modules/cluster/src/share/org/apache/catalina/cluster/io/ListenCallback.java | 3598 | /*
* $Header: /home/cvs/jakarta-tomcat-catalina/modules/cluster/src/share/org/apache/catalina/cluster/io/ListenCallback.java,v 1.1 2003/02/19 20:32:10 fhanik Exp $
* $Revision: 1.1 $
* $Date: 2003/02/19 20:32:10 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Tomcat", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* [Additional notices, if required by prior licensing conditions]
*
*/
package org.apache.catalina.cluster.io;
/**
* The listen callback interface is used by the replication system
* when data has been received. The interface does not care about
* objects and marshalling and just passes the bytes straight through.
* @author Filip Hanik
* @version $Revision: 1.1 $, $Date: 2003/02/19 20:32:10 $
*/
public interface ListenCallback
{
/**
* This method is invoked on the callback object to notify it that new data has
* been received from one of the cluster nodes.
* @param data - the message bytes received from the cluster/replication system
*/
public void messageDataReceived(byte[] data);
}
| apache-2.0 |
NotFound403/WePay | src/main/java/cn/felord/wepay/ali/sdk/api/response/ZhimaMerchantOrderRentCreateResponse.java | 2960 | package cn.felord.wepay.ali.sdk.api.response;
import cn.felord.wepay.ali.sdk.api.internal.mapping.ApiField;
import cn.felord.wepay.ali.sdk.api.AlipayResponse;
/**
* ALIPAY API: zhima.merchant.order.rent.create response.
*
* @author auto create
* @version $Id: $Id
*/
public class ZhimaMerchantOrderRentCreateResponse extends AlipayResponse {
private static final long serialVersionUID = 6767715372181211422L;
/**
* 是否准入:
Y-准入
N-不准入
*/
@ApiField("admit_state")
private String admitState;
/**
* 商户发起借用服务时,需要在借用结束后返回给商户的参数
*/
@ApiField("invoke_state")
private String invokeState;
/**
* 芝麻信用借还订单号
*/
@ApiField("order_no")
private String orderNo;
/**
* 外部订单号,需要唯一,由商户传入,芝麻内部会做幂等控制,格式为:yyyyMMddHHmmss+4位随机数
*/
@ApiField("out_order_no")
private String outOrderNo;
/**
* 借用者的userId
*/
@ApiField("user_id")
private String userId;
/**
* <p>Setter for the field <code>admitState</code>.</p>
*
* @param admitState a {@link java.lang.String} object.
*/
public void setAdmitState(String admitState) {
this.admitState = admitState;
}
/**
* <p>Getter for the field <code>admitState</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getAdmitState( ) {
return this.admitState;
}
/**
* <p>Setter for the field <code>invokeState</code>.</p>
*
* @param invokeState a {@link java.lang.String} object.
*/
public void setInvokeState(String invokeState) {
this.invokeState = invokeState;
}
/**
* <p>Getter for the field <code>invokeState</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getInvokeState( ) {
return this.invokeState;
}
/**
* <p>Setter for the field <code>orderNo</code>.</p>
*
* @param orderNo a {@link java.lang.String} object.
*/
public void setOrderNo(String orderNo) {
this.orderNo = orderNo;
}
/**
* <p>Getter for the field <code>orderNo</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getOrderNo( ) {
return this.orderNo;
}
/**
* <p>Setter for the field <code>outOrderNo</code>.</p>
*
* @param outOrderNo a {@link java.lang.String} object.
*/
public void setOutOrderNo(String outOrderNo) {
this.outOrderNo = outOrderNo;
}
/**
* <p>Getter for the field <code>outOrderNo</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getOutOrderNo( ) {
return this.outOrderNo;
}
/**
* <p>Setter for the field <code>userId</code>.</p>
*
* @param userId a {@link java.lang.String} object.
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* <p>Getter for the field <code>userId</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getUserId( ) {
return this.userId;
}
}
| apache-2.0 |
taori/WMPR | src/WMPR/WMPR.Client/Framework/Converters/BooleanToVisibilityConverterInverted.cs | 262 | using System.Windows;
namespace WMPR.Client.Framework.Converters
{
public class BooleanToVisibilityConverterInverted : BooleanConverter<Visibility>
{
public BooleanToVisibilityConverterInverted() : base(Visibility.Collapsed, Visibility.Visible)
{
}
}
} | apache-2.0 |
madis/kubernetes | test/e2e/dns.go | 9989 | /*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"fmt"
"time"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/latest"
"github.com/GoogleCloudPlatform/kubernetes/pkg/fields"
"github.com/GoogleCloudPlatform/kubernetes/pkg/labels"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util/wait"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var dnsServiceLableSelector = labels.Set{
"k8s-app": "kube-dns",
"kubernetes.io/cluster-service": "true",
}.AsSelector()
var _ = Describe("DNS", func() {
f := NewFramework("dns")
It("should provide DNS for the cluster", func() {
if providerIs("vagrant") {
By("Skipping test which is broken for vagrant (See https://github.com/GoogleCloudPlatform/kubernetes/issues/3580)")
return
}
podClient := f.Client.Pods(api.NamespaceDefault)
By("Waiting for DNS Service to be Running")
dnsPods, err := podClient.List(dnsServiceLableSelector, fields.Everything())
if err != nil {
Failf("Failed to list all dns service pods")
}
if len(dnsPods.Items) != 1 {
Failf("Unexpected number of pods (%d) matches the label selector %v", len(dnsPods.Items), dnsServiceLableSelector.String())
}
expectNoError(waitForPodRunning(f.Client, dnsPods.Items[0].Name))
// All the names we need to be able to resolve.
// TODO: Spin up a separate test service and test that dns works for that service.
namesToResolve := []string{
"kubernetes-ro.default",
"kubernetes-ro.default.svc",
"kubernetes-ro.default.svc.cluster.local",
"kubernetes-ro.default.cluster.local",
"google.com",
}
// Added due to #8512. This is critical for GCE and GKE deployments.
if providerIs("gce", "gke") {
namesToResolve = append(namesToResolve, "metadata")
}
probeCmd := "for i in `seq 1 600`; do "
for _, name := range namesToResolve {
// Resolve by TCP and UDP DNS.
probeCmd += fmt.Sprintf(`test -n "$(dig +notcp +noall +answer +search %s)" && echo OK > /results/udp@%s;`, name, name)
probeCmd += fmt.Sprintf(`test -n "$(dig +tcp +noall +answer +search %s)" && echo OK > /results/tcp@%s;`, name, name)
}
probeCmd += "sleep 1; done"
// Run a pod which probes DNS and exposes the results by HTTP.
By("creating a pod to probe DNS")
pod := &api.Pod{
TypeMeta: api.TypeMeta{
Kind: "Pod",
APIVersion: latest.Version,
},
ObjectMeta: api.ObjectMeta{
Name: "dns-test-" + string(util.NewUUID()),
Namespace: f.Namespace.Name,
},
Spec: api.PodSpec{
Volumes: []api.Volume{
{
Name: "results",
VolumeSource: api.VolumeSource{
EmptyDir: &api.EmptyDirVolumeSource{},
},
},
},
Containers: []api.Container{
// TODO: Consider scraping logs instead of running a webserver.
{
Name: "webserver",
Image: "gcr.io/google_containers/test-webserver",
VolumeMounts: []api.VolumeMount{
{
Name: "results",
MountPath: "/results",
},
},
},
{
Name: "querier",
Image: "gcr.io/google_containers/dnsutils",
Command: []string{"sh", "-c", probeCmd},
VolumeMounts: []api.VolumeMount{
{
Name: "results",
MountPath: "/results",
},
},
},
},
},
}
By("submitting the pod to kubernetes")
podClient = f.Client.Pods(f.Namespace.Name)
defer func() {
By("deleting the pod")
defer GinkgoRecover()
podClient.Delete(pod.Name, nil)
}()
if _, err := podClient.Create(pod); err != nil {
Failf("Failed to create %s pod: %v", pod.Name, err)
}
expectNoError(f.WaitForPodRunning(pod.Name))
By("retrieving the pod")
pod, err = podClient.Get(pod.Name)
if err != nil {
Failf("Failed to get pod %s: %v", pod.Name, err)
}
// Try to find results for each expected name.
By("looking for the results for each expected name")
var failed []string
expectNoError(wait.Poll(time.Second*2, time.Second*60, func() (bool, error) {
failed = []string{}
for _, name := range namesToResolve {
for _, proto := range []string{"udp", "tcp"} {
testCase := fmt.Sprintf("%s@%s", proto, name)
_, err := f.Client.Get().
Prefix("proxy").
Resource("pods").
Namespace(f.Namespace.Name).
Name(pod.Name).
Suffix("results", testCase).
Do().Raw()
if err != nil {
failed = append(failed, testCase)
}
}
}
if len(failed) == 0 {
return true, nil
}
Logf("Lookups using %s failed for: %v\n", pod.Name, failed)
return false, nil
}))
Expect(len(failed)).To(Equal(0))
// TODO: probe from the host, too.
Logf("DNS probes using %s succeeded\n", pod.Name)
})
It("should provide DNS for headless services", func() {
if providerIs("vagrant") {
By("Skipping test which is broken for vagrant (See https://github.com/GoogleCloudPlatform/kubernetes/issues/3580)")
return
}
podClient := f.Client.Pods(api.NamespaceDefault)
By("Waiting for DNS Service to be Running")
dnsPods, err := podClient.List(dnsServiceLableSelector, fields.Everything())
if err != nil {
Failf("Failed to list all dns service pods")
}
if len(dnsPods.Items) != 1 {
Failf("Unexpected number of pods (%d) matches the label selector %v", len(dnsPods.Items), dnsServiceLableSelector.String())
}
expectNoError(waitForPodRunning(f.Client, dnsPods.Items[0].Name))
// Create a test headless service.
By("Creating a test headless service")
testServiceName := "test-service"
testServiceSelector := map[string]string{
"dns-test": "true",
}
svc := &api.Service{
ObjectMeta: api.ObjectMeta{
Name: testServiceName,
},
Spec: api.ServiceSpec{
PortalIP: "None",
Ports: []api.ServicePort{
{Port: 80},
},
Selector: testServiceSelector,
},
}
_, err = f.Client.Services(f.Namespace.Name).Create(svc)
Expect(err).NotTo(HaveOccurred())
defer func() {
By("deleting the test headless service")
defer GinkgoRecover()
f.Client.Services(f.Namespace.Name).Delete(svc.Name)
}()
// All the names we need to be able to resolve.
// TODO: Create more endpoints and ensure that multiple A records are returned
// for headless service.
namesToResolve := []string{
fmt.Sprintf("%s", testServiceName),
fmt.Sprintf("%s.%s", testServiceName, f.Namespace.Name),
fmt.Sprintf("%s.%s.svc", testServiceName, f.Namespace.Name),
}
probeCmd := "for i in `seq 1 600`; do "
for _, name := range namesToResolve {
// Resolve by TCP and UDP DNS.
probeCmd += fmt.Sprintf(`test -n "$(dig +notcp +noall +answer +search %s)" && echo OK > /results/udp@%s;`, name, name)
probeCmd += fmt.Sprintf(`test -n "$(dig +tcp +noall +answer +search %s)" && echo OK > /results/tcp@%s;`, name, name)
}
probeCmd += "sleep 1; done"
Logf("vishh: 1")
// Run a pod which probes DNS and exposes the results by HTTP.
By("creating a pod to probe DNS")
pod := &api.Pod{
TypeMeta: api.TypeMeta{
Kind: "Pod",
APIVersion: latest.Version,
},
ObjectMeta: api.ObjectMeta{
Name: "dns-test",
Labels: testServiceSelector,
},
Spec: api.PodSpec{
Volumes: []api.Volume{
{
Name: "results",
VolumeSource: api.VolumeSource{
EmptyDir: &api.EmptyDirVolumeSource{},
},
},
},
Containers: []api.Container{
// TODO: Consider scraping logs instead of running a webserver.
{
Name: "webserver",
Image: "gcr.io/google_containers/test-webserver",
VolumeMounts: []api.VolumeMount{
{
Name: "results",
MountPath: "/results",
},
},
},
{
Name: "querier",
Image: "gcr.io/google_containers/dnsutils",
Command: []string{"sh", "-c", probeCmd},
VolumeMounts: []api.VolumeMount{
{
Name: "results",
MountPath: "/results",
},
},
},
},
},
}
By("submitting the pod to kubernetes")
podClient = f.Client.Pods(f.Namespace.Name)
defer func() {
By("deleting the pod")
defer GinkgoRecover()
podClient.Delete(pod.Name, nil)
}()
if _, err := podClient.Create(pod); err != nil {
Failf("Failed to create %s pod: %v", pod.Name, err)
}
expectNoError(f.WaitForPodRunning(pod.Name))
By("retrieving the pod")
pod, err = podClient.Get(pod.Name)
if err != nil {
Failf("Failed to get pod %s: %v", pod.Name, err)
}
// Try to find results for each expected name.
By("looking for the results for each expected name")
var failed []string
expectNoError(wait.Poll(time.Second*2, time.Second*60, func() (bool, error) {
failed = []string{}
for _, name := range namesToResolve {
for _, proto := range []string{"udp", "tcp"} {
testCase := fmt.Sprintf("%s@%s", proto, name)
_, err := f.Client.Get().
Prefix("proxy").
Resource("pods").
Namespace(f.Namespace.Name).
Name(pod.Name).
Suffix("results", testCase).
Do().Raw()
if err != nil {
failed = append(failed, testCase)
}
}
}
if len(failed) == 0 {
return true, nil
}
Logf("Lookups using %s failed for: %v\n", pod.Name, failed)
return false, nil
}))
Expect(len(failed)).To(Equal(0))
// TODO: probe from the host, too.
Logf("DNS probes using %s succeeded\n", pod.Name)
})
})
| apache-2.0 |
matthid/Yaaf.Xmpp.Runtime | src/source/System.XML/System.Xml.Serialization/SoapSchemaExporter.cs | 2001 | //
// Mono.System.Xml.Serialization.SoapSchemaExporter
//
// Authors:
// Gonzalo Paniagua Javier (gonzalo@ximian.com)
// Lluis Sanchez Gual (lluis@ximian.com)
//
// (c) 2002 Ximian, Inc. (http://www.ximian.com)
//
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
namespace Mono.System.Xml.Serialization
{
public class SoapSchemaExporter
{
XmlSchemaExporter _exporter;
public SoapSchemaExporter (XmlSchemas schemas)
{
_exporter = new XmlSchemaExporter(schemas, true);
}
public void ExportMembersMapping (XmlMembersMapping xmlMembersMapping)
{
_exporter.ExportMembersMapping (xmlMembersMapping, false);
}
public void ExportMembersMapping (XmlMembersMapping xmlMembersMapping,
bool exportEnclosingType)
{
_exporter.ExportMembersMapping (xmlMembersMapping, exportEnclosingType);
}
public void ExportTypeMapping (XmlTypeMapping xmlTypeMapping)
{
_exporter.ExportTypeMapping (xmlTypeMapping);
}
}
}
| apache-2.0 |
dawutao/ManagerShop | ManagerShop.UI/ManagerShop.Repository/System/RoleRepository.cs | 1569 | using ManagerShop.Infrastructure.Core;
using ManagerShop.Data.DbContent;
using ManagerShop.Domain;
using ManagerShop.Domain.System;
using ManagerShop.DTOModel.System;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ManagerShop.Domain.IRepository.System;
namespace ManagerShop.Data.Repository.System
{
public class RoleRepository : EFRepositoryBase<RoleEntity>, IRoleRepository
{
public bool IsTranscation { get; set; } = false;
public RoleRepository()
{
base.db = new BaseDbContext();
}
public RoleRepository(BaseDbContext context)
{
IsTranscation = true;
base.db = context;
}
public void CreateTo(RoleEntity model)
{
var UserId = AccountProvider<DTO_USER>.Current.M_Id;
model.M_Id = Guid.NewGuid().ToString();
if (!String.IsNullOrEmpty(UserId))
{
model.CreateUserId = UserId;
}
model.CreateTime = DateTime.Now;
Insert(model);
if (!IsTranscation)
base.db.SaveChanges();
}
public void UpdateTo(RoleEntity model)
{
Update(model);
if (!IsTranscation)
base.db.SaveChanges();
}
public void DeleteTo(RoleEntity model)
{
Delete(model);
if (!IsTranscation)
base.db.SaveChanges();
}
}
}
| apache-2.0 |
funtl/framework | funtl-framework-tools/alipay-sdk/src/main/java/com/funtl/framework/alipay/trade/pay/protocol/downloadbill/util/ZipInputStream.java | 12172 | /*
* Copyright 2015-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.funtl.framework.alipay.trade.pay.protocol.downloadbill.util;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.io.UnsupportedEncodingException;
import java.util.zip.CRC32;
import java.util.zip.Inflater;
import java.util.zip.ZipException;
/**
* This class implements an input stream filter for reading files in the
* ZIP file format. Includes support for both compressed and uncompressed
* entries.
*
* @author David Connelly
* @version 1.44, 06/15/07
*/
public class ZipInputStream extends InflaterInputStream implements ZipConstants {
private ZipEntry entry;
private int flag;
private CRC32 crc = new CRC32();
private long remaining;
private byte[] tmpbuf = new byte[512];
private static final int STORED = ZipEntry.STORED;
private static final int DEFLATED = ZipEntry.DEFLATED;
private boolean closed = false;
// this flag is set to true after EOF has reached for
// one entry
private boolean entryEOF = false;
/**
* Check to make sure that this stream has not been closed
*/
private void ensureOpen() throws IOException {
if (closed) {
throw new IOException("Stream closed");
}
}
/**
* Creates a new ZIP input stream.
*
* @param in the actual input stream
*/
public ZipInputStream(InputStream in) {
super(new PushbackInputStream(in, 512), new Inflater(true), 512);
usesDefaultInflater = true;
if (in == null) {
throw new NullPointerException("in is null");
}
}
/**
* Reads the next ZIP file entry and positions the stream at the
* beginning of the entry data.
*
* @return the next ZIP file entry, or null if there are no more entries
* @throws ZipException if a ZIP file error has occurred
* @throws IOException if an I/O error has occurred
*/
public ZipEntry getNextEntry() throws IOException {
ensureOpen();
if (entry != null) {
closeEntry();
}
crc.reset();
inf.reset();
if ((entry = readLOC()) == null) {
return null;
}
if (entry.method == STORED) {
remaining = entry.size;
}
entryEOF = false;
return entry;
}
/**
* Closes the current ZIP entry and positions the stream for reading the
* next entry.
*
* @throws ZipException if a ZIP file error has occurred
* @throws IOException if an I/O error has occurred
*/
public void closeEntry() throws IOException {
ensureOpen();
while (read(tmpbuf, 0, tmpbuf.length) != -1) ;
entryEOF = true;
}
/**
* Returns 0 after EOF has reached for the current entry data,
* otherwise always return 1.
* <p>
* Programs should not count on this method to return the actual number
* of bytes that could be read without blocking.
*
* @return 1 before EOF and 0 after EOF has reached for current entry.
* @throws IOException if an I/O error occurs.
*/
public int available() throws IOException {
ensureOpen();
if (entryEOF) {
return 0;
} else {
return 1;
}
}
/**
* Reads from the current ZIP entry into an array of bytes.
* If <code>len</code> is not zero, the method
* blocks until some input is available; otherwise, no
* bytes are read and <code>0</code> is returned.
*
* @param b the buffer into which the data is read
* @param off the start offset in the destination array <code>b</code>
* @param len the maximum number of bytes read
* @return the actual number of bytes read, or -1 if the end of the
* entry is reached
* @throws NullPointerException If <code>b</code> is <code>null</code>.
* @throws IndexOutOfBoundsException If <code>off</code> is negative,
* <code>len</code> is negative, or <code>len</code> is greater than
* <code>b.length - off</code>
* @throws ZipException if a ZIP file error has occurred
* @throws IOException if an I/O error has occurred
*/
public int read(byte[] b, int off, int len) throws IOException {
ensureOpen();
if (off < 0 || len < 0 || off > b.length - len) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
if (entry == null) {
return -1;
}
switch (entry.method) {
case DEFLATED:
len = super.read(b, off, len);
if (len == -1) {
readEnd(entry);
entryEOF = true;
entry = null;
} else {
crc.update(b, off, len);
}
return len;
case STORED:
if (remaining <= 0) {
entryEOF = true;
entry = null;
return -1;
}
if (len > remaining) {
len = (int) remaining;
}
len = in.read(b, off, len);
if (len == -1) {
throw new ZipException("unexpected EOF");
}
crc.update(b, off, len);
remaining -= len;
if (remaining == 0 && entry.crc != crc.getValue()) {
throw new ZipException("invalid entry CRC (expected 0x" + Long.toHexString(entry.crc) + " but got 0x" + Long.toHexString(crc.getValue()) + ")");
}
return len;
default:
throw new ZipException("invalid compression method");
}
}
/**
* Skips specified number of bytes in the current ZIP entry.
*
* @param n the number of bytes to skip
* @return the actual number of bytes skipped
* @throws ZipException if a ZIP file error has occurred
* @throws IOException if an I/O error has occurred
* @throws IllegalArgumentException if n < 0
*/
public long skip(long n) throws IOException {
if (n < 0) {
throw new IllegalArgumentException("negative skip length");
}
ensureOpen();
int max = (int) Math.min(n, Integer.MAX_VALUE);
int total = 0;
while (total < max) {
int len = max - total;
if (len > tmpbuf.length) {
len = tmpbuf.length;
}
len = read(tmpbuf, 0, len);
if (len == -1) {
entryEOF = true;
break;
}
total += len;
}
return total;
}
/**
* Closes this input stream and releases any system resources associated
* with the stream.
*
* @throws IOException if an I/O error has occurred
*/
public void close() throws IOException {
if (!closed) {
super.close();
closed = true;
}
}
private byte[] b = new byte[256];
/*
* Reads local file (LOC) header for next entry.
*/
private ZipEntry readLOC() throws IOException {
try {
readFully(tmpbuf, 0, LOCHDR);
} catch (EOFException e) {
return null;
}
if (get32(tmpbuf, 0) != LOCSIG) {
return null;
}
// get the entry name and create the ZipEntry first
int len = get16(tmpbuf, LOCNAM);
int blen = b.length;
if (len > blen) {
do blen = blen * 2; while (len > blen);
b = new byte[blen];
}
readFully(b, 0, len);
ZipEntry e = createZipEntry(getUTF8String(b, 0, len));
// now get the remaining fields for the entry
flag = get16(tmpbuf, LOCFLG);
if ((flag & 1) == 1) {
throw new ZipException("encrypted ZIP entry not supported");
}
e.method = get16(tmpbuf, LOCHOW);
e.time = get32(tmpbuf, LOCTIM);
if ((flag & 8) == 8) {
/* "Data Descriptor" present */
if (e.method != DEFLATED) {
throw new ZipException("only DEFLATED entries can have EXT descriptor");
}
} else {
e.crc = get32(tmpbuf, LOCCRC);
e.csize = get32(tmpbuf, LOCSIZ);
e.size = get32(tmpbuf, LOCLEN);
}
len = get16(tmpbuf, LOCEXT);
if (len > 0) {
byte[] bb = new byte[len];
readFully(bb, 0, len);
e.setExtra(bb);
}
return e;
}
/*
* Fetches a UTF8-encoded String from the specified byte array.
*/
private static String getUTF8String(byte[] b, int off, int len) {
try {
String s = new String(b, off, len, "GBK");
return s;
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
//����Ϊ����ӵĽ��GBK�����
// First, count the number of characters in the sequence
int count = 0;
int max = off + len;
int i = off;
while (i < max) {
int c = b[i++] & 0xff;
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
// 0xxxxxxx
count++;
break;
case 12:
case 13:
// 110xxxxx 10xxxxxx
if ((int) (b[i++] & 0xc0) != 0x80) {
throw new IllegalArgumentException();
}
count++;
break;
case 14:
// 1110xxxx 10xxxxxx 10xxxxxx
if (((int) (b[i++] & 0xc0) != 0x80) || ((int) (b[i++] & 0xc0) != 0x80)) {
throw new IllegalArgumentException();
}
count++;
break;
default:
// 10xxxxxx, 1111xxxx
throw new IllegalArgumentException();
}
}
if (i != max) {
throw new IllegalArgumentException();
}
// Now decode the characters...
char[] cs = new char[count];
i = 0;
while (off < max) {
int c = b[off++] & 0xff;
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
// 0xxxxxxx
cs[i++] = (char) c;
break;
case 12:
case 13:
// 110xxxxx 10xxxxxx
cs[i++] = (char) (((c & 0x1f) << 6) | (b[off++] & 0x3f));
break;
case 14:
// 1110xxxx 10xxxxxx 10xxxxxx
int t = (b[off++] & 0x3f) << 6;
cs[i++] = (char) (((c & 0x0f) << 12) | t | (b[off++] & 0x3f));
break;
default:
// 10xxxxxx, 1111xxxx
throw new IllegalArgumentException();
}
}
return new String(cs, 0, count);
}
/**
* Creates a new <code>ZipEntry</code> object for the specified
* entry name.
*
* @param name the ZIP file entry name
* @return the ZipEntry just created
*/
protected ZipEntry createZipEntry(String name) {
return new ZipEntry(name);
}
/*
* Reads end of deflated entry as well as EXT descriptor if present.
*/
private void readEnd(ZipEntry e) throws IOException {
int n = inf.getRemaining();
if (n > 0) {
((PushbackInputStream) in).unread(buf, len - n, n);
}
if ((flag & 8) == 8) {
/* "Data Descriptor" present */
readFully(tmpbuf, 0, EXTHDR);
long sig = get32(tmpbuf, 0);
if (sig != EXTSIG) { // no EXTSIG present
e.crc = sig;
e.csize = get32(tmpbuf, EXTSIZ - EXTCRC);
e.size = get32(tmpbuf, EXTLEN - EXTCRC);
((PushbackInputStream) in).unread(tmpbuf, EXTHDR - EXTCRC - 1, EXTCRC);
} else {
e.crc = get32(tmpbuf, EXTCRC);
e.csize = get32(tmpbuf, EXTSIZ);
e.size = get32(tmpbuf, EXTLEN);
}
}
if (e.size != inf.getBytesWritten()) {
throw new ZipException("invalid entry size (expected " + e.size + " but got " + inf.getBytesWritten() + " bytes)");
}
if (e.csize != inf.getBytesRead()) {
throw new ZipException("invalid entry compressed size (expected " + e.csize + " but got " + inf.getBytesRead() + " bytes)");
}
if (e.crc != crc.getValue()) {
throw new ZipException("invalid entry CRC (expected 0x" + Long.toHexString(e.crc) + " but got 0x" + Long.toHexString(crc.getValue()) + ")");
}
}
/*
* Reads bytes, blocking until all bytes are read.
*/
private void readFully(byte[] b, int off, int len) throws IOException {
while (len > 0) {
int n = in.read(b, off, len);
if (n == -1) {
throw new EOFException();
}
off += n;
len -= n;
}
}
/*
* Fetches unsigned 16-bit value from byte array at specified offset.
* The bytes are assumed to be in Intel (little-endian) byte order.
*/
private static final int get16(byte b[], int off) {
return (b[off] & 0xff) | ((b[off + 1] & 0xff) << 8);
}
/*
* Fetches unsigned 32-bit value from byte array at specified offset.
* The bytes are assumed to be in Intel (little-endian) byte order.
*/
private static final long get32(byte b[], int off) {
return get16(b, off) | ((long) get16(b, off + 2) << 16);
}
}
| apache-2.0 |
DMadhuranga/projectOOSD | drugDetails.php | 3429 | <?php
session_start();
include('dbconnection.php'); //$conn
//include('User.php');
include('basicTemp.php');
include('Drug.php');
if(!isset($_SESSION['logged']) || !isset($_SESSION['user'])){
header('location:login.php');
}
$pages = $_SESSION['pages'];
$user = unserialize($_SESSION['user']);
$s_num = $_REQUEST['serial_number'];
$query1 = "select * from hospital.drugs where serial_number = '$s_num'";
$is_run = mysqli_query($conn,$query1);
if ($is_run = mysqli_query($conn,$query1)){
$is_run = mysqli_fetch_array($is_run);
$drug_name = $is_run['drug_name'];
$type = $is_run['type'];
}
$query2 = "select * from hospital.drug_batches where (deleted=0 AND serial_number = '$s_num' )";
//echo $s_num;
$res = mysqli_query($conn,$query2);
if ($res){
$batches = array();
while($row=mysqli_fetch_array($res,MYSQLI_ASSOC)){
$batch = new drugbatch();
$batch->setBatch_number($row['batch_number']);
$batch->setArrival($row["arrival"]);
$batch->setExpire($row['expire']);
$batch->setArrival_amount($row['arrival_amount']);
$batch->setInventory_balance($row['inventory_balance']);
$batch->setDispensary_balance($row['dispensory_balance']);
$batch->setTotal_balance($row['total_balance']);
$batch->setOther_department_balance($row['other_departments_balance']);
$num = $row['serial_number'];
array_push($batches,$batch);
}
}
/*$query3 = "select * from hospital.drugs where serial_number = '$num'";
$res = mysqli_query($conn,$query3);
$res=mysqli_fetch_array($res,MYSQLI_ASSOC);
$name = $res['drug_name'];*/
?>
<html>
<head>
</head>
<body>
<div class='container-fluid'>
<div class='row'>
<div class='col-md-2 col-md-2-height1'>
<div class = "row">
<ul class="nav nav-pills nav-stacked">
<?php
foreach( $pages as $tempPag ) {?>
<li><a href="<?php echo $tempPag[1]; ?>"><?php echo $tempPag[0]; ?></a></li>
<?php
};
?>
</ul>
</div>
</div>
<div class='col-md-10'>
<div class="row">
<!-- Put Anything-->
<h1 class="well"><?php echo $drug_name ?></h1>
<h3 class="well">Serial Number : <?php echo $s_num?></h3>
<table id="myTable" class="table table-striped">
<thead>
<tr>
<th>Batch Number</th>
<th>Arrival Date</th>
<th>Expire Date</th>
<th>Arrival Amount</th>
<th>Inventory Amount</th>
<th>Dispensary Balance</th>
<th>Other Department Balance</th>
<th>Total Balance</th>
</tr>
</thead>
<tbody>
<?php
foreach( $batches as $batch ) {?>
<tr class = "table-active">
<td><?php echo $batch->getBatch_number();?></td>
<td><?php echo $batch->getArrival(); ?></td>
<td><?php echo $batch->getExpire(); ?></td>
<td><?php echo $batch->getArrival_amount(); ?></td>
<td><?php echo $batch->getInventory_balance(); ?></td>
<td><?php echo $batch->getDispensary_balance(); ?></td>
<td><?php echo $batch->getOther_department_balance(); ?></td>
<td><?php echo $batch->getTotal_balance(); ?></td>
</tr>
<?php
}
?>
</tbody>
</table>
<!-- Put Anything-->
</div>
</div>
</div>
</div>
</body>
</html>
| apache-2.0 |
yanaga/querydsl-args | querydsl-args-jsf/src/test/java/me/yanaga/querydsl/args/jsf/SingleBigDecimalArgumentConverterTest.java | 1934 | package me.yanaga.querydsl.args.jsf;
/*
* #%L
* querydsl-args-jsf
* %%
* Copyright (C) 2014 - 2015 Edson Yanaga
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import me.yanaga.querydsl.args.core.single.SingleBigDecimalArgument;
import org.testng.annotations.Test;
import javax.faces.convert.ConverterException;
import java.math.BigDecimal;
import static org.assertj.core.api.Assertions.assertThat;
public class SingleBigDecimalArgumentConverterTest {
private final SingleBigDecimalArgumentConverter converter = new SingleBigDecimalArgumentConverter();
@Test
public void testGetAsObject() throws Exception {
Object object = converter.getAsObject(null, null, "123.00");
assertThat(object).isInstanceOf(SingleBigDecimalArgument.class);
assertThat(object.toString()).isEqualTo("123.00");
}
@Test
public void testGetAsObjectWithEmpty() throws Exception {
Object object = converter.getAsObject(null, null, "");
assertThat(object).isInstanceOf(SingleBigDecimalArgument.class);
assertThat(object.toString()).isEqualTo("");
}
@Test(expectedExceptions = ConverterException.class)
public void testGetAsObjectWithInvalidInput() throws Exception {
converter.getAsObject(null, null, "abc");
}
@Test
public void testGetAsString() throws Exception {
assertThat(converter.getAsString(null, null, SingleBigDecimalArgument.of(new BigDecimal("123.321")))).isEqualTo("123.321");
}
}
| apache-2.0 |
srhtcn/ceng-app-store | src/main/java/tr/edu/metu/ceng352/config/WebAppInitializer.java | 1511 | /**
* Created by Serhat CAN
*/
package tr.edu.metu.ceng352.config;
import org.springframework.web.filter.CharacterEncodingFilter;
import org.springframework.web.filter.DelegatingFilterProxy;
import org.springframework.web.servlet.support.AbstractAnnotationConfigDispatcherServletInitializer;
import javax.servlet.*;
public class WebAppInitializer extends AbstractAnnotationConfigDispatcherServletInitializer {
@Override
protected String[] getServletMappings() {
return new String[]{"/"};
}
@Override
protected Class<?>[] getRootConfigClasses() {
return new Class<?>[] {ApplicationConfig.class, JpaConfig.class, SecurityConfig.class};
}
@Override
protected Class<?>[] getServletConfigClasses() {
return new Class<?>[] {WebMvcConfig.class};
}
@Override
protected Filter[] getServletFilters() {
CharacterEncodingFilter characterEncodingFilter = new CharacterEncodingFilter();
characterEncodingFilter.setEncoding("UTF-8");
characterEncodingFilter.setForceEncoding(true);
DelegatingFilterProxy securityFilterChain = new DelegatingFilterProxy("springSecurityFilterChain");
return new Filter[] {characterEncodingFilter, securityFilterChain};
}
@Override
protected void customizeRegistration(ServletRegistration.Dynamic registration) {
registration.setInitParameter("defaultHtmlEscape", "true");
registration.setInitParameter("spring.profiles.active", "default");
}
} | apache-2.0 |
xushaomin/appleframework | apple-commons/src/main/java/com/appleframework/logging/level/package-info.java | 132 | /**
* 按照日志级别定义的日志打印接口定义
*
* @author cruise.xu
*
*/
package com.appleframework.logging.level; | apache-2.0 |
shashidharatd/garden-linux | network/fakes/fake_configurer.go | 2667 | // This file was generated by counterfeiter
package fakes
import (
"sync"
"github.com/cloudfoundry-incubator/garden-linux/network"
)
type FakeConfigurer struct {
ConfigureContainerStub func(*network.ContainerConfig) error
configureContainerMutex sync.RWMutex
configureContainerArgsForCall []struct {
arg1 *network.ContainerConfig
}
configureContainerReturns struct {
result1 error
}
ConfigureHostStub func(*network.HostConfig) error
configureHostMutex sync.RWMutex
configureHostArgsForCall []struct {
arg1 *network.HostConfig
}
configureHostReturns struct {
result1 error
}
}
func (fake *FakeConfigurer) ConfigureContainer(arg1 *network.ContainerConfig) error {
fake.configureContainerMutex.Lock()
fake.configureContainerArgsForCall = append(fake.configureContainerArgsForCall, struct {
arg1 *network.ContainerConfig
}{arg1})
fake.configureContainerMutex.Unlock()
if fake.ConfigureContainerStub != nil {
return fake.ConfigureContainerStub(arg1)
} else {
return fake.configureContainerReturns.result1
}
}
func (fake *FakeConfigurer) ConfigureContainerCallCount() int {
fake.configureContainerMutex.RLock()
defer fake.configureContainerMutex.RUnlock()
return len(fake.configureContainerArgsForCall)
}
func (fake *FakeConfigurer) ConfigureContainerArgsForCall(i int) *network.ContainerConfig {
fake.configureContainerMutex.RLock()
defer fake.configureContainerMutex.RUnlock()
return fake.configureContainerArgsForCall[i].arg1
}
func (fake *FakeConfigurer) ConfigureContainerReturns(result1 error) {
fake.ConfigureContainerStub = nil
fake.configureContainerReturns = struct {
result1 error
}{result1}
}
func (fake *FakeConfigurer) ConfigureHost(arg1 *network.HostConfig) error {
fake.configureHostMutex.Lock()
fake.configureHostArgsForCall = append(fake.configureHostArgsForCall, struct {
arg1 *network.HostConfig
}{arg1})
fake.configureHostMutex.Unlock()
if fake.ConfigureHostStub != nil {
return fake.ConfigureHostStub(arg1)
} else {
return fake.configureHostReturns.result1
}
}
func (fake *FakeConfigurer) ConfigureHostCallCount() int {
fake.configureHostMutex.RLock()
defer fake.configureHostMutex.RUnlock()
return len(fake.configureHostArgsForCall)
}
func (fake *FakeConfigurer) ConfigureHostArgsForCall(i int) *network.HostConfig {
fake.configureHostMutex.RLock()
defer fake.configureHostMutex.RUnlock()
return fake.configureHostArgsForCall[i].arg1
}
func (fake *FakeConfigurer) ConfigureHostReturns(result1 error) {
fake.ConfigureHostStub = nil
fake.configureHostReturns = struct {
result1 error
}{result1}
}
var _ network.Configurer = new(FakeConfigurer)
| apache-2.0 |
aike/cckeyboard | js/webmidilink.js | 1462 | $(function() {
window.addEventListener("message", webMidiLinkRecv, false);
function webMidiLinkRecv(event) {
if ((typeof event.data.valueOf()) != 'string')
return;
var msg = event.data.split(",");
switch (msg[0]) {
case "link": //Level1 messages
switch (msg[1]) {
case "reqpatch":
event.source.postMessage("link,patch," + GetPatchString(), "*");
break;
case "setpatch":
SetPatchString(msg[2]);
break;
}
break;
case "midi":
switch (parseInt(msg[1], 16) & 0xf0) {
case 0x80:
synth.keyUp(parseInt(msg[2], 16));
break;
case 0x90:
var velo = parseInt(msg[3], 16);
if (velo > 0)
synth.keyDown(parseInt(msg[2], 16), velo);
else
synth.keyUp(parseInt(msg[2], 16));
break;
case 0xb0:
if (parseInt(msg[2], 16) == 0x78) {
synth.allNoteOff();
}
break;
}
break;
}
};
function GetPatchString() {
var s = '';
for (var i = 0; i < synth.controls.length; i++) {
s = s + Math.floor($(synth.controls[i])[0].value) + '/';
}
return s;
};
function SetPatchString(s) {
var a = s.split('/');
for (var i = 0; i < synth.controls.length; i++) {
$(synth.controls[i])[0].setValue(parseInt(a[i], 10), true);
}
};
function LinkReady() {
if (window.opener) {
window.opener.postMessage("link,ready", "*");
} else {
window.parent.postMessage("link,ready", "*");
}
};
LinkReady();
});
| apache-2.0 |
oswa/bianccoAdmin | BianccoAdministrator/src/main/java/com/biancco/admin/persistence/dao/PermissionDAO.java | 1265 | /**
* SOSExcellence S.A. de C.V. all rights reserved 2016.
*/
package com.biancco.admin.persistence.dao;
import java.util.List;
import com.biancco.admin.app.exception.DBException;
import com.biancco.admin.persistence.model.Permission;
/**
* Manage the persistence for Permission objects.
*
* @author SOSExcellence.
*/
public interface PermissionDAO {
/**
* Saves a permission.
*
* @param permission
* A permission.
* @return The permission saved.
* @throws DBException
* If a problem occurs.
*/
Permission save(Permission permission) throws DBException;
/**
* Updates a permission.
*
* @param permission
* A permission.
* @throws DBException
* If a problem occurs.
*/
void update(Permission permission) throws DBException;
/**
* Deletes a permission.
*
* @param permission
* A permission.
* @throws DBException
* If a problem occurs.
*/
void delete(Permission permission) throws DBException;
/**
* Gets all permissions.
*
* @return A permission list.
* @throws DBException
* If a problem occurs.
*/
List<Permission> getAll() throws DBException;
}
| apache-2.0 |