repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
andrenpaes/killbill
api/src/main/java/org/killbill/billing/junction/BillingEvent.java
3148
/* * Copyright 2010-2011 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.junction; import java.math.BigDecimal; import java.util.List; import java.util.Set; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.LocalDate; import org.killbill.billing.account.api.Account; import org.killbill.billing.catalog.api.BillingMode; import org.killbill.billing.catalog.api.BillingPeriod; import org.killbill.billing.catalog.api.CatalogApiException; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.catalog.api.Plan; import org.killbill.billing.catalog.api.PlanPhase; import org.killbill.billing.catalog.api.Usage; import org.killbill.billing.subscription.api.SubscriptionBaseTransitionType; import org.killbill.billing.subscription.api.SubscriptionBase; public interface BillingEvent extends Comparable<BillingEvent> { /** * @return the billCycleDay in the account timezone as seen for that subscription at that time * <p/> * Note: The billCycleDay may come from the Account, or the bundle or the subscription itself */ int getBillCycleDayLocal(); /** * @return the subscription */ SubscriptionBase getSubscription(); /** * @return the date for when that event became effective */ DateTime getEffectiveDate(); /** * @return the plan phase */ PlanPhase getPlanPhase(); /** * @return the plan */ Plan getPlan(); /** * @return the billing period for the active phase */ BillingPeriod getBillingPeriod(); /** * @return the description of the billing event */ String getDescription(); /** * @return the fixed price for the phase */ BigDecimal getFixedPrice(); /** * @return the recurring price for the phase */ BigDecimal getRecurringPrice(DateTime effectiveDate) throws CatalogApiException; /** * @return the currency for the account being invoiced */ Currency getCurrency(); /** * @return the transition type of the underlying subscription event that triggered this */ SubscriptionBaseTransitionType getTransitionType(); /** * @return a unique long indicating the ordering on which events got inserted on disk-- used for sorting only */ Long getTotalOrdering(); /** * @return the TimeZone of the account */ DateTimeZone getTimeZone(); /** * * @return the list of {@code Usage} section */ List<Usage> getUsages(); }
apache-2.0
HeTyDeHer/ZapovA
chapter_012-015/src/main/java/trietask/WordIndex.java
1661
package trietask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileNotFoundException; import java.util.Scanner; import java.util.Set; /** * Нужно сделать класс WordIndex (можно создавать и другие сопутствующие классы, если это необходимо), * который по сути будет являться индексом. * Он должен позволять по заданному слову находить все вхождения (позиции) его в файле. */ public class WordIndex { private final Trie trie = new Trie(); private static final Logger logger = LoggerFactory.getLogger(WordIndex.class); /** * Load file. Read file, create trie index. * @param filename file. */ public void loadFile(String filename){ try (Scanner scanner = new Scanner(new File(filename))){ int index = 0; while (scanner.hasNextLine()) { String currentLine = scanner.nextLine(); String[] words = currentLine.split(" "); for (String word : words) { trie.addWord(word, index); } index++; } } catch (FileNotFoundException e) { logger.error(e.getMessage(), e); } } /** * Search searchWord in previously created trie. * @param searchWord searchWord * @return indexes of the searchWord. */ public Set<Integer> getIndexes4Word(String searchWord) { return trie.containsWord(searchWord); } }
apache-2.0
jitsi/jitsi
src/net/java/sip/communicator/service/protocol/OperationSetBasicTelephony.java
12621
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.service.protocol; import java.text.*; import java.util.*; import net.java.sip.communicator.service.protocol.event.*; import org.jitsi.service.neomedia.recording.*; /** * An Operation Set defining all basic telephony operations such as conducting * simple calls and etc. Note that video is not considered as a part of a * supplementary operation set and if included in the service should be available * behind the basic telephony set. * * @param <T> the provider extension class like for example * <tt>ProtocolProviderServiceSipImpl</tt> or * <tt>ProtocolProviderServiceJabberImpl</tt> * * @author Emil Ivov * @author Lyubomir Marinov */ public interface OperationSetBasicTelephony<T extends ProtocolProviderService> extends OperationSet { /** * The name of the property that contains the maximum port number that we'd * like our RTP managers to bind upon. */ public static final String MAX_MEDIA_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MAX_MEDIA_PORT_NUMBER"; /** * The name of the property that contains the minimum port number that we'd * like our RTP managers to bind upon. */ public static final String MIN_MEDIA_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MIN_MEDIA_PORT_NUMBER"; /** * The name of the property that contains the minimum port number that we'd * like our Video RTP managers to bind upon. */ public static final String MIN_VIDEO_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MIN_VIDEO_PORT_NUMBER"; /** * The name of the property that contains the maximum port number that we'd * like our Video RTP managers to bind upon. */ public static final String MAX_VIDEO_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MAX_VIDEO_PORT_NUMBER"; /** * The name of the property that contains the minimum port number that we'd * like our Audio RTP managers to bind upon. */ public static final String MIN_AUDIO_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MIN_AUDIO_PORT_NUMBER"; /** * The name of the property that contains the maximum port number that we'd * like our Audio RTP managers to bind upon. */ public static final String MAX_AUDIO_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MAX_AUDIO_PORT_NUMBER"; /** * The name of the property that contains the minimum port number that we'd * like our Data Channel (e.g. Pseudo TCP) managers to bind upon. */ public static final String MIN_DATA_CHANNEL_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MIN_DATA_CHANNEL_PORT_NUMBER"; /** * The name of the property that contains the maximum port number that we'd * like our Data Channel RTP managers to bind upon. */ public static final String MAX_DATA_CHANNEL_PORT_NUMBER_PROPERTY_NAME = "net.java.sip.communicator.service.protocol.MAX_DATA_CHANNEL_PORT_NUMBER"; /** * Reason code used to hangup peer, indicates normal hangup. */ public static final int HANGUP_REASON_NORMAL_CLEARING = 200; /** * Reason code used to hangup peer when we wait for some event * and it timeouted. */ public static final int HANGUP_REASON_TIMEOUT = 408; /** * Reason code used to hangup peer if call was not encrypted. */ public static final int HANGUP_REASON_ENCRYPTION_REQUIRED = 609; /** * Reason code used to hangup peer, indicates busy here. */ public static final int HANGUP_REASON_BUSY_HERE = 486; /** * Reason code used to hangup peer, indicates internal server error. */ public static final int HANGUP_REASON_ERROR = 500; /** * Registers the specified CallListener with this provider so that it could * be notified when incoming calls are received. This method is called * by the implementation of the PhoneUI service. * @param listener the listener to register with this provider. * */ public void addCallListener(CallListener listener); /** * Removes the specified listener from the list of call listeners. * @param listener the listener to unregister. */ public void removeCallListener(CallListener listener); /** * Creates a new <tt>Call</tt> and invites a specific <tt>CallPeer</tt> to * it given by her <tt>String</tt> URI. * * @param uri the address of the callee who we should invite to a new * <tt>Call</tt> * @return a newly created <tt>Call</tt>. The specified <tt>callee</tt> is * available in the <tt>Call</tt> as a <tt>CallPeer</tt> * @throws OperationFailedException with the corresponding code if we fail * to create the call * @throws ParseException if <tt>callee</tt> is not a valid SIP address * <tt>String</tt> */ public Call createCall(String uri) throws OperationFailedException, ParseException; /** * Creates a new <tt>Call</tt> and invites a specific <tt>CallPeer</tt> * to it given by her <tt>Contact</tt>. * * @param callee the address of the callee who we should invite to a new * call * @return a newly created <tt>Call</tt>. The specified <tt>callee</tt> is * available in the <tt>Call</tt> as a <tt>CallPeer</tt> * @throws OperationFailedException with the corresponding code if we fail * to create the call */ public Call createCall(Contact callee) throws OperationFailedException; /** * Creates a new <tt>Call</tt> and invites a specific <tt>CallPeer</tt> to * it given by her <tt>String</tt> URI. * * @param uri the address of the callee who we should invite to a new * <tt>Call</tt> * @param conference the <tt>CallConference</tt> in which the newly-created * <tt>Call</tt> is to participate * @return a newly created <tt>Call</tt>. The specified <tt>callee</tt> is * available in the <tt>Call</tt> as a <tt>CallPeer</tt> * @throws OperationFailedException with the corresponding code if we fail * to create the call * @throws ParseException if <tt>callee</tt> is not a valid SIP address * <tt>String</tt> */ public Call createCall(String uri, CallConference conference) throws OperationFailedException, ParseException; /** * Creates a new <tt>Call</tt> and invites a specific <tt>CallPeer</tt> * to it given by her <tt>Contact</tt>. * * @param callee the address of the callee who we should invite to a new * call * @param conference the <tt>CallConference</tt> in which the newly-created * <tt>Call</tt> is to participate * @return a newly created <tt>Call</tt>. The specified <tt>callee</tt> is * available in the <tt>Call</tt> as a <tt>CallPeer</tt> * @throws OperationFailedException with the corresponding code if we fail * to create the call */ public Call createCall(Contact callee, CallConference conference) throws OperationFailedException; /** * Creates a new <tt>Call</tt> and sends an invite to the conference * described in <tt>cd</tt>. A <tt>CallPeer</tt> corresponding the * <tt>cd</tt> will be created and added to the returned <tt>Call</tt> * * @param cd the conference to send an invite to * @param chatRoom the chat room associated with the call. * * @return a newly created <tt>Call</tt>, to which a <tt>CallPeer</tt> * corresponding to <tt>cd</tt> has been added. */ public Call createCall(ConferenceDescription cd, ChatRoom chatRoom) throws OperationFailedException; /** * Indicates a user request to answer an incoming call from the specified * CallPeer. * @param peer the call peer that we'd like to answer. * @throws OperationFailedException with the corresponding code if we * encounter an error while performing this operation. */ public void answerCallPeer(CallPeer peer) throws OperationFailedException; /** * Puts the specified CallPeer "on hold". In other words incoming * media flows are not played and outgoing media flows are either muted or * stopped, without actually interrupting the session. * @param peer the peer that we'd like to put on hold. * @throws OperationFailedException with the corresponding code if we * encounter an error while performing this operation. */ public void putOnHold(CallPeer peer) throws OperationFailedException; /** * Resumes communication with a call peer previously put on hold. If * the specified peer is not "On Hold" at the time putOffHold is * called, the method has no effect. * * @param peer the call peer to put on hold. * @throws OperationFailedException with the corresponding code if we * encounter an error while performing this operation */ public void putOffHold(CallPeer peer) throws OperationFailedException; /** * Indicates a user request to end a call with the specified call * peer. * @param peer the peer that we'd like to hang up on. * @throws OperationFailedException with the corresponding code if we * encounter an error while performing this operation. */ public void hangupCallPeer(CallPeer peer) throws OperationFailedException; /** * Ends the call with the specified <tt>peer</tt>. * * @param peer the peer that we'd like to hang up on. * @param reasonCode indicates if the hangup is following to a call failure or * simply a disconnect indicate by the reason. * @param reason the reason of the hangup. If the hangup is due to a call * failure, then this string could indicate the reason of the failure * * @throws OperationFailedException if we fail to terminate the call. */ public void hangupCallPeer(CallPeer peer, int reasonCode, String reason) throws OperationFailedException; /** * Returns an iterator over all currently active calls. * @return Iterator */ public Iterator<? extends Call> getActiveCalls(); /** * Sets the mute state of the <tt>Call</tt>. * <p> * Muting audio streams sent from the call is implementation specific * and one of the possible approaches to it is sending silence. * </p> * * @param call the <tt>Call</tt> whos mute state is set * @param mute <tt>true</tt> to mute the call streams being sent to * <tt>peers</tt>; otherwise, <tt>false</tt> */ public void setMute(Call call, boolean mute); /** * Returns the protocol provider that this operation set belongs to. * * @return a reference to the <tt>ProtocolProviderService</tt> that created * this operation set. */ public T getProtocolProvider(); /** * Creates a new <tt>Recorder</tt> which is to record the specified * <tt>Call</tt> (into a file which is to be specified when starting the * returned <tt>Recorder</tt>). * * @param call the <tt>Call</tt> which is to be recorded by the returned * <tt>Recorder</tt> when the latter is started * @return a new <tt>Recorder</tt> which is to record the specified * <tt>call</tt> (into a file which is to be specified when starting the * returned <tt>Recorder</tt>) * @throws OperationFailedException if anything goes wrong while creating * the new <tt>Recorder</tt> for the specified <tt>call</tt> */ public Recorder createRecorder(Call call) throws OperationFailedException; }
apache-2.0
patani1/PyHDB
contest/run_tests.py
471
import socket import time import pytest import sys def main(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) while True: result = sock.connect_ex(('127.0.0.1', 30015)) if result == 0: break else: print("hana not available yet, retrying...") time.sleep(10) rc = pytest.main(sys.argv[1:]) if rc != 0: sys.exit("pytest run has failed") if __name__ == "__main__": main()
apache-2.0
MarcoCaballero/FFB_DAW
SPRING-APP-TRADITIONAL+API/src/main/java/com/ffbet/fase3/repositories/BetSportMatchRepository.java
237
package com.ffbet.fase3.repositories; import org.springframework.data.jpa.repository.JpaRepository; import com.ffbet.fase3.domain.BetSportMatch; public interface BetSportMatchRepository extends JpaRepository<BetSportMatch, Long> { }
apache-2.0
ksuksu1601/java_training
addressbook_web_tests/src/test/java/com/ksu/addressbook/appmanager/DbHelper.java
2212
package com.ksu.addressbook.appmanager; import com.ksu.addressbook.model.ContactData; import com.ksu.addressbook.model.Contacts; import com.ksu.addressbook.model.GroupData; import com.ksu.addressbook.model.Groups; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.boot.MetadataSources; import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import java.util.List; /** * Created by Ksu on 03.04.2016. */ public class DbHelper { private final SessionFactory sessionFactory; public DbHelper() { final StandardServiceRegistry registry = new StandardServiceRegistryBuilder().configure().build(); sessionFactory = new MetadataSources(registry).buildMetadata().buildSessionFactory(); } public Groups groups(){ Session session = sessionFactory.openSession(); session.beginTransaction(); List<GroupData> result = session.createQuery("from GroupData").list(); session.getTransaction().commit(); session.close(); return new Groups(result); } public GroupData groupById(int id){ Session session = sessionFactory.openSession(); session.beginTransaction(); GroupData result = (GroupData) session.createQuery("from GroupData where group_id = " + id).uniqueResult(); session.getTransaction().commit(); session.close(); return result; } public Contacts contacts(){ Session session = sessionFactory.openSession(); session.beginTransaction(); List<ContactData> result = session.createQuery("from ContactData where deprecated = '0000-00-00'").list(); session.getTransaction().commit(); session.close(); return new Contacts(result); } public ContactData contactById(int id){ Session session = sessionFactory.openSession(); session.beginTransaction(); ContactData result = (ContactData) session.createQuery("from ContactData where deprecated = '0000-00-00' and id = " + id).uniqueResult(); session.getTransaction().commit(); session.close(); return result; } }
apache-2.0
ifgeny87/pem_calc
frontend/js/main.js
775
/** js/main.js **/ 'use strict'; window.PEM = { updateTitle: function (title) { $('#header').html(title); }, updateMenu: function () { $('#mainmenu a').removeClass('selected'); // подсвечиваю нужную кнопку меню var link = location.href.split('/')[3]; $('#mainmenu a[href="/' + link + '"]').addClass('selected'); }, init: function() { $(PEM.onLoad); // обнволяется меню PEM.updateMenu(); // показываю кнопку сброса если не загрузилась страница setTimeout(function() { $('#wait-then-reload').show(); }, 1000); }, onLoad: function() { } } PEM.init();
apache-2.0
yousky/springboot
src/main/java/org/bwyou/springboot/model/viewmodel/IViewModel.java
180
package org.bwyou.springboot.model.viewmodel; public interface IViewModel<TEntityVM, TEntitySrc> { TEntityVM LoadModel(TEntitySrc baseModel, boolean recursive, String sort); }
apache-2.0
sxjscience/tvm
tests/python/relay/test_pass_alter_op_layout.py
47223
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Test alter op layout pass""" import pytest import tvm from tvm import relay from tvm.relay import transform, analysis from tvm.relay.testing.temp_op_attr import TempOpAttr from tvm.relay.testing import run_infer_type import numpy as np import tvm.testing def run_opt_pass(expr, passes): passes = passes if isinstance(passes, list) else [passes] mod = tvm.IRModule.from_expr(expr) seq = tvm.transform.Sequential(passes) with tvm.transform.PassContext(opt_level=3): mod = seq(mod) entry = mod["main"] return entry if isinstance(expr, relay.Function) else entry.body def test_alter_op(): """Test directly replacing an operator with a new one""" def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight = relay.var("weight", shape=(64, 64, 3, 3)) y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.relu(y) y = relay.Function([x, weight], y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs weight = relay.multiply(weight, relay.const(2.0, "float32")) return relay.nn.conv2d(data, weight, **attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) weight = relay.var("weight", shape=(64, 64, 3, 3)) y = relay.nn.conv2d( x, relay.multiply(weight, relay.const(2.0, "float32")), channels=64, kernel_size=(3, 3), padding=(1, 1), ) y = relay.nn.relu(y) y = relay.Function([x, weight], y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_return_none(): """Test doing nothing by returning 'None' """ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) y = relay.nn.global_max_pool2d(x) y = relay.Function([x], y) return y called = [False] def alter_conv2d(attrs, inputs, tinfos, out_type): called[0] = True return None with TempOpAttr("nn.global_max_pool2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(before(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) assert called[0] def test_alter_layout(): """Test alternating the layout of a conv2d. The layout of broadcast operators and the weight should be changed accordingly. """ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) bias = relay.var("bias") weight = relay.var("weight") y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.bias_add(y, bias) # a useless tuple, which will be eliminated y = relay.Tuple([y])[0] y = relay.nn.relu(y) y = relay.nn.max_pool2d(y, pool_size=(2, 2)) y = relay.cast(y, "int32") y = relay.nn.batch_flatten(y) y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" new_attrs["kernel_layout"] = "OIHW16i" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) bias = relay.var("bias", shape=(64,)) weight = relay.var("weight", shape=(64, 64, 3, 3)) y = relay.layout_transform(x, "NCHW", "NCHW16c") w = relay.layout_transform(weight, "OIHW", "OIHW16i") y = relay.nn.conv2d( y, w, channels=64, kernel_size=(3, 3), padding=(1, 1), kernel_layout="OIHW16i", data_layout="NCHW16c", ) b = relay.expand_dims(bias, axis=1, num_newaxis=2) b = relay.expand_dims(b, axis=0, num_newaxis=1) b = relay.layout_transform(b, "NCHW", "NCHW16c") y = relay.add(y, b) y = relay.nn.relu(y) y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c") y = relay.cast(y, "int32") y = relay.layout_transform(y, "NCHW16c", "NCHW") y = relay.nn.batch_flatten(y) y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()]) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_lrn(): """Test alternating the layout of a conv2d. The layout of broadcast operators and the weight should be changed accordingly. """ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) bias = relay.var("bias") weight = relay.var("weight") y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.max_pool2d(y, pool_size=(2, 2)) y = relay.nn.lrn(y) y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" new_attrs["kernel_layout"] = "OIHW16i" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) bias = relay.var("bias", shape=(64,)) weight = relay.var("weight", shape=(64, 64, 3, 3)) y = relay.layout_transform(x, "NCHW", "NCHW16c") w = relay.layout_transform(weight, "OIHW", "OIHW16i") y = relay.nn.conv2d( y, w, channels=64, kernel_size=(3, 3), padding=(1, 1), kernel_layout="OIHW16i", data_layout="NCHW16c", ) y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c") y = relay.layout_transform(y, "NCHW16c", "NCHW") y = relay.nn.lrn(y) y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()]) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_dual_path(): """ Test alternating the layout with two outputs. One path continues to use the new layout while one path fall backs to old layout. """ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.relu(y) y1 = relay.nn.conv2d(y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1)) y1 = relay.nn.relu(y1) y2 = relay.nn.batch_flatten(y) ret = relay.Tuple([y1, y2]) y = relay.Function(analysis.free_vars(ret), ret) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y = relay.nn.relu(y) y1 = relay.nn.conv2d( y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y1 = relay.nn.relu(y1) y1 = relay.layout_transform(y1, "NCHW16c", "NCHW") y2 = relay.layout_transform(y, "NCHW16c", "NCHW") y2 = relay.nn.batch_flatten(y2) ret = relay.Tuple([y1, y2]) y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_resnet(): """Test alternating the layout of a residual block This also tests the elimination of duplicated transformation. If a same transformation applies to a same node twice, only one transformation will be created. """ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.relu(y) y2 = relay.nn.conv2d(x, weight2, channels=32, kernel_size=(1, 1)) y2 = relay.nn.relu(y2) y = y + y2 y = relay.nn.global_max_pool2d(y) return relay.Function(analysis.free_vars(y), y) def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") x = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y = relay.nn.relu(y) y2 = relay.nn.conv2d(x, weight2, channels=32, kernel_size=(1, 1), data_layout="NCHW16c") y2 = relay.nn.relu(y2) y = y + y2 y = relay.nn.global_max_pool2d(y, layout="NCHW16c") y = relay.layout_transform(y, "NCHW16c", "NCHW") return relay.Function(analysis.free_vars(y), y) with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_broadcast_op(): """Test boradcast operators """ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) bias = relay.var("bias", shape=(64,)) scale = relay.var("scale", shape=(64, 1, 1)) weight = relay.var("weight") y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.bias_add(y, bias) # test broadcasting to lhs y = relay.multiply(scale, y) # test broadcasting to rhs y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) bias = relay.var("bias", shape=(64,)) scale = relay.var("scale", shape=(64, 1, 1)) weight = relay.var("weight") x = relay.layout_transform(x, "NCHW", "NCHW16c") bias = relay.expand_dims(bias, 1, 2) bias = relay.expand_dims(bias, 0, 1) bias = relay.layout_transform(bias, "NCHW", "NCHW16c") scale = relay.expand_dims(scale, 0, 1) scale = relay.layout_transform(scale, "NCHW", "NCHW16c") y = relay.nn.conv2d( x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y = relay.add(y, bias) # test broadcasting to lhs y = relay.multiply(scale, y) # test broadcasting to rhs y = relay.layout_transform(y, "NCHW16c", "NCHW") y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()]) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_broadcast_scalar_op(): """Test alternating the layout of a conv2d. The layout of broadcast operators and the weight should be changed accordingly. """ def before(): x = relay.var("x", shape=(1, 500, 500, 64)) kernel = relay.var("kernel", shape=(3, 3, 64, 64), dtype="float32") bias = relay.var("bias", shape=(64,)) multiplier1 = relay.var("multiplier1", shape=(1,), dtype="float32") multiplier2 = relay.var("multiplier2", shape=(1, 1), dtype="float32") y = relay.nn.conv2d(x, kernel, data_layout="NHWC", kernel_layout="HWIO", kernel_size=(3, 3)) y = relay.add(bias, y) y = relay.nn.relu(y) y = relay.multiply(multiplier1, y) y = relay.multiply(y, multiplier2) y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 500, 500, 64)) kernel = relay.var("kernel", shape=(3, 3, 64, 64), dtype="float32") bias = relay.var("bias", shape=(64,)) multiplier1 = relay.var("multiplier1", shape=(1,), dtype="float32") multiplier2 = relay.var("multiplier2", shape=(1, 1), dtype="float32") b = relay.expand_dims(bias, axis=0, num_newaxis=3) b = relay.layout_transform(b, "NHWC", "NCHW16c") y = relay.layout_transform(x, "NHWC", "NCHW16c") y = relay.nn.conv2d( y, kernel, data_layout="NCHW16c", kernel_layout="HWIO", kernel_size=(3, 3) ) y = relay.add(b, y) y = relay.nn.relu(y) y = relay.multiply(multiplier1, y) y = relay.multiply(y, multiplier2) y = relay.layout_transform(y, "NCHW16c", "NHWC") y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()]) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_scalar(): """Test alternating the layout of a conv2d. The layout of broadcast operators and the weight should be changed accordingly. """ def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight = relay.var("weight") y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1)) y = relay.add(y, relay.const(1, "float32")) y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) w = relay.var("weight") y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, w, channels=64, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y = relay.add(y, relay.const(1.0, "float32")) y = relay.layout_transform(y, "NCHW16c", "NCHW") y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()]) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_scalar_regression(): """regression test where scalar fails""" def before(): x = relay.var("x", shape=(1, 56, 56, 64)) weight = relay.var("weight", shape=(3, 3, 64, 16)) bias = relay.var("bias", shape=(1, 1, 1, 16)) y = relay.nn.conv2d( x, weight, channels=16, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC", kernel_layout="HWIO", ) y = relay.add(y, bias) mean = relay.mean(y, axis=3, exclude=True) var = relay.variance(y, axis=3, exclude=True) gamma = relay.var("gamma") beta = relay.var("beta") y = relay.nn.batch_norm(y, gamma, beta, mean, var, axis=3) y = y[0] y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 56, 56, 64)) weight = relay.var("weight", shape=(3, 3, 64, 16)) bias = relay.var("bias", shape=(1, 1, 1, 16)) x = relay.layout_transform(x, src_layout="NHWC", dst_layout="NCHW") x = relay.layout_transform(x, src_layout="NCHW", dst_layout="NCHW16c") weight = relay.layout_transform(weight, src_layout="HWIO", dst_layout="OIHW") y = relay.nn.conv2d( x, weight, channels=16, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) bias = relay.layout_transform(bias, src_layout="NHWC", dst_layout="NCHW") bias = relay.layout_transform(bias, src_layout="NCHW", dst_layout="NCHW16c") add = relay.add(y, bias) y = relay.layout_transform(add, src_layout="NCHW16c", dst_layout="NCHW") y = relay.layout_transform(y, src_layout="NCHW", dst_layout="NHWC") mean = relay.mean(y, axis=3, exclude=True) var = relay.variance(y, axis=3, exclude=True) denom = relay.const(1.0) / relay.sqrt(var + relay.const(1e-05)) gamma = relay.var("gamma", shape=(16,)) denom = denom * gamma denom_expand1 = relay.expand_dims(denom, axis=1, num_newaxis=2) denom_expand2 = relay.expand_dims(denom_expand1, axis=0) denom_nchwc16 = relay.layout_transform( denom_expand2, src_layout="NCHW", dst_layout="NCHW16c" ) out = add * denom_nchwc16 beta = relay.var("beta", shape=(16,)) numerator = (-mean) * denom + beta numerator_expand1 = relay.expand_dims(numerator, axis=1, num_newaxis=2) numerator_expand2 = relay.expand_dims(numerator_expand1, axis=0) numerator_nchwc16 = relay.layout_transform( numerator_expand2, src_layout="NCHW", dst_layout="NCHW16c" ) out = out + numerator_nchwc16 out = relay.layout_transform(out, src_layout="NCHW16c", dst_layout="NCHW") y = relay.layout_transform(out, src_layout="NCHW", dst_layout="NHWC") y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() desired_layouts = {"nn.conv2d": ["NCHW", "default"], "nn.batch_norm": ["NHWC", "default"]} a = run_opt_pass( a, [ transform.InferType(), relay.transform.ConvertLayout(desired_layouts), transform.SimplifyInference(), transform.CanonicalizeOps(), transform.AlterOpLayout(), ], ) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_concatenate(): """ NCHW, NHWC and corner case concatenate layout transform.""" def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) # NCHW layout transformation. def before_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1)) y1 = relay.nn.conv2d(y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1)) ret = relay.concatenate([y, y1], axis=1) y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y1 = relay.nn.conv2d( y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.concatenate([y, y1], axis=1) ret = relay.layout_transform(ret, "NCHW16c", "NCHW") y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nchw() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nchw(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) # NHWC layout transformation. def before_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") y = relay.nn.conv2d( x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC" ) y1 = relay.nn.conv2d( y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC" ) ret = relay.concatenate([y, y1], axis=3) y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") weight2 = relay.var("weight2") y = relay.layout_transform(x, "NHWC", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y1 = relay.nn.conv2d( y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.concatenate([y, y1], axis=1) ret = relay.layout_transform(ret, "NCHW16c", "NHWC") y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nhwc() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nhwc(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_nchw_upsamping_op(): """Test upsamping operators """ def before(): x = relay.var("x", shape=(1, 32, 28, 28)) weight = relay.var("weight", shape=(32, 32, 3, 3)) y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.upsampling(y, scale_h=2, scale_w=2) y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2)) y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 32, 28, 28)) weight = relay.var("weight") x = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y = relay.nn.upsampling(y, scale_h=2, scale_w=2, layout="NCHW16c") y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2), layout="NCHW16c") y = relay.layout_transform(y, "NCHW16c", "NCHW") y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) @tvm.testing.uses_gpu def test_alter_layout_strided_slice(): """Test rewriting strided_slice during alter_iop_layout""" def before(): x = relay.var("x", shape=(1, 32, 28, 28)) weight = relay.var("weight", shape=(32, 32, 3, 3)) y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1)) y = relay.strided_slice(y, begin=[0, 16], end=[1, 33], strides=[1, 1]) y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW4c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 32, 28, 28)) weight = relay.var("weight", shape=(32, 32, 3, 3)) weight = relay.layout_transform(weight, "OIHW", "OIHW4i4o") x = relay.layout_transform(x, "NCHW", "NCHW4c") y = relay.op.nn.contrib_conv2d_nchwc( x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW4c" ) y = relay.strided_slice(y, begin=[0, 4], end=[1, 21], strides=[1, 1]) y = relay.layout_transform(y, "NCHW4c", "NCHW") y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() b = run_opt_pass(expected(), transform.InferType()) # Verify inference result mod_before = tvm.IRModule() mod_new = tvm.IRModule() mod_before["main"] = a mod_new["main"] = b mod_before = transform.InferType()(mod_before) mod_new = transform.InferType()(mod_new) with relay.build_config(opt_level=3): for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug", "vm"]: ex_before = relay.create_executor(kind, mod=mod_before, ctx=ctx, target=target) ex_new = relay.create_executor(kind, mod=mod_new, ctx=ctx, target=target) np_data = np.random.uniform(size=(1, 32, 28, 28)).astype("float32") np_weight = np.random.uniform(size=(32, 32, 3, 3)).astype("float32") result_before = ex_before.evaluate()(np_data, np_weight) result_new = ex_new.evaluate()(np_data, np_weight) tvm.testing.assert_allclose( result_before.asnumpy(), result_new.asnumpy(), rtol=1e-5, atol=1e-5 ) def test_alter_layout_depthwise_conv2d(): """Test depthwise_conv2d operator""" def before(): x = relay.var("x", shape=(1, 32, 56, 56)) w = relay.var("w", shape=(32, 1, 3, 3)) y = relay.nn.conv2d(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3), groups=32) y = relay.Function(analysis.free_vars(y), y) return y from tvm import topi def alter_conv2d(attrs, inputs, tinfos, out_type): with tvm.target.Target("llvm"): return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type) def expected(): x = relay.var("x", shape=(1, 32, 56, 56)) w = relay.var("w", shape=(32, 1, 3, 3)) x = relay.layout_transform(x, "NCHW", "NCHW8c") w = relay.layout_transform(w, "OIHW", "OIHW1i8o") y = relay.nn.contrib_depthwise_conv2d_nchwc( x, w, padding=(1, 1, 1, 1), channels=32, kernel_size=(3, 3), groups=32, data_layout="NCHW8c", kernel_layout="OIHW1i8o", out_layout="NCHW8c", ) y = relay.layout_transform(y, "NCHW8c", "NCHW") y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()]) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b) def test_alter_layout_prelu(): """Test PRelu operator""" def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight = relay.var("weight") alpha = relay.var("alpha", relay.IncompleteType()) y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.prelu(y, alpha) y = relay.Function(analysis.free_vars(y), y) return y def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) w = relay.var("weight") alpha = relay.var("alpha", relay.IncompleteType()) y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, w, channels=64, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) y = relay.layout_transform(y, "NCHW16c", "NCHW") y = relay.nn.prelu(y, alpha) y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()]) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b) def test_alter_layout_pad(): """ Check NCHW, NHWC and corner case for pad layout conversion""" def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) # Check NCHW conversion. def before_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1)) ret = relay.nn.pad(y, pad_width=((0, 0), (0, 0), (1, 1), (1, 1))) y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.nn.pad(y, pad_width=((0, 0), (0, 0), (1, 1), (1, 1), (0, 0))) ret = relay.layout_transform(ret, "NCHW16c", "NCHW") y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nchw() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nchw(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) # Check NHWC conversion. def before_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") y = relay.nn.conv2d( x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC" ) ret = relay.nn.pad(y, pad_width=((0, 0), (1, 1), (1, 1), (0, 0))) y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") y = relay.layout_transform(x, "NHWC", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.nn.pad(y, pad_width=((0, 0), (0, 0), (1, 1), (1, 1), (0, 0))) ret = relay.layout_transform(ret, "NCHW16c", "NHWC") y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nhwc() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nhwc(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) # Check that conversion does not happen when padding along split axis. def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1)) ret = relay.nn.pad(y, pad_width=((0, 0), (1, 1), (1, 1), (1, 1))) y = relay.Function(analysis.free_vars(ret), ret) return y def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.layout_transform(y, "NCHW16c", "NCHW") ret = relay.nn.pad(ret, pad_width=((0, 0), (1, 1), (1, 1), (1, 1))) y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_pool(): """ Check NCHW, NHWC pool layout conversion""" def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) # Check NCHW conversion. def before_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1)) ret = relay.nn.avg_pool2d(y, pool_size=(1, 1)) y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NCHW16c") ret = relay.layout_transform(ret, "NCHW16c", "NCHW") y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nchw() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nchw(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) # Check NHWC conversion. def before_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") y = relay.nn.conv2d( x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC" ) ret = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NHWC") y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") y = relay.layout_transform(x, "NHWC", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NCHW16c") ret = relay.layout_transform(ret, "NCHW16c", "NHWC") y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nhwc() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nhwc(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_sum(): """ Check NCHW, NHWC sum layout conversion""" def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs new_attrs = dict(attrs) new_attrs["data_layout"] = "NCHW16c" return relay.nn.conv2d(data, weight, **new_attrs) # Check NCHW conversion. def before_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1)) ret = relay.sum(y, axis=1, keepdims=True) y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nchw(): x = relay.var("x", shape=(1, 64, 56, 56)) weight1 = relay.var("weight1") y = relay.layout_transform(x, "NCHW", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.layout_transform(y, "NCHW16c", "NCHW") ret = relay.sum(ret, axis=[1], keepdims=True) y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nchw() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nchw(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) # Check NHWC conversion. def before_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") y = relay.nn.conv2d( x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC" ) ret = relay.sum(y, axis=3, keepdims=True) y = relay.Function(analysis.free_vars(ret), ret) return y def expected_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1") y = relay.layout_transform(x, "NHWC", "NCHW16c") y = relay.nn.conv2d( y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c" ) ret = relay.layout_transform(y, "NCHW16c", "NCHW") ret = relay.sum(ret, axis=[1], keepdims=True) ret = relay.layout_transform(ret, "NCHW", "NHWC") y = relay.Function(analysis.free_vars(ret), ret) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nhwc() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nhwc(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_nhwc_arm(): """ Check that AlterOplayout does not alter NHWC data layout. """ def alter_conv2d(attrs, inputs, tinfos, out_type): from tvm import topi with tvm.target.Target("llvm -device=arm_cpu"): return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type) # Check NHWC conversion. def before_nhwc(): x = relay.var("x", shape=(1, 56, 56, 64)) weight1 = relay.var("weight1", shape=(3, 3, 64, 64)) weight2 = relay.var("weight2", shape=(3, 3, 64, 64)) y = relay.nn.conv2d( x, weight1, channels=64, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO" ) y = relay.nn.relu(y) y = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NHWC") y = relay.nn.conv2d( y, weight2, channels=64, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO" ) y = relay.nn.relu(y) y = relay.Function(analysis.free_vars(y), y) return y def expected_nhwc(): return before_nhwc() with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nhwc() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nhwc(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_layout_nhwc_int8_aarch64(): """ Check that AlterOplayout does not alter NHWC data layout. """ from tvm import autotvm expected_workload_shape = (20, 42, 4, 16) # We use Int8Fallback to disable the fallback flag # and to test the new workload produced during the pass class Int8Fallback(autotvm.FallbackContext): def _query_inside(self, target, workload): key = (target, workload) if key in self.memory: return self.memory[key] cfg = autotvm.task.space.FallbackConfigEntity() cfg.is_fallback = False cfg.cost = 0 self.memory[key] = cfg return cfg def update(self, target, workload, cfg): key = (str(target), workload) assert workload[2][1] == expected_workload_shape assert workload[0] == "conv2d_NHWC_quantized_interleaved_without_transform.arm_cpu" self.memory[key] = cfg def alter_conv2d(attrs, inputs, tinfos, out_type): from tvm import topi with tvm.target.Target("llvm -device=arm_cpu -mtriple=aarch64-linux-gnu"): with Int8Fallback(): tmp = topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type) return tmp # Check NHWC conversion. def before_nhwc_int8(): x = relay.var("x", shape=(1, 56, 56, 73), dtype="int8") weight = relay.var("weight1", shape=(3, 3, 73, 79), dtype="int8") y = relay.nn.conv2d( x, weight, channels=79, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO", out_dtype="int32", ) y = relay.Function(analysis.free_vars(y), y) return y def expected_nhwc_int8(): x = relay.var("x", shape=(1, 56, 56, 73), dtype="int8") weight = relay.var("weight1", shape=(3, 3, 73, 79), dtype="int8") tile_rows = 4 tile_cols = 16 weight_transformed = relay.nn.contrib_conv2d_gemm_weight_transform( weight, tile_rows, tile_cols ) y = relay.nn.contrib_conv2d_gemm_without_weight_transform( x, weight_transformed, channels=79, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO", out_dtype="int32", ) y = relay.Function(analysis.free_vars(y), y) return y with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before_nhwc_int8() a = run_opt_pass(a, transform.AlterOpLayout()) b = run_opt_pass(expected_nhwc_int8(), transform.InferType()) assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a) def test_alter_op_with_global_var(): """Test directly replacing an operator with a new one""" def before(): x = relay.var("x", shape=(1, 64, 56, 56)) weight = relay.var("weight", shape=(64, 64, 3, 3)) y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1)) y = relay.nn.relu(y) mod = tvm.IRModule() foo = relay.GlobalVar("foo") mod[foo] = relay.Function([x, weight], y) mod = transform.InferType()(mod) mod["main"] = relay.Function([x, weight], foo(x, weight)) mod = transform.InferType()(mod) return mod def alter_conv2d(attrs, inputs, tinfos, out_type): data, weight = inputs weight = relay.multiply(weight, relay.const(2.0, "float32")) return relay.nn.conv2d(data, weight, **attrs) def expected(): x = relay.var("x", shape=(1, 64, 56, 56)) weight = relay.var("weight", shape=(64, 64, 3, 3)) y = relay.nn.conv2d( x, relay.multiply(weight, relay.const(2.0, "float32")), channels=64, kernel_size=(3, 3), padding=(1, 1), ) y = relay.nn.relu(y) mod = tvm.IRModule() foo = relay.GlobalVar("foo") mod[foo] = relay.Function([x, weight], y) mod = transform.InferType()(mod) mod["main"] = relay.Function([x, weight], foo(x, weight)) return mod with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d): a = before() a = transform.AlterOpLayout()(a) b = transform.InferType()(expected()) assert tvm.ir.structural_equal(a, b, map_free_vars=True), "Actual = \n" + str(a) if __name__ == "__main__": test_alter_op() test_alter_return_none() test_alter_layout() test_alter_layout_dual_path() test_alter_layout_lrn() test_alter_layout_resnet() test_alter_layout_broadcast_op() test_alter_layout_broadcast_scalar_op() test_alter_layout_scalar() test_alter_layout_concatenate() test_alter_layout_nchw_upsamping_op() test_alter_layout_strided_slice() test_alter_layout_depthwise_conv2d() test_alter_layout_prelu() test_alter_layout_pad() test_alter_layout_pool() test_alter_layout_sum() test_alter_layout_nhwc_arm() test_alter_layout_nhwc_int8_aarch64() test_alter_op_with_global_var()
apache-2.0
arunkumar9t2/crux
src/main/java/com/chimbori/crux/articles/Article.java
4660
package com.chimbori.crux.articles; import com.chimbori.crux.common.StringUtils; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * Parsed result from web page containing important title, text and image. * <p> * All fields are public for performance reasons on Android. * * @link https://developer.android.com/training/articles/perf-tips.html#GettersSetters * <p> * Avoid Internal Getters/Setters * <p> * In native languages like C++ it's common practice to use getters (i = getCount()) instead of * accessing the field directly (i = mCount). This is an excellent habit for C++ and is often * practiced in other object oriented languages like C# and Java, because the compiler can usually * inline the access, and if you need to restrict or debug field access you can add the code at any * time. * <p> * However, this is a bad idea on Android. Virtual method calls are expensive, much more so than * instance field lookups. It's reasonable to follow common object-oriented programming practices * and have getters and setters in the public interface, but within a class you should always * access fields directly. * <p> * Without a JIT, direct field access is about 3x faster than invoking a trivial getter. With the * JIT (where direct field access is as cheap as accessing a local), direct field access is about * 7x faster than invoking a trivial getter. */ @SuppressWarnings("WeakerAccess") public class Article { public final String url; public final String originalUrl = ""; public String title = ""; public String description = ""; public String siteName = ""; public String themeColor = ""; public String ampUrl = ""; public String canonicalUrl = ""; public String imageUrl = ""; public String videoUrl = ""; public String feedUrl = ""; public String faviconUrl = ""; /** * Estimated reading time, in minutes. This is not populated unless explicitly requested by the * caller. */ public int estimatedReadingTimeMinutes = 0; public Document document; public Collection<String> keywords; public List<Image> images = new ArrayList<>(); Article(String url) { // Package private constructor to disallow outside the library. this.url = url; this.canonicalUrl = url; // Can be overridden later, but we start off by setting it to the URL itself. } @Override public String toString() { return "Article{" + "url='" + url + '\'' + ", title='" + title + '\'' + ", description='" + description + '\'' + ", siteName='" + siteName + '\'' + ", themeColor='" + themeColor + '\'' + ", ampUrl='" + ampUrl + '\'' + ", originalUrl='" + originalUrl + '\'' + ", canonicalUrl='" + canonicalUrl + '\'' + ", imageUrl='" + imageUrl + '\'' + ", videoUrl='" + videoUrl + '\'' + ", feedUrl='" + feedUrl + '\'' + ", faviconUrl='" + faviconUrl + '\'' + ", estimatedReadingTimeMinutes=" + estimatedReadingTimeMinutes + ", document=" + document + ", keywords=" + keywords + ", images=" + images + '}'; } /** * Class which encapsulates the data from an image found under an element */ static class Image { public String src; public int weight; public String title; public int height; public int width; public String alt; public boolean noFollow; public Element element; private Image() { } static Image from(Element imgElement) { Image image = new Image(); image.element = imgElement; // Some sites use data-src to load images lazily, so prefer the data-src attribute if it exists. image.src = !imgElement.attr("data-src").isEmpty() ? imgElement.attr("data-src") : imgElement.attr("src"); image.width = StringUtils.parseAttrAsInt(imgElement, "width"); image.height = StringUtils.parseAttrAsInt(imgElement, "height"); image.alt = imgElement.attr("alt"); image.title = imgElement.attr("title"); image.noFollow = imgElement.parent() != null && imgElement.parent().attr("rel") != null && imgElement.parent().attr("rel").contains("nofollow"); return image; } @Override public String toString() { return "Image{" + "src='" + src + '\'' + ", weight=" + weight + ", title='" + title + '\'' + ", height=" + height + ", width=" + width + ", alt='" + alt + '\'' + ", noFollow=" + noFollow + ", element=" + element + '}'; } } }
apache-2.0
objectiser/camel
components/camel-jetty-common/src/main/java/org/apache/camel/component/jetty/JettyHttpComponent.java
58253
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jetty; import java.io.File; import java.io.IOException; import java.io.Writer; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URI; import java.net.URISyntaxException; import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.management.MBeanServer; import javax.servlet.Filter; import javax.servlet.MultipartConfigElement; import javax.servlet.RequestDispatcher; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.camel.CamelContext; import org.apache.camel.Consumer; import org.apache.camel.Endpoint; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.camel.SSLContextParametersAware; import org.apache.camel.http.common.CamelServlet; import org.apache.camel.http.common.HttpBinding; import org.apache.camel.http.common.HttpCommonComponent; import org.apache.camel.http.common.HttpCommonEndpoint; import org.apache.camel.http.common.HttpConfiguration; import org.apache.camel.http.common.HttpConsumer; import org.apache.camel.http.common.HttpRestServletResolveConsumerStrategy; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.ManagementAgent; import org.apache.camel.spi.ManagementStrategy; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.RestApiConsumerFactory; import org.apache.camel.spi.RestConfiguration; import org.apache.camel.spi.RestConsumerFactory; import org.apache.camel.support.jsse.SSLContextParameters; import org.apache.camel.support.service.ServiceHelper; import org.apache.camel.util.FileUtil; import org.apache.camel.util.HostUtils; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.PropertiesHelper; import org.apache.camel.util.StringHelper; import org.apache.camel.util.URISupport; import org.apache.camel.util.UnsafeUriCharactersEncoder; import org.eclipse.jetty.http.HttpStatus; import org.eclipse.jetty.jmx.MBeanContainer; import org.eclipse.jetty.server.AbstractConnector; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.MultiPartFormDataCompliance; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.server.handler.ErrorHandler; import org.eclipse.jetty.server.handler.HandlerCollection; import org.eclipse.jetty.server.handler.HandlerWrapper; import org.eclipse.jetty.server.session.SessionHandler; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.servlets.CrossOriginFilter; import org.eclipse.jetty.util.component.Container; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.util.thread.ThreadPool; /** * An HttpComponent which starts an embedded Jetty for to handle consuming from * the http endpoints. */ public abstract class JettyHttpComponent extends HttpCommonComponent implements RestConsumerFactory, RestApiConsumerFactory, SSLContextParametersAware { public static final String TMP_DIR = "CamelJettyTempDir"; protected static final HashMap<String, ConnectorRef> CONNECTORS = new HashMap<>(); private static final String JETTY_SSL_KEYSTORE = "org.eclipse.jetty.ssl.keystore"; private static final String JETTY_SSL_KEYPASSWORD = "org.eclipse.jetty.ssl.keypassword"; private static final String JETTY_SSL_PASSWORD = "org.eclipse.jetty.ssl.password"; protected String sslKeyPassword; protected String sslPassword; protected String sslKeystore; protected Map<Integer, Connector> sslSocketConnectors; protected Map<Integer, Connector> socketConnectors; protected Map<String, Object> sslSocketConnectorProperties; protected Map<String, Object> socketConnectorProperties; protected Integer minThreads; protected Integer maxThreads; protected ThreadPool threadPool; protected MBeanContainer mbContainer; protected boolean enableJmx; protected JettyHttpBinding jettyHttpBinding; protected Long continuationTimeout; protected boolean useContinuation = true; protected SSLContextParameters sslContextParameters; protected boolean useGlobalSslContextParameters; protected Integer requestBufferSize; protected Integer requestHeaderSize; protected Integer responseBufferSize; protected Integer responseHeaderSize; protected String proxyHost; protected ErrorHandler errorHandler; protected boolean useXForwardedForHeader; private Integer proxyPort; private boolean sendServerVersion = true; private QueuedThreadPool defaultQueuedThreadPool; public JettyHttpComponent() { } class ConnectorRef { CamelContext camelContext; Server server; Connector connector; CamelServlet servlet; int refCount; ConnectorRef(CamelContext camelContext, Server server, Connector connector, CamelServlet servlet) { this.camelContext = camelContext; this.server = server; this.connector = connector; this.servlet = servlet; increment(); } public int increment() { return ++refCount; } public int decrement() { return --refCount; } public int getRefCount() { return refCount; } } @Override protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception { // must extract well known parameters before we create the endpoint List<Handler> handlerList = resolveAndRemoveReferenceListParameter(parameters, "handlers", Handler.class); HttpBinding binding = resolveAndRemoveReferenceParameter(parameters, "httpBindingRef", HttpBinding.class); JettyHttpBinding jettyBinding = resolveAndRemoveReferenceParameter(parameters, "jettyHttpBindingRef", JettyHttpBinding.class); Boolean enableJmx = getAndRemoveParameter(parameters, "enableJmx", Boolean.class); Boolean enableMultipartFilter = getAndRemoveParameter(parameters, "enableMultipartFilter", Boolean.class, true); Filter multipartFilter = resolveAndRemoveReferenceParameter(parameters, "multipartFilterRef", Filter.class); List<Filter> filters = resolveAndRemoveReferenceListParameter(parameters, "filters", Filter.class); Boolean enableCors = getAndRemoveParameter(parameters, "enableCORS", Boolean.class, false); HeaderFilterStrategy headerFilterStrategy = resolveAndRemoveReferenceParameter(parameters, "headerFilterStrategy", HeaderFilterStrategy.class); SSLContextParameters sslContextParameters = resolveAndRemoveReferenceParameter(parameters, "sslContextParameters", SSLContextParameters.class); SSLContextParameters ssl = sslContextParameters != null ? sslContextParameters : this.sslContextParameters; ssl = ssl != null ? ssl : retrieveGlobalSslContextParameters(); String proxyHost = getAndRemoveParameter(parameters, "proxyHost", String.class, getProxyHost()); Integer proxyPort = getAndRemoveParameter(parameters, "proxyPort", Integer.class, getProxyPort()); Boolean async = getAndRemoveParameter(parameters, "async", Boolean.class); // extract filterInit. parameters Map filterInitParameters = PropertiesHelper.extractProperties(parameters, "filterInit."); String address = remaining; URI addressUri = new URI(UnsafeUriCharactersEncoder.encodeHttpURI(address)); URI endpointUri = URISupport.createRemainingURI(addressUri, parameters); // need to keep the httpMethodRestrict parameter for the endpointUri String httpMethodRestrict = getAndRemoveParameter(parameters, "httpMethodRestrict", String.class); // restructure uri to be based on the parameters left as we dont want to include the Camel internal options URI httpUri = URISupport.createRemainingURI(addressUri, parameters); // create endpoint after all known parameters have been extracted from parameters // include component scheme in the uri String scheme = StringHelper.before(uri, ":"); endpointUri = new URI(scheme + ":" + endpointUri); JettyHttpEndpoint endpoint = createEndpoint(endpointUri, httpUri); if (async != null) { endpoint.setAsync(async); } if (headerFilterStrategy != null) { endpoint.setHeaderFilterStrategy(headerFilterStrategy); } else { setEndpointHeaderFilterStrategy(endpoint); } // setup the proxy host and proxy port if (proxyHost != null) { endpoint.setProxyHost(proxyHost); endpoint.setProxyPort(proxyPort); } if (!filterInitParameters.isEmpty()) { endpoint.setFilterInitParameters(filterInitParameters); } if (handlerList.size() > 0) { endpoint.setHandlers(handlerList); } // prefer to use endpoint configured over component configured if (binding == null) { // fallback to component configured binding = getHttpBinding(); } if (binding != null) { endpoint.setBinding(binding); } // prefer to use endpoint configured over component configured if (jettyBinding == null) { // fallback to component configured jettyBinding = getJettyHttpBinding(); } if (enableJmx != null) { endpoint.setEnableJmx(enableJmx); } else { // set this option based on setting of JettyHttpComponent endpoint.setEnableJmx(isEnableJmx()); } endpoint.setEnableMultipartFilter(enableMultipartFilter); if (multipartFilter != null) { endpoint.setMultipartFilter(multipartFilter); endpoint.setEnableMultipartFilter(true); } if (enableCors) { endpoint.setEnableCORS(enableCors); if (filters == null) { filters = new ArrayList<>(1); } filters.add(new CrossOriginFilter()); } if (filters != null) { endpoint.setFilters(filters); } if (httpMethodRestrict != null) { endpoint.setHttpMethodRestrict(httpMethodRestrict); } if (ssl != null) { endpoint.setSslContextParameters(ssl); } endpoint.setSendServerVersion(isSendServerVersion()); setProperties(endpoint, parameters); // re-create http uri after all parameters has been set on the endpoint, as the remainders are for http uri httpUri = URISupport.createRemainingURI(addressUri, parameters); endpoint.setHttpUri(httpUri); return endpoint; } protected abstract JettyHttpEndpoint createEndpoint(URI endpointUri, URI httpUri) throws URISyntaxException; @Override public boolean canConnect(HttpConsumer consumer) throws Exception { // Make sure that there is a connector for the requested endpoint. JettyHttpEndpoint endpoint = (JettyHttpEndpoint)consumer.getEndpoint(); String connectorKey = getConnectorKey(endpoint); synchronized (CONNECTORS) { ConnectorRef connectorRef = CONNECTORS.get(connectorKey); // check if there are already another consumer on the same context-path and if so fail if (connectorRef != null) { for (Map.Entry<String, HttpConsumer> entry : connectorRef.servlet.getConsumers().entrySet()) { String path = entry.getValue().getPath(); CamelContext camelContext = entry.getValue().getEndpoint().getCamelContext(); if (consumer.getPath().equals(path)) { // its allowed if they are from the same camel context boolean sameContext = consumer.getEndpoint().getCamelContext() == camelContext; if (!sameContext) { return false; } } } } } return true; } /** * Connects the URL specified on the endpoint to the specified processor. */ @Override public void connect(HttpConsumer consumer) throws Exception { // Make sure that there is a connector for the requested endpoint. JettyHttpEndpoint endpoint = (JettyHttpEndpoint)consumer.getEndpoint(); String connectorKey = getConnectorKey(endpoint); synchronized (CONNECTORS) { ConnectorRef connectorRef = CONNECTORS.get(connectorKey); if (connectorRef == null) { Server server = createServer(); Connector connector = getConnector(server, endpoint); if ("localhost".equalsIgnoreCase(endpoint.getHttpUri().getHost())) { log.warn("You use localhost interface! It means that no external connections will be available." + " Don't you want to use 0.0.0.0 instead (all network interfaces)? " + endpoint); } if (endpoint.isEnableJmx()) { enableJmx(server); } server.addConnector(connector); connectorRef = new ConnectorRef(getCamelContext(), server, connector, createServletForConnector(server, connector, endpoint.getHandlers(), endpoint)); // must enable session before we start if (endpoint.isSessionSupport()) { enableSessionSupport(connectorRef.server, connectorKey); } connectorRef.server.start(); log.debug("Adding connector key: {} -> {}", connectorKey, connectorRef); CONNECTORS.put(connectorKey, connectorRef); } else { log.debug("Using existing connector key: {} -> {}", connectorKey, connectorRef); // check if there are any new handlers, and if so then we need to re-start the server if (endpoint.getHandlers() != null && !endpoint.getHandlers().isEmpty()) { List<Handler> existingHandlers = new ArrayList<>(); if (connectorRef.server.getHandlers() != null && connectorRef.server.getHandlers().length > 0) { existingHandlers = Arrays.asList(connectorRef.server.getHandlers()); } List<Handler> newHandlers = new ArrayList<>(endpoint.getHandlers()); boolean changed = !existingHandlers.containsAll(newHandlers) && !newHandlers.containsAll(existingHandlers); if (changed) { log.debug("Restarting Jetty server due to adding new Jetty Handlers: {}", newHandlers); connectorRef.server.stop(); addJettyHandlers(connectorRef.server, endpoint.getHandlers()); connectorRef.server.start(); } } // check the session support if (endpoint.isSessionSupport()) { enableSessionSupport(connectorRef.server, connectorKey); } // ref track the connector connectorRef.increment(); } if (endpoint.isEnableMultipartFilter()) { enableMultipartFilter(endpoint, connectorRef.server, connectorKey); } if (endpoint.getFilters() != null && endpoint.getFilters().size() > 0) { setFilters(endpoint, connectorRef.server, connectorKey); } connectorRef.servlet.connect(consumer); } } private void enableJmx(Server server) { MBeanContainer containerToRegister = getMbContainer(); if (containerToRegister != null) { log.info("Jetty JMX Extensions is enabled"); addServerMBean(server); // Since we may have many Servers running, don't tie the MBeanContainer // to a Server lifecycle or we end up closing it while it is still in use. //server.addBean(mbContainer); } } private void enableSessionSupport(Server server, String connectorKey) throws Exception { ServletContextHandler context = server.getChildHandlerByClass(ServletContextHandler.class); if (context.getSessionHandler() == null) { SessionHandler sessionHandler = new SessionHandler(); if (context.isStarted()) { throw new IllegalStateException("Server has already been started. Cannot enabled sessionSupport on " + connectorKey); } else { context.setSessionHandler(sessionHandler); } } } private void setFilters(JettyHttpEndpoint endpoint, Server server, String connectorKey) { ServletContextHandler context = server.getChildHandlerByClass(ServletContextHandler.class); List<Filter> filters = endpoint.getFilters(); for (Filter filter : filters) { FilterHolder filterHolder = new FilterHolder(); if (endpoint.getFilterInitParameters() != null) { filterHolder.setInitParameters(endpoint.getFilterInitParameters()); } filterHolder.setFilter(new CamelFilterWrapper(filter)); String pathSpec = endpoint.getPath(); if (pathSpec == null || "".equals(pathSpec)) { pathSpec = "/"; } if (endpoint.isMatchOnUriPrefix()) { pathSpec = pathSpec.endsWith("/") ? pathSpec + "*" : pathSpec + "/*"; } addFilter(context, filterHolder, pathSpec); } } private void addFilter(ServletContextHandler context, FilterHolder filterHolder, String pathSpec) { context.getServletHandler().addFilterWithMapping(filterHolder, pathSpec, 0); } private void enableMultipartFilter(HttpCommonEndpoint endpoint, Server server, String connectorKey) throws Exception { ServletContextHandler context = server.getChildHandlerByClass(ServletContextHandler.class); CamelContext camelContext = this.getCamelContext(); FilterHolder filterHolder = new FilterHolder(); filterHolder.setInitParameter("deleteFiles", "true"); if (ObjectHelper.isNotEmpty(camelContext.getGlobalOption(TMP_DIR))) { File file = new File(camelContext.getGlobalOption(TMP_DIR)); if (!file.isDirectory()) { throw new RuntimeCamelException( "The temp file directory of camel-jetty is not exists, please recheck it with directory name :" + camelContext.getGlobalOptions().get(TMP_DIR)); } context.setAttribute("javax.servlet.context.tempdir", file); } // if a filter ref was provided, use it. Filter filter = ((JettyHttpEndpoint) endpoint).getMultipartFilter(); if (filter == null) { // if no filter ref was provided, use the default filter filter = new MultiPartFilter(); } filterHolder.setFilter(new CamelFilterWrapper(filter)); String pathSpec = endpoint.getPath(); if (pathSpec == null || "".equals(pathSpec)) { pathSpec = "/"; } if (endpoint.isMatchOnUriPrefix()) { pathSpec = pathSpec.endsWith("/") ? pathSpec + "*" : pathSpec + "/*"; } addFilter(context, filterHolder, pathSpec); log.debug("using multipart filter implementation " + filter.getClass().getName() + " for path " + pathSpec); } /** * Disconnects the URL specified on the endpoint from the specified processor. */ @Override public void disconnect(HttpConsumer consumer) throws Exception { // If the connector is not needed anymore then stop it HttpCommonEndpoint endpoint = consumer.getEndpoint(); String connectorKey = getConnectorKey(endpoint); synchronized (CONNECTORS) { ConnectorRef connectorRef = CONNECTORS.get(connectorKey); if (connectorRef != null) { connectorRef.servlet.disconnect(consumer); if (connectorRef.decrement() == 0) { connectorRef.server.removeConnector(connectorRef.connector); connectorRef.connector.stop(); connectorRef.server.stop(); CONNECTORS.remove(connectorKey); // Camel controls the lifecycle of these entities so remove the // registered MBeans when Camel is done with the managed objects. if (mbContainer != null) { this.removeServerMBean(connectorRef.server); //mbContainer.removeBean(connectorRef.connector); } if (defaultQueuedThreadPool != null) { try { defaultQueuedThreadPool.stop(); } catch (Throwable t) { defaultQueuedThreadPool.destroy(); } finally { defaultQueuedThreadPool = null; } } } } } } private String getConnectorKey(HttpCommonEndpoint endpoint) { return endpoint.getProtocol() + ":" + endpoint.getHttpUri().getHost() + ":" + endpoint.getPort(); } // Properties // ------------------------------------------------------------------------- public String getSslKeyPassword() { return sslKeyPassword; } /** * The key password, which is used to access the certificate's key entry in the keystore (this is the same password that is supplied to the keystore command's -keypass option). */ @Metadata(description = "The key password, which is used to access the certificate's key entry in the keystore " + "(this is the same password that is supplied to the keystore command's -keypass option).", label = "security", secret = true) public void setSslKeyPassword(String sslKeyPassword) { this.sslKeyPassword = sslKeyPassword; } public String getSslPassword() { return sslPassword; } /** * The ssl password, which is required to access the keystore file (this is the same password that is supplied to the keystore command's -storepass option). */ @Metadata(description = "The ssl password, which is required to access the keystore file (this is the same password that is supplied to the keystore command's -storepass option).", label = "security", secret = true) public void setSslPassword(String sslPassword) { this.sslPassword = sslPassword; } /** * Specifies the location of the Java keystore file, which contains the Jetty server's own X.509 certificate in a key entry. */ @Metadata(description = "Specifies the location of the Java keystore file, which contains the Jetty server's own X.509 certificate in a key entry.", label = "security", secret = true) public void setKeystore(String sslKeystore) { this.sslKeystore = sslKeystore; } public String getKeystore() { return sslKeystore; } public ErrorHandler getErrorHandler() { return errorHandler; } /** * This option is used to set the ErrorHandler that Jetty server uses. */ @Metadata(description = "This option is used to set the ErrorHandler that Jetty server uses.", label = "advanced") public void setErrorHandler(ErrorHandler errorHandler) { this.errorHandler = errorHandler; } protected Connector getConnector(Server server, JettyHttpEndpoint endpoint) { Connector connector; if ("https".equals(endpoint.getProtocol())) { connector = getSslSocketConnector(server, endpoint); } else { connector = getSocketConnector(server, endpoint); } return connector; } protected Connector getSocketConnector(Server server, JettyHttpEndpoint endpoint) { Connector answer = null; if (socketConnectors != null) { answer = socketConnectors.get(endpoint.getPort()); } if (answer == null) { answer = createConnector(server, endpoint); } return answer; } protected Connector getSslSocketConnector(Server server, JettyHttpEndpoint endpoint) { Connector answer = null; if (sslSocketConnectors != null) { answer = sslSocketConnectors.get(endpoint.getPort()); } if (answer == null) { answer = createConnector(server, endpoint); } return answer; } protected Connector createConnector(Server server, JettyHttpEndpoint endpoint) { // now we just use the SelectChannelConnector as the default connector SslContextFactory sslcf = null; // Note that this was set on the endpoint when it was constructed. It was // either explicitly set at the component or on the endpoint, but either way, // the value is already set. We therefore do not need to look at the component // level SSLContextParameters again in this method. SSLContextParameters endpointSslContextParameters = endpoint.getSslContextParameters(); if (endpointSslContextParameters != null) { try { sslcf = createSslContextFactory(endpointSslContextParameters, false); } catch (Exception e) { throw new RuntimeCamelException(e); } } else if ("https".equals(endpoint.getProtocol())) { sslcf = new SslContextFactory(); sslcf.setEndpointIdentificationAlgorithm(null); String keystoreProperty = System.getProperty(JETTY_SSL_KEYSTORE); if (keystoreProperty != null) { sslcf.setKeyStorePath(keystoreProperty); } else if (sslKeystore != null) { sslcf.setKeyStorePath(sslKeystore); } String keystorePassword = System.getProperty(JETTY_SSL_KEYPASSWORD); if (keystorePassword != null) { sslcf.setKeyManagerPassword(keystorePassword); } else if (sslKeyPassword != null) { sslcf.setKeyManagerPassword(sslKeyPassword); } String password = System.getProperty(JETTY_SSL_PASSWORD); if (password != null) { sslcf.setKeyStorePassword(password); } else if (sslPassword != null) { sslcf.setKeyStorePassword(sslPassword); } } return createConnectorJettyInternal(server, endpoint, sslcf); } protected abstract AbstractConnector createConnectorJettyInternal(Server server, JettyHttpEndpoint endpoint, SslContextFactory sslcf); private SslContextFactory createSslContextFactory(SSLContextParameters ssl, boolean client) throws GeneralSecurityException, IOException { SslContextFactory answer = new SslContextFactory(); if (!client) { answer.setEndpointIdentificationAlgorithm(null); } if (ssl != null) { answer.setSslContext(ssl.createSSLContext(getCamelContext())); } // jetty default is // addExcludeProtocols("SSL", "SSLv2", "SSLv2Hello", "SSLv3"); // setExcludeCipherSuites("^.*_(MD5|SHA|SHA1)$"); // configure include/exclude ciphers and protocols if (ssl != null && ssl.getCipherSuitesFilter() != null) { List<String> includeCiphers = ssl.getCipherSuitesFilter().getInclude(); if (includeCiphers != null && !includeCiphers.isEmpty()) { String[] arr = includeCiphers.toArray(new String[includeCiphers.size()]); answer.setIncludeCipherSuites(arr); } else { answer.setIncludeCipherSuites(".*"); } List<String> excludeCiphers = ssl.getCipherSuitesFilter().getExclude(); if (excludeCiphers != null && !excludeCiphers.isEmpty()) { String[] arr = excludeCiphers.toArray(new String[excludeCiphers.size()]); answer.setExcludeCipherSuites(arr); } } if (ssl != null && ssl.getSecureSocketProtocolsFilter() != null) { List<String> includeProtocols = ssl.getSecureSocketProtocolsFilter().getInclude(); if (includeProtocols != null && !includeProtocols.isEmpty()) { String[] arr = includeProtocols.toArray(new String[includeProtocols.size()]); answer.setIncludeProtocols(arr); } else { answer.setIncludeProtocols(".*"); } List<String> excludeProtocols = ssl.getSecureSocketProtocolsFilter().getExclude(); if (excludeProtocols != null && !excludeProtocols.isEmpty()) { String[] arr = excludeProtocols.toArray(new String[excludeProtocols.size()]); answer.setExcludeProtocols(arr); } } return answer; } protected boolean checkSSLContextFactoryConfig(Object instance) { try { Method method = instance.getClass().getMethod("checkConfig"); return (Boolean)method.invoke(instance); } catch (NoSuchMethodException ex) { // ignore } catch (IllegalArgumentException e) { // ignore } catch (IllegalAccessException e) { // ignore } catch (InvocationTargetException e) { // ignore } return false; } public Map<Integer, Connector> getSslSocketConnectors() { return sslSocketConnectors; } /** * A map which contains per port number specific SSL connectors. */ @Metadata(description = "A map which contains per port number specific SSL connectors.", label = "security") public void setSslSocketConnectors(Map <Integer, Connector> connectors) { sslSocketConnectors = connectors; } /** * A map which contains per port number specific HTTP connectors. Uses the same principle as sslSocketConnectors. */ @Metadata(description = "A map which contains per port number specific HTTP connectors. Uses the same principle as sslSocketConnectors.", label = "security") public void setSocketConnectors(Map<Integer, Connector> socketConnectors) { this.socketConnectors = socketConnectors; } public Integer getMinThreads() { return minThreads; } /** * To set a value for minimum number of threads in server thread pool. Notice that both a min and max size must be configured. */ @Metadata(description = "To set a value for minimum number of threads in server thread pool. Notice that both a min and max size must be configured.", label = "consumer") public void setMinThreads(Integer minThreads) { this.minThreads = minThreads; } public Integer getMaxThreads() { return maxThreads; } /** * To set a value for maximum number of threads in server thread pool. Notice that both a min and max size must be configured. */ @Metadata(description = "To set a value for maximum number of threads in server thread pool. Notice that both a min and max size must be configured.", label = "consumer") public void setMaxThreads(Integer maxThreads) { this.maxThreads = maxThreads; } public ThreadPool getThreadPool() { return threadPool; } /** * To use a custom thread pool for the server. This option should only be used in special circumstances. */ @Metadata(description = "To use a custom thread pool for the server. This option should only be used in special circumstances.", label = "consumer,advanced") public void setThreadPool(ThreadPool threadPool) { this.threadPool = threadPool; } public boolean isEnableJmx() { return enableJmx; } /** * If this option is true, Jetty JMX support will be enabled for this endpoint. */ @Metadata(description = "If this option is true, Jetty JMX support will be enabled for this endpoint.") public void setEnableJmx(boolean enableJmx) { this.enableJmx = enableJmx; } public JettyHttpBinding getJettyHttpBinding() { return jettyHttpBinding; } /** * To use a custom org.apache.camel.component.jetty.JettyHttpBinding, which are used to customize how a response should be written for the producer. */ @Metadata(description = "To use a custom org.apache.camel.component.jetty.JettyHttpBinding, which are used to customize how a response should be written for the producer.", label = "advanced") public void setJettyHttpBinding(JettyHttpBinding jettyHttpBinding) { this.jettyHttpBinding = jettyHttpBinding; } /** * Not to be used - use JettyHttpBinding instead. */ @Override @Metadata(description = "Not to be used - use JettyHttpBinding instead.", label = "advanced") public void setHttpBinding(HttpBinding httpBinding) { throw new IllegalArgumentException("Not to be used - use JettyHttpBinding instead."); } /** * Jetty component does not use HttpConfiguration. */ @Override @Metadata(description = "Jetty component does not use HttpConfiguration.", label = "advanced") public void setHttpConfiguration(HttpConfiguration httpConfiguration) { throw new IllegalArgumentException("Jetty component does not use HttpConfiguration."); } public synchronized MBeanContainer getMbContainer() { // If null, provide the default implementation. if (mbContainer == null) { MBeanServer mbs = null; final ManagementStrategy mStrategy = this.getCamelContext().getManagementStrategy(); final ManagementAgent mAgent = mStrategy.getManagementAgent(); if (mAgent != null) { mbs = mAgent.getMBeanServer(); } if (mbs != null) { mbContainer = new MBeanContainer(mbs); } else { log.warn("JMX disabled in CamelContext. Jetty JMX extensions will remain disabled."); } } return this.mbContainer; } /** * To use a existing configured org.eclipse.jetty.jmx.MBeanContainer if JMX is enabled that Jetty uses for registering mbeans. */ @Metadata(description = "To use a existing configured org.eclipse.jetty.jmx.MBeanContainer if JMX is enabled that Jetty uses for registering mbeans.", label = "advanced") public void setMbContainer(MBeanContainer mbContainer) { this.mbContainer = mbContainer; } public Map<String, Object> getSslSocketConnectorProperties() { return sslSocketConnectorProperties; } /** * A map which contains general SSL connector properties. */ @Metadata(description = "A map which contains general SSL connector properties.", label = "security") public void setSslSocketConnectorProperties(Map<String, Object> sslSocketConnectorProperties) { this.sslSocketConnectorProperties = sslSocketConnectorProperties; } public Map<String, Object> getSocketConnectorProperties() { return socketConnectorProperties; } /** * A map which contains general HTTP connector properties. Uses the same principle as sslSocketConnectorProperties. */ @Metadata(description = "A map which contains general HTTP connector properties. Uses the same principle as sslSocketConnectorProperties.", label = "security") public void setSocketConnectorProperties(Map<String, Object> socketConnectorProperties) { this.socketConnectorProperties = socketConnectorProperties; } public void addSocketConnectorProperty(String key, Object value) { if (socketConnectorProperties == null) { socketConnectorProperties = new HashMap<>(); } socketConnectorProperties.put(key, value); } public void addSslSocketConnectorProperty(String key, Object value) { if (sslSocketConnectorProperties == null) { sslSocketConnectorProperties = new HashMap<>(); } sslSocketConnectorProperties.put(key, value); } public Long getContinuationTimeout() { return continuationTimeout; } /** * Allows to set a timeout in millis when using Jetty as consumer (server). * By default Jetty uses 30000. You can use a value of <= 0 to never expire. * If a timeout occurs then the request will be expired and Jetty will return back a http error 503 to the client. * This option is only in use when using Jetty with the Asynchronous Routing Engine. */ @Metadata(description = "Allows to set a timeout in millis when using Jetty as consumer (server)." + " By default Jetty uses 30000. You can use a value of <= 0 to never expire." + " If a timeout occurs then the request will be expired and Jetty will return back a http error 503 to the client." + " This option is only in use when using Jetty with the Asynchronous Routing Engine.", defaultValue = "30000", label = "consumer") public void setContinuationTimeout(Long continuationTimeout) { this.continuationTimeout = continuationTimeout; } public boolean isUseContinuation() { return useContinuation; } /** * Whether or not to use Jetty continuations for the Jetty Server. */ @Metadata(description = "Whether or not to use Jetty continuations for the Jetty Server.", defaultValue = "true", label = "consumer") public void setUseContinuation(boolean useContinuation) { this.useContinuation = useContinuation; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } /** * To configure security using SSLContextParameters */ @Metadata(description = "To configure security using SSLContextParameters", label = "security") public void setSslContextParameters(SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } @Override public boolean isUseGlobalSslContextParameters() { return this.useGlobalSslContextParameters; } /** * Enable usage of global SSL context parameters */ @Override @Metadata(description = "Enable usage of global SSL context parameters", label = "security", defaultValue = "false") public void setUseGlobalSslContextParameters(boolean useGlobalSslContextParameters) { this.useGlobalSslContextParameters = useGlobalSslContextParameters; } public Integer getResponseBufferSize() { return responseBufferSize; } /** * Allows to configure a custom value of the response buffer size on the Jetty connectors. */ @Metadata(description = "Allows to configure a custom value of the response buffer size on the Jetty connectors.") public void setResponseBufferSize(Integer responseBufferSize) { this.responseBufferSize = responseBufferSize; } public Integer getRequestBufferSize() { return requestBufferSize; } /** * Allows to configure a custom value of the request buffer size on the Jetty connectors. */ @Metadata(description = "Allows to configure a custom value of the request buffer size on the Jetty connectors.") public void setRequestBufferSize(Integer requestBufferSize) { this.requestBufferSize = requestBufferSize; } public Integer getRequestHeaderSize() { return requestHeaderSize; } /** * Allows to configure a custom value of the request header size on the Jetty connectors. */ @Metadata(description = "Allows to configure a custom value of the request header size on the Jetty connectors.") public void setRequestHeaderSize(Integer requestHeaderSize) { this.requestHeaderSize = requestHeaderSize; } public Integer getResponseHeaderSize() { return responseHeaderSize; } /** * Allows to configure a custom value of the response header size on the Jetty connectors. */ @Metadata(description = "Allows to configure a custom value of the response header size on the Jetty connectors.") public void setResponseHeaderSize(Integer responseHeaderSize) { this.responseHeaderSize = responseHeaderSize; } public String getProxyHost() { return proxyHost; } /** * To use a http proxy to configure the hostname. */ @Metadata(description = "To use a http proxy to configure the hostname.", label = "proxy") public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } public Integer getProxyPort() { return proxyPort; } /** * To use a http proxy to configure the port number. */ @Metadata(description = "To use a http proxy to configure the port number.", label = "proxy") public void setProxyPort(Integer proxyPort) { this.proxyPort = proxyPort; } public boolean isUseXForwardedForHeader() { return useXForwardedForHeader; } /** * To use the X-Forwarded-For header in HttpServletRequest.getRemoteAddr. */ @Metadata(description = "To use the X-Forwarded-For header in HttpServletRequest.getRemoteAddr.") public void setUseXForwardedForHeader(boolean useXForwardedForHeader) { this.useXForwardedForHeader = useXForwardedForHeader; } public boolean isSendServerVersion() { return sendServerVersion; } /** * If the option is true, jetty will send the server header with the jetty version information to the client which sends the request. * NOTE please make sure there is no any other camel-jetty endpoint is share the same port, otherwise this option may not work as expected. */ @Metadata(description = "If the option is true, jetty will send the server header with the jetty version information to the client which sends the request." + " NOTE please make sure there is no any other camel-jetty endpoint is share the same port, otherwise this option may not work as expected.", defaultValue = "true", label = "consumer") public void setSendServerVersion(boolean sendServerVersion) { this.sendServerVersion = sendServerVersion; } // Implementation methods // ------------------------------------------------------------------------- @Override public Consumer createConsumer(CamelContext camelContext, Processor processor, String verb, String basePath, String uriTemplate, String consumes, String produces, RestConfiguration configuration, Map<String, Object> parameters) throws Exception { return doCreateConsumer(camelContext, processor, verb, basePath, uriTemplate, consumes, produces, configuration, parameters, false); } @Override public Consumer createApiConsumer(CamelContext camelContext, Processor processor, String contextPath, RestConfiguration configuration, Map<String, Object> parameters) throws Exception { // reuse the createConsumer method we already have. The api need to use GET and match on uri prefix return doCreateConsumer(camelContext, processor, "GET", contextPath, null, null, null, configuration, parameters, true); } Consumer doCreateConsumer(CamelContext camelContext, Processor processor, String verb, String basePath, String uriTemplate, String consumes, String produces, RestConfiguration configuration, Map<String, Object> parameters, boolean api) throws Exception { String path = basePath; if (uriTemplate != null) { // make sure to avoid double slashes if (uriTemplate.startsWith("/")) { path = path + uriTemplate; } else { path = path + "/" + uriTemplate; } } path = FileUtil.stripLeadingSeparator(path); String scheme = "http"; String host = ""; int port = 0; // if no explicit port/host configured, then use port from rest configuration RestConfiguration config = configuration; if (config == null) { config = camelContext.getRestConfiguration("jetty", true); } if (config.getScheme() != null) { scheme = config.getScheme(); } if (config.getHost() != null) { host = config.getHost(); } int num = config.getPort(); if (num > 0) { port = num; } // prefix path with context-path if configured in rest-dsl configuration String contextPath = config.getContextPath(); if (ObjectHelper.isNotEmpty(contextPath)) { contextPath = FileUtil.stripTrailingSeparator(contextPath); contextPath = FileUtil.stripLeadingSeparator(contextPath); if (ObjectHelper.isNotEmpty(contextPath)) { path = contextPath + "/" + path; } } // if no explicit hostname set then resolve the hostname if (ObjectHelper.isEmpty(host)) { if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.allLocalIp) { host = "0.0.0.0"; } else if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.localHostName) { host = HostUtils.getLocalHostName(); } else if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.localIp) { host = HostUtils.getLocalIp(); } } Map<String, Object> map = new HashMap<>(); // build query string, and append any endpoint configuration properties if (config.getComponent() == null || config.getComponent().equals("jetty")) { // setup endpoint options if (config.getEndpointProperties() != null && !config.getEndpointProperties().isEmpty()) { map.putAll(config.getEndpointProperties()); } } boolean cors = config.isEnableCORS(); if (cors) { // allow HTTP Options as we want to handle CORS in rest-dsl map.put("optionsEnabled", "true"); } String query = URISupport.createQueryString(map); String url; if (api) { url = "jetty:%s://%s:%s/%s?matchOnUriPrefix=true&httpMethodRestrict=%s"; } else { url = "jetty:%s://%s:%s/%s?httpMethodRestrict=%s"; } // must use upper case for restrict String restrict = verb.toUpperCase(Locale.US); if (cors) { restrict += ",OPTIONS"; } // get the endpoint url = String.format(url, scheme, host, port, path, restrict); if (!query.isEmpty()) { url = url + "&" + query; } JettyHttpEndpoint endpoint = camelContext.getEndpoint(url, JettyHttpEndpoint.class); setProperties(camelContext, endpoint, parameters); if (!map.containsKey("httpBindingRef")) { // use the rest binding, if not using a custom http binding endpoint.setHttpBinding(new JettyRestHttpBinding(endpoint)); // disable this filter as we want to use ours endpoint.setEnableMultipartFilter(false); } // configure consumer properties Consumer consumer = endpoint.createConsumer(processor); if (config.getConsumerProperties() != null && !config.getConsumerProperties().isEmpty()) { setProperties(camelContext, consumer, config.getConsumerProperties()); } // the endpoint must be started before creating the producer ServiceHelper.startService(endpoint); return consumer; } protected CamelServlet createServletForConnector(Server server, Connector connector, List<Handler> handlers, JettyHttpEndpoint endpoint) throws Exception { ServletContextHandler context = new ServletContextHandler(server, "/", ServletContextHandler.NO_SECURITY | ServletContextHandler.NO_SESSIONS); addJettyHandlers(server, handlers); CamelServlet camelServlet = new CamelContinuationServlet(); ServletHolder holder = new ServletHolder(); holder.setServlet(camelServlet); holder.setAsyncSupported(true); holder.setInitParameter(CamelServlet.ASYNC_PARAM, Boolean.toString(endpoint.isAsync())); context.addServlet(holder, "/*"); File file = File.createTempFile("camel", ""); file.delete(); //must register the MultipartConfig to make jetty server multipart aware holder.getRegistration().setMultipartConfig(new MultipartConfigElement(file.getParentFile().getAbsolutePath(), -1, -1, 0)); // use rest enabled resolver in case we use rest camelServlet.setServletResolveConsumerStrategy(new HttpRestServletResolveConsumerStrategy()); //must make RFC7578 as default to avoid using the deprecated MultiPartInputStreamParser connector.getConnectionFactory(HttpConnectionFactory.class).getHttpConfiguration() .setMultiPartFormDataCompliance(MultiPartFormDataCompliance.RFC7578); return camelServlet; } protected void addJettyHandlers(Server server, List<Handler> handlers) { if (handlers != null && !handlers.isEmpty()) { for (Handler handler : handlers) { if (handler instanceof HandlerWrapper) { // avoid setting a handler more than once if (!isHandlerInChain(server.getHandler(), handler)) { ((HandlerWrapper) handler).setHandler(server.getHandler()); server.setHandler(handler); } } else { HandlerCollection handlerCollection = new HandlerCollection(); handlerCollection.addHandler(server.getHandler()); handlerCollection.addHandler(handler); server.setHandler(handlerCollection); } } } } protected boolean isHandlerInChain(Handler current, Handler handler) { if (handler.equals(current)) { //Found a match in the chain return true; } else if (current instanceof HandlerWrapper) { //Inspect the next handler in the chain return isHandlerInChain(((HandlerWrapper) current).getHandler(), handler); } else { //End of chain return false; } } protected Server createServer() { ThreadPool tp = threadPool; defaultQueuedThreadPool = null; // configure thread pool if min/max given if (minThreads != null || maxThreads != null) { if (getThreadPool() != null) { throw new IllegalArgumentException("You cannot configure both minThreads/maxThreads and a custom threadPool on JettyHttpComponent: " + this); } defaultQueuedThreadPool = new QueuedThreadPool(); if (minThreads != null) { defaultQueuedThreadPool.setMinThreads(minThreads.intValue()); } if (maxThreads != null) { defaultQueuedThreadPool.setMaxThreads(maxThreads.intValue()); } tp = defaultQueuedThreadPool; } Server s; if (tp != null) { s = new Server(tp); } else { s = new Server(); } if (isEnableJmx()) { enableJmx(s); } if (defaultQueuedThreadPool != null) { // let the thread names indicate they are from the server defaultQueuedThreadPool.setName("CamelJettyServer(" + ObjectHelper.getIdentityHashCode(s) + ")"); try { defaultQueuedThreadPool.start(); } catch (Exception e) { throw new RuntimeCamelException("Error starting JettyServer thread pool: " + defaultQueuedThreadPool, e); } } ContextHandlerCollection collection = new ContextHandlerCollection(); s.setHandler(collection); // setup the error handler if it set to Jetty component if (getErrorHandler() != null) { s.addBean(getErrorHandler()); } else { //need an error handler that won't leak information about the exception back to the client. ErrorHandler eh = new ErrorHandler() { public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException { String msg = HttpStatus.getMessage(response.getStatus()); request.setAttribute(RequestDispatcher.ERROR_MESSAGE, msg); super.handle(target, baseRequest, request, response); } protected void writeErrorPage(HttpServletRequest request, Writer writer, int code, String message, boolean showStacks) throws IOException { super.writeErrorPage(request, writer, code, message, false); } }; s.addBean(eh, false); } return s; } @Override protected void doStart() throws Exception { super.doStart(); RestConfiguration config = getCamelContext().getRestConfiguration("jetty", true); // configure additional options on jetty configuration if (config.getComponentProperties() != null && !config.getComponentProperties().isEmpty()) { setProperties(this, config.getComponentProperties()); } } @Override protected void doStop() throws Exception { super.doStop(); if (CONNECTORS.size() > 0) { for (String connectorKey : CONNECTORS.keySet()) { ConnectorRef connectorRef = CONNECTORS.get(connectorKey); if (connectorRef != null && connectorRef.getRefCount() == 0) { connectorRef.server.removeConnector(connectorRef.connector); connectorRef.connector.stop(); // Camel controls the lifecycle of these entities so remove the // registered MBeans when Camel is done with the managed objects. removeServerMBean(connectorRef.server); connectorRef.server.stop(); //removeServerMBean(connectorRef.connector); CONNECTORS.remove(connectorKey); } } } if (mbContainer != null) { mbContainer.destroy(); mbContainer = null; } } private void addServerMBean(Server server) { if (mbContainer == null) { return; } try { Object o = getContainer(server); o.getClass().getMethod("addEventListener", Container.Listener.class).invoke(o, mbContainer); mbContainer.getClass().getMethod("beanAdded", Container.class, Object.class) .invoke(mbContainer, null, server); } catch (RuntimeException rex) { throw rex; } catch (Exception r) { throw new RuntimeException(r); } } private void removeServerMBean(Server server) { try { mbContainer.getClass().getMethod("beanRemoved", Container.class, Object.class) .invoke(mbContainer, null, server); } catch (RuntimeException rex) { throw rex; } catch (Exception r) { try { mbContainer.getClass().getMethod("removeBean", Object.class) .invoke(mbContainer, server); } catch (RuntimeException rex) { throw rex; } catch (Exception r2) { throw new RuntimeException(r); } } } private static Container getContainer(Object server) { if (server instanceof Container) { return (Container)server; } try { return (Container)server.getClass().getMethod("getContainer").invoke(server); } catch (RuntimeException t) { throw t; } catch (Throwable t) { throw new RuntimeException(t); } } }
apache-2.0
ampproject/amp-by-example
playground/src/document/document.js
2824
// Copyright 2018 The AMPHTML Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import events from '../events/events.js'; const ROOT = '/document/'; export const EVENT_DOCUMENT_STATE_CHANGED = 'playground-document-state-changed'; export const DIRTY = 'dirty'; export const SAVED = 'saved'; export const READ_ONLY = 'readOnly'; export function createDocument() { return new PlaygroundDocument(window); } class PlaygroundDocument { constructor(win) { this.win = win; this.state = SAVED; this.docId = ''; } fetchUrl(url) { const headers = new Headers(); headers.append('x-requested-by', 'playground'); headers.append('Content-Type', 'text/html'); return fetch('/playground/fetch?url=' + url, { mode: 'cors', headers, }).then((response) => { if (!response.ok) { throw new Error('Failed fetching document'); } this._changeState(READ_ONLY); return response.text(); }); } fetchDocument(docId) { this.docId = docId; return fetch(ROOT + docId, { mode: 'cors', credentials: 'include', }) .then((response) => { if (!response.ok) { throw new Error('Failed fetching document'); } return response.json(); }) .then((jsonDocument) => { if (jsonDocument.readOnly) { this._changeState(READ_ONLY); this.docId = ''; } else { this.docId = jsonDocument.id; } return jsonDocument.content; }); } update() { this._changeState(DIRTY); } fork(snippet) { return this._saveSnippet(snippet, ''); } save(snippet) { return this._saveSnippet(snippet, this.docId); } _saveSnippet(snippet, snippetId) { const url = ROOT + snippetId; return fetch(url, { method: 'POST', body: snippet, credentials: 'include', }).then((response) => response.json()) .then((data) => { this._changeState(SAVED); return data.id; }); } _changeState(newState) { if (this.state === newState) { return; } if (this.state === READ_ONLY && newState === DIRTY) { return; } this.state = newState; events.publish(EVENT_DOCUMENT_STATE_CHANGED, this.state); } }
apache-2.0
artgon/netty
buffer/src/main/java/io/netty/buffer/PooledByteBufAllocator.java
27961
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import static io.netty.util.internal.ObjectUtil.checkPositiveOrZero; import io.netty.util.NettyRuntime; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.FastThreadLocal; import io.netty.util.concurrent.FastThreadLocalThread; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.StringUtil; import io.netty.util.internal.SystemPropertyUtil; import io.netty.util.internal.ThreadExecutorMap; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; public class PooledByteBufAllocator extends AbstractByteBufAllocator implements ByteBufAllocatorMetricProvider { private static final InternalLogger logger = InternalLoggerFactory.getInstance(PooledByteBufAllocator.class); private static final int DEFAULT_NUM_HEAP_ARENA; private static final int DEFAULT_NUM_DIRECT_ARENA; private static final int DEFAULT_PAGE_SIZE; private static final int DEFAULT_MAX_ORDER; // 8192 << 11 = 16 MiB per chunk private static final int DEFAULT_SMALL_CACHE_SIZE; private static final int DEFAULT_NORMAL_CACHE_SIZE; private static final int DEFAULT_MAX_CACHED_BUFFER_CAPACITY; private static final int DEFAULT_CACHE_TRIM_INTERVAL; private static final long DEFAULT_CACHE_TRIM_INTERVAL_MILLIS; private static final boolean DEFAULT_USE_CACHE_FOR_ALL_THREADS; private static final int DEFAULT_DIRECT_MEMORY_CACHE_ALIGNMENT; static final int DEFAULT_MAX_CACHED_BYTEBUFFERS_PER_CHUNK; private static final int MIN_PAGE_SIZE = 4096; private static final int MAX_CHUNK_SIZE = (int) (((long) Integer.MAX_VALUE + 1) / 2); private final Runnable trimTask = new Runnable() { @Override public void run() { PooledByteBufAllocator.this.trimCurrentThreadCache(); } }; static { int defaultPageSize = SystemPropertyUtil.getInt("io.netty.allocator.pageSize", 8192); Throwable pageSizeFallbackCause = null; try { validateAndCalculatePageShifts(defaultPageSize); } catch (Throwable t) { pageSizeFallbackCause = t; defaultPageSize = 8192; } DEFAULT_PAGE_SIZE = defaultPageSize; int defaultMaxOrder = SystemPropertyUtil.getInt("io.netty.allocator.maxOrder", 11); Throwable maxOrderFallbackCause = null; try { validateAndCalculateChunkSize(DEFAULT_PAGE_SIZE, defaultMaxOrder); } catch (Throwable t) { maxOrderFallbackCause = t; defaultMaxOrder = 11; } DEFAULT_MAX_ORDER = defaultMaxOrder; // Determine reasonable default for nHeapArena and nDirectArena. // Assuming each arena has 3 chunks, the pool should not consume more than 50% of max memory. final Runtime runtime = Runtime.getRuntime(); /* * We use 2 * available processors by default to reduce contention as we use 2 * available processors for the * number of EventLoops in NIO and EPOLL as well. If we choose a smaller number we will run into hot spots as * allocation and de-allocation needs to be synchronized on the PoolArena. * * See https://github.com/netty/netty/issues/3888. */ final int defaultMinNumArena = NettyRuntime.availableProcessors() * 2; final int defaultChunkSize = DEFAULT_PAGE_SIZE << DEFAULT_MAX_ORDER; DEFAULT_NUM_HEAP_ARENA = Math.max(0, SystemPropertyUtil.getInt( "io.netty.allocator.numHeapArenas", (int) Math.min( defaultMinNumArena, runtime.maxMemory() / defaultChunkSize / 2 / 3))); DEFAULT_NUM_DIRECT_ARENA = Math.max(0, SystemPropertyUtil.getInt( "io.netty.allocator.numDirectArenas", (int) Math.min( defaultMinNumArena, PlatformDependent.maxDirectMemory() / defaultChunkSize / 2 / 3))); // cache sizes DEFAULT_SMALL_CACHE_SIZE = SystemPropertyUtil.getInt("io.netty.allocator.smallCacheSize", 256); DEFAULT_NORMAL_CACHE_SIZE = SystemPropertyUtil.getInt("io.netty.allocator.normalCacheSize", 64); // 32 kb is the default maximum capacity of the cached buffer. Similar to what is explained in // 'Scalable memory allocation using jemalloc' DEFAULT_MAX_CACHED_BUFFER_CAPACITY = SystemPropertyUtil.getInt( "io.netty.allocator.maxCachedBufferCapacity", 32 * 1024); // the number of threshold of allocations when cached entries will be freed up if not frequently used DEFAULT_CACHE_TRIM_INTERVAL = SystemPropertyUtil.getInt( "io.netty.allocator.cacheTrimInterval", 8192); if (SystemPropertyUtil.contains("io.netty.allocation.cacheTrimIntervalMillis")) { logger.warn("-Dio.netty.allocation.cacheTrimIntervalMillis is deprecated," + " use -Dio.netty.allocator.cacheTrimIntervalMillis"); if (SystemPropertyUtil.contains("io.netty.allocator.cacheTrimIntervalMillis")) { // Both system properties are specified. Use the non-deprecated one. DEFAULT_CACHE_TRIM_INTERVAL_MILLIS = SystemPropertyUtil.getLong( "io.netty.allocator.cacheTrimIntervalMillis", 0); } else { DEFAULT_CACHE_TRIM_INTERVAL_MILLIS = SystemPropertyUtil.getLong( "io.netty.allocation.cacheTrimIntervalMillis", 0); } } else { DEFAULT_CACHE_TRIM_INTERVAL_MILLIS = SystemPropertyUtil.getLong( "io.netty.allocator.cacheTrimIntervalMillis", 0); } DEFAULT_USE_CACHE_FOR_ALL_THREADS = SystemPropertyUtil.getBoolean( "io.netty.allocator.useCacheForAllThreads", true); DEFAULT_DIRECT_MEMORY_CACHE_ALIGNMENT = SystemPropertyUtil.getInt( "io.netty.allocator.directMemoryCacheAlignment", 0); // Use 1023 by default as we use an ArrayDeque as backing storage which will then allocate an internal array // of 1024 elements. Otherwise we would allocate 2048 and only use 1024 which is wasteful. DEFAULT_MAX_CACHED_BYTEBUFFERS_PER_CHUNK = SystemPropertyUtil.getInt( "io.netty.allocator.maxCachedByteBuffersPerChunk", 1023); if (logger.isDebugEnabled()) { logger.debug("-Dio.netty.allocator.numHeapArenas: {}", DEFAULT_NUM_HEAP_ARENA); logger.debug("-Dio.netty.allocator.numDirectArenas: {}", DEFAULT_NUM_DIRECT_ARENA); if (pageSizeFallbackCause == null) { logger.debug("-Dio.netty.allocator.pageSize: {}", DEFAULT_PAGE_SIZE); } else { logger.debug("-Dio.netty.allocator.pageSize: {}", DEFAULT_PAGE_SIZE, pageSizeFallbackCause); } if (maxOrderFallbackCause == null) { logger.debug("-Dio.netty.allocator.maxOrder: {}", DEFAULT_MAX_ORDER); } else { logger.debug("-Dio.netty.allocator.maxOrder: {}", DEFAULT_MAX_ORDER, maxOrderFallbackCause); } logger.debug("-Dio.netty.allocator.chunkSize: {}", DEFAULT_PAGE_SIZE << DEFAULT_MAX_ORDER); logger.debug("-Dio.netty.allocator.smallCacheSize: {}", DEFAULT_SMALL_CACHE_SIZE); logger.debug("-Dio.netty.allocator.normalCacheSize: {}", DEFAULT_NORMAL_CACHE_SIZE); logger.debug("-Dio.netty.allocator.maxCachedBufferCapacity: {}", DEFAULT_MAX_CACHED_BUFFER_CAPACITY); logger.debug("-Dio.netty.allocator.cacheTrimInterval: {}", DEFAULT_CACHE_TRIM_INTERVAL); logger.debug("-Dio.netty.allocator.cacheTrimIntervalMillis: {}", DEFAULT_CACHE_TRIM_INTERVAL_MILLIS); logger.debug("-Dio.netty.allocator.useCacheForAllThreads: {}", DEFAULT_USE_CACHE_FOR_ALL_THREADS); logger.debug("-Dio.netty.allocator.maxCachedByteBuffersPerChunk: {}", DEFAULT_MAX_CACHED_BYTEBUFFERS_PER_CHUNK); } } public static final PooledByteBufAllocator DEFAULT = new PooledByteBufAllocator(PlatformDependent.directBufferPreferred()); private final PoolArena<byte[]>[] heapArenas; private final PoolArena<ByteBuffer>[] directArenas; private final int smallCacheSize; private final int normalCacheSize; private final List<PoolArenaMetric> heapArenaMetrics; private final List<PoolArenaMetric> directArenaMetrics; private final PoolThreadLocalCache threadCache; private final int chunkSize; private final PooledByteBufAllocatorMetric metric; public PooledByteBufAllocator() { this(false); } @SuppressWarnings("deprecation") public PooledByteBufAllocator(boolean preferDirect) { this(preferDirect, DEFAULT_NUM_HEAP_ARENA, DEFAULT_NUM_DIRECT_ARENA, DEFAULT_PAGE_SIZE, DEFAULT_MAX_ORDER); } @SuppressWarnings("deprecation") public PooledByteBufAllocator(int nHeapArena, int nDirectArena, int pageSize, int maxOrder) { this(false, nHeapArena, nDirectArena, pageSize, maxOrder); } /** * @deprecated use * {@link PooledByteBufAllocator#PooledByteBufAllocator(boolean, int, int, int, int, int, int, boolean)} */ @Deprecated public PooledByteBufAllocator(boolean preferDirect, int nHeapArena, int nDirectArena, int pageSize, int maxOrder) { this(preferDirect, nHeapArena, nDirectArena, pageSize, maxOrder, 0, DEFAULT_SMALL_CACHE_SIZE, DEFAULT_NORMAL_CACHE_SIZE); } /** * @deprecated use * {@link PooledByteBufAllocator#PooledByteBufAllocator(boolean, int, int, int, int, int, int, boolean)} */ @Deprecated public PooledByteBufAllocator(boolean preferDirect, int nHeapArena, int nDirectArena, int pageSize, int maxOrder, int tinyCacheSize, int smallCacheSize, int normalCacheSize) { this(preferDirect, nHeapArena, nDirectArena, pageSize, maxOrder, smallCacheSize, normalCacheSize, DEFAULT_USE_CACHE_FOR_ALL_THREADS, DEFAULT_DIRECT_MEMORY_CACHE_ALIGNMENT); } /** * @deprecated use * {@link PooledByteBufAllocator#PooledByteBufAllocator(boolean, int, int, int, int, int, int, boolean)} */ @Deprecated public PooledByteBufAllocator(boolean preferDirect, int nHeapArena, int nDirectArena, int pageSize, int maxOrder, int tinyCacheSize, int smallCacheSize, int normalCacheSize, boolean useCacheForAllThreads) { this(preferDirect, nHeapArena, nDirectArena, pageSize, maxOrder, smallCacheSize, normalCacheSize, useCacheForAllThreads); } public PooledByteBufAllocator(boolean preferDirect, int nHeapArena, int nDirectArena, int pageSize, int maxOrder, int smallCacheSize, int normalCacheSize, boolean useCacheForAllThreads) { this(preferDirect, nHeapArena, nDirectArena, pageSize, maxOrder, smallCacheSize, normalCacheSize, useCacheForAllThreads, DEFAULT_DIRECT_MEMORY_CACHE_ALIGNMENT); } /** * @deprecated use * {@link PooledByteBufAllocator#PooledByteBufAllocator(boolean, int, int, int, int, int, int, boolean, int)} */ @Deprecated public PooledByteBufAllocator(boolean preferDirect, int nHeapArena, int nDirectArena, int pageSize, int maxOrder, int tinyCacheSize, int smallCacheSize, int normalCacheSize, boolean useCacheForAllThreads, int directMemoryCacheAlignment) { this(preferDirect, nHeapArena, nDirectArena, pageSize, maxOrder, smallCacheSize, normalCacheSize, useCacheForAllThreads, directMemoryCacheAlignment); } public PooledByteBufAllocator(boolean preferDirect, int nHeapArena, int nDirectArena, int pageSize, int maxOrder, int smallCacheSize, int normalCacheSize, boolean useCacheForAllThreads, int directMemoryCacheAlignment) { super(preferDirect); threadCache = new PoolThreadLocalCache(useCacheForAllThreads); this.smallCacheSize = smallCacheSize; this.normalCacheSize = normalCacheSize; chunkSize = validateAndCalculateChunkSize(pageSize, maxOrder); checkPositiveOrZero(nHeapArena, "nHeapArena"); checkPositiveOrZero(nDirectArena, "nDirectArena"); checkPositiveOrZero(directMemoryCacheAlignment, "directMemoryCacheAlignment"); if (directMemoryCacheAlignment > 0 && !isDirectMemoryCacheAlignmentSupported()) { throw new IllegalArgumentException("directMemoryCacheAlignment is not supported"); } if ((directMemoryCacheAlignment & -directMemoryCacheAlignment) != directMemoryCacheAlignment) { throw new IllegalArgumentException("directMemoryCacheAlignment: " + directMemoryCacheAlignment + " (expected: power of two)"); } int pageShifts = validateAndCalculatePageShifts(pageSize); if (nHeapArena > 0) { heapArenas = newArenaArray(nHeapArena); List<PoolArenaMetric> metrics = new ArrayList<PoolArenaMetric>(heapArenas.length); for (int i = 0; i < heapArenas.length; i ++) { PoolArena.HeapArena arena = new PoolArena.HeapArena(this, pageSize, pageShifts, chunkSize, directMemoryCacheAlignment); heapArenas[i] = arena; metrics.add(arena); } heapArenaMetrics = Collections.unmodifiableList(metrics); } else { heapArenas = null; heapArenaMetrics = Collections.emptyList(); } if (nDirectArena > 0) { directArenas = newArenaArray(nDirectArena); List<PoolArenaMetric> metrics = new ArrayList<PoolArenaMetric>(directArenas.length); for (int i = 0; i < directArenas.length; i ++) { PoolArena.DirectArena arena = new PoolArena.DirectArena( this, pageSize, pageShifts, chunkSize, directMemoryCacheAlignment); directArenas[i] = arena; metrics.add(arena); } directArenaMetrics = Collections.unmodifiableList(metrics); } else { directArenas = null; directArenaMetrics = Collections.emptyList(); } metric = new PooledByteBufAllocatorMetric(this); } @SuppressWarnings("unchecked") private static <T> PoolArena<T>[] newArenaArray(int size) { return new PoolArena[size]; } private static int validateAndCalculatePageShifts(int pageSize) { if (pageSize < MIN_PAGE_SIZE) { throw new IllegalArgumentException("pageSize: " + pageSize + " (expected: " + MIN_PAGE_SIZE + ")"); } if ((pageSize & pageSize - 1) != 0) { throw new IllegalArgumentException("pageSize: " + pageSize + " (expected: power of 2)"); } // Logarithm base 2. At this point we know that pageSize is a power of two. return Integer.SIZE - 1 - Integer.numberOfLeadingZeros(pageSize); } private static int validateAndCalculateChunkSize(int pageSize, int maxOrder) { if (maxOrder > 14) { throw new IllegalArgumentException("maxOrder: " + maxOrder + " (expected: 0-14)"); } // Ensure the resulting chunkSize does not overflow. int chunkSize = pageSize; for (int i = maxOrder; i > 0; i --) { if (chunkSize > MAX_CHUNK_SIZE / 2) { throw new IllegalArgumentException(String.format( "pageSize (%d) << maxOrder (%d) must not exceed %d", pageSize, maxOrder, MAX_CHUNK_SIZE)); } chunkSize <<= 1; } return chunkSize; } @Override protected ByteBuf newHeapBuffer(int initialCapacity, int maxCapacity) { PoolThreadCache cache = threadCache.get(); PoolArena<byte[]> heapArena = cache.heapArena; final ByteBuf buf; if (heapArena != null) { buf = heapArena.allocate(cache, initialCapacity, maxCapacity); } else { buf = PlatformDependent.hasUnsafe() ? new UnpooledUnsafeHeapByteBuf(this, initialCapacity, maxCapacity) : new UnpooledHeapByteBuf(this, initialCapacity, maxCapacity); } return toLeakAwareBuffer(buf); } @Override protected ByteBuf newDirectBuffer(int initialCapacity, int maxCapacity) { PoolThreadCache cache = threadCache.get(); PoolArena<ByteBuffer> directArena = cache.directArena; final ByteBuf buf; if (directArena != null) { buf = directArena.allocate(cache, initialCapacity, maxCapacity); } else { buf = PlatformDependent.hasUnsafe() ? UnsafeByteBufUtil.newUnsafeDirectByteBuf(this, initialCapacity, maxCapacity) : new UnpooledDirectByteBuf(this, initialCapacity, maxCapacity); } return toLeakAwareBuffer(buf); } /** * Default number of heap arenas - System Property: io.netty.allocator.numHeapArenas - default 2 * cores */ public static int defaultNumHeapArena() { return DEFAULT_NUM_HEAP_ARENA; } /** * Default number of direct arenas - System Property: io.netty.allocator.numDirectArenas - default 2 * cores */ public static int defaultNumDirectArena() { return DEFAULT_NUM_DIRECT_ARENA; } /** * Default buffer page size - System Property: io.netty.allocator.pageSize - default 8192 */ public static int defaultPageSize() { return DEFAULT_PAGE_SIZE; } /** * Default maximum order - System Property: io.netty.allocator.maxOrder - default 11 */ public static int defaultMaxOrder() { return DEFAULT_MAX_ORDER; } /** * Default thread caching behavior - System Property: io.netty.allocator.useCacheForAllThreads - default true */ public static boolean defaultUseCacheForAllThreads() { return DEFAULT_USE_CACHE_FOR_ALL_THREADS; } /** * Default prefer direct - System Property: io.netty.noPreferDirect - default false */ public static boolean defaultPreferDirect() { return PlatformDependent.directBufferPreferred(); } /** * Default tiny cache size - default 0 * * @deprecated Tiny caches have been merged into small caches. */ @Deprecated public static int defaultTinyCacheSize() { return 0; } /** * Default small cache size - System Property: io.netty.allocator.smallCacheSize - default 256 */ public static int defaultSmallCacheSize() { return DEFAULT_SMALL_CACHE_SIZE; } /** * Default normal cache size - System Property: io.netty.allocator.normalCacheSize - default 64 */ public static int defaultNormalCacheSize() { return DEFAULT_NORMAL_CACHE_SIZE; } /** * Return {@code true} if direct memory cache alignment is supported, {@code false} otherwise. */ public static boolean isDirectMemoryCacheAlignmentSupported() { return PlatformDependent.hasUnsafe(); } @Override public boolean isDirectBufferPooled() { return directArenas != null; } /** * Returns {@code true} if the calling {@link Thread} has a {@link ThreadLocal} cache for the allocated * buffers. */ @Deprecated public boolean hasThreadLocalCache() { return threadCache.isSet(); } /** * Free all cached buffers for the calling {@link Thread}. */ @Deprecated public void freeThreadLocalCache() { threadCache.remove(); } final class PoolThreadLocalCache extends FastThreadLocal<PoolThreadCache> { private final boolean useCacheForAllThreads; PoolThreadLocalCache(boolean useCacheForAllThreads) { this.useCacheForAllThreads = useCacheForAllThreads; } @Override protected synchronized PoolThreadCache initialValue() { final PoolArena<byte[]> heapArena = leastUsedArena(heapArenas); final PoolArena<ByteBuffer> directArena = leastUsedArena(directArenas); final Thread current = Thread.currentThread(); if (useCacheForAllThreads || current instanceof FastThreadLocalThread) { final PoolThreadCache cache = new PoolThreadCache( heapArena, directArena, smallCacheSize, normalCacheSize, DEFAULT_MAX_CACHED_BUFFER_CAPACITY, DEFAULT_CACHE_TRIM_INTERVAL); if (DEFAULT_CACHE_TRIM_INTERVAL_MILLIS > 0) { final EventExecutor executor = ThreadExecutorMap.currentExecutor(); if (executor != null) { executor.scheduleAtFixedRate(trimTask, DEFAULT_CACHE_TRIM_INTERVAL_MILLIS, DEFAULT_CACHE_TRIM_INTERVAL_MILLIS, TimeUnit.MILLISECONDS); } } return cache; } // No caching so just use 0 as sizes. return new PoolThreadCache(heapArena, directArena, 0, 0, 0, 0); } @Override protected void onRemoval(PoolThreadCache threadCache) { threadCache.free(false); } private <T> PoolArena<T> leastUsedArena(PoolArena<T>[] arenas) { if (arenas == null || arenas.length == 0) { return null; } PoolArena<T> minArena = arenas[0]; for (int i = 1; i < arenas.length; i++) { PoolArena<T> arena = arenas[i]; if (arena.numThreadCaches.get() < minArena.numThreadCaches.get()) { minArena = arena; } } return minArena; } } @Override public PooledByteBufAllocatorMetric metric() { return metric; } /** * Return the number of heap arenas. * * @deprecated use {@link PooledByteBufAllocatorMetric#numHeapArenas()}. */ @Deprecated public int numHeapArenas() { return heapArenaMetrics.size(); } /** * Return the number of direct arenas. * * @deprecated use {@link PooledByteBufAllocatorMetric#numDirectArenas()}. */ @Deprecated public int numDirectArenas() { return directArenaMetrics.size(); } /** * Return a {@link List} of all heap {@link PoolArenaMetric}s that are provided by this pool. * * @deprecated use {@link PooledByteBufAllocatorMetric#heapArenas()}. */ @Deprecated public List<PoolArenaMetric> heapArenas() { return heapArenaMetrics; } /** * Return a {@link List} of all direct {@link PoolArenaMetric}s that are provided by this pool. * * @deprecated use {@link PooledByteBufAllocatorMetric#directArenas()}. */ @Deprecated public List<PoolArenaMetric> directArenas() { return directArenaMetrics; } /** * Return the number of thread local caches used by this {@link PooledByteBufAllocator}. * * @deprecated use {@link PooledByteBufAllocatorMetric#numThreadLocalCaches()}. */ @Deprecated public int numThreadLocalCaches() { PoolArena<?>[] arenas = heapArenas != null ? heapArenas : directArenas; if (arenas == null) { return 0; } int total = 0; for (PoolArena<?> arena : arenas) { total += arena.numThreadCaches.get(); } return total; } /** * Return the size of the tiny cache. * * @deprecated use {@link PooledByteBufAllocatorMetric#tinyCacheSize()}. */ @Deprecated public int tinyCacheSize() { return 0; } /** * Return the size of the small cache. * * @deprecated use {@link PooledByteBufAllocatorMetric#smallCacheSize()}. */ @Deprecated public int smallCacheSize() { return smallCacheSize; } /** * Return the size of the normal cache. * * @deprecated use {@link PooledByteBufAllocatorMetric#normalCacheSize()}. */ @Deprecated public int normalCacheSize() { return normalCacheSize; } /** * Return the chunk size for an arena. * * @deprecated use {@link PooledByteBufAllocatorMetric#chunkSize()}. */ @Deprecated public final int chunkSize() { return chunkSize; } final long usedHeapMemory() { return usedMemory(heapArenas); } final long usedDirectMemory() { return usedMemory(directArenas); } private static long usedMemory(PoolArena<?>[] arenas) { if (arenas == null) { return -1; } long used = 0; for (PoolArena<?> arena : arenas) { used += arena.numActiveBytes(); if (used < 0) { return Long.MAX_VALUE; } } return used; } final PoolThreadCache threadCache() { PoolThreadCache cache = threadCache.get(); assert cache != null; return cache; } /** * Trim thread local cache for the current {@link Thread}, which will give back any cached memory that was not * allocated frequently since the last trim operation. * * Returns {@code true} if a cache for the current {@link Thread} exists and so was trimmed, false otherwise. */ public boolean trimCurrentThreadCache() { PoolThreadCache cache = threadCache.getIfExists(); if (cache != null) { cache.trim(); return true; } return false; } /** * Returns the status of the allocator (which contains all metrics) as string. Be aware this may be expensive * and so should not called too frequently. */ public String dumpStats() { int heapArenasLen = heapArenas == null ? 0 : heapArenas.length; StringBuilder buf = new StringBuilder(512) .append(heapArenasLen) .append(" heap arena(s):") .append(StringUtil.NEWLINE); if (heapArenasLen > 0) { for (PoolArena<byte[]> a: heapArenas) { buf.append(a); } } int directArenasLen = directArenas == null ? 0 : directArenas.length; buf.append(directArenasLen) .append(" direct arena(s):") .append(StringUtil.NEWLINE); if (directArenasLen > 0) { for (PoolArena<ByteBuffer> a: directArenas) { buf.append(a); } } return buf.toString(); } }
apache-2.0
java110/MicroCommunity
service-api/src/main/java/com/java110/api/listener/machineTranslate/BaseMachineListener.java
5009
package com.java110.api.listener.machineTranslate; import com.alibaba.fastjson.JSONObject; import com.aliyuncs.utils.StringUtils; import com.java110.api.listener.AbstractServiceApiPlusListener; import com.java110.core.context.DataFlowContext; import com.java110.core.event.service.api.ServiceDataFlowEvent; import com.java110.intf.common.IMachineInnerServiceSMO; import com.java110.dto.machine.MachineDto; import com.java110.utils.util.Assert; import com.java110.utils.util.StringUtil; import com.java110.vo.ResultVo; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import java.util.List; import java.util.Map; /** * 设备侦听 父类 */ public abstract class BaseMachineListener extends AbstractServiceApiPlusListener { /** * 校验头部信息 * * @param event * @param reqJson */ protected void validateMachineHeader(ServiceDataFlowEvent event, JSONObject reqJson) { DataFlowContext context = event.getDataFlowContext(); Map<String, String> reqHeader = context.getRequestHeaders(); Assert.hasKeyAndValue(reqHeader, "machinecode", "请求报文中未包含设备编码"); // Assert.hasKeyAndValue(reqHeader, "communityId", "请求报文中未包含小区信息"); } /** * 校验报文内容 * * @param event * @param context * @param reqJson */ protected boolean validateMachineBody(ServiceDataFlowEvent event, DataFlowContext context, JSONObject reqJson, IMachineInnerServiceSMO machineInnerServiceSMOImpl) { ResponseEntity<String> responseEntity = null; ResultVo resultVo = null; Map<String, String> reqHeader = context.getRequestHeaders(); HttpHeaders headers = new HttpHeaders(); String communityId = reqJson.containsKey("communityId") ? reqJson.getString("communityId") : reqHeader.get("communityId"); if (StringUtil.isEmpty(communityId)) { resultVo = new ResultVo(ResultVo.CODE_MACHINE_ERROR, "请求头中未包含小区编码"); responseEntity = new ResponseEntity<>(resultVo.toString(), headers, HttpStatus.OK); context.setResponseEntity(responseEntity); return false; } for (String key : reqHeader.keySet()) { if (key.toLowerCase().equals("content-length")) { continue; } headers.add(key, reqHeader.get(key)); } if (!reqHeader.containsKey("machinecode") || StringUtils.isEmpty(reqHeader.get("machinecode"))) { resultVo = new ResultVo(ResultVo.CODE_MACHINE_ERROR, "请求头中未包含设备编码"); responseEntity = new ResponseEntity<>(resultVo.toString(), headers, HttpStatus.OK); context.setResponseEntity(responseEntity); return false; } //检查设备是否合法 if("-1".equals(reqHeader.get("machinecode"))){ reqJson.put("machineCode", reqHeader.get("machinecode")); reqJson.put("machineId", reqHeader.get("machinecode")); reqJson.put("communityId", communityId); return true; } //检查设备是否合法 MachineDto machineDto = new MachineDto(); machineDto.setMachineCode(reqHeader.get("machinecode")); machineDto.setCommunityId(communityId); List<MachineDto> machineDtos = machineInnerServiceSMOImpl.queryMachines(machineDto); if (machineDtos == null || machineDtos.size() < 1) { resultVo = new ResultVo(ResultVo.CODE_MACHINE_ERROR, "该设备【" + reqHeader.get("machinecode") + "】未在该小区【" + communityId + "】注册"); responseEntity = new ResponseEntity<>(resultVo.toString(), headers, HttpStatus.OK); context.setResponseEntity(responseEntity); return false; } if ("1600".equals(machineDtos.get(0).getState())) { //设备禁用状态 resultVo = new ResultVo(ResultVo.CODE_MACHINE_ERROR, "该设备【" + reqHeader.get("machinecode") + "】禁用状态"); responseEntity = new ResponseEntity<>(resultVo.toString(), headers, HttpStatus.OK); context.setResponseEntity(responseEntity); return false; } reqJson.put("machineCode", machineDtos.get(0).getMachineCode()); reqJson.put("machineId", machineDtos.get(0).getMachineId()); reqJson.put("communityId", communityId); return true; } protected HttpHeaders getHeader(DataFlowContext context) { Map<String, String> reqHeader = context.getRequestHeaders(); HttpHeaders headers = new HttpHeaders(); for (String key : reqHeader.keySet()) { if (key.toLowerCase().equals("content-length")) { continue; } headers.add(key, reqHeader.get(key)); } return headers; } }
apache-2.0
snaheth/pileup.js
src/main/data/formats/bbi.js
3438
/** * BBI is the shared structure between bigBed and bigWig. * These structures are based on UCSC's src/inc/bbiFile.h * @flow */ 'use strict'; import {typeAtOffset} from './helpers'; var TYPE_SET = { 'jBinary.littleEndian': true, 'Header': { _magic: ['const', 'uint32', 0x8789F2EB, true], version: 'uint16', zoomLevels: 'uint16', chromosomeTreeOffset: 'uint64', unzoomedDataOffset: 'uint64', unzoomedIndexOffset: 'uint64', fieldCount: 'uint16', definedFieldCount: 'uint16', // 0 if no autoSql information autoSqlOffset: 'uint64', totalSummaryOffset: 'uint64', // Size of uncompression buffer. 0 if uncompressed. uncompressBufSize: 'uint32', // Offset to header extension 0 if no such extension // TODO: support extended headers (not used in ensGene.bb) extensionOffset: 'uint64', zoomHeaders: ['array', 'ZoomHeader', 'zoomLevels'], totalSummary: typeAtOffset('TotalSummary', 'totalSummaryOffset'), chromosomeTree: typeAtOffset('BPlusTree', 'chromosomeTreeOffset') }, 'TotalSummary': { basesCovered: 'uint64', minVal: 'float64', // for bigBed minimum depth of coverage maxVal: 'float64', // for bigBed maximum depth of coverage sumData: 'float64', // for bigBed sum of coverage sumSquared: 'float64' // for bigBed sum of coverage squared }, 'ZoomHeader': { reductionLevel: 'uint32', _reserved: 'uint32', dataOffset: 'uint64', indexOffset: 'uint64' }, 'BPlusTree': { magic: ['const', 'uint32', 0x78CA8C91, true], // Number of children per block (not byte size of block) blockSize: 'uint32', // Number of significant bytes in key keySize: 'uint32', // Number of bytes in value valSize: 'uint32', // Number of items in index itemCount: 'uint64', _reserved2: ['skip', 4], _reserved3: ['skip', 4], nodes: 'BPlusTreeNode' // ['array', 'BPlusTreeNode', 'itemCount'] }, 'BPlusTreeNode': { isLeaf: 'uint8', // 1 = yes, 0 = no _reserved: 'uint8', count: 'uint16', contents: ['array', ['if', 'isLeaf', { key: ['string', 'keySize'], // Note: bigBed allows more general values; this is what Ensembl uses. // value: ['string', 'valSize'] id: 'uint32', size: 'uint32' }, { key: ['string', 'keySize'], offset: 'uint64' }], 'count'] }, 'CirTree': { _magic: ['const', 'uint32', 0x2468ACE0, true], blockSize: 'uint32', itemCount: 'uint64', startChromIx: 'uint32', startBase: 'uint32', endChromIx: 'uint32', endBase: 'uint32', fileSize: 'uint64', itemsPerSlot: 'uint32', _reserved: ['skip', 4], blocks: 'CirNode' }, 'CirNode': { isLeaf: 'uint8', // 1 = yes, 0 = no _reserved: 'uint8', count: 'uint16', contents: [ 'array', [ 'if', 'isLeaf', 'LeafData', 'NonLeafData' ], 'count' ] }, 'LeafData': { startChromIx: 'uint32', startBase: 'uint32', endChromIx: 'uint32', endBase: 'uint32', offset: 'uint64', size: 'uint64' }, 'NonLeafData': { startChromIx: 'uint32', startBase: 'uint32', endChromIx: 'uint32', endBase: 'uint32', offset: 'uint64', }, 'BedEntry': { 'chrId': 'uint32', 'start': 'uint32', 'stop': 'uint32', 'rest': 'string0' }, 'BedBlock': ['array', 'BedEntry'], }; module.exports = {TYPE_SET};
apache-2.0
crmzhangwei/crm
protected/modules/Customer/views/contact/index.php
7308
<?php /* @var $this CustomerinfoController */ /* @var $model CustomerInfo */ $this->breadcrumbs = array( '客户管理' => array('admin'), '电话记录', ); Yii::app()->clientScript->registerScript('search', " $('.search-button').click(function(){ $('.search-form').toggle(); return false; }); $('.search-form form').submit(function(){ $('#customer-info-grid').yiiGridView('update', { data: $(this).serialize() }); return false; }); "); ?> <!-- search-form --> <?php $form = $this->beginWidget('CActiveForm', array( 'action' => Yii::app()->createUrl($this->route), 'method' => 'get', 'id' => 'form1', )); ?> <div class="form-group"> <table class="table table-bordered" width="50%"> <tr> <td width="3%">部门/组别</td> <td width="30%"> <?php echo CHtml::dropDownList('search[dept]', $infoArr['dept'], $deptArr, array('onchange' => 'listgroup(this)')); ?> <?php if (!$user_info['group_arr']): ?> <select id="groupinfo" name="search[group]" onchange="listuser(this)"> <option value ="0">--请选择组--</option> </select> <?php else: echo CHtml::dropDownList('search[group]', intval($infoArr['group']), $user_info['group_arr'], array('onchange' => "listuser(this)", 'id' => 'groupinfo')); endif; ?> </td> </tr> <tr> <td>统计时间</td> <td> <?php echo $form->hiddenField($model, 'timetype', array("id" => "id_timetype")); ?> <button class="btn btn-sm btn-primary" type="button" onclick="subCotact(1);"> 昨天 </button> <button class="btn btn-sm btn-primary" type="button" onclick="subCotact(2);"> 最近7天 </button> <button class="btn btn-sm btn-primary" type="button" onclick="subCotact(3);"> 最近30天 </button> &nbsp;&nbsp;自定义: <?php echo $form->textField($model, 'stime', array('size' => 25, 'maxlength' => 25, 'onclick' => 'WdatePicker()')); ?> to <?php echo $form->textField($model, 'etime', array('size' => 25, 'maxlength' => 25, 'onclick' => 'WdatePicker()')); ?> </td> </tr> <tr> <td><?php echo $form->dropDownList($model, 'searchtype', array('1' => '联系人', '2' => '客户名称', '3' => '电话号码'), array('style' => "height:34px;")); ?></td> <td><?php echo $form->textField($model, 'keyword', array('size' => 25, 'maxlength' => 25)); ?> <button class="btn btn-sm btn-primary" type="button" onclick="subCotact(0);"> <i class="icon-search"></i> 搜 索 </button> </td> </tr> </table> </div> <?php $this->endWidget(); ?> <font color="red"><?php echo $form->errorSummary($model); ?></font> <?php $form = $this->beginWidget('CActiveForm', array( 'action' => Yii::app()->controller->createUrl("syncByDate"), 'method' => 'get', )); ?> <div class="form-group" style="display:none"> 同步日期:<?php echo $form->textField($model, 'sync_date', array('class' => "Wdate", 'onClick' => "WdatePicker({dateFmt:'yyyy-MM-dd'})", 'style' => 'height:30px;')); ?> <button class="btn btn-sm btn-primary" type="submit"> <i class="icon-search"></i> 同 步 </button> </div> <?php $this->endWidget(); ?> <?php $dataProvider = $model->search(); $total = $this->getTotal($dataProvider->criteria->condition); ?> 合计:共电话联系 <?php echo $total['tcount']; ?>次,通话时长为 <?php echo gmstrftime("%H:%M:%S", $total['tlong']); ?> <?php $dataProvider->pagination->pageVar = 'page'; $this->widget('GGridView', array( 'id' => 'CustomerInfo-grid', 'dataProvider' => $dataProvider, 'columns' => array( array('class' => 'CCheckBoxColumn', 'name' => 'uid', 'id' => 'select', 'selectableRows' => 0, 'headerTemplate' => '{item}', 'htmlOptions' => array( 'width' => '20', ), ), 'user_name', 'cust_name', 'extend_no', 'phone', array('name' => 'dial_time', 'value' => 'date("Y-m-d H:i:s",$data->dial_time)'), //'dial_long', array('name' => 'dial_long', 'value' => 'gmstrftime("%H:%M:%S",$data->dial_long)'), array( 'class' => 'CButtonColumn', 'deleteButtonOptions' => array(), 'viewButtonOptions' => array('style' => 'background-color:red'), 'header' => '操作', 'template' => '{play} {sync} {view}', 'htmlOptions' => array( 'width' => '50', 'style' => 'text-align:center', ), 'buttons' => array( 'play' => array( 'label' => '播放和下载', 'url' => '', 'imageUrl' => '', 'options' => array('class' => 'btn btn-info btn-minier tooltip-info', 'onclick' => 'playAndDown(this)'), ), 'sync' => array( 'label' => '匹配', 'url' => '', 'imageUrl' => '', 'options' => array('class' => 'btn btn-info btn-minier tooltip-info', 'onclick' => 'sync(this)'), ), 'view' => array( 'label' => '查看', 'url' => 'Yii::app()->controller->createUrl("viewCust",array("custid"=>$data->cust_id))', 'imageUrl' => '', 'options' => array('class' => 'btn btn-info btn-minier tooltip-info','target'=>'_blank'), 'visible'=>'$data->cust_id', ), ), 'htmlOptions' => array( 'width' => '220', ) ), ), )); ?> <div class="table-page"> <div class="col-sm-6"> 共<span class="orange"><?= $dataProvider->totalItemCount ?></span>条记录 </div> <div class="col-sm-6 no-padding-right"> <?php $this->widget('GLinkPager', array('pages' => $dataProvider->pagination,)); ?> </div> </div> <script> function playAndDown(obj) { var trindex = $(obj).parents('tr').index(); var dial_id = $('#select_' + trindex).val(); var url; <?php $a = Yii::app()->createurl('Service/service/play4'); echo 'url=' . "'$a';"; ?> public.dialog('播放和下载录音', url + '&id=' + dial_id); } function sync(obj) { var trindex = $(obj).parents('tr').index(); var uid = $('#select_' + trindex).val(); var url; <?php $a = Yii::app()->createurl('Customer/contact/syncByUid'); echo 'url=' . "'$a';"; ?> public.dialog('匹配记录', url + '&uid=' + uid); } function subCotact(timetype) { $("#id_timetype").val(timetype); $("#form1").submit(); } </script> <script src="/static/js/secondlevel.js"></script>
apache-2.0
falconre/falcon
lib/translator/ppc/test.rs
2556
use crate::architecture; use crate::architecture::Endian; use crate::executor::*; use crate::il::*; use crate::memory; use crate::translator::ppc::*; use crate::RC; fn init_driver_block<'d>( instruction_bytes: &[u8], scalars: Vec<(&str, Constant)>, memory_: Memory, ) -> Driver { let mut bytes = instruction_bytes.to_vec(); // ori 0,0,0 bytes.append(&mut vec![0x60, 0x00, 0x00, 0x00]); let mut backing = memory::backing::Memory::new(Endian::Big); backing.set_memory( 0, bytes.to_vec(), memory::MemoryPermissions::EXECUTE | memory::MemoryPermissions::READ, ); let function = Ppc::new().translate_function(&backing, 0).unwrap(); let location = if function .control_flow_graph() .block(0) .unwrap() .instructions() .is_empty() { ProgramLocation::new(Some(0), FunctionLocation::EmptyBlock(0)) } else { ProgramLocation::new(Some(0), FunctionLocation::Instruction(0, 0)) }; let mut program = Program::new(); program.add_function(function); let mut state = State::new(memory_); for scalar in scalars { state.set_scalar(scalar.0, scalar.1); } Driver::new( RC::new(program), location, state, RC::new(architecture::Ppc::new()), ) } fn get_scalar( instruction_bytes: &[u8], scalars: Vec<(&str, Constant)>, memory: Memory, result_scalar: &str, ) -> Constant { let mut driver = init_driver_block(instruction_bytes, scalars, memory); while !driver .location() .apply(driver.program()) .unwrap() .forward() .unwrap() .is_empty() { driver = driver.step().unwrap(); } // The final step // driver = driver.step().unwrap(); driver.state().get_scalar(result_scalar).unwrap().clone() } #[test] fn rlwinm() { // rlwinm 6,4,2,0,0x1D let instruction_bytes = &[0x54, 0x86, 0x10, 0x3a]; let result = get_scalar( instruction_bytes, vec![ ("r4", const_(0x9000_3000, 32)), ("r6", const_(0xffff_ffff, 32)), ], Memory::new(Endian::Big), "r6", ); assert_eq!(result.value_u64().unwrap(), 0x4000_c000); let result = get_scalar( instruction_bytes, vec![ ("r4", const_(0xb004_3000, 32)), ("r6", const_(0xffff_ffff, 32)), ], Memory::new(Endian::Big), "r6", ); assert_eq!(result.value_u64().unwrap(), 0xc010_c000); }
apache-2.0
googleapis/python-dialogflow
google/cloud/dialogflow_v2beta1/services/versions/transports/grpc.py
16199
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dialogflow_v2beta1.types import version from google.cloud.dialogflow_v2beta1.types import version as gcd_version from google.protobuf import empty_pb2 # type: ignore from .base import VersionsTransport, DEFAULT_CLIENT_INFO class VersionsGrpcTransport(VersionsTransport): """gRPC backend transport for Versions. Service for managing [Versions][google.cloud.dialogflow.v2beta1.Version]. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation and call it. It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ _stubs: Dict[str, Callable] def __init__( self, *, host: str = "dialogflow.googleapis.com", credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None else: if api_mtls_endpoint: host = api_mtls_endpoint # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials else: if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, # use the credentials which are saved credentials=self._credentials, # Set ``credentials_file`` to ``None`` here as # the credentials that we saved earlier should be used. credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) # Wrap messages. This must be done after self._grpc_channel exists self._prep_wrapped_messages(client_info) @classmethod def create_channel( cls, host: str = "dialogflow.googleapis.com", credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. quota_project_id (Optional[str]): An optional project to use for billing and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. Raises: google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service. """ return self._grpc_channel @property def list_versions( self, ) -> Callable[[version.ListVersionsRequest], version.ListVersionsResponse]: r"""Return a callable for the list versions method over gRPC. Returns the list of all versions of the specified agent. Returns: Callable[[~.ListVersionsRequest], ~.ListVersionsResponse]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_versions" not in self._stubs: self._stubs["list_versions"] = self.grpc_channel.unary_unary( "/google.cloud.dialogflow.v2beta1.Versions/ListVersions", request_serializer=version.ListVersionsRequest.serialize, response_deserializer=version.ListVersionsResponse.deserialize, ) return self._stubs["list_versions"] @property def get_version(self) -> Callable[[version.GetVersionRequest], version.Version]: r"""Return a callable for the get version method over gRPC. Retrieves the specified agent version. Returns: Callable[[~.GetVersionRequest], ~.Version]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_version" not in self._stubs: self._stubs["get_version"] = self.grpc_channel.unary_unary( "/google.cloud.dialogflow.v2beta1.Versions/GetVersion", request_serializer=version.GetVersionRequest.serialize, response_deserializer=version.Version.deserialize, ) return self._stubs["get_version"] @property def create_version( self, ) -> Callable[[gcd_version.CreateVersionRequest], gcd_version.Version]: r"""Return a callable for the create version method over gRPC. Creates an agent version. The new version points to the agent instance in the "default" environment. Returns: Callable[[~.CreateVersionRequest], ~.Version]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_version" not in self._stubs: self._stubs["create_version"] = self.grpc_channel.unary_unary( "/google.cloud.dialogflow.v2beta1.Versions/CreateVersion", request_serializer=gcd_version.CreateVersionRequest.serialize, response_deserializer=gcd_version.Version.deserialize, ) return self._stubs["create_version"] @property def update_version( self, ) -> Callable[[gcd_version.UpdateVersionRequest], gcd_version.Version]: r"""Return a callable for the update version method over gRPC. Updates the specified agent version. Note that this method does not allow you to update the state of the agent the given version points to. It allows you to update only mutable properties of the version resource. Returns: Callable[[~.UpdateVersionRequest], ~.Version]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_version" not in self._stubs: self._stubs["update_version"] = self.grpc_channel.unary_unary( "/google.cloud.dialogflow.v2beta1.Versions/UpdateVersion", request_serializer=gcd_version.UpdateVersionRequest.serialize, response_deserializer=gcd_version.Version.deserialize, ) return self._stubs["update_version"] @property def delete_version( self, ) -> Callable[[version.DeleteVersionRequest], empty_pb2.Empty]: r"""Return a callable for the delete version method over gRPC. Delete the specified agent version. Returns: Callable[[~.DeleteVersionRequest], ~.Empty]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_version" not in self._stubs: self._stubs["delete_version"] = self.grpc_channel.unary_unary( "/google.cloud.dialogflow.v2beta1.Versions/DeleteVersion", request_serializer=version.DeleteVersionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_version"] def close(self): self.grpc_channel.close() __all__ = ("VersionsGrpcTransport",)
apache-2.0
Glamdring/welshare
src/main/java/com/welshare/model/User.java
17536
package com.welshare.model; import java.io.Serializable; import javax.persistence.Cacheable; import javax.persistence.Column; import javax.persistence.Embedded; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.Transient; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import org.apache.commons.lang3.StringUtils; import org.hibernate.annotations.DynamicUpdate; import org.hibernate.annotations.GenericGenerator; import org.hibernate.annotations.Type; import org.hibernate.search.annotations.DocumentId; import org.hibernate.search.annotations.Field; import org.hibernate.search.annotations.Indexed; import org.hibernate.search.annotations.Store; import org.hibernate.validator.constraints.Email; import org.hibernate.validator.constraints.NotEmpty; import org.joda.time.DateTime; import org.springframework.format.annotation.DateTimeFormat; import org.springframework.format.annotation.DateTimeFormat.ISO; import com.welshare.model.enums.AccountType; import com.welshare.model.social.BitlySettings; import com.welshare.model.social.FacebookSettings; import com.welshare.model.social.GooglePlusSettings; import com.welshare.model.social.LinkedInSettings; import com.welshare.model.social.TwitterSettings; @Entity @NamedQueries({ @NamedQuery( name = "User.login", query = "select u from User u where (u.username=:username OR u.email=:username) AND u.password=:password" ), @NamedQuery( name = "User.getByEmail", query = "SELECT u FROM User u WHERE u.email=:email" ), @NamedQuery( name = "User.getByUsername", query = "SELECT u FROM User u WHERE LOWER(u.username)=LOWER(:username)" ), @NamedQuery( name = "User.getFollowers", query = "SELECT following.primaryKey.follower FROM Following following WHERE following.primaryKey.followed=:followed ORDER BY following.dateTime DESC"), @NamedQuery( name = "User.getFollowing", query = "SELECT following.primaryKey.followed FROM Following following WHERE following.primaryKey.follower=:follower ORDER BY following.dateTime DESC"), @NamedQuery( name = "User.getFriends", query = "SELECT following.primaryKey.followed FROM Following following WHERE following.primaryKey.follower=:user AND following.closeFriend=true ORDER BY following.dateTime DESC"), @NamedQuery( name = "User.getFollowingMetaData", query = "SELECT following FROM Following following WHERE following.primaryKey.follower=:user"), @NamedQuery( name = "User.findByName", query = "SELECT user FROM User user WHERE user.names LIKE :name OR user.username LIKE :name or user.email LIKE :name"), @NamedQuery( name = "User.getTopUsers", query = "SELECT user FROM User user ORDER BY score+externalScore DESC"), @NamedQuery( name = "User.getTopUsersByCountry", query = "SELECT user FROM User user WHERE user.profile.country = :country ORDER BY score+externalScore DESC"), @NamedQuery( name = "User.getTopUsersByCity", query = "SELECT user FROM User user WHERE user.profile.city = :city ORDER BY score+externalScore DESC") }) @Indexed @Cacheable(true) @DynamicUpdate // update only changed fields public class User implements Serializable { private static final long serialVersionUID = -3081100632040573825L; @Id @org.springframework.data.annotation.Id @Column(columnDefinition="CHAR(32)") @GeneratedValue(generator="hibernate-uuid") @GenericGenerator(name = "hibernate-uuid", strategy = "uuid") @DocumentId private String id; @Transient private String externalId; @Column(unique = true) @Size(min=4, max=30) @NotNull @Pattern(regexp="[\\p{L}0-9_\\.]*[\\p{L}0-9_]{1}") @Field(store=Store.YES) private String username; @Embedded private ProfileSettings profile = new ProfileSettings(); @Column(unique = true) @Email @Field(store=Store.YES) private String email; @Column @Size(min=6, max=60) @NotNull private String password; @Column private String passwordResetToken; @Column(nullable=false) private boolean allowUnverifiedPasswordReset; @Column private String salt; @Column @NotEmpty @Field(store=Store.YES) private String names; @Column(nullable=false) private boolean active; @Column(nullable=false) private boolean changePasswordAfterLogin; @Column private String activationCode; @Column(nullable=false) private long registrationTimestamp; @Column(nullable=false) private long lastLogin; @Column(nullable=false) private long lastLogout; @Column private String gravatarHash; @Column private String profilePictureURI; @Column private String smallProfilePictureURI; @Column private String largeProfilePictureURI; @Column(nullable=false) private int score; @Column(nullable=false) private int externalScore; @Column(nullable=false) private int messages; /** * The field holds the current timezone for the user and is updated on each login */ @Column private String currentTimeZoneId; @Transient private String externalUrl; // instantiating the settings object, because of non-nullable fields // the existence of settings does not mean the user is using the service @Embedded private TwitterSettings twitterSettings = new TwitterSettings(); @Embedded private FacebookSettings facebookSettings = new FacebookSettings(); @Embedded private LinkedInSettings linkedInSettings = new LinkedInSettings(); @Embedded private GooglePlusSettings googlePlusSettings = new GooglePlusSettings(); @Embedded private BitlySettings bitlySettings; @Column(nullable=false) private int following; @Column(nullable=false) private int followers; @Column(nullable=false) private int closeFriends; @Column private String externalAuthId; @Column(nullable=false) private long lastNotificationsReadTimestamp; @Column(nullable=false) private int waitingUserId; @Column(nullable=false) private boolean viewedStartingHints; @Column(nullable=false) private boolean closedHomepageConnectLinks; @Column(nullable=false) private boolean receivedActivityStatsEmail; @Column(nullable=false) private int subsequentFailedLoginAttempts; @Column @Type(type = "com.welshare.util.persistence.PersistentDateTime") @DateTimeFormat(iso=ISO.DATE_TIME) private DateTime lastFailedLoginAttempt; @Column(nullable=false) private boolean admin; @Column(nullable=false) private int onlineSecondsToday; @Column(nullable=false) @Enumerated(EnumType.ORDINAL) private AccountType accountType = AccountType.FREE; public String getEmail() { return email; } public String getId() { return id; } public void setId(String id) { this.id = id; } public void setEmail(String email) { this.email = email; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getNames() { return names; } public void setNames(String names) { this.names = names; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getExternalId() { return externalId; } public void setExternalId(String externalId) { this.externalId = externalId; } public String getPasswordResetToken() { return passwordResetToken; } public void setPasswordResetToken(String passwordResetToken) { this.passwordResetToken = passwordResetToken; } public boolean isAllowUnverifiedPasswordReset() { return allowUnverifiedPasswordReset; } public void setAllowUnverifiedPasswordReset(boolean allowUnverifiedPasswordReset) { this.allowUnverifiedPasswordReset = allowUnverifiedPasswordReset; } public String getSalt() { return salt; } public void setSalt(String salt) { this.salt = salt; } public boolean isActive() { return active; } public void setActive(boolean active) { this.active = active; } public boolean isChangePasswordAfterLogin() { return changePasswordAfterLogin; } public void setChangePasswordAfterLogin(boolean changePasswordAfterLogin) { this.changePasswordAfterLogin = changePasswordAfterLogin; } public String getActivationCode() { return activationCode; } public void setActivationCode(String activationCode) { this.activationCode = activationCode; } public long getRegistrationTimestamp() { return registrationTimestamp; } public void setRegistrationTimestamp(long registrationTimestamp) { this.registrationTimestamp = registrationTimestamp; } public long getLastLogin() { return lastLogin; } public void setLastLogin(long lastLogin) { this.lastLogin = lastLogin; } public long getLastLogout() { return lastLogout; } public void setLastLogout(long lastLogout) { this.lastLogout = lastLogout; } public String getGravatarHash() { return gravatarHash; } public void setGravatarHash(String gravatarHash) { this.gravatarHash = gravatarHash; } public String getProfilePictureURI() { return profilePictureURI; } public void setProfilePictureURI(String profilePictureURI) { this.profilePictureURI = profilePictureURI; } public String getSmallProfilePictureURI() { return smallProfilePictureURI; } public void setSmallProfilePictureURI(String smallProfilePictureURI) { this.smallProfilePictureURI = smallProfilePictureURI; } public String getLargeProfilePictureURI() { return largeProfilePictureURI; } public void setLargeProfilePictureURI(String largeProfilePictureURI) { this.largeProfilePictureURI = largeProfilePictureURI; } public int getScore() { return score; } public void setScore(int score) { this.score = score; } public int getMessages() { return messages; } public void setMessages(int messages) { this.messages = messages; } public String getCurrentTimeZoneId() { return currentTimeZoneId; } public void setCurrentTimeZoneId(String currentTimeZoneId) { this.currentTimeZoneId = currentTimeZoneId; } public String getExternalUrl() { return externalUrl; } public void setExternalUrl(String externalUrl) { this.externalUrl = externalUrl; } public TwitterSettings getTwitterSettings() { return twitterSettings; } public void setTwitterSettings(TwitterSettings twitterSettings) { this.twitterSettings = twitterSettings; } public FacebookSettings getFacebookSettings() { return facebookSettings; } public void setFacebookSettings(FacebookSettings facebookSettings) { this.facebookSettings = facebookSettings; } public BitlySettings getBitlySettings() { return bitlySettings; } public void setBitlySettings(BitlySettings bitlySettings) { this.bitlySettings = bitlySettings; } public int getFollowing() { return following; } public void setFollowing(int following) { this.following = following; } public int getFollowers() { return followers; } public void setFollowers(int followers) { this.followers = followers; } public int getCloseFriends() { return closeFriends; } public void setCloseFriends(int friends) { this.closeFriends = friends; } public String getExternalAuthId() { return externalAuthId; } public void setExternalAuthId(String externalAuthId) { this.externalAuthId = externalAuthId; } public long getLastNotificationsReadTimestamp() { return lastNotificationsReadTimestamp; } public void setLastNotificationsReadTimestamp(long lastNotificationsReadTimestamp) { this.lastNotificationsReadTimestamp = lastNotificationsReadTimestamp; } public int getWaitingUserId() { return waitingUserId; } public void setWaitingUserId(int waitingUserId) { this.waitingUserId = waitingUserId; } public boolean isViewedStartingHints() { return viewedStartingHints; } public void setViewedStartingHints(boolean viewedStartingHints) { this.viewedStartingHints = viewedStartingHints; } public boolean isClosedHomepageConnectLinks() { return closedHomepageConnectLinks; } public void setClosedHomepageConnectLinks(boolean closedHomepageConnectLinks) { this.closedHomepageConnectLinks = closedHomepageConnectLinks; } public int getSubsequentFailedLoginAttempts() { return subsequentFailedLoginAttempts; } public void setSubsequentFailedLoginAttempts(int subsequentFailedLoginAttempts) { this.subsequentFailedLoginAttempts = subsequentFailedLoginAttempts; } public DateTime getLastFailedLoginAttempt() { return lastFailedLoginAttempt; } public void setLastFailedLoginAttempt(DateTime lastFailedLoginAttempt) { this.lastFailedLoginAttempt = lastFailedLoginAttempt; } public boolean isAdmin() { return admin; } public void setAdmin(boolean admin) { this.admin = admin; } public LinkedInSettings getLinkedInSettings() { return linkedInSettings; } public void setLinkedInSettings(LinkedInSettings linkedInSettings) { this.linkedInSettings = linkedInSettings; } public GooglePlusSettings getGooglePlusSettings() { return googlePlusSettings; } public void setGooglePlusSettings(GooglePlusSettings googlePlusSettings) { this.googlePlusSettings = googlePlusSettings; } public ProfileSettings getProfile() { return profile; } public void setProfile(ProfileSettings profile) { this.profile = profile; } public int getOnlineSecondsToday() { return onlineSecondsToday; } public void setOnlineSecondsToday(int onlineMinutesToday) { this.onlineSecondsToday = onlineMinutesToday; } public int getExternalScore() { return externalScore; } public void setExternalScore(int externalScore) { this.externalScore = externalScore; } public boolean isReceivedActivityStatsEmail() { return receivedActivityStatsEmail; } public void setReceivedActivityStatsEmail(boolean receivedActivityStatsEmail) { this.receivedActivityStatsEmail = receivedActivityStatsEmail; } public AccountType getAccountType() { return accountType; } public void setAccountType(AccountType accountType) { this.accountType = accountType; } public String getPublicId() { if (id != null) { return id; } else { return externalId; } } public boolean isExternal() { return externalId != null; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } User other = (User) obj; if (id == null) { if (other.id != null) { return false; } } else if (!id.equals(other.id)) { return false; } return true; } public void incrementFollowers() { this.followers++; } public void incrementFollowing() { this.following++; } public void incrementFriends() { this.closeFriends++; } public void decrementFollowing() { this.following--; } public void decrementFollowers() { this.followers--; } public void decrementFriends() { this.closeFriends--; } public void incrementMessageCount() { this.messages++; } public void decrementMessageCount() { this.messages--; } public String getActualTimeZoneId() { if (StringUtils.isNotEmpty(currentTimeZoneId)) { return currentTimeZoneId; } else { return profile.getTimeZoneId(); } } @Override public String toString() { return "User[id=" + id + ", username=" + username + "]"; } }
apache-2.0
wouldyougo/TRx
TRL.Common/Models/BarBuilder.cs
14667
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using TRL.Common.Models; using TRL.Common.Extensions.Models; namespace TRL.Common.Models { //сущность: // включает сущности: // реализует методы: //сущность: // фабрика баров // включает сущности: // тик // бар // настройки баров (тип бара, параметр для формирования бара) // фабрика баров // реализует методы: // создает новый бар добавлением нового тика // обновляет существующий бар добавлением нового тика /// <summary> /// Постритель баров /// (создает новый бар добавлением нового тика /// обновляет существующий бар добавлением нового тика) /// </summary> public class BarBuilder { /// <summary> /// Настройка бара, определяет: /// Код инструмента /// Тип бара /// Интервал /// в секундах - для времеонного интервала /// в единицах - для volume или range бара /// </summary> public BarSettings BarSettings { get; protected set; } public Bar LastBar { get; protected set; } public BarBuilder(BarSettings barSettings) { this.BarSettings = barSettings; LastBar = new Bar(); LastBar.DateTime = DateTime.MinValue; } /// <summary> /// Проверить на Symbol != BarSettings.Symbol /// Вызвать исключение /// </summary> /// <param name="tick"></param> protected virtual void CheckSymbol(string Symbol) { if (BarSettings.Symbol != null) if (Symbol != BarSettings.Symbol) { throw new System.Exception("Symbol != BarSettings.Symbol"); } } protected virtual void CheckSymbol(string Symbol1, string Symbol2) { if (BarSettings.Symbol != null) { if (Symbol1 != BarSettings.Symbol) { throw new System.Exception("Symbol != BarSettings.Symbol"); } } if (Symbol1 != Symbol2) { throw new System.Exception("Symbol1 != Symbol2"); } } /// <summary> /// Проверить на State == Enums.BarState.Finished /// Вызвать исключение /// </summary> /// <param name="tick"></param> protected virtual void CheckState(Enums.BarState State) { if (State == Enums.BarState.Finished) { throw new System.Exception("State == Enums.BarState.Finished"); } } /// <summary> /// Создает новый бар на основе тика /// </summary> /// <param name="tick"></param> /// <returns>Новый бар</returns> protected virtual Bar CreateBar(Tick tick) { return this.CreateBar(tick, tick.DateTime); } /// <summary> /// Создает новый бар на основе тика /// </summary> /// <param name="tick"></param> /// <param name="barDate">DateTime начала нового бара</param> /// <returns>Новый бар</returns> public virtual Bar CreateBar(Tick tick, DateTime barDate) { //if (tick.Symbol != BarSettings.Symbol) //{ // throw new System.Exception("tick.Symbol != BarSettings.Symbol"); //} CheckSymbol(tick.Symbol); double open = tick.Price; double close = tick.Price; double high = tick.Price; double low = tick.Price; double volume = tick.Volume; double volumePrice = tick.Volume * tick.Price; return new Bar { Symbol = tick.Symbol, DateTime = barDate, Open = open, High = high, Low = low, Close = close, Volume = volume, VolumePrice = volumePrice, Interval = BarSettings.Interval, //State = Enums.BarState.Started State = Enums.BarState.Changed }; //throw new NotImplementedException(); } /// <summary> /// Обновляет новый бар добавлением к бару нового тика /// </summary> /// <param name="bar"></param> /// <param name="tick"></param> /// <returns>Новый бар</returns> public virtual Bar UpdateBar(Bar bar, Tick tick) { CheckSymbol(bar.Symbol, tick.Symbol); CheckState(bar.State); if (tick.Price < bar.Low) { bar.Low = tick.Price; } else if (tick.Price > bar.High) { bar.High = tick.Price; } bar.Close = tick.Price; bar.DateTime = tick.DateTime; bar.Volume += tick.Volume; bar.VolumePrice += tick.Price * tick.Volume; bar.State = Enums.BarState.Changed; return bar; } /// <summary> /// Завершает новый бар /// </summary> /// <param name="bar"></param> /// <param name="tick"></param> /// <returns>Новый бар</returns> public virtual Bar FinishBarState(Bar bar) { bar.State = Enums.BarState.Finished; //UpdateDateId(bar); return bar; } //Bar bar2 = CreateBar(tick, tick.DateTime); //List<Bar> listBar = new List<Bar>(); //listBar.Add(bar); //listBar.Add(bar2); //Bar bar3; //bar3 = MakeBar(listBar); //return bar3; //throw new NotImplementedException(); ///// <summary> ///// Создает новый бар на основе нескольких тиков ///// </summary> ///// <param name="tick"></param> ///// <param name="barDate">DateTime начала нового бара</param> ///// <returns>Новый бар</returns> //public Bar CreateBar(IEnumerable<Tick> ticks, DateTime barDate) //{ // throw new NotImplementedException(); // double open = ticks.First().Price; // double close = ticks.Last().Price; // IEnumerable<Tick> orderedByPrice = OrderByPrice(ticks); // double high = orderedByPrice.Last().Price; // double low = orderedByPrice.First().Price; // double volume = orderedByPrice.Sum(i => i.Volume); // return new Bar { Symbol = ticks.ElementAt(0).Symbol, DateTime = barDate, Open = open, High = high, Low = low, Close = close, Volume = volume }; //} ///// <summary> ///// Обновляет бар добавлением нового тика ///// </summary> ///// <param name="bar"></param> ///// <param name="tick"></param> ///// <returns>Новый бар</returns> //public Bar UpdateBar(Bar bar, Tick tick) //{ // Bar bar3 = JastUpdateBar(bar, tick); // if (bar3.BarLengthHL() > BarSettings.Interval) // { // //bar3 = CreateBar(tick); // bar3 = null; // } // return bar3; // //throw new NotImplementedException(); //} ///// <summary> ///// Обновляет бар добавлением нового тика или ///// Создает новый бар добавлением нового тика ///// </summary> ///// <param name="bar"></param> ///// <param name="tick"></param> ///// <returns>Новый бар</returns> //public Bar UpdateOrMakeBar(Bar bar, Tick tick) //{ // Bar bar3 = UpdateBar(bar, tick); // if (bar3 == null) // { // bar3 = CreateBar(tick); // } // return bar3; // //Bar bar3 = JastUpdateBar(bar, tick); // //if (bar3.BarLengthHL() > BarSettings.Interval) // //{ // // bar3 = CreateBar(tick); // //} // //return bar3; // //throw new NotImplementedException(); //} //public static IEnumerable<Tick> OrderByPrice(IEnumerable<Tick> ticks) //{ // return ticks.OrderBy(i => i.Price); //} //public static IEnumerable<Tick> OrderByDateTime(IEnumerable<Tick> ticks) //{ // return ticks.OrderBy(i => i.DateTime); // //throw new NotImplementedException(); //} //public IEnumerable<Bar> OrderByLow(IEnumerable<Bar> bars) //{ // return bars.OrderBy(i => i.Low); //} //public IEnumerable<Bar> OrderByHigh(IEnumerable<Bar> bars) //{ // return bars.OrderBy(i => i.High); //} //public static IEnumerable<Bar> OrderByDateTime(IEnumerable<Bar> bars) //{ // return bars.OrderBy(i => i.DateTime); //} /// <summary> /// Создает новый бар из нескольких /// </summary> /// <param name="bars"></param> /// <returns>Новый бар</returns> protected virtual Bar MakeBar(List<Bar> bars) { //throw new NotImplementedException(); double open = bars.First().Open; double close = bars.Last().Close; List<Bar> orderedByHigh = bars.OrderBy(i => i.High).ToList<Bar>(); double high = orderedByHigh.Last().High; List<Bar> orderedByLow = bars.OrderBy(i => i.Low).ToList<Bar>(); double low = orderedByLow.First().Low; double volume = bars.Sum(i => i.Volume); return new Bar { Symbol = bars.First().Symbol, DateTime = bars.First().DateTime, Open = open, High = high, Low = low, Close = close, Volume = volume, Interval = BarSettings.Interval }; } /// <summary> /// Создает новый бар на основе price /// </summary> /// <param name="price"></param> /// <param name="barDate">DateTime начала нового бара</param> /// <returns>Новый бар</returns> public virtual Bar CreateBar(double price, DateTime dateTime, string symbol) { //if (tick.Symbol != BarSettings.Symbol) //{ // throw new System.Exception("tick.Symbol != BarSettings.Symbol"); //} CheckSymbol(symbol); double open = price; double close = price; double high = price; double low = price; double volume = 0; double volumePrice = 0; return new Bar { Symbol = symbol, DateTime = dateTime, Open = open, High = high, Low = low, Close = close, Volume = volume, VolumePrice = volumePrice, Interval = BarSettings.Interval, //State = Enums.BarState.Started State = Enums.BarState.Changed }; //throw new NotImplementedException(); } /// <summary> /// Обновляет новый бар интервалом после поступления тика /// цена закрытия = цене открытия + интервал /// </summary> /// <param name="bar"></param> /// <param name="tick"></param> /// <returns>Новый бар</returns> public virtual void UpdateBarFinish(Bar bar, Tick tick) { CheckSymbol(bar.Symbol, tick.Symbol); CheckState(bar.State); if (tick.Price < bar.Low) { //bar.Low = tick.Price; bar.Low = bar.High - BarSettings.Interval; bar.Close = bar.Low; } else if (tick.Price > bar.High) { //bar.High = tick.Price; bar.High = bar.Low + BarSettings.Interval; bar.Close = bar.High; } bar.DateTime = tick.DateTime; ///bar.State = Enums.BarState.Finished; ///UpdateDateId(bar); FinishBarState(bar); //bar.Volume += tick.Volume; //bar.VolumePrice += tick.Price * tick.Volume; //bar.State = Enums.BarState.Changed; //return bar; //throw new NotImplementedException(); } /// <summary> /// Обновить ID (DateTime.Ticks) в зависимости от ID предыдущего бара /// </summary> /// <param name="bar"></param> protected virtual void UpdateDateId(Bar bar) { /// При завершении бара сравнить ID (DateTime.Ticks) с ID предыдущего бара, /// если совпадают (или меньше) - это значит бар "фиктивный", /// т.е. сфомирован за счет быстрого движения, /// чтобы ID (DateTime.Ticks) отличались нужно увеличить Ticks для текущего бара if (bar.DateID <= LastBar.DateID) { bar.DateTime = LastBar.DateTime.AddTicks(1); } /// Хранить в построителе предыдущий завершенный бар LastBar = bar; } } }
apache-2.0
aeolusproject/conductor
src/db/migrate/20130116112864_add_keys_pf_pa.rb
342
class AddKeysPfPa < ActiveRecord::Migration def change add_foreign_key "pool_families_provider_accounts", "pool_families", :name => "pool_families_provider_accounts_pool_family_id_fk" add_foreign_key "pool_families_provider_accounts", "provider_accounts", :name => "pool_families_provider_accounts_provider_account_id_fk" end end
apache-2.0
davidtsadler/ebay-sdk-php
src/PostOrder/Types/SubmitCancellationRequestRestRequest.php
1039
<?php /** * DO NOT EDIT THIS FILE! * * This file was automatically generated from external sources. * * Any manual change here will be lost the next time the SDK * is updated. You've been warned! */ namespace DTS\eBaySDK\PostOrder\Types; /** * */ class SubmitCancellationRequestRestRequest extends \DTS\eBaySDK\PostOrder\Types\CreateCancelRequest { /** * @var array Properties belonging to objects of this class. */ private static $propertyTypes = [ ]; /** * @param array $values Optional properties and values to assign to the object. */ public function __construct(array $values = []) { list($parentValues, $childValues) = self::getParentValues(self::$propertyTypes, $values); parent::__construct($parentValues); if (!array_key_exists(__CLASS__, self::$properties)) { self::$properties[__CLASS__] = array_merge(self::$properties[get_parent_class()], self::$propertyTypes); } $this->setValues(__CLASS__, $childValues); } }
apache-2.0
eddumelendez/spring-security
core/src/main/java/org/springframework/security/provisioning/UserDetailsManager.java
1799
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.provisioning; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; /** * An extension of the {@link UserDetailsService} which provides the ability to create new * users and update existing ones. * * @author Luke Taylor * @since 2.0 */ public interface UserDetailsManager extends UserDetailsService { /** * Create a new user with the supplied details. */ void createUser(UserDetails user); /** * Update the specified user. */ void updateUser(UserDetails user); /** * Remove the user with the given login name from the system. */ void deleteUser(String username); /** * Modify the current user's password. This should change the user's password in the * persistent user repository (datbase, LDAP etc). * * @param oldPassword current password (for re-authentication if required) * @param newPassword the password to change to */ void changePassword(String oldPassword, String newPassword); /** * Check if a user with the supplied login name exists in the system. */ boolean userExists(String username); }
apache-2.0
Ethan826/udacity-conference
static/js/app.js
3495
'use strict'; /** * @ngdoc object * @name conferenceApp * @requires $routeProvider * @requires conferenceControllers * @requires ui.bootstrap * * @description * Root app, which routes and specifies the partial html and controller depending on the url requested. * */ var app = angular.module('conferenceApp', ['conferenceControllers', 'ngRoute', 'ui.bootstrap']). config(['$routeProvider', function ($routeProvider) { $routeProvider. when('/conference', { templateUrl: '/partials/show_conferences.html', controller: 'ShowConferenceCtrl' }). when('/conference/create', { templateUrl: '/partials/create_conferences.html', controller: 'CreateConferenceCtrl' }). when('/conference/detail/:websafeConferenceKey', { templateUrl: '/partials/conference_detail.html', controller: 'ConferenceDetailCtrl' }). when('/profile', { templateUrl: '/partials/profile.html', controller: 'MyProfileCtrl' }). when('/', { templateUrl: '/partials/home.html' }). otherwise({ redirectTo: '/' }); }]); /** * @ngdoc filter * @name startFrom * * @description * A filter that extracts an array from the specific index. * */ app.filter('startFrom', function () { /** * Extracts an array from the specific index. * * @param {Array} data * @param {Integer} start * @returns {Array|*} */ var filter = function (data, start) { return data.slice(start); } return filter; }); /** * @ngdoc constant * @name HTTP_ERRORS * * @description * Holds the constants that represent HTTP error codes. * */ app.constant('HTTP_ERRORS', { 'UNAUTHORIZED': 401 }); /** * @ngdoc service * @name oauth2Provider * * @description * Service that holds the OAuth2 information shared across all the pages. * */ app.factory('oauth2Provider', function ($modal) { var oauth2Provider = { CLIENT_ID: '491800841575-ta27mji2fb4nhf992cmp8nni9b8ihhke.apps.googleusercontent.com', SCOPES: 'email profile', signedIn: false } /** * Calls the OAuth2 authentication method. */ oauth2Provider.signIn = function (callback) { gapi.auth.signIn({ 'clientid': oauth2Provider.CLIENT_ID, 'cookiepolicy': 'single_host_origin', 'accesstype': 'online', 'approveprompt': 'auto', 'scope': oauth2Provider.SCOPES, 'callback': callback }); }; /** * Logs out the user. */ oauth2Provider.signOut = function () { gapi.auth.signOut(); // Explicitly set the invalid access token in order to make the API calls fail. gapi.auth.setToken({access_token: ''}) oauth2Provider.signedIn = false; }; /** * Shows the modal with Google+ sign in button. * * @returns {*|Window} */ oauth2Provider.showLoginModal = function() { var modalInstance = $modal.open({ templateUrl: '/partials/login.modal.html', controller: 'OAuth2LoginModalCtrl' }); return modalInstance; }; return oauth2Provider; });
apache-2.0
spullara/niblayout
src/com/sampullara/awt/platform/MacPlatformStandards.java
1735
/* Copyright 2004 Sam Pullara Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. $Author: sam $ $Date: 2004/12/17 01:06:41 $ $Header: /home/cvs/root/NIBLayout/src/com/sampullara/awt/platform/MacPlatformStandards.java,v 1.1 2004/12/17 01:06:41 sam Exp $ $Id: MacPlatformStandards.java,v 1.1 2004/12/17 01:06:41 sam Exp $ Revisions: $Log: MacPlatformStandards.java,v $ Revision 1.1 2004/12/17 01:06:41 sam Large scale refactoring of the editor. Still need to comment a ton of things. Revision 1.3 2004/12/16 08:01:31 sam Initial work on Swing Interface Builder Version: $Name: $ */ package com.sampullara.awt.platform; /** * These platform guidelines for the Mac were derived from use the Apple IB tool. * <p/> * User: sam * Date: Dec 10, 2004 * Time: 2:24:44 AM * To change this template use File | Settings | File Templates. */ public class MacPlatformStandards extends PlatformStandards { public int getVerticalEdgeSpacing() { return 12; } public int getHorizontalEdgeSpacing() { return 14; } public int getVerticalComponentSpacing() { return 10; } public int getHorizontalComponentSpacing() { return 10; } }
apache-2.0
nickman/heliosJMX
src/main/java/com/heliosapm/script/compilers/groovy/ConfigurableGroovyScriptEngineFactory.java
2891
/** * Helios, OpenSource Monitoring * Brought to you by the Helios Development Group * * Copyright 2007, Helios Development Group and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. * */ package com.heliosapm.script.compilers.groovy; import groovy.lang.GroovyClassLoader; import javax.script.ScriptContext; import javax.script.ScriptEngine; import org.codehaus.groovy.jsr223.GroovyScriptEngineFactory; import com.heliosapm.jmx.config.Configuration; /** * <p>Title: ConfigurableGroovyScriptEngineFactory</p> * <p>Description: An extension of the jsr223 Groovy ScriptEngineFactory that supports configuring the compiler options</p> * <p>Company: Helios Development Group LLC</p> * @author Whitehead (nwhitehead AT heliosdev DOT org) * <p><code>com.heliosapm.script.compilers.groovy.ConfigurableGroovyScriptEngineFactory</code></p> */ public class ConfigurableGroovyScriptEngineFactory extends GroovyScriptEngineFactory { /** The customized groovy class loader */ protected final GroovyClassLoader groovyClassLoader; /** The customizable compilation */ protected final GroovyCompilationCustomizer compilationCustomizer = new GroovyCompilationCustomizer(); /** * Creates a new ConfigurableGroovyScriptEngineFactory */ public ConfigurableGroovyScriptEngineFactory() { groovyClassLoader = new GroovyClassLoader(Thread.currentThread().getContextClassLoader(), compilationCustomizer.getDefaultConfig(), true); } /** * {@inheritDoc} * @see org.codehaus.groovy.jsr223.GroovyScriptEngineFactory#getEngineName() */ @Override public String getEngineName() { return "Configurable " + super.getEngineName(); } /** * {@inheritDoc} * @see org.codehaus.groovy.jsr223.GroovyScriptEngineFactory#getScriptEngine() */ @Override public ScriptEngine getScriptEngine() { final ScriptEngine se = new ConfigurableGroovyScriptEngineImpl(groovyClassLoader); Configuration cfg = new Configuration(); se.setBindings(cfg, ScriptContext.ENGINE_SCOPE); return se; } }
apache-2.0
Incubaid/arakoon
pylabs/test/server/quick/test_client_lib.py
1151
""" Copyright (2010-2014) INCUBAID BVBA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from Compat import X def test_client_lib(): my_temp = '/tmp/client_lib_test' cmds = [ (['make', 'uninstall_client'], None), (['make', 'install_client'], None), (['mkdir', '-p', my_temp], None), (['cp', './examples/ocaml/demo.ml', './examples/ocaml/_tags', my_temp], None), (['ocamlbuild', '-use-ocamlfind', 'demo.native'], my_temp), (['make', 'uninstall_client'], None), ] for cmd, cwd in cmds: if cwd == None: cwd = '../..' r = X.subprocess.check_output(cmd, cwd = cwd) print r
apache-2.0
deniskin82/sector-sphere
master/replica_unittest.cpp
1716
/***************************************************************************** Copyright 2011 VeryCloud LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *****************************************************************************/ /***************************************************************************** written by bdl62, last updated 04/21/2011 *****************************************************************************/ #include <cassert> #include <iostream> #include <string> #include "replica.h" using namespace std; using namespace sector; int test1() { const string file1 = "test1"; const string file2 = "test2"; ReplicaMgmt rm; ReplicaJob job; job.m_strSource = file1; job.m_iPriority = COPY; // priority 0. rm.insert(job); assert(rm.getTotalNum() == 1); ReplicaMgmt::iterator iter = rm.begin(); assert(iter->m_strSource == file1); job.m_strSource = file2; job.m_iPriority = BACKGROUND; // priority 1. rm.insert(job); assert(rm.getTotalNum() == 2); ReplicaMgmt::iterator tmp = iter; ++iter; rm.erase(tmp); assert(rm.getTotalNum() == 1); assert(iter->m_strSource == file2); rm.clear(); return 0; } int main() { test1(); return 0; }
apache-2.0
0end1/code_base
Node.js_Codebase/chapter05/ch05.process-sysinfo-node.js
367
/** * Created by king on 15-3-22. * * ch05.process-sysinfo-node.js */ console.info("------ Process System Info ------"); console.info(); console.info('Node.js版本号:'); console.info(process.version); console.info(); console.info('Node.js版本属性:'); console.info(process.versions); console.info(); console.info("------ Process System Info ------");
apache-2.0
bcuff/GeocodeSharp
src/GeocodeSharp/Properties/AssemblyInfo.cs
1491
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("GeocodeSharp")] [assembly: AssemblyDescription("An async wrapper of the Google Geocode API for .NET")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("Brandon Cuff, alexinspire, Johan Nilsson")] [assembly: AssemblyProduct("GeocodeSharp")] [assembly: AssemblyCopyright("Copyright © 2014")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("b7b4bfb7-b046-4e1c-9e9b-9ef40e3ff033")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.5.0.0")] [assembly: AssemblyFileVersion("1.5.0.0")]
apache-2.0
nxjcproject/EasyUIJsonParser
EasyUIJsonParser/EasyUIJsonParser/ComboboxJsonParser.cs
1470
using EasyUIJsonParser.Infrastructure; using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Text; using System.Threading.Tasks; namespace EasyUIJsonParser { public class ComboboxJsonParser { public static string DataTableToJson(DataTable table, params string[] columnsToParse) { if (table == null || table.Rows.Count == 0) return "[]"; StringBuilder sb = new StringBuilder(); sb.Append("["); for (int i = 0; i < table.Rows.Count; i++) { sb.Append("{"); if (columnsToParse.Count() == 0) { columnsToParse = ParserHelper.GetColumnName(table); } for (int j = 0; j < columnsToParse.Length; j++) { if (j < columnsToParse.Length - 1) sb.Append("\"" + columnsToParse[j] + "\":" + "\"" + table.Rows[i][columnsToParse[j]].ToString().Trim() + "\","); else sb.Append("\"" + columnsToParse[j] + "\":" + "\"" + table.Rows[i][columnsToParse[j]].ToString().Trim() + "\""); } if (i == table.Rows.Count - 1) sb.Append("}"); else sb.Append("},"); } sb.Append("]"); return sb.ToString(); } } }
apache-2.0
rogeralsing/gam
actor/context.go
3843
package actor import "time" // Context contains contextual information for actors type Context interface { infoPart basePart messagePart senderPart receiverPart spawnerPart stopperPart } type SenderContext interface { infoPart senderPart messagePart } type ReceiverContext interface { infoPart receiverPart messagePart } type SpawnerContext interface { infoPart spawnerPart } type infoPart interface { // Parent returns the PID for the current actors parent Parent() *PID // Self returns the PID for the current actor Self() *PID // Actor returns the actor associated with this context Actor() Actor ActorSystem() *ActorSystem } type basePart interface { // ReceiveTimeout returns the current timeout ReceiveTimeout() time.Duration // Returns a slice of the actors children Children() []*PID // Respond sends a response to the to the current `Sender` // If the Sender is nil, the actor will panic Respond(response interface{}) // Stash stashes the current message on a stack for reprocessing when the actor restarts Stash() // Watch registers the actor as a monitor for the specified PID Watch(pid *PID) // Unwatch unregisters the actor as a monitor for the specified PID Unwatch(pid *PID) // SetReceiveTimeout sets the inactivity timeout, after which a ReceiveTimeout message will be sent to the actor. // A duration of less than 1ms will disable the inactivity timer. // // If a message is received before the duration d, the timer will be reset. If the message conforms to // the NotInfluenceReceiveTimeout interface, the timer will not be reset SetReceiveTimeout(d time.Duration) CancelReceiveTimeout() // Forward forwards current message to the given PID Forward(pid *PID) AwaitFuture(f *Future, continuation func(res interface{}, err error)) } type messagePart interface { // Message returns the current message to be processed Message() interface{} // MessageHeader returns the meta information for the currently processed message MessageHeader() ReadonlyMessageHeader } type senderPart interface { // Sender returns the PID of actor that sent currently processed message Sender() *PID // Send sends a message to the given PID Send(pid *PID, message interface{}) // Request sends a message to the given PID Request(pid *PID, message interface{}) // Request sends a message to the given PID and also provides a Sender PID RequestWithCustomSender(pid *PID, message interface{}, sender *PID) // RequestFuture sends a message to a given PID and returns a Future RequestFuture(pid *PID, message interface{}, timeout time.Duration) *Future } type receiverPart interface { Receive(envelope *MessageEnvelope) } type spawnerPart interface { // Spawn starts a new child actor based on props and named with a unique id Spawn(props *Props) *PID // SpawnPrefix starts a new child actor based on props and named using a prefix followed by a unique id SpawnPrefix(props *Props, prefix string) *PID // SpawnNamed starts a new child actor based on props and named using the specified name // // ErrNameExists will be returned if id already exists // // Please do not use name sharing same pattern with system actors, for example "YourPrefix$1", "Remote$1", "future$1" SpawnNamed(props *Props, id string) (*PID, error) } type stopperPart interface { // Stop will stop actor immediately regardless of existing user messages in mailbox. Stop(pid *PID) // StopFuture will stop actor immediately regardless of existing user messages in mailbox, and return its future. StopFuture(pid *PID) *Future // Poison will tell actor to stop after processing current user messages in mailbox. Poison(pid *PID) // PoisonFuture will tell actor to stop after processing current user messages in mailbox, and return its future. PoisonFuture(pid *PID) *Future }
apache-2.0
ClarkHobbie/miranda
src/main/java/com/ltsllc/miranda/writer/IgnoreWritesState.java
1765
/* * Copyright 2017 Long Term Software LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ltsllc.miranda.writer; import com.ltsllc.miranda.Consumer; import com.ltsllc.miranda.Message; import com.ltsllc.miranda.State; import org.apache.log4j.Logger; /** * Created by Clark on 2/8/2017. */ public class IgnoreWritesState extends State { private static Logger logger = Logger.getLogger(IgnoreWritesState.class); public static void setLogger (Logger logger) { IgnoreWritesState.logger = logger; } public IgnoreWritesState (Consumer consumer) { super(consumer); } public State processMessage (Message m) { State nextState = this; switch (m.getSubject()) { case Write: { WriteMessage writeMessage = (WriteMessage) m; nextState = processWriteMessage(writeMessage); break; } default: { nextState = super.processMessage(m); break; } } return nextState; } private State processWriteMessage (WriteMessage writeMessage) { logger.warn ("Ignoring write to " + writeMessage.getFilename()); return this; } }
apache-2.0
hambroperks/prettytime
core/src/main/java/org/ocpsoft/prettytime/i18n/Resources_ua.java
6729
/* * Copyright 2012 <a href="mailto:lincolnbaxter@gmail.com">Lincoln Baxter, III</a> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ocpsoft.prettytime.i18n; import org.ocpsoft.prettytime.Duration; import org.ocpsoft.prettytime.TimeFormat; import org.ocpsoft.prettytime.TimeUnit; import org.ocpsoft.prettytime.impl.TimeFormatProvider; import org.ocpsoft.prettytime.units.*; import java.util.ListResourceBundle; /** * Created with IntelliJ IDEA. * User: Tumin Alexander * Date: 2012-12-13 * Time: 03:33 * * reedit to Ukrainian with Eclipse). * User: Ihor Lavrynuk * Date: 2013-01-06 * Time: 15:04 * */ public class Resources_ua extends ListResourceBundle implements TimeFormatProvider { private static final Object[][] OBJECTS = new Object[0][0]; private static final int tolerance = 50; // see http://translate.sourceforge.net/wiki/l10n/pluralforms private static final int slavicPluralForms = 3; private static class TimeFormatAided implements TimeFormat { private final String[] pluarls; public TimeFormatAided(String ... plurals) { if (plurals.length != slavicPluralForms) { throw new IllegalArgumentException("Wrong plural forms number for slavic language!"); } this.pluarls = plurals; } @Override public String format(Duration duration) { long quantity = duration.getQuantityRounded(tolerance); StringBuilder result = new StringBuilder(); result.append(quantity); return result.toString(); } @Override public String formatUnrounded(Duration duration) { long quantity = duration.getQuantity(); StringBuilder result = new StringBuilder(); result.append(quantity); return result.toString(); } @Override public String decorate(Duration duration, String time) { return performDecoration( duration.isInPast(), duration.isInFuture(), duration.getQuantityRounded(tolerance), time ); } @Override public String decorateUnrounded(Duration duration, String time) { return performDecoration( duration.isInPast(), duration.isInFuture(), duration.getQuantity(), time ); } private String performDecoration(boolean past, boolean future, long n, String time) { // a bit cryptic, yet well-tested // consider http://translate.sourceforge.net/wiki/l10n/pluralforms int pluralIdx = (n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2); if (pluralIdx > slavicPluralForms) { // impossible happening throw new IllegalStateException("Wrong plural index was calculated somehow for slavic language"); } StringBuilder result = new StringBuilder(); if (future) { result.append("через "); } result.append(time); result.append(' '); result.append(pluarls[pluralIdx]); if (past) { result.append(" тому"); } return result.toString(); } } @Override public Object[][] getContents() { return OBJECTS; } @Override public TimeFormat getFormatFor(TimeUnit t) { if (t instanceof JustNow) { return new TimeFormat() { @Override public String format(Duration duration) { return performFormat(duration); } @Override public String formatUnrounded(Duration duration) { return performFormat(duration); } private String performFormat(Duration duration) { if (duration.isInFuture()) { return "зараз"; } if (duration.isInPast()) { return "щойно"; } return null; } @Override public String decorate(Duration duration, String time) { return time; } @Override public String decorateUnrounded(Duration duration, String time) { return time; } }; } else if (t instanceof Century) { return new TimeFormatAided("століття", "століття", "столітть"); } else if (t instanceof Day) { return new TimeFormatAided("день", "дні", "днів"); } else if (t instanceof Decade) { return new TimeFormatAided("десятиліття", "десятиліття", "десятиліть"); } else if (t instanceof Hour) { return new TimeFormatAided("годину", "години", "годин"); } else if (t instanceof Millennium) { return new TimeFormatAided("тисячоліття", "тисячоліття", "тисячоліть"); } else if (t instanceof Millisecond) { return new TimeFormatAided("мілісекунду", "мілісекунди", "мілісекунд"); } else if (t instanceof Minute) { return new TimeFormatAided("хвилину", "хвилини", "хвилин"); } else if (t instanceof Month) { return new TimeFormatAided("місяць", "місяці", "місяців"); } else if (t instanceof Second) { return new TimeFormatAided("секунду", "секунди", "секунд"); } else if (t instanceof Week) { return new TimeFormatAided("тиждень", "тижні", "тижнів"); } else if (t instanceof Year) { return new TimeFormatAided("рік", "роки", "років"); } return null; // error } }
apache-2.0
codingllama/trillian
storage/mysql/storage_test.go
8131
// Copyright 2016 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package mysql import ( "bytes" "context" "crypto" "crypto/sha256" "database/sql" "flag" "fmt" "os" "testing" "github.com/golang/glog" "github.com/google/trillian" "github.com/google/trillian/merkle" "github.com/google/trillian/merkle/rfc6962" "github.com/google/trillian/storage" "github.com/google/trillian/storage/testdb" storageto "github.com/google/trillian/storage/testonly" ) func TestNodeRoundTrip(t *testing.T) { ctx := context.Background() cleanTestDB(DB) logID := createLogForTests(DB) s := NewLogStorage(DB, nil) const writeRevision = int64(100) nodesToStore := createSomeNodes() nodeIDsToRead := make([]storage.NodeID, len(nodesToStore)) for i := range nodesToStore { nodeIDsToRead[i] = nodesToStore[i].NodeID } { tx := beginLogTx(s, logID, t) defer tx.Close() forceWriteRevision(writeRevision, tx) // Need to read nodes before attempting to write if _, err := tx.GetMerkleNodes(ctx, 99, nodeIDsToRead); err != nil { t.Fatalf("Failed to read nodes: %s", err) } if err := tx.SetMerkleNodes(ctx, nodesToStore); err != nil { t.Fatalf("Failed to store nodes: %s", err) } if err := tx.Commit(); err != nil { t.Fatalf("Failed to commit nodes: %s", err) } } { tx := beginLogTx(s, logID, t) defer tx.Close() readNodes, err := tx.GetMerkleNodes(ctx, 100, nodeIDsToRead) if err != nil { t.Fatalf("Failed to retrieve nodes: %s", err) } if err := nodesAreEqual(readNodes, nodesToStore); err != nil { t.Fatalf("Read back different nodes from the ones stored: %s", err) } commit(tx, t) } } // This test ensures that node writes cross subtree boundaries so this edge case in the subtree // cache gets exercised. Any tree size > 256 will do this. func TestLogNodeRoundTripMultiSubtree(t *testing.T) { ctx := context.Background() cleanTestDB(DB) logID := createLogForTests(DB) s := NewLogStorage(DB, nil) const writeRevision = int64(100) nodesToStore, err := createLogNodesForTreeAtSize(871, writeRevision) if err != nil { t.Fatalf("failed to create test tree: %v", err) } nodeIDsToRead := make([]storage.NodeID, len(nodesToStore)) for i := range nodesToStore { nodeIDsToRead[i] = nodesToStore[i].NodeID } { tx := beginLogTx(s, logID, t) defer tx.Close() forceWriteRevision(writeRevision, tx) // Need to read nodes before attempting to write if _, err := tx.GetMerkleNodes(ctx, writeRevision-1, nodeIDsToRead); err != nil { t.Fatalf("Failed to read nodes: %s", err) } if err := tx.SetMerkleNodes(ctx, nodesToStore); err != nil { t.Fatalf("Failed to store nodes: %s", err) } if err := tx.Commit(); err != nil { t.Fatalf("Failed to commit nodes: %s", err) } } { tx := beginLogTx(s, logID, t) defer tx.Close() readNodes, err := tx.GetMerkleNodes(ctx, 100, nodeIDsToRead) if err != nil { t.Fatalf("Failed to retrieve nodes: %s", err) } if err := nodesAreEqual(readNodes, nodesToStore); err != nil { missing, extra := diffNodes(readNodes, nodesToStore) for _, n := range missing { t.Errorf("Missing: %s %s", n.NodeID.String(), n.NodeID.CoordString()) } for _, n := range extra { t.Errorf("Extra : %s %s", n.NodeID.String(), n.NodeID.CoordString()) } t.Fatalf("Read back different nodes from the ones stored: %s", err) } commit(tx, t) } } func forceWriteRevision(rev int64, tx storage.TreeTX) { mtx, ok := tx.(*logTreeTX) if !ok { panic(nil) } mtx.treeTX.writeRevision = rev } func createSomeNodes() []storage.Node { r := make([]storage.Node, 4) for i := range r { r[i].NodeID = storage.NewNodeIDWithPrefix(uint64(i), 8, 8, 8) h := sha256.Sum256([]byte{byte(i)}) r[i].Hash = h[:] glog.Infof("Node to store: %v\n", r[i].NodeID) } return r } func createLogNodesForTreeAtSize(ts, rev int64) ([]storage.Node, error) { tree := merkle.NewCompactMerkleTree(rfc6962.New(crypto.SHA256)) nodeMap := make(map[string]storage.Node) for l := 0; l < int(ts); l++ { // We're only interested in the side effects of adding leaves - the node updates if _, _, err := tree.AddLeaf([]byte(fmt.Sprintf("Leaf %d", l)), func(depth int, index int64, hash []byte) error { nID, err := storage.NewNodeIDForTreeCoords(int64(depth), index, 64) if err != nil { return fmt.Errorf("failed to create a nodeID for tree - should not happen d:%d i:%d", depth, index) } nodeMap[nID.String()] = storage.Node{NodeID: nID, NodeRevision: rev, Hash: hash} return nil }); err != nil { return nil, err } } // Unroll the map, which has deduped the updates for us and retained the latest nodes := make([]storage.Node, 0, len(nodeMap)) for _, v := range nodeMap { nodes = append(nodes, v) } return nodes, nil } func nodesAreEqual(lhs []storage.Node, rhs []storage.Node) error { if ls, rs := len(lhs), len(rhs); ls != rs { return fmt.Errorf("different number of nodes, %d vs %d", ls, rs) } for i := range lhs { if l, r := lhs[i].NodeID.String(), rhs[i].NodeID.String(); l != r { return fmt.Errorf("NodeIDs are not the same,\nlhs = %v,\nrhs = %v", l, r) } if l, r := lhs[i].Hash, rhs[i].Hash; !bytes.Equal(l, r) { return fmt.Errorf("Hashes are not the same for %s,\nlhs = %v,\nrhs = %v", lhs[i].NodeID.CoordString(), l, r) } } return nil } func diffNodes(got, want []storage.Node) ([]storage.Node, []storage.Node) { var missing []storage.Node gotMap := make(map[string]storage.Node) for _, n := range got { gotMap[n.NodeID.String()] = n } for _, n := range want { _, ok := gotMap[n.NodeID.String()] if !ok { missing = append(missing, n) } delete(gotMap, n.NodeID.String()) } // Unpack the extra nodes to return both as slices extra := make([]storage.Node, 0, len(gotMap)) for _, v := range gotMap { extra = append(extra, v) } return missing, extra } func openTestDBOrDie() *sql.DB { db, err := testdb.NewTrillianDB(context.TODO()) if err != nil { panic(err) } return db } // cleanTestDB deletes all the entries in the database. func cleanTestDB(db *sql.DB) { for _, table := range allTables { if _, err := db.ExecContext(context.TODO(), fmt.Sprintf("DELETE FROM %s", table)); err != nil { panic(fmt.Sprintf("Failed to delete rows in %s: %s", table, err)) } } } // createMapForTests creates a map-type tree for tests. Returns the treeID of the new tree. func createMapForTests(db *sql.DB) int64 { tree, err := createTree(db, storageto.MapTree) if err != nil { panic(fmt.Sprintf("Error creating map: %v", err)) } return tree.TreeId } // createLogForTests creates a log-type tree for tests. Returns the treeID of the new tree. func createLogForTests(db *sql.DB) int64 { tree, err := createTree(db, storageto.LogTree) if err != nil { panic(fmt.Sprintf("Error creating log: %v", err)) } return tree.TreeId } // createTree creates the specified tree using AdminStorage. func createTree(db *sql.DB, tree *trillian.Tree) (*trillian.Tree, error) { ctx := context.Background() s := NewAdminStorage(db) return storage.CreateTree(ctx, s, tree) } // updateTree updates the specified tree using AdminStorage. func updateTree(db *sql.DB, treeID int64, updateFn func(*trillian.Tree)) (*trillian.Tree, error) { ctx := context.Background() s := NewAdminStorage(db) return storage.UpdateTree(ctx, s, treeID, updateFn) } // DB is the database used for tests. It's initialized and closed by TestMain(). var DB *sql.DB func TestMain(m *testing.M) { flag.Parse() DB = openTestDBOrDie() defer DB.Close() cleanTestDB(DB) ec := m.Run() os.Exit(ec) }
apache-2.0
Esri/route-planner-csharp
RoutePlanner_DeveloperTools/Source/ArcLogisticsApp/Pages/Page.cs
3264
/* | Version 10.1.84 | Copyright 2013 Esri | | Licensed under the Apache License, Version 2.0 (the "License"); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an "AS IS" BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. */ using System; using System.Windows; using System.Windows.Media; using System.Collections.Generic; using ESRI.ArcLogistics.App.Help; using ESRI.ArcLogistics.App.Widgets; namespace ESRI.ArcLogistics.App.Pages { /// <summary> /// Abstract Page class is used as a base for all pages that can be shown in the Main Window. /// </summary> public abstract class Page : System.Windows.Controls.Page { /// <summary> /// Initialize page with the instance of application. /// </summary> /// <param name="app">Application instance.</param> public abstract void Initialize(App app); /// <summary> /// Returns unique page name. /// </summary> public abstract new string Name { get; } /// <summary> /// Returns page title. /// </summary> public abstract new string Title { get; } /// <summary> /// Returns collection of page widgets. /// </summary> /// <remarks>Collection is read-only.</remarks> public abstract ICollection<PageWidget> Widgets { get; } /// <summary> /// Returns page icon as a TileBrush (DrawingBrush or ImageBrush). /// </summary> public abstract TileBrush Icon { get; } /// <summary> /// Returns true if page can be left in that moment. /// </summary> public abstract bool CanBeLeft { get; internal protected set; } /// <summary> /// Returns true if it is allowed to navigate to this page. /// </summary> public abstract bool IsAllowed { get; internal protected set; } /// <summary> /// Returns true if page is complete. This means that there is no task that must be finished on this page. /// </summary> public abstract bool IsComplete { get; internal protected set; } /// <summary> /// Returns true if page supports Complete status or not. /// </summary> public abstract bool DoesSupportCompleteStatus { get; internal protected set; } /// <summary> /// Returns true if page must be obligatory visited by user and all neccessary taks on it must be completed. /// </summary> public abstract bool IsRequired { get; internal protected set; } /// <summary> /// Returns name of Help Topic. /// </summary> public abstract HelpTopic HelpTopic { get; } /// <summary> /// Returns category name of commands that will be present in Tasks widget. /// </summary> public abstract string PageCommandsCategoryName { get; } } }
apache-2.0
xiangxik/java-platform
extension/extension-cms/src/main/java/com/whenling/extension/cms/service/AdPositionService.java
295
package com.whenling.extension.cms.service; import org.springframework.stereotype.Service; import com.whenling.extension.cms.model.AdPosition; import com.whenling.module.domain.service.BaseService; @Service public class AdPositionService extends BaseService<AdPosition, Long> { }
apache-2.0
isaclidberg/TitaniumTest
app/controllers/login.js
1687
var args = arguments[0] || {}; function initUserInterface() { } function loginWithGoogleAccount(){ //initialize module var GoogleAuth = require('modules/googleAuth'); var googleAuth = new GoogleAuth({ clientId : 'CLIENT_ID', clientSecret : 'CLIENT_SECRET', propertyName : 'googleToken', scope : ['https://www.googleapis.com/auth/tasks', 'https://www.googleapis.com/auth/tasks.readonly'], loginHint : 'someuser@gmail.com' }); //create some button var sync = Ti.UI.createButton({ title : 'Sync' }); //do some action... sync.addEventListener('click', function() { googleAuth.isAuthorized(function() { Ti.API.info('Access Token: ' + googleAuth.getAccessToken()); //user is authorized so do something... just dont forget to add accessToken to your requests }, function() { //authorize first googleAuth.authorize(); }); }); }\ function loginWithFacebookAccount(){ Ti.Facebook.appid = '[YOUR APPID]'; Ti.Facebook.permissions = ['publish_stream']; // Permissions your app needs Ti.Facebook.addEventListener('login', function(e) { if (e.success) { alert('Logged In'); } else if (e.error) { alert(e.error); } else if (e.cancelled) { alert("Canceled"); } }); Ti.Facebook.authorize(); } function connectCouchbase( strServer , Port){ var SYNC_URL = "52.11.107.126"; var CBLite = require('com.couchbase.cbl'); } $.imageGoogleLogin.addEventListener('click', function(e) { loginWithGoogleAccount(); }); $.imageFacebookLogin.addEventListener('click', function(e) { loginWithFacebookAccount(); }); $.index.open(); initUserInterface();
apache-2.0
wavelets/smile
SmileMath/src/smile/math/random/RandomNumberGenerator.java
1593
/****************************************************************************** * Confidential Proprietary * * (c) Copyright Haifeng Li 2011, All Rights Reserved * ******************************************************************************/ package smile.math.random; /** * Random number generator interface. * * @author Haifeng Li */ public interface RandomNumberGenerator { /** * Returns up to 32 random bits. */ public int next(int numbits); /** * Returns the next pseudorandom, uniformly distributed int value * from this random number generator's sequence. */ public int nextInt(); /** * Returns a pseudorandom, uniformly distributed int value * between 0 (inclusive) and the specified value (exclusive), * drawn from this random number generator's sequence. */ public int nextInt(int n); /** * Returns the next pseudorandom, uniformly distributed long value * from this random number generator's sequence. */ public long nextLong(); /** * Returns the next pseudorandom, uniformly distributed double value * between 0.0 and 1.0 from this random number generator's sequence. */ public double nextDouble(); /** * Returns a vector of pseudorandom, uniformly distributed double values * between 0.0 and 1.0 from this random number generator's sequence. */ public void nextDoubles(double[] d); }
apache-2.0
REMEXLabs/SmartFridge
app/src/main/java/com/example/poiz/fridgetablet/data/Picture.java
1513
package com.example.poiz.fridgetablet.data; /** * Created by poiz on 07.09.2016. */ import java.util.Date; /** * Class to hold information about a Picture */ public class Picture { String name; byte[] imageBytes; long timestamp; public long getTimestamp() { return timestamp; } public void setTimestamp(long timestamp) { this.timestamp = timestamp; } /** * Constructor * @param name name of the image * @param imageBytes bytes of the image */ public Picture(String name, byte[] imageBytes) { super(); this.name = name; this.imageBytes = imageBytes; } public Picture(String name, byte[] imageBytes, long timestamp) { super(); this.name = name; this.imageBytes = imageBytes; this.timestamp = timestamp; } public Picture(){} /** * returns the Name of the Picutre * @return Name of the Picutre */ public String getName() { return name; } /** * Sets the name of the Picture * @param name name of the Picture */ public void setName(String name) { this.name = name; } /** * returns the Image * @return bytes of the image */ public byte[] getImageBytes() { return imageBytes; } /** * Sets the Image * @param imageBytes bytes of the image */ public void setImageBytes(byte[] imageBytes) { this.imageBytes = imageBytes; } }
apache-2.0
enilfodne/matchbox
matchbox/storage/storagepb/profile_test.go
2543
package storagepb import ( "testing" "github.com/stretchr/testify/assert" ) var ( testProfile = &Profile{ Id: "id", CloudId: "cloud.yaml", IgnitionId: "ignition.json", } ) func TestProfileParse(t *testing.T) { cases := []struct { json string profile *Profile }{ {`{"id": "id", "cloud_id": "cloud.yaml", "ignition_id": "ignition.json"}`, testProfile}, } for _, c := range cases { profile, _ := ParseProfile([]byte(c.json)) assert.Equal(t, c.profile, profile) } } func TestProfileValidate(t *testing.T) { cases := []struct { profile *Profile valid bool }{ {testProfile, true}, {&Profile{Id: "a1b2c3d4"}, true}, {&Profile{}, false}, } for _, c := range cases { valid := c.profile.AssertValid() == nil assert.Equal(t, c.valid, valid) } } func TestProfileCopy(t *testing.T) { profile := &Profile{ Id: "id", CloudId: "cloudy.tmpl", IgnitionId: "ignition.tmpl", Boot: &NetBoot{ Kernel: "/image/kernel", Initrd: []string{"/image/initrd_a"}, Cmdline: map[string]string{"a": "b"}, Args: []string{"a=b"}, }, } clone := profile.Copy() // assert that: // - Profile fields are copied to the clone // - Mutation of the clone does not affect the original assert.Equal(t, profile.Id, clone.Id) assert.Equal(t, profile.Name, clone.Name) assert.Equal(t, profile.IgnitionId, clone.IgnitionId) assert.Equal(t, profile.CloudId, clone.CloudId) assert.Equal(t, profile.Boot, clone.Boot) // mutate the NetBoot struct clone.Boot.Initrd = []string{"/image/initrd_b"} clone.Boot.Cmdline["c"] = "d" clone.Boot.Args = []string{"console=ttyS0"} assert.NotEqual(t, profile.Boot.Initrd, clone.Boot.Initrd) assert.NotEqual(t, profile.Boot.Cmdline, clone.Boot.Cmdline) assert.NotEqual(t, profile.Boot.Args, clone.Boot.Args) } func TestNetBootCopy(t *testing.T) { boot := &NetBoot{ Kernel: "/image/kernel", Initrd: []string{"/image/initrd_a"}, Cmdline: map[string]string{"a": "b"}, Args: []string{"a=b"}, } clone := boot.Copy() // assert that: // - NetBoot fields are copied to the clone // - Mutation of the clone does not affect the original assert.Equal(t, boot.Kernel, clone.Kernel) assert.Equal(t, boot.Initrd, clone.Initrd) assert.Equal(t, boot.Cmdline, clone.Cmdline) assert.Equal(t, boot.Args, clone.Args) // mutate the clone's slice field contents extra := []string{"extra"} copy(clone.Initrd, extra) copy(clone.Args, extra) assert.NotEqual(t, boot.Initrd, clone.Initrd) assert.NotEqual(t, boot.Args, clone.Args) }
apache-2.0
jmapper-framework/jmapper-api
JMapper Api/src/main/java/com/googlecode/jmapper/api/enums/NullPointerControl.java
865
/** * Copyright (C) 2012 - 2014 Alessandro Vurro. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.jmapper.api.enums; /** * NullPointerControl permits to define a null pointer control to Destination or Source or All or Not any. * @author Alessandro Vurro * */ public enum NullPointerControl { DESTINATION,SOURCE,ALL,NOT_ANY }
apache-2.0
hoverruan/weiboclient4j
src/main/java/weiboclient4j/model/Timeline.java
1281
package weiboclient4j.model; import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.annotate.JsonProperty; import java.util.List; /** * @author Hover Ruan */ @JsonIgnoreProperties(ignoreUnknown = true) public class Timeline { @JsonProperty("hasvisible") private boolean visible; private long previousCursor; private long nextCursor; private int totalNumber; private List<Status> statuses; public boolean isVisible() { return visible; } public void setVisible(boolean visible) { this.visible = visible; } public long getPreviousCursor() { return previousCursor; } public void setPreviousCursor(long previousCursor) { this.previousCursor = previousCursor; } public long getNextCursor() { return nextCursor; } public void setNextCursor(long nextCursor) { this.nextCursor = nextCursor; } public int getTotalNumber() { return totalNumber; } public void setTotalNumber(int totalNumber) { this.totalNumber = totalNumber; } public List<Status> getStatuses() { return statuses; } public void setStatuses(List<Status> statuses) { this.statuses = statuses; } }
apache-2.0
OpenConext/OpenConext-manage
manage-server/src/test/java/manage/validations/BasicAuthenticationUsernameFormatValidatorTest.java
765
package manage.validations; import org.junit.Test; import java.util.Optional; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class BasicAuthenticationUsernameFormatValidatorTest { private BasicAuthenticationUsernameFormatValidator subject = new BasicAuthenticationUsernameFormatValidator(); @Test public void validate() throws Exception { Optional<String> result = subject.validate(null); assertTrue(result.isPresent()); result = subject.validate(" "); assertTrue(result.isPresent()); result = subject.validate("http://test"); assertTrue(result.isPresent()); result = subject.validate("user"); assertFalse(result.isPresent()); } }
apache-2.0
d07RiV/d3planner
scripts/locale.js
405
(function() { var order = [ "/scripts/locale/account.js?1236911196000", "/scripts/locale/itemflavor.js?1592721950096", "/scripts/locale/uistats.js?1543886559503", ]; var loadIdx = 0; function doLoad() { if (loadIdx >= order.length) { DiabloCalc.onLocaleLoaded(); } else { var path = order[loadIdx++]; DC_getScript(path, doLoad); } } doLoad(); })();
apache-2.0
PlanetWaves/clockworkengine
branches/3.0/engine/src/bullet-common/com/clockwork/bullet/debug/BulletRigidBodyDebugControl.java
2265
package com.clockwork.bullet.debug; import com.clockwork.bullet.collision.shapes.CollisionShape; import com.clockwork.bullet.objects.PhysicsRigidBody; import com.clockwork.bullet.util.DebugShapeFactory; import com.clockwork.math.Quaternion; import com.clockwork.math.Vector3f; import com.clockwork.renderer.RenderManager; import com.clockwork.renderer.ViewPort; import com.clockwork.scene.Node; import com.clockwork.scene.Spatial; /** * */ public class BulletRigidBodyDebugControl extends AbstractPhysicsDebugControl { protected final PhysicsRigidBody body; protected final Vector3f location = new Vector3f(); protected final Quaternion rotation = new Quaternion(); protected CollisionShape myShape; protected Spatial geom; public BulletRigidBodyDebugControl(BulletDebugAppState debugAppState, PhysicsRigidBody body) { super(debugAppState); this.body = body; myShape = body.getCollisionShape(); this.geom = DebugShapeFactory.getDebugShape(body.getCollisionShape()); this.geom.setName(body.toString()); geom.setMaterial(debugAppState.DEBUG_BLUE); } @Override public void setSpatial(Spatial spatial) { if (spatial != null && spatial instanceof Node) { Node node = (Node) spatial; node.attachChild(geom); } else if (spatial == null && this.spatial != null) { Node node = (Node) this.spatial; node.detachChild(geom); } super.setSpatial(spatial); } @Override protected void controlUpdate(float tpf) { if(myShape != body.getCollisionShape()){ Node node = (Node) this.spatial; node.detachChild(geom); geom = DebugShapeFactory.getDebugShape(body.getCollisionShape()); node.attachChild(geom); } if(body.isActive()){ geom.setMaterial(debugAppState.DEBUG_MAGENTA); }else{ geom.setMaterial(debugAppState.DEBUG_BLUE); } applyPhysicsTransform(body.getPhysicsLocation(location), body.getPhysicsRotation(rotation)); geom.setLocalScale(body.getCollisionShape().getScale()); } @Override protected void controlRender(RenderManager rm, ViewPort vp) { } }
apache-2.0
jumpstarter-io/horizon
openstack_dashboard/api/network_base.py
7134
# Copyright 2013 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Abstraction layer for networking functionalities. This module defines internal APIs for duplicated features between OpenStack Compute and OpenStack Networking. The networking abstraction layer expects methods defined in this module. """ import abc import six @six.add_metaclass(abc.ABCMeta) class FloatingIpManager(object): """Abstract class to implement Floating IP methods The FloatingIP object returned from methods in this class must contains the following attributes: * id: ID of Floating IP * ip: Floating IP address * pool: ID of Floating IP pool from which the address is allocated * fixed_ip: Fixed IP address of a VIF associated with the address * port_id: ID of a VIF associated with the address (instance_id when Nova floating IP is used) * instance_id: Instance ID of an associated with the Floating IP """ @abc.abstractmethod def list_pools(self): """Fetches a list of all floating IP pools. A list of FloatingIpPool objects is returned. FloatingIpPool object is an APIResourceWrapper/APIDictWrapper where 'id' and 'name' attributes are defined. """ pass @abc.abstractmethod def list(self): """Fetches a list all floating IPs. A returned value is a list of FloatingIp object. """ pass @abc.abstractmethod def get(self, floating_ip_id): """Fetches the floating IP. It returns a FloatingIp object corresponding to floating_ip_id. """ pass @abc.abstractmethod def allocate(self, pool=None): """Allocates a floating IP to the tenant. You must provide a pool name or id for which you would like to allocate an floating IP. """ pass @abc.abstractmethod def release(self, floating_ip_id): """Releases a floating IP specified.""" pass @abc.abstractmethod def associate(self, floating_ip_id, port_id): """Associates the floating IP to the port. port_id is a fixed IP of a instance (Nova) or a port_id attached to a VNIC of a instance. """ pass @abc.abstractmethod def disassociate(self, floating_ip_id, port_id): """Disassociates the floating IP from the port. port_id is a fixed IP of a instance (Nova) or a port_id attached to a VNIC of a instance. """ pass @abc.abstractmethod def list_targets(self): """Returns a list of association targets of instance VIFs. Each association target is represented as FloatingIpTarget object. FloatingIpTarget is a APIResourceWrapper/APIDictWrapper and 'id' and 'name' attributes must be defined in each object. FloatingIpTarget.id can be passed as port_id in associate(). FloatingIpTarget.name is displayed in Floating Ip Association Form. """ pass @abc.abstractmethod def get_target_id_by_instance(self, instance_id): """Returns a target ID of floating IP association. Based on a backend implementation. """ pass @abc.abstractmethod def list_target_id_by_instance(self, instance_id): """Returns a list of instance's target IDs of floating IP association. Based on the backend implementation """ pass @abc.abstractmethod def is_simple_associate_supported(self): """Returns True if the default floating IP pool is enabled.""" pass @abc.abstractmethod def is_supported(self): """Returns True if floating IP feature is supported.""" pass @six.add_metaclass(abc.ABCMeta) class SecurityGroupManager(object): """Abstract class to implement Security Group methods SecurityGroup object returned from methods in this class must contains the following attributes: * id: ID of Security Group (int for Nova, uuid for Neutron) * name * description * tenant_id * rules: A list of SecurityGroupRule objects SecurityGroupRule object should have the following attributes (The attribute names and their formats are borrowed from nova security group implementation): * id * direction * ethertype * parent_group_id: security group the rule belongs to * ip_protocol * from_port: lower limit of allowed port range (inclusive) * to_port: upper limit of allowed port range (inclusive) * ip_range: remote IP CIDR (source for ingress, dest for egress). The value should be a format of "{'cidr': <cidr>}" * group: remote security group. The value should be a format of "{'name': <secgroup_name>}" """ @abc.abstractmethod def list(self): """Fetches a list all security groups. A returned value is a list of SecurityGroup object. """ pass @abc.abstractmethod def get(self, sg_id): """Fetches the security group. It returns a SecurityGroup object corresponding to sg_id. """ pass @abc.abstractmethod def create(self, name, desc): """Create a new security group. It returns a SecurityGroup object created. """ pass @abc.abstractmethod def delete(self, sg_id): """Delete the specified security group.""" pass @abc.abstractmethod def rule_create(self, parent_group_id, direction=None, ethertype=None, ip_protocol=None, from_port=None, to_port=None, cidr=None, group_id=None): """Create a new security group rule. :param parent_group_id: security group id a rule is created to :param direction: ingress or egress :param ethertype: ipv4, ipv6, ... :param ip_protocol: tcp, udp, icmp :param from_port: L4 port range min :param to_port: L4 port range max :param cidr: Source IP CIDR :param group_id: ID of Source Security Group """ pass @abc.abstractmethod def rule_delete(self, sgr_id): """Delete the specified security group rule.""" pass @abc.abstractmethod def list_by_instance(self, instance_id): """Get security groups of an instance.""" pass @abc.abstractmethod def update_instance_security_group(self, instance_id, new_security_group_ids): """Update security groups of a specified instance.""" pass
apache-2.0
WojciechZankowski/iextrading4j
iextrading4j-api/src/test/java/pl/zankowski/iextrading4j/api/stats/RecordTest.java
1443
package pl.zankowski.iextrading4j.api.stats; import com.flextrade.jfixture.JFixture; import nl.jqno.equalsverifier.EqualsVerifier; import org.junit.jupiter.api.Test; import pl.zankowski.iextrading4j.api.util.ToStringVerifier; import java.math.BigDecimal; import java.time.LocalDate; import static org.assertj.core.api.Assertions.assertThat; class RecordTest { private final JFixture fixture = new JFixture(); @Test void constructor() { final BigDecimal recordValue = fixture.create(BigDecimal.class); final LocalDate recordDate = fixture.create(LocalDate.class); final BigDecimal previousDayValue = fixture.create(BigDecimal.class); final BigDecimal avg30Value = fixture.create(BigDecimal.class); final Record record = new Record(recordValue, recordDate, previousDayValue, avg30Value); assertThat(record.getRecordValue()).isEqualTo(recordValue); assertThat(record.getRecordDate()).isEqualTo(recordDate); assertThat(record.getPreviousDayValue()).isEqualTo(previousDayValue); assertThat(record.getAvg30Value()).isEqualTo(avg30Value); } @Test void equalsContract() { EqualsVerifier.forClass(Record.class) .usingGetClass() .verify(); } @Test void toStringVerification() { ToStringVerifier.forObject(fixture.create(Record.class)) .verify(); } }
apache-2.0
Jeff-Hanson/scala-euler
src/test/scala/com/github/jeffhanson/problem9/PythagoreanTripletTest.scala
304
package com.github.jeffhanson.problem9 import org.scalatest.FunSuite class PythagoreanTripletTest extends FunSuite { test("Find the pythagorean triplet that sums to 1000") { assert( PythagoreanTripletAndSum.findSumAndPythagTriplet(1000, PythagoreanTripletAndSum.isPythagAndSum) == 31875000) } }
apache-2.0
DigitalAssetCom/hlp-candidate
server/network/src/main/scala/org/hyperledger/network/Messages.scala
6438
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hyperledger.network import org.hyperledger.common._ import org.hyperledger.network.codecs.HyperledgerCodecs._ import org.hyperledger.network.codecs._ import scodec._ import scodec.bits.BitVector import scala.collection.immutable.List /** * Common structures for blockchain based P2P protocols. */ object Messages { sealed trait BlockchainMessage sealed trait DataMessage extends BlockchainMessage case class VersionMessage(payload: Version) extends BlockchainMessage case class VerackMessage() extends BlockchainMessage case class AddrMessage(addresses: List[NetworkAddress]) extends BlockchainMessage object InvMessage { def txs(txs: scala.collection.Iterable[TID]) = InvMessage(txs.map(InventoryVector.tx).toList) def blocks(hashes: scala.collection.Iterable[BID]) = InvMessage(hashes.map(InventoryVector.block).toList) } case class InvMessage(inventory: List[InventoryVector]) extends DataMessage object GetDataMessage { def blocks(h: scala.collection.Iterable[BID]): GetDataMessage = GetDataMessage(h.map(InventoryVector.block).toList) def blocks(h: BID*): GetDataMessage = blocks(h.toIterable) def txs(ids: scala.collection.Iterable[TID]): GetDataMessage = GetDataMessage(ids.map(InventoryVector.tx).toList) } case class GetDataMessage(inventory: List[InventoryVector]) extends BlockchainMessage case class NotFoundMessage(inventory: List[InventoryVector]) extends DataMessage case class GetBlocksMessage(payload: BlockDataRequest) extends DataMessage case class GetHeadersMessage(payload: BlockDataRequest) extends DataMessage case class TxMessage(tx: Transaction) extends DataMessage case class BlockMessage(block: Block) extends DataMessage case class SignedBlockMessage(block: Block) extends DataMessage case class HeadersMessage(headers: List[Header]) extends BlockchainMessage case class SignedHeadersMessage(headers: List[HeaderWithSignatures]) extends BlockchainMessage case class GetAddrMessage() extends BlockchainMessage case class MempoolMessage() extends BlockchainMessage case class PingMessage(payload: Ping) extends BlockchainMessage { def pong = PongMessage(payload) } case class PongMessage(payload: Ping) extends BlockchainMessage case class RejectMessage(payload: Rejection) extends BlockchainMessage case class AlertMessage( content: Alert, signature: BitVector) extends BlockchainMessage // todo add https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki messages import scodec.codecs._ private def discriminator[M](d: String) = Discriminator[BlockchainMessage, M, String](d) implicit val versionD = discriminator[VersionMessage]("version") implicit val versionCodec = Version.codec.hlist.as[VersionMessage] // verack implicit val verackD = discriminator[VerackMessage]("verack") implicit val verackCodec = provide(VerackMessage()) implicit val addrD = discriminator[AddrMessage]("addr") implicit val addrCodec = ("addresses" | varIntSizeSeq(NetworkAddress.codec)).as[AddrMessage] implicit val invD = discriminator[InvMessage]("inv") implicit val invCodec = ("inventory" | varIntSizeSeq(InventoryVector.codec)).as[InvMessage] implicit val getdataD = discriminator[GetDataMessage]("getdata") implicit val getdataCodec = ("inventory" | varIntSizeSeq(InventoryVector.codec)).as[GetDataMessage] implicit val notfoundD = discriminator[NotFoundMessage]("notfound") implicit val notfoundCodec = ("inventory" | varIntSizeSeq(InventoryVector.codec)).as[NotFoundMessage] implicit val getblocksD = discriminator[GetBlocksMessage]("getblocks") implicit val getblocksCodec = BlockDataRequest.codec.hlist.as[GetBlocksMessage] implicit val getheadersD = discriminator[GetHeadersMessage]("getheaders") implicit val getheadersCodec = BlockDataRequest.codec.hlist.as[GetHeadersMessage] implicit val txMessageD = discriminator[TxMessage]("tx") implicit val txMessageCodec = txCodec.hlist.as[TxMessage] implicit val blockMessageD = discriminator[BlockMessage]("block") implicit val blockMessageCodec = blockCodec.hlist.as[BlockMessage] implicit val signedBlockMessageD = discriminator[SignedBlockMessage]("sigblock") implicit val signedBlockMessageCodec = signedBlockCodec.hlist.as[SignedBlockMessage] implicit val headersMessageD = discriminator[HeadersMessage]("headers") implicit val headersMessageCodec: Codec[HeadersMessage] = varIntSizeSeq(headerCodec ~ ignore(8)).xmap[HeadersMessage]( x => HeadersMessage(x.map(_._1)), hm => hm.headers.map((_, ()))) implicit val signedHeadersMessageD = discriminator[SignedHeadersMessage]("sighdrs") implicit val signedHeadersMessageCodec = varIntSizeSeq(signedHeaderCodec).hlist.as[SignedHeadersMessage] implicit val getaddrD = discriminator[GetAddrMessage]("getaddr") implicit val getaddrCodec = provide(GetAddrMessage()) implicit val mempoolD = discriminator[MempoolMessage]("mempool") implicit val mempoolCodec = provide(MempoolMessage()) implicit val pingD = discriminator[PingMessage]("ping") implicit val pingCodec = Ping.codec.hlist.as[PingMessage] implicit val pongD = discriminator[PongMessage]("pong") implicit val pongCodec = Ping.codec.hlist.as[PongMessage] implicit val rejectD = discriminator[RejectMessage]("reject") implicit val rejectCodec = Rejection.codec.hlist.as[RejectMessage] implicit val alertD = discriminator[AlertMessage]("alert") implicit val alertMessageCodec = { variableSizeBytes(varInt, Alert.codec) :: scodec.codecs.bits }.as[AlertMessage] val blockChainMessageCodec: Codec[BlockchainMessage] = Codec.coproduct[BlockchainMessage] .framing(PayloadFrameCodec.framing) .discriminatedBy(cmdCodec).auto def messageCodec(magic: Int): Codec[BlockchainMessage] = { magicCodec(magic) ~> blockChainMessageCodec }.as[BlockchainMessage] }
apache-2.0
YukaiXin/Domes
app/src/main/java/com/kxyu/domes/okhttp/utils/ImageUtils.java
5180
package com.kxyu.domes.okhttp.utils; import android.graphics.BitmapFactory; import android.util.DisplayMetrics; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import java.io.InputStream; import java.lang.reflect.Field; public class ImageUtils { /** * 根据InputStream获取图片实际的宽度和高度 * * @param imageStream * @return */ public static ImageSize getImageSize(InputStream imageStream) { BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeStream(imageStream, null, options); return new ImageSize(options.outWidth, options.outHeight); } public static class ImageSize { int width; int height; public ImageSize() { } public ImageSize(int width, int height) { this.width = width; this.height = height; } @Override public String toString() { return "ImageSize{" + "width=" + width + ", height=" + height + '}'; } } public static int calculateInSampleSize(ImageSize srcSize, ImageSize targetSize) { // 源图片的宽度 int width = srcSize.width; int height = srcSize.height; int inSampleSize = 1; int reqWidth = targetSize.width; int reqHeight = targetSize.height; if (width > reqWidth && height > reqHeight) { // 计算出实际宽度和目标宽度的比率 int widthRatio = Math.round((float) width / (float) reqWidth); int heightRatio = Math.round((float) height / (float) reqHeight); inSampleSize = Math.max(widthRatio, heightRatio); } return inSampleSize; } /** * 根据ImageView获适当的压缩的宽和高 * * @param view * @return */ public static ImageSize getImageViewSize(View view) { ImageSize imageSize = new ImageSize(); imageSize.width = getExpectWidth(view); imageSize.height = getExpectHeight(view); return imageSize; } /** * 根据view获得期望的高度 * * @param view * @return */ private static int getExpectHeight(View view) { int height = 0; if (view == null) return 0; final ViewGroup.LayoutParams params = view.getLayoutParams(); //如果是WRAP_CONTENT,此时图片还没加载,getWidth根本无效 if (params != null && params.height != ViewGroup.LayoutParams.WRAP_CONTENT) { height = view.getWidth(); // 获得实际的宽度 } if (height <= 0 && params != null) { height = params.height; // 获得布局文件中的声明的宽度 } if (height <= 0) { height = getImageViewFieldValue(view, "mMaxHeight");// 获得设置的最大的宽度 } //如果宽度还是没有获取到,憋大招,使用屏幕的宽度 if (height <= 0) { DisplayMetrics displayMetrics = view.getContext().getResources() .getDisplayMetrics(); height = displayMetrics.heightPixels; } return height; } /** * 根据view获得期望的宽度 * * @param view * @return */ private static int getExpectWidth(View view) { int width = 0; if (view == null) return 0; final ViewGroup.LayoutParams params = view.getLayoutParams(); //如果是WRAP_CONTENT,此时图片还没加载,getWidth根本无效 if (params != null && params.width != ViewGroup.LayoutParams.WRAP_CONTENT) { width = view.getWidth(); // 获得实际的宽度 } if (width <= 0 && params != null) { width = params.width; // 获得布局文件中的声明的宽度 } if (width <= 0) { width = getImageViewFieldValue(view, "mMaxWidth");// 获得设置的最大的宽度 } //如果宽度还是没有获取到,憋大招,使用屏幕的宽度 if (width <= 0) { DisplayMetrics displayMetrics = view.getContext().getResources() .getDisplayMetrics(); width = displayMetrics.widthPixels; } return width; } /** * 通过反射获取imageview的某个属性值 * * @param object * @param fieldName * @return */ private static int getImageViewFieldValue(Object object, String fieldName) { int value = 0; try { Field field = ImageView.class.getDeclaredField(fieldName); field.setAccessible(true); int fieldValue = field.getInt(object); if (fieldValue > 0 && fieldValue < Integer.MAX_VALUE) { value = fieldValue; } } catch (Exception e) { } return value; } }
apache-2.0
kdmitrieva16/python_training
generator/contact.py
1362
# -*- coding: utf-8 -*- from model.contact import Contact import random import string import os.path import jsonpickle import getopt import sys try: opts, args = getopt.getopt(sys.argv[1:],"n:f:", ["number of contacts","file"]) except getopt.GetoptError as err: getopt.usage() sys.exit(2) n=5 f="data/contacts.json" for o, a in opts: if o=="-n": n=int(a) elif o=="-f": f=a def random_string(prefix, maxlen): symbols=string.ascii_letters + string.digits + string.punctuation + " "*10 return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))]) def random_num_string(prefix, maxlen): symbols=string.digits return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))]) testdata = [Contact(firstname="", middlename ="", lastname="")]+[ Contact(firstname=random_string("firstname",20), middlename=random_string("middlename", 20), lastname=random_string("lastname", 20), home_phone=random_num_string("home",10), work_phone=random_num_string("work",10), mobile_phone=random_num_string("mobile",10)) for i in range(5) ] file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f) with open(file,"w") as out: jsonpickle.set_encoder_options("json", indent=2) out.write(jsonpickle.encode(testdata))
apache-2.0
pascallouisperez/guice-jit-providers
src/com/google/inject/spi/DefaultElementVisitor.java
2420
/** * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.spi; import com.google.inject.Binding; import com.google.inject.JitBinding; /** * No-op visitor for subclassing. All interface methods simply delegate to * {@link #visitOther(Element)}, returning its result. * * @param <V> any type to be returned by the visit method. Use {@link Void} with * {@code return null} if no return type is needed. * * @author sberlin@gmail.com (Sam Berlin) * @since 2.0 */ public abstract class DefaultElementVisitor<V> implements ElementVisitor<V> { /** * Default visit implementation. Returns {@code null}. */ protected V visitOther(Element element) { return null; } public V visit(Message message) { return visitOther(message); } public <T> V visit(Binding<T> binding) { return visitOther(binding); } public <T> V visit(JitBinding<T> binding) { return visitOther(binding); } /*if[AOP]*/ public V visit(InterceptorBinding interceptorBinding) { return visitOther(interceptorBinding); } /*end[AOP]*/ public V visit(ScopeBinding scopeBinding) { return visitOther(scopeBinding); } public V visit(TypeConverterBinding typeConverterBinding) { return visitOther(typeConverterBinding); } public <T> V visit(ProviderLookup<T> providerLookup) { return visitOther(providerLookup); } public V visit(InjectionRequest<?> injectionRequest) { return visitOther(injectionRequest); } public V visit(StaticInjectionRequest staticInjectionRequest) { return visitOther(staticInjectionRequest); } public V visit(PrivateElements privateElements) { return visitOther(privateElements); } public <T> V visit(MembersInjectorLookup<T> lookup) { return visitOther(lookup); } public V visit(TypeListenerBinding binding) { return visitOther(binding); } }
apache-2.0
dotnetcurry/azure-media-services-encoding-mvc
AzureMediaPortal/Migrations/201308011924358_InitialCreate.Designer.cs
727
// <auto-generated /> namespace AzureMediaPortal.Migrations { using System.Data.Entity.Migrations; using System.Data.Entity.Migrations.Infrastructure; using System.Resources; public sealed partial class InitialCreate : IMigrationMetadata { private readonly ResourceManager Resources = new ResourceManager(typeof(InitialCreate)); string IMigrationMetadata.Id { get { return "201308011924358_InitialCreate"; } } string IMigrationMetadata.Source { get { return null; } } string IMigrationMetadata.Target { get { return Resources.GetString("Target"); } } } }
apache-2.0
ravindraranwala/wso2-axis2-transports
modules/rabbitmq/src/main/java/org/apache/axis2/transport/rabbitmq/RabbitMQConnectionFactoryManager.java
5725
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.transport.rabbitmq; import org.apache.axis2.description.Parameter; import org.apache.axis2.description.ParameterInclude; import org.apache.axis2.transport.rabbitmq.utils.RabbitMQConstants; import org.apache.commons.lang.StringUtils; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; /** * Class managing a set of {@link RabbitMQConnectionFactory} objects. */ public class RabbitMQConnectionFactoryManager { private final Map<String, RabbitMQConnectionFactory> connectionFactories = new HashMap<String, RabbitMQConnectionFactory>(); /** * Construct a Connection factory manager for the RabbitMQ transport sender or receiver * * @param description */ public RabbitMQConnectionFactoryManager(ParameterInclude description) { loadConnectionFactoryDefinitions(description); } /** * Get the connection factory that matches the given name, i.e. referring to * the same underlying connection factory. Used by the RabbitMQSender to determine if already * available resources should be used for outgoing messages. If no factory instance is * found then a new one will be created and added to the connection factory map * * @param props a Map of connection factory properties and name * @return the connection factory or null if no connection factory compatible * with the given properties exists */ public RabbitMQConnectionFactory getConnectionFactory(Hashtable<String, String> props) { String connectionFactoryName = props.get(RabbitMQConstants.RABBITMQ_CON_FAC); if (StringUtils.isEmpty(connectionFactoryName)) { //add all properties to connection factory name in order to have a unique name connectionFactoryName = props.get(RabbitMQConstants.SERVER_HOST_NAME) + "_" + props.get(RabbitMQConstants.SERVER_PORT) + "_" + props.get(RabbitMQConstants.SERVER_USER_NAME) + "_" + props.get(RabbitMQConstants.SERVER_PASSWORD) + "_" + props.get(RabbitMQConstants.SSL_ENABLED) + "_" + props.get(RabbitMQConstants.SERVER_VIRTUAL_HOST) + "_" + props.get(RabbitMQConstants.SERVER_RETRY_INTERVAL) + "_" + props.get(RabbitMQConstants.RETRY_INTERVAL) + "_" + props.get(RabbitMQConstants.RETRY_COUNT) + "_" + props.get(RabbitMQConstants.HEARTBEAT) + "_" + props.get(RabbitMQConstants.CONNECTION_TIMEOUT) + "_" + props.get(RabbitMQConstants.CONNECTION_POOL_SIZE); } //create/get connection factory RabbitMQConnectionFactory rabbitMQConnectionFactory = connectionFactories.get(connectionFactoryName); if (rabbitMQConnectionFactory == null) { synchronized (connectionFactories) { // ensure that connection factories are created only once in a concurrent environment rabbitMQConnectionFactory = connectionFactories.get(connectionFactoryName); if (rabbitMQConnectionFactory == null) { rabbitMQConnectionFactory = new RabbitMQConnectionFactory(connectionFactoryName, props); connectionFactories.put(rabbitMQConnectionFactory.getName(), rabbitMQConnectionFactory); } } } //initialize connection pools synchronized (rabbitMQConnectionFactory) { rabbitMQConnectionFactory.initializeConnectionPool((props.get(RabbitMQConstants.REPLY_TO_NAME) != null)); } return rabbitMQConnectionFactory; } /** * Get the AMQP connection factory with the given name. * * @param connectionFactoryName the name of the AMQP connection factory * @return the AMQP connection factory or null if no connection factory with * the given name exists */ public RabbitMQConnectionFactory getConnectionFactory(String connectionFactoryName) { return connectionFactories.get(connectionFactoryName); } /** * Create ConnectionFactory instances for the definitions in the transport configuration, * and add these into our collection of connectionFactories map keyed by name * * @param trpDesc the transport description for RabbitMQ AMQP */ private void loadConnectionFactoryDefinitions(ParameterInclude trpDesc) { for (Parameter parameter : trpDesc.getParameters()) { RabbitMQConnectionFactory amqpConFactory = new RabbitMQConnectionFactory(parameter); connectionFactories.put(amqpConFactory.getName(), amqpConFactory); } } /** * Stop all connection factories. */ public void stop() { for (RabbitMQConnectionFactory conFac : connectionFactories.values()) { conFac.stop(); } } }
apache-2.0
google/schemaorg-java
src/main/java/com/google/schemaorg/core/impl/EntertainmentBusinessImpl.java
44059
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.schemaorg.SchemaOrgTypeImpl; import com.google.schemaorg.ValueType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.GoogConstants; import com.google.schemaorg.goog.PopularityScoreSpecification; /** Implementation of {@link EntertainmentBusiness}. */ public class EntertainmentBusinessImpl extends LocalBusinessImpl implements EntertainmentBusiness { private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet(); private static ImmutableSet<String> initializePropertySet() { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.add(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY); builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE); builder.add(CoreConstants.PROPERTY_ADDRESS); builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING); builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME); builder.add(CoreConstants.PROPERTY_ALUMNI); builder.add(CoreConstants.PROPERTY_AREA_SERVED); builder.add(CoreConstants.PROPERTY_AWARD); builder.add(CoreConstants.PROPERTY_AWARDS); builder.add(CoreConstants.PROPERTY_BRANCH_CODE); builder.add(CoreConstants.PROPERTY_BRANCH_OF); builder.add(CoreConstants.PROPERTY_BRAND); builder.add(CoreConstants.PROPERTY_CONTACT_POINT); builder.add(CoreConstants.PROPERTY_CONTACT_POINTS); builder.add(CoreConstants.PROPERTY_CONTAINED_IN); builder.add(CoreConstants.PROPERTY_CONTAINED_IN_PLACE); builder.add(CoreConstants.PROPERTY_CONTAINS_PLACE); builder.add(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED); builder.add(CoreConstants.PROPERTY_DEPARTMENT); builder.add(CoreConstants.PROPERTY_DESCRIPTION); builder.add(CoreConstants.PROPERTY_DISSOLUTION_DATE); builder.add(CoreConstants.PROPERTY_DUNS); builder.add(CoreConstants.PROPERTY_EMAIL); builder.add(CoreConstants.PROPERTY_EMPLOYEE); builder.add(CoreConstants.PROPERTY_EMPLOYEES); builder.add(CoreConstants.PROPERTY_EVENT); builder.add(CoreConstants.PROPERTY_EVENTS); builder.add(CoreConstants.PROPERTY_FAX_NUMBER); builder.add(CoreConstants.PROPERTY_FOUNDER); builder.add(CoreConstants.PROPERTY_FOUNDERS); builder.add(CoreConstants.PROPERTY_FOUNDING_DATE); builder.add(CoreConstants.PROPERTY_FOUNDING_LOCATION); builder.add(CoreConstants.PROPERTY_GEO); builder.add(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER); builder.add(CoreConstants.PROPERTY_HAS_MAP); builder.add(CoreConstants.PROPERTY_HAS_OFFER_CATALOG); builder.add(CoreConstants.PROPERTY_HAS_POS); builder.add(CoreConstants.PROPERTY_IMAGE); builder.add(CoreConstants.PROPERTY_ISIC_V4); builder.add(CoreConstants.PROPERTY_LEGAL_NAME); builder.add(CoreConstants.PROPERTY_LOCATION); builder.add(CoreConstants.PROPERTY_LOGO); builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE); builder.add(CoreConstants.PROPERTY_MAKES_OFFER); builder.add(CoreConstants.PROPERTY_MAP); builder.add(CoreConstants.PROPERTY_MAPS); builder.add(CoreConstants.PROPERTY_MEMBER); builder.add(CoreConstants.PROPERTY_MEMBER_OF); builder.add(CoreConstants.PROPERTY_MEMBERS); builder.add(CoreConstants.PROPERTY_NAICS); builder.add(CoreConstants.PROPERTY_NAME); builder.add(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES); builder.add(CoreConstants.PROPERTY_OPENING_HOURS); builder.add(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION); builder.add(CoreConstants.PROPERTY_OWNS); builder.add(CoreConstants.PROPERTY_PARENT_ORGANIZATION); builder.add(CoreConstants.PROPERTY_PAYMENT_ACCEPTED); builder.add(CoreConstants.PROPERTY_PHOTO); builder.add(CoreConstants.PROPERTY_PHOTOS); builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION); builder.add(CoreConstants.PROPERTY_PRICE_RANGE); builder.add(CoreConstants.PROPERTY_REVIEW); builder.add(CoreConstants.PROPERTY_REVIEWS); builder.add(CoreConstants.PROPERTY_SAME_AS); builder.add(CoreConstants.PROPERTY_SEEKS); builder.add(CoreConstants.PROPERTY_SERVICE_AREA); builder.add(CoreConstants.PROPERTY_SUB_ORGANIZATION); builder.add(CoreConstants.PROPERTY_TAX_ID); builder.add(CoreConstants.PROPERTY_TELEPHONE); builder.add(CoreConstants.PROPERTY_URL); builder.add(CoreConstants.PROPERTY_VAT_ID); builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION); builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE); return builder.build(); } static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<EntertainmentBusiness.Builder> implements EntertainmentBusiness.Builder { @Override public EntertainmentBusiness.Builder addAdditionalProperty(PropertyValue value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value); } @Override public EntertainmentBusiness.Builder addAdditionalProperty(PropertyValue.Builder value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, value.build()); } @Override public EntertainmentBusiness.Builder addAdditionalProperty(String value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_PROPERTY, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAdditionalType(URL value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value); } @Override public EntertainmentBusiness.Builder addAdditionalType(String value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAddress(PostalAddress value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, value); } @Override public EntertainmentBusiness.Builder addAddress(PostalAddress.Builder value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, value.build()); } @Override public EntertainmentBusiness.Builder addAddress(Text value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, value); } @Override public EntertainmentBusiness.Builder addAddress(String value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAggregateRating(AggregateRating value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value); } @Override public EntertainmentBusiness.Builder addAggregateRating(AggregateRating.Builder value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build()); } @Override public EntertainmentBusiness.Builder addAggregateRating(String value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAlternateName(Text value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value); } @Override public EntertainmentBusiness.Builder addAlternateName(String value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAlumni(Person value) { return addProperty(CoreConstants.PROPERTY_ALUMNI, value); } @Override public EntertainmentBusiness.Builder addAlumni(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_ALUMNI, value.build()); } @Override public EntertainmentBusiness.Builder addAlumni(String value) { return addProperty(CoreConstants.PROPERTY_ALUMNI, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAreaServed(AdministrativeArea value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public EntertainmentBusiness.Builder addAreaServed(AdministrativeArea.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public EntertainmentBusiness.Builder addAreaServed(GeoShape value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public EntertainmentBusiness.Builder addAreaServed(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public EntertainmentBusiness.Builder addAreaServed(Place value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public EntertainmentBusiness.Builder addAreaServed(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public EntertainmentBusiness.Builder addAreaServed(Text value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public EntertainmentBusiness.Builder addAreaServed(String value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAward(Text value) { return addProperty(CoreConstants.PROPERTY_AWARD, value); } @Override public EntertainmentBusiness.Builder addAward(String value) { return addProperty(CoreConstants.PROPERTY_AWARD, Text.of(value)); } @Override public EntertainmentBusiness.Builder addAwards(Text value) { return addProperty(CoreConstants.PROPERTY_AWARDS, value); } @Override public EntertainmentBusiness.Builder addAwards(String value) { return addProperty(CoreConstants.PROPERTY_AWARDS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addBranchCode(Text value) { return addProperty(CoreConstants.PROPERTY_BRANCH_CODE, value); } @Override public EntertainmentBusiness.Builder addBranchCode(String value) { return addProperty(CoreConstants.PROPERTY_BRANCH_CODE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addBranchOf(Organization value) { return addProperty(CoreConstants.PROPERTY_BRANCH_OF, value); } @Override public EntertainmentBusiness.Builder addBranchOf(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_BRANCH_OF, value.build()); } @Override public EntertainmentBusiness.Builder addBranchOf(String value) { return addProperty(CoreConstants.PROPERTY_BRANCH_OF, Text.of(value)); } @Override public EntertainmentBusiness.Builder addBrand(Brand value) { return addProperty(CoreConstants.PROPERTY_BRAND, value); } @Override public EntertainmentBusiness.Builder addBrand(Brand.Builder value) { return addProperty(CoreConstants.PROPERTY_BRAND, value.build()); } @Override public EntertainmentBusiness.Builder addBrand(Organization value) { return addProperty(CoreConstants.PROPERTY_BRAND, value); } @Override public EntertainmentBusiness.Builder addBrand(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_BRAND, value.build()); } @Override public EntertainmentBusiness.Builder addBrand(String value) { return addProperty(CoreConstants.PROPERTY_BRAND, Text.of(value)); } @Override public EntertainmentBusiness.Builder addContactPoint(ContactPoint value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value); } @Override public EntertainmentBusiness.Builder addContactPoint(ContactPoint.Builder value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value.build()); } @Override public EntertainmentBusiness.Builder addContactPoint(String value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, Text.of(value)); } @Override public EntertainmentBusiness.Builder addContactPoints(ContactPoint value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value); } @Override public EntertainmentBusiness.Builder addContactPoints(ContactPoint.Builder value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value.build()); } @Override public EntertainmentBusiness.Builder addContactPoints(String value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addContainedIn(Place value) { return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, value); } @Override public EntertainmentBusiness.Builder addContainedIn(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, value.build()); } @Override public EntertainmentBusiness.Builder addContainedIn(String value) { return addProperty(CoreConstants.PROPERTY_CONTAINED_IN, Text.of(value)); } @Override public EntertainmentBusiness.Builder addContainedInPlace(Place value) { return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, value); } @Override public EntertainmentBusiness.Builder addContainedInPlace(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, value.build()); } @Override public EntertainmentBusiness.Builder addContainedInPlace(String value) { return addProperty(CoreConstants.PROPERTY_CONTAINED_IN_PLACE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addContainsPlace(Place value) { return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, value); } @Override public EntertainmentBusiness.Builder addContainsPlace(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, value.build()); } @Override public EntertainmentBusiness.Builder addContainsPlace(String value) { return addProperty(CoreConstants.PROPERTY_CONTAINS_PLACE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addCurrenciesAccepted(Text value) { return addProperty(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED, value); } @Override public EntertainmentBusiness.Builder addCurrenciesAccepted(String value) { return addProperty(CoreConstants.PROPERTY_CURRENCIES_ACCEPTED, Text.of(value)); } @Override public EntertainmentBusiness.Builder addDepartment(Organization value) { return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value); } @Override public EntertainmentBusiness.Builder addDepartment(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value.build()); } @Override public EntertainmentBusiness.Builder addDepartment(String value) { return addProperty(CoreConstants.PROPERTY_DEPARTMENT, Text.of(value)); } @Override public EntertainmentBusiness.Builder addDescription(Text value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value); } @Override public EntertainmentBusiness.Builder addDescription(String value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addDissolutionDate(Date value) { return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, value); } @Override public EntertainmentBusiness.Builder addDissolutionDate(String value) { return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addDuns(Text value) { return addProperty(CoreConstants.PROPERTY_DUNS, value); } @Override public EntertainmentBusiness.Builder addDuns(String value) { return addProperty(CoreConstants.PROPERTY_DUNS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addEmail(Text value) { return addProperty(CoreConstants.PROPERTY_EMAIL, value); } @Override public EntertainmentBusiness.Builder addEmail(String value) { return addProperty(CoreConstants.PROPERTY_EMAIL, Text.of(value)); } @Override public EntertainmentBusiness.Builder addEmployee(Person value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value); } @Override public EntertainmentBusiness.Builder addEmployee(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value.build()); } @Override public EntertainmentBusiness.Builder addEmployee(String value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addEmployees(Person value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value); } @Override public EntertainmentBusiness.Builder addEmployees(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value.build()); } @Override public EntertainmentBusiness.Builder addEmployees(String value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEES, Text.of(value)); } @Override public EntertainmentBusiness.Builder addEvent(Event value) { return addProperty(CoreConstants.PROPERTY_EVENT, value); } @Override public EntertainmentBusiness.Builder addEvent(Event.Builder value) { return addProperty(CoreConstants.PROPERTY_EVENT, value.build()); } @Override public EntertainmentBusiness.Builder addEvent(String value) { return addProperty(CoreConstants.PROPERTY_EVENT, Text.of(value)); } @Override public EntertainmentBusiness.Builder addEvents(Event value) { return addProperty(CoreConstants.PROPERTY_EVENTS, value); } @Override public EntertainmentBusiness.Builder addEvents(Event.Builder value) { return addProperty(CoreConstants.PROPERTY_EVENTS, value.build()); } @Override public EntertainmentBusiness.Builder addEvents(String value) { return addProperty(CoreConstants.PROPERTY_EVENTS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addFaxNumber(Text value) { return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, value); } @Override public EntertainmentBusiness.Builder addFaxNumber(String value) { return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, Text.of(value)); } @Override public EntertainmentBusiness.Builder addFounder(Person value) { return addProperty(CoreConstants.PROPERTY_FOUNDER, value); } @Override public EntertainmentBusiness.Builder addFounder(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_FOUNDER, value.build()); } @Override public EntertainmentBusiness.Builder addFounder(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDER, Text.of(value)); } @Override public EntertainmentBusiness.Builder addFounders(Person value) { return addProperty(CoreConstants.PROPERTY_FOUNDERS, value); } @Override public EntertainmentBusiness.Builder addFounders(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_FOUNDERS, value.build()); } @Override public EntertainmentBusiness.Builder addFounders(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDERS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addFoundingDate(Date value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, value); } @Override public EntertainmentBusiness.Builder addFoundingDate(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addFoundingLocation(Place value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value); } @Override public EntertainmentBusiness.Builder addFoundingLocation(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value.build()); } @Override public EntertainmentBusiness.Builder addFoundingLocation(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addGeo(GeoCoordinates value) { return addProperty(CoreConstants.PROPERTY_GEO, value); } @Override public EntertainmentBusiness.Builder addGeo(GeoCoordinates.Builder value) { return addProperty(CoreConstants.PROPERTY_GEO, value.build()); } @Override public EntertainmentBusiness.Builder addGeo(GeoShape value) { return addProperty(CoreConstants.PROPERTY_GEO, value); } @Override public EntertainmentBusiness.Builder addGeo(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_GEO, value.build()); } @Override public EntertainmentBusiness.Builder addGeo(String value) { return addProperty(CoreConstants.PROPERTY_GEO, Text.of(value)); } @Override public EntertainmentBusiness.Builder addGlobalLocationNumber(Text value) { return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, value); } @Override public EntertainmentBusiness.Builder addGlobalLocationNumber(String value) { return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, Text.of(value)); } @Override public EntertainmentBusiness.Builder addHasMap(Map value) { return addProperty(CoreConstants.PROPERTY_HAS_MAP, value); } @Override public EntertainmentBusiness.Builder addHasMap(Map.Builder value) { return addProperty(CoreConstants.PROPERTY_HAS_MAP, value.build()); } @Override public EntertainmentBusiness.Builder addHasMap(URL value) { return addProperty(CoreConstants.PROPERTY_HAS_MAP, value); } @Override public EntertainmentBusiness.Builder addHasMap(String value) { return addProperty(CoreConstants.PROPERTY_HAS_MAP, Text.of(value)); } @Override public EntertainmentBusiness.Builder addHasOfferCatalog(OfferCatalog value) { return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value); } @Override public EntertainmentBusiness.Builder addHasOfferCatalog(OfferCatalog.Builder value) { return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value.build()); } @Override public EntertainmentBusiness.Builder addHasOfferCatalog(String value) { return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, Text.of(value)); } @Override public EntertainmentBusiness.Builder addHasPOS(Place value) { return addProperty(CoreConstants.PROPERTY_HAS_POS, value); } @Override public EntertainmentBusiness.Builder addHasPOS(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_HAS_POS, value.build()); } @Override public EntertainmentBusiness.Builder addHasPOS(String value) { return addProperty(CoreConstants.PROPERTY_HAS_POS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addImage(ImageObject value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public EntertainmentBusiness.Builder addImage(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value.build()); } @Override public EntertainmentBusiness.Builder addImage(URL value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public EntertainmentBusiness.Builder addImage(String value) { return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addIsicV4(Text value) { return addProperty(CoreConstants.PROPERTY_ISIC_V4, value); } @Override public EntertainmentBusiness.Builder addIsicV4(String value) { return addProperty(CoreConstants.PROPERTY_ISIC_V4, Text.of(value)); } @Override public EntertainmentBusiness.Builder addLegalName(Text value) { return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, value); } @Override public EntertainmentBusiness.Builder addLegalName(String value) { return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, Text.of(value)); } @Override public EntertainmentBusiness.Builder addLocation(Place value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public EntertainmentBusiness.Builder addLocation(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value.build()); } @Override public EntertainmentBusiness.Builder addLocation(PostalAddress value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public EntertainmentBusiness.Builder addLocation(PostalAddress.Builder value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value.build()); } @Override public EntertainmentBusiness.Builder addLocation(Text value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public EntertainmentBusiness.Builder addLocation(String value) { return addProperty(CoreConstants.PROPERTY_LOCATION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addLogo(ImageObject value) { return addProperty(CoreConstants.PROPERTY_LOGO, value); } @Override public EntertainmentBusiness.Builder addLogo(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_LOGO, value.build()); } @Override public EntertainmentBusiness.Builder addLogo(URL value) { return addProperty(CoreConstants.PROPERTY_LOGO, value); } @Override public EntertainmentBusiness.Builder addLogo(String value) { return addProperty(CoreConstants.PROPERTY_LOGO, Text.of(value)); } @Override public EntertainmentBusiness.Builder addMainEntityOfPage(CreativeWork value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public EntertainmentBusiness.Builder addMainEntityOfPage(CreativeWork.Builder value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build()); } @Override public EntertainmentBusiness.Builder addMainEntityOfPage(URL value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public EntertainmentBusiness.Builder addMainEntityOfPage(String value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addMakesOffer(Offer value) { return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value); } @Override public EntertainmentBusiness.Builder addMakesOffer(Offer.Builder value) { return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value.build()); } @Override public EntertainmentBusiness.Builder addMakesOffer(String value) { return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, Text.of(value)); } @Override public EntertainmentBusiness.Builder addMap(URL value) { return addProperty(CoreConstants.PROPERTY_MAP, value); } @Override public EntertainmentBusiness.Builder addMap(String value) { return addProperty(CoreConstants.PROPERTY_MAP, Text.of(value)); } @Override public EntertainmentBusiness.Builder addMaps(URL value) { return addProperty(CoreConstants.PROPERTY_MAPS, value); } @Override public EntertainmentBusiness.Builder addMaps(String value) { return addProperty(CoreConstants.PROPERTY_MAPS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addMember(Organization value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value); } @Override public EntertainmentBusiness.Builder addMember(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value.build()); } @Override public EntertainmentBusiness.Builder addMember(Person value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value); } @Override public EntertainmentBusiness.Builder addMember(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value.build()); } @Override public EntertainmentBusiness.Builder addMember(String value) { return addProperty(CoreConstants.PROPERTY_MEMBER, Text.of(value)); } @Override public EntertainmentBusiness.Builder addMemberOf(Organization value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value); } @Override public EntertainmentBusiness.Builder addMemberOf(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build()); } @Override public EntertainmentBusiness.Builder addMemberOf(ProgramMembership value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value); } @Override public EntertainmentBusiness.Builder addMemberOf(ProgramMembership.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build()); } @Override public EntertainmentBusiness.Builder addMemberOf(String value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, Text.of(value)); } @Override public EntertainmentBusiness.Builder addMembers(Organization value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value); } @Override public EntertainmentBusiness.Builder addMembers(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build()); } @Override public EntertainmentBusiness.Builder addMembers(Person value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value); } @Override public EntertainmentBusiness.Builder addMembers(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build()); } @Override public EntertainmentBusiness.Builder addMembers(String value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addNaics(Text value) { return addProperty(CoreConstants.PROPERTY_NAICS, value); } @Override public EntertainmentBusiness.Builder addNaics(String value) { return addProperty(CoreConstants.PROPERTY_NAICS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addName(Text value) { return addProperty(CoreConstants.PROPERTY_NAME, value); } @Override public EntertainmentBusiness.Builder addName(String value) { return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value)); } @Override public EntertainmentBusiness.Builder addNumberOfEmployees(QuantitativeValue value) { return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value); } @Override public EntertainmentBusiness.Builder addNumberOfEmployees(QuantitativeValue.Builder value) { return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value.build()); } @Override public EntertainmentBusiness.Builder addNumberOfEmployees(String value) { return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, Text.of(value)); } @Override public EntertainmentBusiness.Builder addOpeningHours(Text value) { return addProperty(CoreConstants.PROPERTY_OPENING_HOURS, value); } @Override public EntertainmentBusiness.Builder addOpeningHours(String value) { return addProperty(CoreConstants.PROPERTY_OPENING_HOURS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addOpeningHoursSpecification( OpeningHoursSpecification value) { return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, value); } @Override public EntertainmentBusiness.Builder addOpeningHoursSpecification( OpeningHoursSpecification.Builder value) { return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, value.build()); } @Override public EntertainmentBusiness.Builder addOpeningHoursSpecification(String value) { return addProperty(CoreConstants.PROPERTY_OPENING_HOURS_SPECIFICATION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addOwns(OwnershipInfo value) { return addProperty(CoreConstants.PROPERTY_OWNS, value); } @Override public EntertainmentBusiness.Builder addOwns(OwnershipInfo.Builder value) { return addProperty(CoreConstants.PROPERTY_OWNS, value.build()); } @Override public EntertainmentBusiness.Builder addOwns(Product value) { return addProperty(CoreConstants.PROPERTY_OWNS, value); } @Override public EntertainmentBusiness.Builder addOwns(Product.Builder value) { return addProperty(CoreConstants.PROPERTY_OWNS, value.build()); } @Override public EntertainmentBusiness.Builder addOwns(String value) { return addProperty(CoreConstants.PROPERTY_OWNS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addParentOrganization(Organization value) { return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value); } @Override public EntertainmentBusiness.Builder addParentOrganization(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value.build()); } @Override public EntertainmentBusiness.Builder addParentOrganization(String value) { return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addPaymentAccepted(Text value) { return addProperty(CoreConstants.PROPERTY_PAYMENT_ACCEPTED, value); } @Override public EntertainmentBusiness.Builder addPaymentAccepted(String value) { return addProperty(CoreConstants.PROPERTY_PAYMENT_ACCEPTED, Text.of(value)); } @Override public EntertainmentBusiness.Builder addPhoto(ImageObject value) { return addProperty(CoreConstants.PROPERTY_PHOTO, value); } @Override public EntertainmentBusiness.Builder addPhoto(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_PHOTO, value.build()); } @Override public EntertainmentBusiness.Builder addPhoto(Photograph value) { return addProperty(CoreConstants.PROPERTY_PHOTO, value); } @Override public EntertainmentBusiness.Builder addPhoto(Photograph.Builder value) { return addProperty(CoreConstants.PROPERTY_PHOTO, value.build()); } @Override public EntertainmentBusiness.Builder addPhoto(String value) { return addProperty(CoreConstants.PROPERTY_PHOTO, Text.of(value)); } @Override public EntertainmentBusiness.Builder addPhotos(ImageObject value) { return addProperty(CoreConstants.PROPERTY_PHOTOS, value); } @Override public EntertainmentBusiness.Builder addPhotos(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_PHOTOS, value.build()); } @Override public EntertainmentBusiness.Builder addPhotos(Photograph value) { return addProperty(CoreConstants.PROPERTY_PHOTOS, value); } @Override public EntertainmentBusiness.Builder addPhotos(Photograph.Builder value) { return addProperty(CoreConstants.PROPERTY_PHOTOS, value.build()); } @Override public EntertainmentBusiness.Builder addPhotos(String value) { return addProperty(CoreConstants.PROPERTY_PHOTOS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addPotentialAction(Action value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value); } @Override public EntertainmentBusiness.Builder addPotentialAction(Action.Builder value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build()); } @Override public EntertainmentBusiness.Builder addPotentialAction(String value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addPriceRange(Text value) { return addProperty(CoreConstants.PROPERTY_PRICE_RANGE, value); } @Override public EntertainmentBusiness.Builder addPriceRange(String value) { return addProperty(CoreConstants.PROPERTY_PRICE_RANGE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addReview(Review value) { return addProperty(CoreConstants.PROPERTY_REVIEW, value); } @Override public EntertainmentBusiness.Builder addReview(Review.Builder value) { return addProperty(CoreConstants.PROPERTY_REVIEW, value.build()); } @Override public EntertainmentBusiness.Builder addReview(String value) { return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value)); } @Override public EntertainmentBusiness.Builder addReviews(Review value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, value); } @Override public EntertainmentBusiness.Builder addReviews(Review.Builder value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build()); } @Override public EntertainmentBusiness.Builder addReviews(String value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addSameAs(URL value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, value); } @Override public EntertainmentBusiness.Builder addSameAs(String value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addSeeks(Demand value) { return addProperty(CoreConstants.PROPERTY_SEEKS, value); } @Override public EntertainmentBusiness.Builder addSeeks(Demand.Builder value) { return addProperty(CoreConstants.PROPERTY_SEEKS, value.build()); } @Override public EntertainmentBusiness.Builder addSeeks(String value) { return addProperty(CoreConstants.PROPERTY_SEEKS, Text.of(value)); } @Override public EntertainmentBusiness.Builder addServiceArea(AdministrativeArea value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value); } @Override public EntertainmentBusiness.Builder addServiceArea(AdministrativeArea.Builder value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build()); } @Override public EntertainmentBusiness.Builder addServiceArea(GeoShape value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value); } @Override public EntertainmentBusiness.Builder addServiceArea(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build()); } @Override public EntertainmentBusiness.Builder addServiceArea(Place value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value); } @Override public EntertainmentBusiness.Builder addServiceArea(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build()); } @Override public EntertainmentBusiness.Builder addServiceArea(String value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, Text.of(value)); } @Override public EntertainmentBusiness.Builder addSubOrganization(Organization value) { return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value); } @Override public EntertainmentBusiness.Builder addSubOrganization(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value.build()); } @Override public EntertainmentBusiness.Builder addSubOrganization(String value) { return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addTaxID(Text value) { return addProperty(CoreConstants.PROPERTY_TAX_ID, value); } @Override public EntertainmentBusiness.Builder addTaxID(String value) { return addProperty(CoreConstants.PROPERTY_TAX_ID, Text.of(value)); } @Override public EntertainmentBusiness.Builder addTelephone(Text value) { return addProperty(CoreConstants.PROPERTY_TELEPHONE, value); } @Override public EntertainmentBusiness.Builder addTelephone(String value) { return addProperty(CoreConstants.PROPERTY_TELEPHONE, Text.of(value)); } @Override public EntertainmentBusiness.Builder addUrl(URL value) { return addProperty(CoreConstants.PROPERTY_URL, value); } @Override public EntertainmentBusiness.Builder addUrl(String value) { return addProperty(CoreConstants.PROPERTY_URL, Text.of(value)); } @Override public EntertainmentBusiness.Builder addVatID(Text value) { return addProperty(CoreConstants.PROPERTY_VAT_ID, value); } @Override public EntertainmentBusiness.Builder addVatID(String value) { return addProperty(CoreConstants.PROPERTY_VAT_ID, Text.of(value)); } @Override public EntertainmentBusiness.Builder addDetailedDescription(Article value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value); } @Override public EntertainmentBusiness.Builder addDetailedDescription(Article.Builder value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build()); } @Override public EntertainmentBusiness.Builder addDetailedDescription(String value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value)); } @Override public EntertainmentBusiness.Builder addPopularityScore(PopularityScoreSpecification value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value); } @Override public EntertainmentBusiness.Builder addPopularityScore( PopularityScoreSpecification.Builder value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build()); } @Override public EntertainmentBusiness.Builder addPopularityScore(String value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value)); } @Override public EntertainmentBusiness build() { return new EntertainmentBusinessImpl(properties, reverseMap); } } public EntertainmentBusinessImpl( Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) { super(properties, reverseMap); } @Override public String getFullTypeName() { return CoreConstants.TYPE_ENTERTAINMENT_BUSINESS; } @Override public boolean includesProperty(String property) { return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property) || PROPERTY_SET.contains(GoogConstants.NAMESPACE + property) || PROPERTY_SET.contains(property); } }
apache-2.0
xiaozhu36/terraform-provider
vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/cloudapi/reset_app_secret.go
3704
package cloudapi //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // ResetAppSecret invokes the cloudapi.ResetAppSecret API synchronously // api document: https://help.aliyun.com/api/cloudapi/resetappsecret.html func (client *Client) ResetAppSecret(request *ResetAppSecretRequest) (response *ResetAppSecretResponse, err error) { response = CreateResetAppSecretResponse() err = client.DoAction(request, response) return } // ResetAppSecretWithChan invokes the cloudapi.ResetAppSecret API asynchronously // api document: https://help.aliyun.com/api/cloudapi/resetappsecret.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) ResetAppSecretWithChan(request *ResetAppSecretRequest) (<-chan *ResetAppSecretResponse, <-chan error) { responseChan := make(chan *ResetAppSecretResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.ResetAppSecret(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // ResetAppSecretWithCallback invokes the cloudapi.ResetAppSecret API asynchronously // api document: https://help.aliyun.com/api/cloudapi/resetappsecret.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) ResetAppSecretWithCallback(request *ResetAppSecretRequest, callback func(response *ResetAppSecretResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *ResetAppSecretResponse var err error defer close(result) response, err = client.ResetAppSecret(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result } // ResetAppSecretRequest is the request struct for api ResetAppSecret type ResetAppSecretRequest struct { *requests.RpcRequest SecurityToken string `position:"Query" name:"SecurityToken"` AppKey string `position:"Query" name:"AppKey"` } // ResetAppSecretResponse is the response struct for api ResetAppSecret type ResetAppSecretResponse struct { *responses.BaseResponse RequestId string `json:"RequestId" xml:"RequestId"` } // CreateResetAppSecretRequest creates a request to invoke ResetAppSecret API func CreateResetAppSecretRequest() (request *ResetAppSecretRequest) { request = &ResetAppSecretRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("CloudAPI", "2016-07-14", "ResetAppSecret", "apigateway", "openAPI") return } // CreateResetAppSecretResponse creates a response to parse from ResetAppSecret response func CreateResetAppSecretResponse() (response *ResetAppSecretResponse) { response = &ResetAppSecretResponse{ BaseResponse: &responses.BaseResponse{}, } return }
apache-2.0
ignasi35/lagom
service/scaladsl/api/src/main/scala/com/lightbend/lagom/internal/scaladsl/api/ScaladslPath.scala
936
/* * Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.internal.scaladsl.api import com.lightbend.lagom.internal.api.Path import com.lightbend.lagom.internal.api.StaticPathPart import com.lightbend.lagom.scaladsl.api.Descriptor.CallId import com.lightbend.lagom.scaladsl.api.Descriptor.NamedCallId import com.lightbend.lagom.scaladsl.api.Descriptor.PathCallId import com.lightbend.lagom.scaladsl.api.Descriptor.RestCallId /** * Path methods specific to the scaladsl */ object ScaladslPath { def fromCallId(callId: CallId): Path = { callId match { case rest: RestCallId => Path.parse(rest.pathPattern) case path: PathCallId => Path.parse(path.pathPattern) case named: NamedCallId => val name = named.name val path = if (name.startsWith("/")) name else "/" + name Path(path, Seq(StaticPathPart(path)), Nil) } } }
apache-2.0
paulydboy/Quad-Vision
DroneControl/Symbol.py
254
class Symbol(object): def __init__(self, image, command): self._image = image self._command = command @property def image(self): return self._image @property def command(self): return self._command
apache-2.0
skycrown/CuteAssistant
app/src/main/java/net/skycrown/cuteassistant/network/gson/BaseBean.java
704
package net.skycrown.cuteassistant.network.gson; import com.google.gson.annotations.SerializedName; /** * Created by skycrown on 2018/6/10. * 服务器返回结果 */ public class BaseBean<T> { @SerializedName("results") private T data; private String code; private String message; public T getData() { return data; } public void setData(T data) { this.data = data; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } }
apache-2.0
prashant003/interimage-2
interimage-data/src/main/java/br/puc_rio/ele/lvc/interimage/data/imageioimpl/plugins/tiff/TIFFMetadataFormat.java
10153
/* * $RCSfile: TIFFMetadataFormat.java,v $ * * * Copyright (c) 2005 Sun Microsystems, Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistribution of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistribution in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * Neither the name of Sun Microsystems, Inc. or the names of * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * This software is provided "AS IS," without a warranty of any * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY * EXCLUDED. SUN MIDROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL * NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF * USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS * DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR * ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, * CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND * REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR * INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS BEEN ADVISED OF THE * POSSIBILITY OF SUCH DAMAGES. * * You acknowledge that this software is not designed or intended for * use in the design, construction, operation or maintenance of any * nuclear facility. * * $Revision: 1.1 $ * $Date: 2005/02/11 05:01:48 $ * $State: Exp $ */ package br.puc_rio.ele.lvc.interimage.data.imageioimpl.plugins.tiff; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Locale; import java.util.Map; import java.util.MissingResourceException; import java.util.ResourceBundle; import javax.imageio.ImageTypeSpecifier; import javax.imageio.metadata.IIOMetadataFormat; import br.puc_rio.ele.lvc.interimage.data.imageio.plugins.tiff.BaselineTIFFTagSet; import br.puc_rio.ele.lvc.interimage.data.imageio.plugins.tiff.TIFFTag; import br.puc_rio.ele.lvc.interimage.data.imageio.plugins.tiff.TIFFTagSet; public abstract class TIFFMetadataFormat implements IIOMetadataFormat { protected Map elementInfoMap = new HashMap(); protected Map attrInfoMap = new HashMap(); protected String resourceBaseName; protected String rootName; public String getRootName() { return rootName; } private String getResource(String key, Locale locale) { if (locale == null) { locale = Locale.getDefault(); } try { ResourceBundle bundle = ResourceBundle.getBundle(resourceBaseName, locale); return bundle.getString(key); } catch (MissingResourceException e) { return null; } } private TIFFElementInfo getElementInfo(String elementName) { if (elementName == null) { throw new IllegalArgumentException("elementName == null!"); } TIFFElementInfo info = (TIFFElementInfo)elementInfoMap.get(elementName); if (info == null) { throw new IllegalArgumentException("No such element: " + elementName); } return info; } private TIFFAttrInfo getAttrInfo(String elementName, String attrName) { if (elementName == null) { throw new IllegalArgumentException("elementName == null!"); } if (attrName == null) { throw new IllegalArgumentException("attrName == null!"); } String key = elementName + "/" + attrName; TIFFAttrInfo info = (TIFFAttrInfo)attrInfoMap.get(key); if (info == null) { throw new IllegalArgumentException("No such attribute: " + key); } return info; } public int getElementMinChildren(String elementName) { TIFFElementInfo info = getElementInfo(elementName); return info.minChildren; } public int getElementMaxChildren(String elementName) { TIFFElementInfo info = getElementInfo(elementName); return info.maxChildren; } public String getElementDescription(String elementName, Locale locale) { if (!elementInfoMap.containsKey(elementName)) { throw new IllegalArgumentException("No such element: " + elementName); } return getResource(elementName, locale); } public int getChildPolicy(String elementName) { TIFFElementInfo info = getElementInfo(elementName); return info.childPolicy; } public String[] getChildNames(String elementName) { TIFFElementInfo info = getElementInfo(elementName); return info.childNames; } public String[] getAttributeNames(String elementName) { TIFFElementInfo info = getElementInfo(elementName); return info.attributeNames; } public int getAttributeValueType(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.valueType; } public int getAttributeDataType(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.dataType; } public boolean isAttributeRequired(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.isRequired; } public String getAttributeDefaultValue(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.defaultValue; } public String[] getAttributeEnumerations(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.enumerations; } public String getAttributeMinValue(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.minValue; } public String getAttributeMaxValue(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.maxValue; } public int getAttributeListMinLength(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.listMinLength; } public int getAttributeListMaxLength(String elementName, String attrName) { TIFFAttrInfo info = getAttrInfo(elementName, attrName); return info.listMaxLength; } public String getAttributeDescription(String elementName, String attrName, Locale locale) { String key = elementName + "/" + attrName; if (!attrInfoMap.containsKey(key)) { throw new IllegalArgumentException("No such attribute: " + key); } return getResource(key, locale); } public int getObjectValueType(String elementName) { TIFFElementInfo info = getElementInfo(elementName); return info.objectValueType; } public Class getObjectClass(String elementName) { TIFFElementInfo info = getElementInfo(elementName); if (info.objectValueType == VALUE_NONE) { throw new IllegalArgumentException( "Element cannot contain an object value: " + elementName); } return info.objectClass; } public Object getObjectDefaultValue(String elementName) { TIFFElementInfo info = getElementInfo(elementName); if (info.objectValueType == VALUE_NONE) { throw new IllegalArgumentException( "Element cannot contain an object value: " + elementName); } return info.objectDefaultValue; } public Object[] getObjectEnumerations(String elementName) { TIFFElementInfo info = getElementInfo(elementName); if (info.objectValueType == VALUE_NONE) { throw new IllegalArgumentException( "Element cannot contain an object value: " + elementName); } return info.objectEnumerations; } public Comparable getObjectMinValue(String elementName) { TIFFElementInfo info = getElementInfo(elementName); if (info.objectValueType == VALUE_NONE) { throw new IllegalArgumentException( "Element cannot contain an object value: " + elementName); } return info.objectMinValue; } public Comparable getObjectMaxValue(String elementName) { TIFFElementInfo info = getElementInfo(elementName); if (info.objectValueType == VALUE_NONE) { throw new IllegalArgumentException( "Element cannot contain an object value: " + elementName); } return info.objectMaxValue; } public int getObjectArrayMinLength(String elementName) { TIFFElementInfo info = getElementInfo(elementName); if (info.objectValueType == VALUE_NONE) { throw new IllegalArgumentException( "Element cannot contain an object value: " + elementName); } return info.objectArrayMinLength; } public int getObjectArrayMaxLength(String elementName) { TIFFElementInfo info = getElementInfo(elementName); if (info.objectValueType == VALUE_NONE) { throw new IllegalArgumentException( "Element cannot contain an object value: " + elementName); } return info.objectArrayMaxLength; } public TIFFMetadataFormat() {} }
apache-2.0
nate-rcl/irplus
ir_core/test/edu/ur/ir/index/DefaultPdfTextExtractorTest.java
6486
/** Copyright 2008 University of Rochester Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package edu.ur.ir.index; import java.io.File; import java.io.IOException; import java.util.Properties; import org.apache.commons.io.FileUtils; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import edu.ur.file.checksum.ChecksumCalculator; import edu.ur.file.checksum.InMemoryChecksumService; import edu.ur.file.db.FileInfo; import edu.ur.ir.repository.Repository; import edu.ur.ir.test.helper.PropertiesLoader; import edu.ur.ir.test.helper.RepositoryBasedTestHelper; import edu.ur.lucene.analysis.StandardWithISOLatin1AccentFilter; import edu.ur.util.FileUtil; /** * Test the plain text document * * @author Nathan Sarr * */ @Test(groups = { "baseTests" }, enabled = true) public class DefaultPdfTextExtractorTest { /** Properties file with testing specific information. */ PropertiesLoader propertiesLoader = new PropertiesLoader(); /** Get the properties file */ Properties properties = propertiesLoader.getProperties(); InMemoryChecksumService checksumService = new InMemoryChecksumService(); /** * Setup for testing * * this deletes exiting test directories if they exist */ @BeforeMethod public void cleanDirectory() { try { File f = new File(properties.getProperty("a_repo_path")); if (f.exists()) { FileUtils.forceDelete(f); } } catch (IOException e) { throw new IllegalStateException(e); } } /** * Test basic search within a plain text document * * @param description * @throws Exception */ public void testIndexPdfDocument() throws Exception { RepositoryBasedTestHelper repoHelper = new RepositoryBasedTestHelper(); Repository repo = repoHelper.createRepository("localFileServer", "displayName", "file_database", "my_repository", properties .getProperty("a_repo_path"), "default_folder"); // create the first file to store in the temporary folder String tempDirectory = properties.getProperty("ir_core_temp_directory"); File directory = new File(tempDirectory); // helper to create the file FileUtil testUtil = new FileUtil(); testUtil.createDirectory(directory); String baseLocation = properties.getProperty("ir_core_location"); String pdfFile = properties.getProperty("pdf_file"); File f1 = new File(baseLocation + pdfFile); assert f1 != null : "File should not be null"; assert f1.canRead(): "Should be able to read the file " + f1.getAbsolutePath(); ChecksumCalculator calc = checksumService.getChecksumCalculator("MD5"); String checksum1 = calc.calculate(f1); FileInfo info = repo.getFileDatabase().addFile(f1, "indexed_pdf_file"); info.setExtension("pdf"); String checksum2 = calc.calculate(new File(info.getFullPath())); assert checksum1.equals(checksum2) : "Checksum 1 : " + checksum1 + " should equal checksum2 : " + checksum2; FileTextExtractor documentCreator = new DefaultPdfTextExtractor(); assert documentCreator.canExtractText(info.getExtension()) : "Cannot create document for extension " + info.getExtension(); String text = documentCreator .getText(new File(info.getFullPath())); String checksum3 = calc.calculate(new File(info.getFullPath())); assert checksum2.equals(checksum3) : "Checkusm 2 " + checksum2 + " does not eqaual 3: " + checksum3; Document doc = new Document(); doc.add(new Field("body", text, Field.Store.NO, Field.Index.ANALYZED)); assert doc != null : "Document should be created"; // create the lucene directory in memory Directory dir; try { dir = new RAMDirectory(); } catch (Exception e1) { throw new RuntimeException(e1); } // store the document IndexWriter writer = null; try { writer = new IndexWriter(dir, new StandardWithISOLatin1AccentFilter(), true, IndexWriter.MaxFieldLength.LIMITED); writer.addDocument(doc); writer.optimize(); writer.close(); } catch (Exception e) { throw new RuntimeException(e); } finally { if (writer != null) { try { writer.close(); } catch (Exception e) { // do nothing } } } // search the document try { int hits = executeQuery("body", "irFile", dir); assert hits == 1 : "Hit count should equal 1 but equals " + hits; hits = executeQuery("body", "hello", dir); assert hits == 1 : "Hit count should equal 1 but equals " + hits; } catch (Exception e) { throw new RuntimeException(e); } repoHelper.cleanUpRepository(); } /** * Executes the query returning the number of hits. * * @param field - field to search on * @param queryString - query string * @param dir - lucene index to search * * @return - number of hits * * @throws CorruptIndexException * @throws IOException * @throws ParseException */ private int executeQuery(String field, String queryString, Directory dir) throws CorruptIndexException, IOException, ParseException { IndexSearcher searcher = new IndexSearcher(dir); QueryParser parser = new QueryParser(field, new StandardWithISOLatin1AccentFilter()); Query q1 = parser.parse(queryString); TopDocs hits = searcher.search(q1, 1000); int hitCount = hits.totalHits; searcher.close(); return hitCount; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-kinesis/src/main/java/com/amazonaws/services/kinesisanalytics/model/LambdaOutput.java
9500
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kinesisanalytics.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * When configuring application output, identifies an AWS Lambda function as the destination. You provide the function * Amazon Resource Name (ARN) and also an IAM role ARN that Amazon Kinesis Analytics can use to write to the function on * your behalf. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesisanalytics-2015-08-14/LambdaOutput" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class LambdaOutput implements Serializable, Cloneable, StructuredPojo { /** * <p> * Amazon Resource Name (ARN) of the destination Lambda function to write to. * </p> * <note> * <p> * To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the * Lambda function ARN. For more information about Lambda ARNs, see <a * href="/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-lambda">Example ARNs: AWS Lambda</a> * </p> * </note> */ private String resourceARN; /** * <p> * ARN of the IAM role that Amazon Kinesis Analytics can assume to write to the destination function on your behalf. * You need to grant the necessary permissions to this role. * </p> */ private String roleARN; /** * <p> * Amazon Resource Name (ARN) of the destination Lambda function to write to. * </p> * <note> * <p> * To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the * Lambda function ARN. For more information about Lambda ARNs, see <a * href="/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-lambda">Example ARNs: AWS Lambda</a> * </p> * </note> * * @param resourceARN * Amazon Resource Name (ARN) of the destination Lambda function to write to.</p> <note> * <p> * To specify an earlier version of the Lambda function than the latest, include the Lambda function version * in the Lambda function ARN. For more information about Lambda ARNs, see <a * href="/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-lambda">Example ARNs: AWS Lambda</a> * </p> */ public void setResourceARN(String resourceARN) { this.resourceARN = resourceARN; } /** * <p> * Amazon Resource Name (ARN) of the destination Lambda function to write to. * </p> * <note> * <p> * To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the * Lambda function ARN. For more information about Lambda ARNs, see <a * href="/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-lambda">Example ARNs: AWS Lambda</a> * </p> * </note> * * @return Amazon Resource Name (ARN) of the destination Lambda function to write to.</p> <note> * <p> * To specify an earlier version of the Lambda function than the latest, include the Lambda function version * in the Lambda function ARN. For more information about Lambda ARNs, see <a * href="/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-lambda">Example ARNs: AWS Lambda</a> * </p> */ public String getResourceARN() { return this.resourceARN; } /** * <p> * Amazon Resource Name (ARN) of the destination Lambda function to write to. * </p> * <note> * <p> * To specify an earlier version of the Lambda function than the latest, include the Lambda function version in the * Lambda function ARN. For more information about Lambda ARNs, see <a * href="/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-lambda">Example ARNs: AWS Lambda</a> * </p> * </note> * * @param resourceARN * Amazon Resource Name (ARN) of the destination Lambda function to write to.</p> <note> * <p> * To specify an earlier version of the Lambda function than the latest, include the Lambda function version * in the Lambda function ARN. For more information about Lambda ARNs, see <a * href="/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-lambda">Example ARNs: AWS Lambda</a> * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public LambdaOutput withResourceARN(String resourceARN) { setResourceARN(resourceARN); return this; } /** * <p> * ARN of the IAM role that Amazon Kinesis Analytics can assume to write to the destination function on your behalf. * You need to grant the necessary permissions to this role. * </p> * * @param roleARN * ARN of the IAM role that Amazon Kinesis Analytics can assume to write to the destination function on your * behalf. You need to grant the necessary permissions to this role. */ public void setRoleARN(String roleARN) { this.roleARN = roleARN; } /** * <p> * ARN of the IAM role that Amazon Kinesis Analytics can assume to write to the destination function on your behalf. * You need to grant the necessary permissions to this role. * </p> * * @return ARN of the IAM role that Amazon Kinesis Analytics can assume to write to the destination function on your * behalf. You need to grant the necessary permissions to this role. */ public String getRoleARN() { return this.roleARN; } /** * <p> * ARN of the IAM role that Amazon Kinesis Analytics can assume to write to the destination function on your behalf. * You need to grant the necessary permissions to this role. * </p> * * @param roleARN * ARN of the IAM role that Amazon Kinesis Analytics can assume to write to the destination function on your * behalf. You need to grant the necessary permissions to this role. * @return Returns a reference to this object so that method calls can be chained together. */ public LambdaOutput withRoleARN(String roleARN) { setRoleARN(roleARN); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceARN() != null) sb.append("ResourceARN: ").append(getResourceARN()).append(","); if (getRoleARN() != null) sb.append("RoleARN: ").append(getRoleARN()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof LambdaOutput == false) return false; LambdaOutput other = (LambdaOutput) obj; if (other.getResourceARN() == null ^ this.getResourceARN() == null) return false; if (other.getResourceARN() != null && other.getResourceARN().equals(this.getResourceARN()) == false) return false; if (other.getRoleARN() == null ^ this.getRoleARN() == null) return false; if (other.getRoleARN() != null && other.getRoleARN().equals(this.getRoleARN()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceARN() == null) ? 0 : getResourceARN().hashCode()); hashCode = prime * hashCode + ((getRoleARN() == null) ? 0 : getRoleARN().hashCode()); return hashCode; } @Override public LambdaOutput clone() { try { return (LambdaOutput) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.kinesisanalytics.model.transform.LambdaOutputMarshaller.getInstance().marshall(this, protocolMarshaller); } }
apache-2.0
travisbrown/scrooge
scrooge-generator/src/test/scala/com/twitter/scrooge/frontend/TypeResolverSpec.scala
11633
package com.twitter.scrooge.frontend import com.twitter.scrooge.ast._ import com.twitter.scrooge.testutil.Spec class TypeResolverSpec extends Spec { "TypeResolve" should { val foo = EnumField(SimpleID("FOO"), 1, None) val bar = EnumField(SimpleID("BAR"), 2, Some("/** I am a doc. */")) val enum = Enum(SimpleID("SomeEnum"), Seq(foo, bar), None) val enumType = new EnumType(enum) val enumRef = ReferenceType(enum.sid) val struct = Struct(SimpleID("BlahBlah"), "BlahBlah", Seq( Field(1, SimpleID("baby"), "baby", TI16), Field(2, SimpleID("mama"), "mama", TI32), Field(3, SimpleID("papa"), "papa", TI64), Field(4, SimpleID("pupu"), "pupu", enumRef) ), None, Map.empty) val structType = new StructType(struct) val structRef = ReferenceType(struct.sid) val ex = Exception_(SimpleID("Boom"), "Boom", Seq(Field(1, SimpleID("msg"), "msg", enumRef)), None) val exType = new StructType(ex) val resolver = TypeResolver() .withMapping(enum.sid.name, enumType) .withMapping(struct.sid.name, structType) .withMapping(ex.sid.name, exType) def createStruct(structName: String, fieldType: FieldType) = { val fieldName: String = structName + "_field" Struct(SimpleID(structName), structName, Seq(Field(1, SimpleID(fieldName), fieldName, fieldType)), None, Map.empty) } "throw exception on unknown type" in { intercept[TypeNotFoundException] { resolver(ReferenceType(Identifier("wtf"))) } } "resolve a known type" in { resolver(enumRef) must be(enumType) } "resolve dependent types" in { TypeResolver()(enum, None) match { case ResolvedDefinition(enum2, resolver2) => resolver2(struct, None) match { case ResolvedDefinition(struct2: Struct, _) => struct2.fields(3).fieldType must be(enumType) // pass case _ => fail() } case _ => fail() } } "transform MapType" in { resolver(MapType(enumRef, structRef, None)) match { case MapType(enumType, structType, None) => // pass case _ => fail() } } "transform SetType" in { resolver(SetType(structRef, None)) match { case SetType(structType, None) => // pass case _ => fail() } } "transform ListType" in { resolver(ListType(structRef, None)) match { case ListType(structType, None) => // pass case _ => fail() } } "not break on Void" in { resolver(Void) must be(Void) } "transform a Field" in { val field = Field(42, SimpleID("foo"), "foo", structRef) resolver(field) must be(field.copy(fieldType = structType)) } "transform a Field with enum constant default" in { val field = Field(1, SimpleID("field"), "field", enumRef, Some(IdRHS(Identifier("SomeEnum.FOO")))) resolver(field) must be( Field(1, SimpleID("field"), "field", enumType, Some(EnumRHS(enum, foo)))) } "transform a Function" in { val field = Field(1, SimpleID("foo"), "foo", structRef) val ex = Field(2, SimpleID("ex"), "ex", structRef) val fun = Function(SimpleID("foo"), "foo", structRef, Seq(field), Seq(ex), None) resolver(fun) must be( Function(SimpleID("foo"), "foo", resolver(fun.funcType), Seq(resolver(field)), Seq(resolver(ex)), None)) } "transform a TypeDef" in { val typedef = Typedef(SimpleID("foo"), enumRef, Map("some" -> "annotation")) resolver(typedef, None).definition must be( typedef.copy(fieldType = enumType)) } "transform a Struct" in { resolver(struct, None).definition must be(struct.copy(fields = struct.fields.map(resolver.apply))) } "transform an Exception" in { resolver(ex, None).definition must be(ex.copy(fields = ex.fields.map(resolver.apply))) } "transform a Const" in { val const = ConstDefinition(SimpleID("foo"), enumRef, IdRHS(Identifier("SomeEnum.FOO")), None) resolver(const, None).definition must be(ConstDefinition(SimpleID("foo"), enumType, EnumRHS(enum, foo), None)) } "const definition transitivity" in { // this code is ok // const string line = "hi" // const string copy = line val line = ConstDefinition(SimpleID("line"), TString, StringLiteral("hi"), None) val newResolver = resolver(line, None).resolver val copy = ConstDefinition(SimpleID("copy"), TString, IdRHS(SimpleID("line")), None) newResolver(copy, None).definition must be( ConstDefinition(SimpleID("copy"), TString, StringLiteral("hi"), None)) // this code has type mismatch // const string line = "hi" // const i32 copy = line // val copyWrongType = ConstDefinition(SimpleID("copy"), TI32, IdRHS(SimpleID("line")), None) intercept[TypeMismatchException] { newResolver(copyWrongType, None) } // this code has undefined symbol // const string line = "hi" // const string copy = noSuchConst val copyWrongId = ConstDefinition(SimpleID("copy"), TString, IdRHS(SimpleID("noSuchConst")), None) intercept[UndefinedConstantException] { newResolver(copyWrongId, None) } } "allow a valid MapRHS for a StructType" in { val resolver = TypeResolver() val testStruct1 = createStruct("Test1", TI32) val structType1 = StructType(testStruct1) val testStruct2 = createStruct("Test2", structType1) val structType2 = StructType(testStruct2) val mapRHS = MapRHS(Seq((StringLiteral("Test1_field"), IntLiteral(3)))) val mapRHS1 = MapRHS(Seq((StringLiteral("Test2_field"), mapRHS))) val value = resolver(mapRHS1, structType2) val test1Field = testStruct1.fields(0) val test2Field = testStruct2.fields(0) val structElems = Map(test2Field -> StructRHS(sid = structType1.sid, elems = Map(test1Field -> IntLiteral(3)))) value must be(StructRHS(sid = structType2.sid, elems = structElems)) } "throw a TypeMismatchException if invalid MapRHS passed in for a StructType" in { val resolver = TypeResolver() val structType = StructType(createStruct("Test1", TString)) val mapRHS = MapRHS(Seq((StringLiteral("invalid_field"), StringLiteral("Hello")))) intercept[TypeMismatchException] { resolver(mapRHS, structType) } } "transform a Service" in { val fun = Function(SimpleID("foo"), "foo", structRef, Seq(Field(1, SimpleID("foo"), "foo", structRef)), Nil, None) val service = Service(SimpleID("Glurb"), None, Seq(fun), None) resolver(service, None).definition must be(service.copy(functions = Seq(resolver(fun)))) } "resolve a service parent from same scope" in { val service1 = Service(SimpleID("Super"), None, Nil, None) val service2 = Service( SimpleID("Sub"), Some(ServiceParent(SimpleID("Super"), None)), Nil, None) val resolver = TypeResolver().withMapping(service1) resolver(service2, None).definition must be(service2.copy(parent = Some(ServiceParent( service1.sid, None, Some(service1))))) } "resolve a parameter from an included scope" in { val oneInt = Struct(SimpleID("TestRequest"), "TestRequest", Seq(), None, Map.empty) val doc = Document(Nil, Seq(oneInt)) val resolver = TypeResolver().withMapping(Include("other.thrift", doc)) val resolveFieldType: FieldType = resolver.resolveFieldType(QualifiedID(Seq("other", "TestRequest"))) resolveFieldType.asInstanceOf[StructType].scopePrefix must be(Some(SimpleID("other"))) } "resolve a service parent from an included scope" in { val service1 = Service(SimpleID("Super"), None, Nil, None) val otherDoc = Document(Nil, Seq(service1)) val include = Include("other.thrift", otherDoc) val service2 = Service( SimpleID("Sub"), Some(ServiceParent(SimpleID("Super"), Some(SimpleID("other")))), Nil, None) val resolver = TypeResolver().withMapping(include) resolver(service2, None).definition must be( service2.copy(parent = Some(ServiceParent( SimpleID("Super"), Some(SimpleID("other")), Some(service1))))) } "resolve a typedef from an included scope" in { val oneInt = Struct(SimpleID("OneInt"), "OneInt", Seq(Field(1, SimpleID("id"), "id", TI32, None, Requiredness.Default)), None, Map.empty) val typedefInt = Typedef(SimpleID("ManyInts"), ListType(ReferenceType(Identifier("OneInt")), None), Map.empty) val doc1 = Document(Nil, Seq(oneInt, typedefInt)) val collectionStruct = Struct(SimpleID("IntCollection"), "IntCollection", Seq( Field(1, SimpleID("scores1"), "scores1", ReferenceType(Identifier("typedef1.ManyInts")), None, Requiredness.Default), Field(2, SimpleID("scores2"), "scores2", SetType(ReferenceType(Identifier("typedef1.OneInt")), None), None, Requiredness.Default) ), None, Map("foo" -> "bar")) val doc2 = Document(Seq(Include("src/test/thrift/typedef1.thrift", doc1)), Seq(collectionStruct)) val resolvedDoc = TypeResolver()(doc2).document resolvedDoc.defs(0) match { case Struct(_, _, fields, _, annotations) => { fields(0) match { case Field(1, _, _, ListType(StructType(_, Some(SimpleID("typedef1", _))), None), _, _, _, _, _) => // pass case _ => fail() } fields(1) match { case Field(2, _, _, SetType(StructType(_, Some(SimpleID("typedef1", _))), None), _, _, _, _, _) => // pass case _ => fail() } annotations must be(Map("foo" -> "bar")) } case _ => fail() } } def resolve(input: String): ResolvedDocument = { val parser = new ThriftParser(Importer("."), strict = true) val doc = parser.parse(input, parser.document) TypeResolver()(doc) } "include positions in type errors" should { "TypeNotFoundException" in { val input = "const UnknownType name = 15" val ex = intercept[TypeNotFoundException] { resolve(input) } ex.node.pos.line mustBe 1 ex.node.pos.column mustBe 7 } "UndefinedConstantException" in { val input = "const i32 i = UnknownConst" val ex = intercept[UndefinedConstantException] { resolve(input) } ex.node.pos.line mustBe 1 ex.node.pos.column mustBe 15 } "UndefinedSymbolException" in { val input = "const i32 NotAService = 4\n" + "service S extends NotAService {}" val ex = intercept[UndefinedSymbolException] { resolve(input) } ex.node.pos.line mustBe 2 ex.node.pos.column mustBe 19 } "TypeMismatchException" in { val input = """const string name = "name" |const i32 i = name """.stripMargin val ex = intercept[TypeMismatchException] { resolve(input) } ex.node.pos.line mustBe 2 ex.node.pos.column mustBe 15 } "QualifierNotFoundException" in { val input = "const i32 i = UnknownImport.SomeConst" val ex = intercept[QualifierNotFoundException] { resolve(input) } ex.node.pos.line mustBe 1 ex.node.pos.column mustBe 15 } } } }
apache-2.0
stevearm/syncaws
src/main/java/com/horsefire/syncaws/tasks/VersionTask.java
641
package com.horsefire.syncaws.tasks; import java.io.InputStream; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class VersionTask implements Task { private static final Logger LOG = LoggerFactory .getLogger(VersionTask.class); public VersionTask() { } public void validate() { // Do nothing } public void run() throws Exception { String path = "/META-INF/maven/com.horsefire/syncaws/pom.properties"; InputStream stream = getClass().getResourceAsStream(path); Properties props = new Properties(); props.load(stream); LOG.info("SyncAws {}", props.get("version")); } }
apache-2.0
bileto/transitfeed
transitfeed/gtfsfactory.py
8424
#!/usr/bin/python2.5 # Copyright (C) 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from agency import Agency from fareattribute import FareAttribute from farerule import FareRule from feedinfo import FeedInfo from frequency import Frequency from loader import Loader import problems from route import Route from schedule import Schedule from serviceperiod import ServicePeriod from shape import Shape from shapepoint import ShapePoint from stop import Stop from stoptime import StopTime from transfer import Transfer from trip import Trip from stopexternalids import StopExternalIds class GtfsFactory(object): """A factory for the default GTFS objects""" _REQUIRED_MAPPING_FIELDS = ['classes', 'required', 'loading_order'] def __init__(self): self._class_mapping = { 'Agency': Agency, 'ServicePeriod': ServicePeriod, 'FareAttribute': FareAttribute, 'FareRule': FareRule, 'Frequency': Frequency, 'FeedInfo': FeedInfo, 'Shape': Shape, 'ShapePoint': ShapePoint, 'Stop': Stop, 'StopTime': StopTime, 'Route': Route, 'Transfer': Transfer, 'Trip': Trip, 'Schedule': Schedule, 'Loader': Loader, 'StopExternalIds': StopExternalIds } self._file_mapping = { 'agency.txt': { 'required': True, 'loading_order': 0, 'classes': ['Agency'] }, 'calendar.txt': { 'required': False, 'loading_order': None, 'classes': ['ServicePeriod']}, 'calendar_dates.txt': { 'required': False, 'loading_order': None, 'classes': ['ServicePeriod']}, 'fare_attributes.txt': { 'required': False, 'loading_order': 50, 'classes': ['FareAttribute']}, 'fare_rules.txt': { 'required': False, 'loading_order': 60, 'classes': ['FareRule']}, 'feed_info.txt': { 'required': False, 'loading_order': 100, 'classes': ['FeedInfo']}, 'frequencies.txt': { 'required': False, 'loading_order': 70, 'classes': ['Frequency']}, 'shapes.txt': { 'required': False, 'loading_order': None, 'classes': ['Shape', 'ShapePoint']}, 'stops.txt': { 'required': True, 'loading_order': 10, 'classes': ['Stop']}, 'stop_times.txt': { 'required': True, 'loading_order': None, 'classes': ['StopTime']}, 'routes.txt': { 'required': True, 'loading_order': 20, 'classes': ['Route']}, 'transfers.txt': { 'required': False, 'loading_order': 30, 'classes': ['Transfer']}, 'trips.txt': { 'required': True, 'loading_order': 40, 'classes': ['Trip']}, 'stop_external_ids.txt': {'required': True, 'loading_order': None, 'classes': ['StopExternalIds']}, } def __getattr__(self, name): if name in self._class_mapping: return self._class_mapping[name] raise AttributeError(name) def GetGtfsClassByFileName(self, filename): """Returns the transitfeed class corresponding to a GTFS file. Args: filename: The filename whose class is to be returned Raises: NonStandardMapping if the specified filename has more than one corresponding class """ if filename not in self._file_mapping: return None mapping = self._file_mapping[filename] class_list = mapping['classes'] if len(class_list) > 1: raise problems.NonStandardMapping(filename) else: return self._class_mapping[class_list[0]] def GetLoadingOrder(self): """Returns a list of filenames sorted by loading order. Only includes files that Loader's standardized loading knows how to load""" result = {} for filename, mapping in self._file_mapping.iteritems(): loading_order = mapping['loading_order'] if loading_order is not None: result[loading_order] = filename return list(result[key] for key in sorted(result)) def IsFileRequired(self, filename): """Returns true if a file is required by GTFS, false otherwise. Unknown files are, by definition, not required""" if filename not in self._file_mapping: return False mapping = self._file_mapping[filename] return mapping['required'] def GetKnownFilenames(self): """Returns a list of all known filenames""" return self._file_mapping.keys() def RemoveMapping(self, filename): """Removes an entry from the list of known filenames. An entry is identified by its filename. filename: The filename whose mapping is to be updated. """ if filename in self._file_mapping: del self._file_mapping[filename] def AddMapping(self, filename, new_mapping): """Adds an entry to the list of known filenames. Args: filename: The filename whose mapping is being added. new_mapping: A dictionary with the mapping to add. Must contain all fields in _REQUIRED_MAPPING_FIELDS. Raises: DuplicateMapping if the filename already exists in the mapping InvalidMapping if not all required fields are present """ for field in self._REQUIRED_MAPPING_FIELDS: if field not in new_mapping: raise problems.InvalidMapping(field) if filename in self.GetKnownFilenames(): raise problems.DuplicateMapping(filename) self._file_mapping[filename] = new_mapping def UpdateMapping(self, filename, mapping_update): """Updates an entry in the list of known filenames. An entry is identified by its filename. Args: filename: The filename whose mapping is to be updated mapping_update: A dictionary containing the fields to update and their new values. Raises: InexistentMapping if the filename does not exist in the mapping """ if filename not in self._file_mapping: raise problems.NonexistentMapping(filename) mapping = self._file_mapping[filename] mapping.update(mapping_update) def AddClass(self, class_name, gtfs_class): """Adds an entry to the list of known classes. Args: class_name: A string with name through which gtfs_class is to be made accessible. gtfs_class: The class to be added. Raises: DuplicateMapping if class_name is already present in the class mapping. """ if class_name in self._class_mapping: raise problems.DuplicateMapping(class_name) self._class_mapping[class_name] = gtfs_class def UpdateClass(self, class_name, gtfs_class): """Updates an entry in the list of known classes. Args: class_name: A string with the class name that is to be updated. gtfs_class: The new class Raises: NonexistentMapping if there is no class with the specified class_name. """ if class_name not in self._class_mapping: raise problems.NonexistentMapping(class_name) self._class_mapping[class_name] = gtfs_class def RemoveClass(self, class_name): """Removes an entry from the list of known classes. Args: class_name: A string with the class name that is to be removed. Raises: NonexistentMapping if there is no class with the specified class_name. """ if class_name not in self._class_mapping: raise problems.NonexistentMapping(class_name) del self._class_mapping[class_name] def GetProblemReporter(self): return problems.ProblemReporter() def GetGtfsFactory(): """Called by FeedValidator to retrieve this extension's GtfsFactory. Extensions will most likely only need to create an instance of transitfeed.GtfsFactory, call {Remove,Add,Update}Mapping as needed, and return that instance""" return GtfsFactory()
apache-2.0
peterhuene/puppetcpp
lib/src/facts/facter.cc
3744
#include <puppet/facts/facter.hpp> #include <puppet/cast.hpp> #include <facter/facts/collection.hpp> #include <facter/facts/scalar_value.hpp> #include <facter/facts/map_value.hpp> #include <facter/facts/array_value.hpp> #include <boost/algorithm/string.hpp> using namespace std; using namespace puppet::runtime; using namespace facter::facts; namespace puppet { namespace facts { facter::facter() { // Add default facts _collection.add_default_facts(false); _collection.add_external_facts(); // TODO: support additional locations for external facts? _collection.add_environment_facts(); // TODO: add custom facts? Need to initialize the Ruby VM in main } shared_ptr<values::value const> facter::lookup(string const& name) { shared_ptr<values::value const> value; // First check the cache auto it = _cache.find(name); if (it == _cache.end()) { // Not in cache, check the fact collection store(name, _collection[name]); it = _cache.find(name); } if (it != _cache.end()) { value = it->second; } return value; } void facter::each(bool accessed, function<bool(string const&, shared_ptr<values::value const> const&)> const& callback) { // If all facts, enumerate all the facts and store in the cache if (!accessed) { _collection.each([this](string const& name, ::facter::facts::value const* value) { if (_cache.count(name)) { return true; } store(name, value); return true; }); } // Enumerate what's in the cache for (auto& kvp : _cache) { if (!callback(kvp.first, kvp.second)) { break; } } } void facter::store(string const& name, ::facter::facts::value const* value, values::value* parent) { if (!value) { return; } values::value converted; if (auto ptr = dynamic_cast<string_value const*>(value)) { converted = ptr->value(); } else if (auto ptr = dynamic_cast<integer_value const*>(value)) { converted = ptr->value(); } else if (auto ptr = dynamic_cast<boolean_value const*>(value)) { converted = ptr->value(); } else if (auto ptr = dynamic_cast<double_value const*>(value)) { converted = static_cast<double>(ptr->value()); } else if (auto ptr = dynamic_cast<array_value const*>(value)) { converted = values::array(); ptr->each([&](::facter::facts::value const* element) { store(string(), element, &converted); return true; }); } else if (auto ptr = dynamic_cast<map_value const*>(value)) { converted = values::hash(); ptr->each([&](string const& name, ::facter::facts::value const* element) { store(name, element, &converted); return true; }); } // If parent, add to array or map if (parent) { // boost::get is used here because we know the parent is an array or hash and not a variable if (auto ptr = boost::get<values::array>(parent)) { ptr->emplace_back(rvalue_cast(converted)); } else if (auto ptr = boost::get<values::hash>(parent)) { ptr->set(name, rvalue_cast(converted)); } } else { _cache.emplace(boost::to_lower_copy(name), std::make_shared<values::value>(rvalue_cast(converted))); } } }} // namespace puppet::facts
apache-2.0
rust-rethinkdb/reql
reql/src/cmd/literal.rs
235
use crate::{cmd, Command}; use ql2::term::TermType; pub trait Arg { fn arg(self) -> cmd::Arg<()>; } impl Arg for Command { fn arg(self) -> cmd::Arg<()> { Self::new(TermType::Literal).with_arg(self).into_arg() } }
apache-2.0
Esimorp/MongoGO
app/src/sagas/todos.js
768
import {takeLatest} from "redux-saga"; import {call, put, fork} from "redux-saga/effects"; import {getAll} from "../services/todos"; import {message} from "antd"; function* getTodos() { try { const {jsonResult} = yield call(getAll); if (jsonResult.data) { yield put({ type: 'todos/get/success', payload: jsonResult.data, }); } } catch (err) { message.error(err); //yield put({ // type: 'todos/get/failed', // err, //}); } } function* watchTodosGet() { yield takeLatest('todos/get', getTodos) } export default function*() { yield fork(watchTodosGet); // Load todos. yield put({ type: 'todos/get', }); }
apache-2.0
kjartab/langeland
js/langeland/TimeModule.js
2519
var LS = LS || {}; LS.timeModule = (function() { function getTimeTextOld(timeString) { var now = new Date(); var time = convertTime(timeString); var diff = now.getTime() - time.getTime(); console.log(diff); if (diff < 1000*2*3600) { return Math.round(diff/(60*1000)) + ' min'; } else if (diff < 1000*24*3600) { return Math.round(diff/(3600*1000)) + 'timar'; } else if (diff < 1000*24*2*3600) { return ; } else if (diff < 1000*24*14*3600) { return Math.floor(diff/(24*3600*1000)) + ' dagar'; } else if (diff < 1000*24*31*2*3600){ return Math.floor(diff/(24*3600*7)) + ' veker'; } else if (diff < 1000*24*3600*180) { return time.getDate() + '. ' + monthName(time.getMonth()); } return 'I fjor'; } function getTimeText(timeString) { var now = new Date(); var time = convertTime(timeString); console.log(time); return time.getHours() + ':' + (time.getMinutes()<10 ? '0' : '') + time.getMinutes()+ ' ' + time.getDate() + '. ' + monthName(time.getMonth()); } function getDateText(time) { console.log(time); return time.getDate() + '. ' + monthName(time.getMonth()); } function fullMonthName(monthInteger) { switch(monthInteger) { case 0: return 'januar'; break; case 1: return 'februar'; break; case 2: return 'mars'; break; case 3: return 'april'; break; case 4: return 'mai'; break; case 5: return 'juni'; break; case 6: return 'juli'; break; case 7: return 'august'; break; case 8: return 'september'; break; case 9: return 'oktober'; break; case 10: return 'november'; break; case 11: return 'desember'; break; } } function monthName(monthInteger) { switch(monthInteger) { case 0: return 'jan'; break; case 1: return 'feb'; break; case 2: return 'mar'; break; case 3: return 'apr'; break; case 4: return 'mai'; break; case 5: return 'jun'; break; case 6: return 'jul'; break; case 7: return 'aug'; break; case 8: return 'sept'; break; case 9: return 'okt'; break; case 10: return 'nov'; break; case 11: return 'des'; break; } } return { getTimeText : getTimeText, getDateText : getDateText } })();
apache-2.0
wowiwj/cocos2dx
RunRunRun/Classes/Entity.hpp
716
// // Entity.hpp // RunRunRun // // Created by wangju on 15/10/7. // Copyright © 2015年 com.wangju.run01. All rights reserved. // #ifndef Entity_hpp #define Entity_hpp #include <cocos2d.h> #include "ControllerListener.hpp" #include "Controller.hpp" USING_NS_CC; class Entity : public Node,public ControllerListener { public: //绑定精灵 void BindSprite(Sprite* sprite); //设置控制器 void setController(Controller* controller); //实现SimpleMoveLisrener接口方法 virtual void setTagPosition(int x,int y); virtual Point getTagPosition(); protected: Sprite* m_sprite; Controller* m_Controller; }; #endif /* Entity_hpp */
apache-2.0
googleads/google-ads-java
google-ads-stubs-v9/src/main/java/com/google/ads/googleads/v9/errors/KeywordPlanErrorProto.java
3295
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/errors/keyword_plan_error.proto package com.google.ads.googleads.v9.errors; public final class KeywordPlanErrorProto { private KeywordPlanErrorProto() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v9_errors_KeywordPlanErrorEnum_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v9_errors_KeywordPlanErrorEnum_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n7google/ads/googleads/v9/errors/keyword" + "_plan_error.proto\022\036google.ads.googleads." + "v9.errors\032\034google/api/annotations.proto\"" + "\310\003\n\024KeywordPlanErrorEnum\"\257\003\n\020KeywordPlan" + "Error\022\017\n\013UNSPECIFIED\020\000\022\013\n\007UNKNOWN\020\001\022\037\n\033B" + "ID_MULTIPLIER_OUT_OF_RANGE\020\002\022\020\n\014BID_TOO_" + "HIGH\020\003\022\017\n\013BID_TOO_LOW\020\004\022\"\n\036BID_TOO_MANY_" + "FRACTIONAL_DIGITS\020\005\022\030\n\024DAILY_BUDGET_TOO_" + "LOW\020\006\022+\n\'DAILY_BUDGET_TOO_MANY_FRACTIONA" + "L_DIGITS\020\007\022\021\n\rINVALID_VALUE\020\010\022 \n\034KEYWORD" + "_PLAN_HAS_NO_KEYWORDS\020\t\022\034\n\030KEYWORD_PLAN_" + "NOT_ENABLED\020\n\022\032\n\026KEYWORD_PLAN_NOT_FOUND\020" + "\013\022\017\n\013MISSING_BID\020\r\022\033\n\027MISSING_FORECAST_P" + "ERIOD\020\016\022\037\n\033INVALID_FORECAST_DATE_RANGE\020\017" + "\022\020\n\014INVALID_NAME\020\020B\360\001\n\"com.google.ads.go" + "ogleads.v9.errorsB\025KeywordPlanErrorProto" + "P\001ZDgoogle.golang.org/genproto/googleapi" + "s/ads/googleads/v9/errors;errors\242\002\003GAA\252\002" + "\036Google.Ads.GoogleAds.V9.Errors\312\002\036Google" + "\\Ads\\GoogleAds\\V9\\Errors\352\002\"Google::Ads::" + "GoogleAds::V9::Errorsb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), }); internal_static_google_ads_googleads_v9_errors_KeywordPlanErrorEnum_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_ads_googleads_v9_errors_KeywordPlanErrorEnum_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v9_errors_KeywordPlanErrorEnum_descriptor, new java.lang.String[] { }); com.google.api.AnnotationsProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
condast/Collin
Workspace/org.collin.core/src/org/collin/core/transaction/TetraTransaction.java
3350
package org.collin.core.transaction; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.EventObject; import java.util.HashSet; import org.collin.core.essence.TetraEvent; import org.collin.core.essence.TetraEvent.Results; import org.collin.core.graph.ICollINVertex; import org.collin.core.graph.IEdge; import org.condast.commons.strings.StringStyler; public class TetraTransaction<D extends Object> extends EventObject { private static final long serialVersionUID = 1L; public enum States{ START, PROGRESS, COMPLETE; @Override public String toString() { return StringStyler.prettyString( super.toString( )); } } private States state; private long userId; private D data; private double progress; private Date create; private Collection<IEdge<D>> history; private Collection<ITransactionListener<D>> listeners; public TetraTransaction( Object source,long userId ) { this( source, userId, States.START, null, 0); } public TetraTransaction( Object source,long userId, D data ) { this( source, userId, States.START, data, 0); } public TetraTransaction( Object source, long userId, States state, D data, double progress ) { super(source); this.state = state; this.userId = userId; this.data = data; this.progress = progress; this.history = new HashSet<>(); this.create = Calendar.getInstance().getTime(); this.listeners = new ArrayList<>(); } public long getUserId() { return userId; } public States getState() { return state; } public D getData() { return data; } public double getProgress() { return progress; } public void setProgress(double progress) { this.progress = progress; } public boolean isFinished() { return ( this.progress >= 100 ); } public boolean addHistory( IEdge<D> node ) { if( this.history.contains(node)) return false; return this.history.add(node); } @SuppressWarnings("unchecked") public ICollINVertex<D>[] getHistory(){ return this.history.toArray( new ICollINVertex[ this.history.size()]); } public boolean hasBeenProcessed( IEdge<D> node ) { if( node == null ) return true; return this.history.contains(node); } public Date getCreate() { return create; } public boolean addTransactionListener( ITransactionListener<D> listener ) { return this.listeners.add( listener); } public boolean removeTransactionListener( ITransactionListener<D> listener ) { return this.listeners.remove( listener); } /** * Update the transaction. Returns true if the transaction was successfully updated, * Every listener of the transaction is one of the tetra's in the compass, and only one * should be able to give a result other than CONTINUE at any given time * and the next node can be notified. * @param source * @param event * @return */ public Results updateTransaction( ICollINVertex<D> source, TetraEvent<D> event ) { Results result = Results.COMPLETE; for( ITransactionListener<D> listener: this.listeners ) { result = listener.transactionUpdateRequest( source, event); if( !Results.CONTINUE.equals(result)) return result; } return result; } }
apache-2.0
cloudfoundry/cf-java-client
cloudfoundry-client/src/main/java/org/cloudfoundry/client/v3/_ToManyRelationship.java
1090
/* * Copyright 2013-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v3; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.immutables.value.Value; import java.util.List; /** * Represents a to-one relationship to another entity */ @JsonDeserialize @Value.Immutable abstract class _ToManyRelationship { /** * The relationship data */ @JsonProperty("data") abstract List<Relationship> getData(); }
apache-2.0
h-crisis/assistant
test/ichikawa/common/Shape2GeoJson.java
2727
package ichikawa.common; import org.geotools.data.shapefile.ShapefileDataStore; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureSource; import org.geotools.geojson.feature.FeatureJSON; import java.io.*; import java.nio.charset.Charset; /** * Created by manabu on 2016/06/24. * ShapeファイルをGeoJsonファイルに変換するクラス */ public class Shape2GeoJson { /** * ShapeファイルからGeoJsonファイルを作成する * @param inFile Shapeファイル * @param inFileEncoding Shapeファイルの文字コード(Shift_JIS, UTF-8, EUC...) * @param outFile GeoJsonファイル * @param outFileEncoding GeoJsonファイルの文字コード(Shift_JIS, UTF-8, EUC...) * @throws Exception */ public static void createGeoJson(File inFile, String inFileEncoding, File outFile, String outFileEncoding) throws Exception { getGeoJSON(getJSON(inFile, inFileEncoding),outFile, outFileEncoding); } /** * ShapeファイルからJSON形式の文字列を返すメソッド * @param f Shapeファイル * @param encoding Shapeファイルの文字コード * @return JSON形式の文字列 * @throws Exception */ public static String getJSON(File f, String encoding)throws Exception{ f.setReadOnly(); ShapefileDataStore store = new ShapefileDataStore(f.toURI().toURL()); Charset cs=Charset.forName(encoding); store.setCharset(cs); SimpleFeatureSource source = store.getFeatureSource(); SimpleFeatureCollection featureCollection = source.getFeatures(); String geoJson; FeatureJSON fj = new FeatureJSON(); StringWriter writer = new StringWriter(); fj.writeFeatureCollection(featureCollection, writer); geoJson = writer.toString(); return geoJson; } /** * JSON形式の文字列からGeoJSONファイルを作成する * @param json JSON形式の文字列 * @param outFile GeoJSONファイル * @param encoding GeoJSONファイルの文字コード * @throws Exception */ public static void getGeoJSON(String json,File outFile,String encoding) throws Exception{ PrintWriter pw=null; try{ FileOutputStream fos = new FileOutputStream(outFile); OutputStreamWriter osw = new OutputStreamWriter(fos,encoding); pw = new PrintWriter(osw); pw.write(json); pw.close(); pw=null; }catch(IOException e){ e.printStackTrace(); }finally{ if(pw!=null){ pw.close(); } } } }
apache-2.0
FasterXML/jackson-core
src/main/java/com/fasterxml/jackson/core/JsonGenerator.java
108981
/* Jackson JSON-processor. * * Copyright (c) 2007- Tatu Saloranta, tatu.saloranta@iki.fi */ package com.fasterxml.jackson.core; import java.io.*; import java.math.BigDecimal; import java.math.BigInteger; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import com.fasterxml.jackson.core.JsonParser.NumberType; import com.fasterxml.jackson.core.io.CharacterEscapes; import com.fasterxml.jackson.core.type.WritableTypeId; import com.fasterxml.jackson.core.type.WritableTypeId.Inclusion; import com.fasterxml.jackson.core.util.JacksonFeatureSet; import com.fasterxml.jackson.core.util.VersionUtil; import static com.fasterxml.jackson.core.JsonTokenId.*; /** * Base class that defines public API for writing JSON content. * Instances are created using factory methods of * a {@link JsonFactory} instance. * * @author Tatu Saloranta */ public abstract class JsonGenerator implements Closeable, Flushable, Versioned { /** * Default set of {@link StreamWriteCapability}ies that may be used as * basis for format-specific readers (or as bogus instance if non-null * set needs to be passed). * * @since 2.12 */ protected final static JacksonFeatureSet<StreamWriteCapability> DEFAULT_WRITE_CAPABILITIES = JacksonFeatureSet.fromDefaults(StreamWriteCapability.values()); /** * Default set of {@link StreamWriteCapability}ies for typical textual formats, * to use either as-is, or as a base with possible differences. * * @since 2.12 */ protected final static JacksonFeatureSet<StreamWriteCapability> DEFAULT_TEXTUAL_WRITE_CAPABILITIES = DEFAULT_WRITE_CAPABILITIES.with(StreamWriteCapability.CAN_WRITE_FORMATTED_NUMBERS); /** * Default set of {@link StreamWriteCapability}ies for typical binary formats, * to use either as-is, or as a base with possible differences. * * @since 2.12 */ protected final static JacksonFeatureSet<StreamWriteCapability> DEFAULT_BINARY_WRITE_CAPABILITIES = DEFAULT_WRITE_CAPABILITIES.with(StreamWriteCapability.CAN_WRITE_BINARY_NATIVELY); /** * Enumeration that defines all togglable features for generators. */ public enum Feature { // // Low-level I/O / content features /** * Feature that determines whether generator will automatically * close underlying output target that is NOT owned by the * generator. * If disabled, calling application has to separately * close the underlying {@link OutputStream} and {@link Writer} * instances used to create the generator. If enabled, generator * will handle closing, as long as generator itself gets closed: * this happens when end-of-input is encountered, or generator * is closed by a call to {@link JsonGenerator#close}. *<p> * Feature is enabled by default. */ AUTO_CLOSE_TARGET(true), /** * Feature that determines what happens when the generator is * closed while there are still unmatched * {@link JsonToken#START_ARRAY} or {@link JsonToken#START_OBJECT} * entries in output content. If enabled, such Array(s) and/or * Object(s) are automatically closed; if disabled, nothing * specific is done. *<p> * Feature is enabled by default. */ AUTO_CLOSE_JSON_CONTENT(true), /** * Feature that specifies that calls to {@link #flush} will cause * matching <code>flush()</code> to underlying {@link OutputStream} * or {@link Writer}; if disabled this will not be done. * Main reason to disable this feature is to prevent flushing at * generator level, if it is not possible to prevent method being * called by other code (like <code>ObjectMapper</code> or third * party libraries). *<p> * Feature is enabled by default. */ FLUSH_PASSED_TO_STREAM(true), // // Quoting-related features /** * Feature that determines whether JSON Object field names are * quoted using double-quotes, as specified by JSON specification * or not. Ability to disable quoting was added to support use * cases where they are not usually expected, which most commonly * occurs when used straight from Javascript. *<p> * Feature is enabled by default (since it is required by JSON specification). * * @deprecated Since 2.10 use {@link com.fasterxml.jackson.core.json.JsonWriteFeature#QUOTE_FIELD_NAMES} instead */ @Deprecated QUOTE_FIELD_NAMES(true), /** * Feature that determines whether "exceptional" (not real number) * float/double values are output as quoted strings. * The values checked are Double.Nan, * Double.POSITIVE_INFINITY and Double.NEGATIVE_INIFINTY (and * associated Float values). * If feature is disabled, these numbers are still output using * associated literal values, resulting in non-conformant * output. *<p> * Feature is enabled by default. * * @deprecated Since 2.10 use {@link com.fasterxml.jackson.core.json.JsonWriteFeature#WRITE_NAN_AS_STRINGS} instead */ @Deprecated QUOTE_NON_NUMERIC_NUMBERS(true), // // Character escaping features /** * Feature that specifies that all characters beyond 7-bit ASCII * range (i.e. code points of 128 and above) need to be output * using format-specific escapes (for JSON, backslash escapes), * if format uses escaping mechanisms (which is generally true * for textual formats but not for binary formats). *<p> * Note that this setting may not necessarily make sense for all * data formats (for example, binary formats typically do not use * any escaping mechanisms; and some textual formats do not have * general-purpose escaping); if so, settings is simply ignored. * Put another way, effects of this feature are data-format specific. *<p> * Feature is disabled by default. * * @deprecated Since 2.10 use {@link com.fasterxml.jackson.core.json.JsonWriteFeature#ESCAPE_NON_ASCII} instead */ @Deprecated ESCAPE_NON_ASCII(false), // // Datatype coercion features /** * Feature that forces all Java numbers to be written as Strings, * even if the underlying data format has non-textual representation * (which is the case for JSON as well as all binary formats). * Default state is 'false', meaning that Java numbers are to * be serialized using basic numeric serialization (as JSON * numbers, integral or floating point, for example). * If enabled, all such numeric values are instead written out as * textual values (which for JSON means quoted in double-quotes). *<p> * One use case is to avoid problems with Javascript limitations: * since Javascript standard specifies that all number handling * should be done using 64-bit IEEE 754 floating point values, * result being that some 64-bit integer values can not be * accurately represent (as mantissa is only 51 bit wide). *<p> * Feature is disabled by default. * * @deprecated Since 2.10 use {@link com.fasterxml.jackson.core.json.JsonWriteFeature#WRITE_NUMBERS_AS_STRINGS} instead */ @Deprecated WRITE_NUMBERS_AS_STRINGS(false), /** * Feature that determines whether {@link java.math.BigDecimal} entries are * serialized using {@link java.math.BigDecimal#toPlainString()} to prevent * values to be written using scientific notation. *<p> * NOTE: only affects generators that serialize {@link java.math.BigDecimal}s * using textual representation (textual formats but potentially some binary * formats). *<p> * Feature is disabled by default, so default output mode is used; this generally * depends on how {@link BigDecimal} has been created. * * @since 2.3 */ WRITE_BIGDECIMAL_AS_PLAIN(false), // // Schema/Validity support features /** * Feature that determines whether {@link JsonGenerator} will explicitly * check that no duplicate JSON Object field names are written. * If enabled, generator will check all names within context and report * duplicates by throwing a {@link JsonGenerationException}; if disabled, * no such checking will be done. Assumption in latter case is * that caller takes care of not trying to write duplicate names. *<p> * Note that enabling this feature will incur performance overhead * due to having to store and check additional information. *<p> * Feature is disabled by default. * * @since 2.3 */ STRICT_DUPLICATE_DETECTION(false), /** * Feature that determines what to do if the underlying data format requires knowledge * of all properties to output, and if no definition is found for a property that * caller tries to write. If enabled, such properties will be quietly ignored; * if disabled, a {@link JsonProcessingException} will be thrown to indicate the * problem. * Typically most textual data formats do NOT require schema information (although * some do, such as CSV), whereas many binary data formats do require definitions * (such as Avro, protobuf), although not all (Smile, CBOR, BSON and MessagePack do not). *<p> * Note that support for this feature is implemented by individual data format * module, if (and only if) it makes sense for the format in question. For JSON, * for example, this feature has no effect as properties need not be pre-defined. *<p> * Feature is disabled by default, meaning that if the underlying data format * requires knowledge of all properties to output, attempts to write an unknown * property will result in a {@link JsonProcessingException} * * @since 2.5 */ IGNORE_UNKNOWN(false), ; private final boolean _defaultState; private final int _mask; /** * Method that calculates bit set (flags) of all features that * are enabled by default. * * @return Bit field of the features that are enabled by default */ public static int collectDefaults() { int flags = 0; for (Feature f : values()) { if (f.enabledByDefault()) { flags |= f.getMask(); } } return flags; } private Feature(boolean defaultState) { _defaultState = defaultState; _mask = (1 << ordinal()); } public boolean enabledByDefault() { return _defaultState; } // @since 2.3 public boolean enabledIn(int flags) { return (flags & _mask) != 0; } public int getMask() { return _mask; } } /* /********************************************************************** /* Configuration /********************************************************************** */ /** * Object that handles pretty-printing (usually additional * white space to make results more human-readable) during * output. If null, no pretty-printing is done. */ protected PrettyPrinter _cfgPrettyPrinter; /* /********************************************************************** /* Construction, initialization /********************************************************************** */ protected JsonGenerator() { } /** * Method that can be called to set or reset the object to * use for writing Java objects as JsonContent * (using method {@link #writeObject}). * * @param oc Codec to assign, if any; {@code null} if none * * @return This generator, to allow call chaining */ public abstract JsonGenerator setCodec(ObjectCodec oc); /** * Method for accessing the object used for writing Java * object as JSON content * (using method {@link #writeObject}). * * @return Codec assigned to this generator, if any; {@code null} if none */ public abstract ObjectCodec getCodec(); /** * Accessor for finding out version of the bundle that provided this generator instance. * * @return Version of this generator (derived from version declared for * {@code jackson-core} jar that contains the class */ @Override public abstract Version version(); /* /********************************************************************** /* Public API, state, output configuration access /********************************************************************** */ /** * Accessor for context object that provides information about low-level * logical position withing output token stream. * * @return Stream output context ({@link JsonStreamContext}) associated with this generator */ public abstract JsonStreamContext getOutputContext(); /** * Method that can be used to get access to object that is used * as target for generated output; this is usually either * {@link OutputStream} or {@link Writer}, depending on what * generator was constructed with. * Note that returned value may be null in some cases; including * case where implementation does not want to exposed raw * source to caller. * In cases where output has been decorated, object returned here * is the decorated version; this allows some level of interaction * between users of generator and decorator object. *<p> * In general use of this accessor should be considered as * "last effort", i.e. only used if no other mechanism is applicable. * * @return Output target this generator was configured with */ public Object getOutputTarget() { return null; } /** * Helper method, usually equivalent to: *<code> * getOutputContext().getCurrentValue(); *</code> *<p> * Note that "current value" is NOT populated (or used) by Streaming parser or generators; * it is only used by higher-level data-binding functionality. * The reason it is included here is that it can be stored and accessed hierarchically, * and gets passed through data-binding. * * @return "Current value" associated with the current context (state) of this generator * * @since 2.13 (added as replacement for older {@link #getCurrentValue()} */ public Object currentValue() { // TODO: implement directly in 2.14 or later, make getCurrentValue() call this return getCurrentValue(); } /** * Helper method, usually equivalent to: *<code> * getOutputContext().setCurrentValue(v); *</code> * * @param v Current value to assign for the current context of this generator * * @since 2.13 (added as replacement for older {@link #setCurrentValue} */ public void assignCurrentValue(Object v) { // TODO: implement directly in 2.14 or later, make setCurrentValue() call this setCurrentValue(v); } // TODO: deprecate in 2.14 or later /** * Alias for {@link #currentValue()}, to be deprecated in later * Jackson 2.x versions (and removed from Jackson 3.0). * * @return Location of the last processed input unit (byte or character) */ public Object getCurrentValue() { JsonStreamContext ctxt = getOutputContext(); return (ctxt == null) ? null : ctxt.getCurrentValue(); } // TODO: deprecate in 2.14 or later /** * Alias for {@link #assignCurrentValue}, to be deprecated in later * Jackson 2.x versions (and removed from Jackson 3.0). * * @param v Current value to assign for the current context of this generator */ public void setCurrentValue(Object v) { JsonStreamContext ctxt = getOutputContext(); if (ctxt != null) { ctxt.setCurrentValue(v); } } /* /********************************************************************** /* Public API, Feature configuration /********************************************************************** */ /** * Method for enabling specified generator feature: * check {@link Feature} for list of available features. * * @param f Feature to enable * * @return This generator, to allow call chaining */ public abstract JsonGenerator enable(Feature f); /** * Method for disabling specified feature * (check {@link Feature} for list of features) * * @param f Feature to disable * * @return This generator, to allow call chaining */ public abstract JsonGenerator disable(Feature f); /** * Method for enabling or disabling specified feature: * check {@link Feature} for list of available features. * * @param f Feature to enable or disable * @param state Whether to enable ({@code true}) or disable ({@code false}) feature * * @return This generator, to allow call chaining */ public final JsonGenerator configure(Feature f, boolean state) { if (state) enable(f); else disable(f); return this; } /** * Method for checking whether given feature is enabled. * Check {@link Feature} for list of available features. * * @param f Feature to check * * @return True if specified feature is enabled; false if not */ public abstract boolean isEnabled(Feature f); /** * Method for checking whether given feature is enabled. * Check {@link Feature} for list of available features. * * @param f Feature to check * * @return True if specified feature is enabled; false if not * * @since 2.10 */ public boolean isEnabled(StreamWriteFeature f) { return isEnabled(f.mappedFeature()); } /** * Bulk access method for getting state of all standard (non-dataformat-specific) * {@link JsonGenerator.Feature}s. * * @return Bit mask that defines current states of all standard {@link JsonGenerator.Feature}s. * * @since 2.3 */ public abstract int getFeatureMask(); /** * Bulk set method for (re)setting states of all standard {@link Feature}s * * @since 2.3 * * @param values Bitmask that defines which {@link Feature}s are enabled * and which disabled * * @return This generator, to allow call chaining * * @deprecated Since 2.7, use {@link #overrideStdFeatures(int, int)} instead -- remove from 2.9 */ @Deprecated public abstract JsonGenerator setFeatureMask(int values); /** * Bulk set method for (re)setting states of features specified by <code>mask</code>. * Functionally equivalent to *<code> * int oldState = getFeatureMask(); * int newState = (oldState &amp; ~mask) | (values &amp; mask); * setFeatureMask(newState); *</code> * but preferred as this lets caller more efficiently specify actual changes made. * * @param values Bit mask of set/clear state for features to change * @param mask Bit mask of features to change * * @return This generator, to allow call chaining * * @since 2.6 */ public JsonGenerator overrideStdFeatures(int values, int mask) { int oldState = getFeatureMask(); int newState = (oldState & ~mask) | (values & mask); return setFeatureMask(newState); } /** * Bulk access method for getting state of all {@link FormatFeature}s, format-specific * on/off configuration settings. * * @return Bit mask that defines current states of all standard {@link FormatFeature}s. * * @since 2.6 */ public int getFormatFeatures() { return 0; } /** * Bulk set method for (re)setting states of {@link FormatFeature}s, * by specifying values (set / clear) along with a mask, to determine * which features to change, if any. *<p> * Default implementation will simply throw an exception to indicate that * the generator implementation does not support any {@link FormatFeature}s. * * @param values Bit mask of set/clear state for features to change * @param mask Bit mask of features to change * * @return This generator, to allow call chaining * * @since 2.6 */ public JsonGenerator overrideFormatFeatures(int values, int mask) { // 08-Oct-2018, tatu: For 2.10 we actually do get `JsonWriteFeature`s, although they // are (for 2.x only, not for 3.x) mapper to legacy settings. So do not freak out: // throw new IllegalArgumentException("No FormatFeatures defined for generator of type "+getClass().getName()); return this; } /* /********************************************************************** /* Public API, Schema configuration /********************************************************************** */ /** * Method to call to make this generator use specified schema. * Method must be called before generating any content, right after instance * has been created. * Note that not all generators support schemas; and those that do usually only * accept specific types of schemas: ones defined for data format this generator * produces. *<p> * If generator does not support specified schema, {@link UnsupportedOperationException} * is thrown. * * @param schema Schema to use * * @throws UnsupportedOperationException if generator does not support schema */ public void setSchema(FormatSchema schema) { throw new UnsupportedOperationException(String.format( "Generator of type %s does not support schema of type '%s'", getClass().getName(), schema.getSchemaType())); } /** * Method for accessing Schema that this generator uses, if any; {@code null} if none. * Default implementation returns null. * * @return Schema in use by this generator, if any; {@code null} if none */ public FormatSchema getSchema() { return null; } /* /********************************************************************** /* Public API, other configuration /********************************************************************** */ /** * Method for setting a custom pretty printer, which is usually * used to add indentation for improved human readability. * By default, generator does not do pretty printing. *<p> * To use the default pretty printer that comes with core * Jackson distribution, call {@link #useDefaultPrettyPrinter} * instead. * * @param pp {@code PrettyPrinter} to assign, if any; {@code null} if none * * @return This generator, to allow call chaining */ public JsonGenerator setPrettyPrinter(PrettyPrinter pp) { _cfgPrettyPrinter = pp; return this; } /** * Accessor for checking whether this generator has a configured * {@link PrettyPrinter}; returns it if so, null if none configured. * * @return {@link PrettyPrinter} configured for this generator, if any; {@code null} if none */ public PrettyPrinter getPrettyPrinter() { return _cfgPrettyPrinter; } /** * Convenience method for enabling pretty-printing using * the default pretty printer * ({@link com.fasterxml.jackson.core.util.DefaultPrettyPrinter}). * * @return This generator, to allow call chaining */ public abstract JsonGenerator useDefaultPrettyPrinter(); /** * Method that can be called to request that generator escapes * all character codes above specified code point (if positive value); * or, to not escape any characters except for ones that must be * escaped for the data format (if -1). * To force escaping of all non-ASCII characters, for example, * this method would be called with value of 127. *<p> * Note that generators are NOT required to support setting of value * higher than 127, because there are other ways to affect quoting * (or lack thereof) of character codes between 0 and 127. * Not all generators support concept of escaping, either; if so, * calling this method will have no effect. *<p> * Default implementation does nothing; sub-classes need to redefine * it according to rules of supported data format. * * @param charCode Either -1 to indicate that no additional escaping * is to be done; or highest code point not to escape (meaning higher * ones will be), if positive value. * * @return This generator, to allow call chaining */ public JsonGenerator setHighestNonEscapedChar(int charCode) { return this; } /** * Accessor method for testing what is the highest unescaped character * configured for this generator. This may be either positive value * (when escaping configuration has been set and is in effect), or * 0 to indicate that no additional escaping is in effect. * Some generators may not support additional escaping: for example, * generators for binary formats that do not use escaping should * simply return 0. * * @return Currently active limitation for highest non-escaped character, * if defined; or 0 to indicate no additional escaping is performed. */ public int getHighestEscapedChar() { return 0; } /** * Method for accessing custom escapes factory uses for {@link JsonGenerator}s * it creates. * * @return {@link CharacterEscapes} configured for this generator, if any; {@code null} if none */ public CharacterEscapes getCharacterEscapes() { return null; } /** * Method for defining custom escapes factory uses for {@link JsonGenerator}s * it creates. *<p> * Default implementation does nothing and simply returns this instance. * * @param esc {@link CharacterEscapes} to configure this generator to use, if any; {@code null} if none * * @return This generator, to allow call chaining */ public JsonGenerator setCharacterEscapes(CharacterEscapes esc) { return this; } /** * Method that allows overriding String used for separating root-level * JSON values (default is single space character) *<p> * Default implementation throws {@link UnsupportedOperationException}. * * @param sep Separator to use, if any; null means that no separator is * automatically added * * @return This generator, to allow call chaining */ public JsonGenerator setRootValueSeparator(SerializableString sep) { throw new UnsupportedOperationException(); } /* /********************************************************************** /* Public API, output state access /********************************************************************** */ /** * Method for verifying amount of content that is buffered by generator * but not yet flushed to the underlying target (stream, writer), * in units (byte, char) that the generator implementation uses for buffering; * or -1 if this information is not available. * Unit used is often the same as the unit of underlying target (that is, * `byte` for {@link java.io.OutputStream}, `char` for {@link java.io.Writer}), * but may differ if buffering is done before encoding. * Default JSON-backed implementations do use matching units. *<p> * Note: non-JSON implementations will be retrofitted for 2.6 and beyond; * please report if you see -1 (missing override) * * @return Amount of content buffered in internal units, if amount known and * accessible; -1 if not accessible. * * @since 2.6 */ public int getOutputBuffered() { return -1; } /* /********************************************************************** /* Public API, capability introspection methods /********************************************************************** */ /** * Method that can be used to verify that given schema can be used with * this generator (using {@link #setSchema}). * * @param schema Schema to check * * @return True if this generator can use given schema; false if not */ public boolean canUseSchema(FormatSchema schema) { return false; } /** * Introspection method that may be called to see if the underlying * data format supports some kind of Object Ids natively (many do not; * for example, JSON doesn't). * This method <b>must</b> be called prior to calling * {@link #writeObjectId} or {@link #writeObjectRef}. *<p> * Default implementation returns false; overridden by data formats * that do support native Object Ids. Caller is expected to either * use a non-native notation (explicit property or such), or fail, * in case it can not use native object ids. * * @return {@code True} if this generator is capable of writing "native" Object Ids * (which is typically determined by capabilities of the underlying format), * {@code false} if not * * @since 2.3 */ public boolean canWriteObjectId() { return false; } /** * Introspection method that may be called to see if the underlying * data format supports some kind of Type Ids natively (many do not; * for example, JSON doesn't). * This method <b>must</b> be called prior to calling * {@link #writeTypeId}. *<p> * Default implementation returns false; overridden by data formats * that do support native Type Ids. Caller is expected to either * use a non-native notation (explicit property or such), or fail, * in case it can not use native type ids. * * @return {@code True} if this generator is capable of writing "native" Type Ids * (which is typically determined by capabilities of the underlying format), * {@code false} if not * * @since 2.3 */ public boolean canWriteTypeId() { return false; } /** * Introspection method that may be called to see if the underlying * data format supports "native" binary data; that is, an efficient * output of binary content without encoding. *<p> * Default implementation returns false; overridden by data formats * that do support native binary content. * * @return {@code True} if this generator is capable of writing "raw" Binary * Content * (this is typically determined by capabilities of the underlying format); * {@code false} if not * * @since 2.3 */ public boolean canWriteBinaryNatively() { return false; } /** * Introspection method to call to check whether it is ok to omit * writing of Object fields or not. Most formats do allow omission, * but certain positional formats (such as CSV) require output of * placeholders, even if no real values are to be emitted. * * @return {@code True} if this generator is allowed to only write values * of some Object fields and omit the rest; {@code false} if not * * @since 2.3 */ public boolean canOmitFields() { return true; } /** * Introspection method to call to check whether it is possible * to write numbers using {@link #writeNumber(java.lang.String)} * using possible custom format, or not. Typically textual formats * allow this (and JSON specifically does), whereas binary formats * do not allow this (except by writing them as Strings). * Usual reason for calling this method is to check whether custom * formatting of numbers may be applied by higher-level code (databinding) * or not. * * @return {@code True} if this generator is capable of writing "formatted" * numbers (and if so, need to be passed using * {@link #writeNumber(String)}, that is, passed as {@code String}); * {@code false} if not * * @since 2.8 */ public boolean canWriteFormattedNumbers() { return false; } /** * Accessor for getting metadata on capabilities of this generator, based on * underlying data format being read (directly or indirectly). * * @return Set of write capabilities for content written using this generator * * @since 2.12 */ public JacksonFeatureSet<StreamWriteCapability> getWriteCapabilities() { return DEFAULT_WRITE_CAPABILITIES; } /* /********************************************************************** /* Public API, write methods, structural /********************************************************************** */ /** * Method for writing starting marker of a Array value * (for JSON this is character '['; plus possible white space decoration * if pretty-printing is enabled). *<p> * Array values can be written in any context where values * are allowed: meaning everywhere except for when * a field name is expected. * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeStartArray() throws IOException; /** * Method for writing start marker of an Array value, similar * to {@link #writeStartArray()}, * but also specifying how many * elements will be written for the array before calling * {@link #writeEndArray()}. *<p> * Default implementation simply calls {@link #writeStartArray()}. * * @param size Number of elements this array will have: actual * number of values written (before matching call to * {@link #writeEndArray()} MUST match; generator MAY verify * this is the case (and SHOULD if format itself encodes length) * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.4 * * @deprecated Since 2.12 Use {@link #writeStartArray(Object, int)} instead */ @Deprecated public void writeStartArray(int size) throws IOException { writeStartArray(); } /** * Method for writing start marker of an Array value, similar * to {@link #writeStartArray()}, * but also specifying the "current value" * to assign to the new Array context being created. * * @param forValue "Current value" to assign for the Array context being created * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.10 */ public void writeStartArray(Object forValue) throws IOException { writeStartArray(); setCurrentValue(forValue); } /** * Method for writing start marker of an Array value, similar * to {@link #writeStartArray()}, but also specifying the "current value" * to assign to the new Array context being created * as well as how many elements will be written for the array before calling * {@link #writeEndArray()}. * * @param forValue "Current value" to assign for the Array context being created * @param size Number of elements this Array will have: actual * number of values written (before matching call to * {@link #writeEndArray()} MUST match; generator MAY verify * this is the case (and SHOULD if format itself encodes length) * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.10 */ public void writeStartArray(Object forValue, int size) throws IOException { writeStartArray(size); setCurrentValue(forValue); } /** * Method for writing closing marker of a JSON Array value * (character ']'; plus possible white space decoration * if pretty-printing is enabled). *<p> * Marker can be written if the innermost structured type * is Array. * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeEndArray() throws IOException; /** * Method for writing starting marker of an Object value * (character '{'; plus possible white space decoration * if pretty-printing is enabled). *<p> * Object values can be written in any context where values * are allowed: meaning everywhere except for when * a field name is expected. * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeStartObject() throws IOException; /** * Method for writing starting marker of an Object value * to represent the given Java Object value. * Argument is offered as metadata, but more * importantly it should be assigned as the "current value" * for the Object content that gets constructed and initialized. *<p> * Object values can be written in any context where values * are allowed: meaning everywhere except for when * a field name is expected. * * @param forValue "Current value" to assign for the Object context being created * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.8 */ public void writeStartObject(Object forValue) throws IOException { writeStartObject(); setCurrentValue(forValue); } /** * Method for writing starting marker of an Object value * to represent the given Java Object value. * Argument is offered as metadata, but more * importantly it should be assigned as the "current value" * for the Object content that gets constructed and initialized. * In addition, caller knows number of key/value pairs ("properties") * that will get written for the Object value: this is relevant for * some format backends (but not, as an example, for JSON). *<p> * Object values can be written in any context where values * are allowed: meaning everywhere except for when * a field name is expected. * * @param forValue "Current value" to assign for the Object context being created * @param size Number of key/value pairs this Object will have: actual * number of entries written (before matching call to * {@link #writeEndObject()} MUST match; generator MAY verify * this is the case (and SHOULD if format itself encodes length) * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.10 */ public void writeStartObject(Object forValue, int size) throws IOException { writeStartObject(); setCurrentValue(forValue); } /** * Method for writing closing marker of an Object value * (character '}'; plus possible white space decoration * if pretty-printing is enabled). *<p> * Marker can be written if the innermost structured type * is Object, and the last written event was either a * complete value, or START-OBJECT marker (see JSON specification * for more details). * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeEndObject() throws IOException; /** * Method for writing a field name (JSON String surrounded by * double quotes: syntactically identical to a JSON String value), * possibly decorated by white space if pretty-printing is enabled. *<p> * Field names can only be written in Object context (check out * JSON specification for details), when field name is expected * (field names alternate with values). * * @param name Field name to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeFieldName(String name) throws IOException; /** * Method similar to {@link #writeFieldName(String)}, main difference * being that it may perform better as some of processing (such as * quoting of certain characters, or encoding into external encoding * if supported by generator) can be done just once and reused for * later calls. *<p> * Default implementation simple uses unprocessed name container in * serialized String; implementations are strongly encouraged to make * use of more efficient methods argument object has. * * @param name Field name to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeFieldName(SerializableString name) throws IOException; /** * Alternative to {@link #writeFieldName(String)} that may be used * in cases where property key is of numeric type; either where * underlying format supports such notion (some binary formats do, * unlike JSON), or for convenient conversion into String presentation. * Default implementation will simply convert id into <code>String</code> * and call {@link #writeFieldName(String)}. * * @param id Field id to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.8 */ public void writeFieldId(long id) throws IOException { writeFieldName(Long.toString(id)); } /* /********************************************************************** /* Public API, write methods, scalar arrays (2.8) /********************************************************************** */ /** * Value write method that can be called to write a single * array (sequence of {@link JsonToken#START_ARRAY}, zero or * more {@link JsonToken#VALUE_NUMBER_INT}, {@link JsonToken#END_ARRAY}) * * @param array Array that contains values to write * @param offset Offset of the first element to write, within array * @param length Number of elements in array to write, from `offset` to `offset + len - 1` * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.8 */ public void writeArray(int[] array, int offset, int length) throws IOException { if (array == null) { throw new IllegalArgumentException("null array"); } _verifyOffsets(array.length, offset, length); writeStartArray(array, length); for (int i = offset, end = offset+length; i < end; ++i) { writeNumber(array[i]); } writeEndArray(); } /** * Value write method that can be called to write a single * array (sequence of {@link JsonToken#START_ARRAY}, zero or * more {@link JsonToken#VALUE_NUMBER_INT}, {@link JsonToken#END_ARRAY}) * * @param array Array that contains values to write * @param offset Offset of the first element to write, within array * @param length Number of elements in array to write, from `offset` to `offset + len - 1` * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.8 */ public void writeArray(long[] array, int offset, int length) throws IOException { if (array == null) { throw new IllegalArgumentException("null array"); } _verifyOffsets(array.length, offset, length); writeStartArray(array, length); for (int i = offset, end = offset+length; i < end; ++i) { writeNumber(array[i]); } writeEndArray(); } /** * Value write method that can be called to write a single * array (sequence of {@link JsonToken#START_ARRAY}, zero or * more {@link JsonToken#VALUE_NUMBER_FLOAT}, {@link JsonToken#END_ARRAY}) * * @param array Array that contains values to write * @param offset Offset of the first element to write, within array * @param length Number of elements in array to write, from `offset` to `offset + len - 1` * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.8 */ public void writeArray(double[] array, int offset, int length) throws IOException { if (array == null) { throw new IllegalArgumentException("null array"); } _verifyOffsets(array.length, offset, length); writeStartArray(array, length); for (int i = offset, end = offset+length; i < end; ++i) { writeNumber(array[i]); } writeEndArray(); } /** * Value write method that can be called to write a single * array (sequence of {@link JsonToken#START_ARRAY}, zero or * more {@link JsonToken#VALUE_STRING}, {@link JsonToken#END_ARRAY}) * * @param array Array that contains values to write * @param offset Offset of the first element to write, within array * @param length Number of elements in array to write, from `offset` to `offset + len - 1` * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.11 */ public void writeArray(String[] array, int offset, int length) throws IOException { if (array == null) { throw new IllegalArgumentException("null array"); } _verifyOffsets(array.length, offset, length); writeStartArray(array, length); for (int i = offset, end = offset+length; i < end; ++i) { writeString(array[i]); } writeEndArray(); } /* /********************************************************************** /* Public API, write methods, text/String values /********************************************************************** */ /** * Method for outputting a String value. Depending on context * this means either array element, (object) field value or * a stand alone String; but in all cases, String will be * surrounded in double quotes, and contents will be properly * escaped as required by JSON specification. * * @param text Text value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeString(String text) throws IOException; /** * Method for outputting a String value. Depending on context * this means either array element, (object) field value or * a stand alone String; but in all cases, String will be * surrounded in double quotes, and contents will be properly * escaped as required by JSON specification. * If {@code len} is &lt; 0, then write all contents of the reader. * Otherwise, write only len characters. *<p> * Note: actual length of content available may exceed {@code len} but * can not be less than it: if not enough content available, a * {@link JsonGenerationException} will be thrown. * * @param reader Reader to use for reading Text value to write * @param len Maximum Length of Text value to read (in {@code char}s, non-negative) * if known; {@code -1} to indicate "read and write it all" * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer; or if length ({@code len}) is specified but * {@code reader} does not provide enough content * * @since 2.9 */ public void writeString(Reader reader, int len) throws IOException { // Implemented as "unsupported" for backwards compatibility _reportUnsupportedOperation(); } /** * Method for outputting a String value. Depending on context * this means either array element, (object) field value or * a stand alone String; but in all cases, String will be * surrounded in double quotes, and contents will be properly * escaped as required by JSON specification. * * @param buffer Buffer that contains String value to write * @param offset Offset in {@code buffer} of the first character of String value to write * @param len Length of the String value (in characters) to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeString(char[] buffer, int offset, int len) throws IOException; /** * Method similar to {@link #writeString(String)}, but that takes * {@link SerializableString} which can make this potentially * more efficient to call as generator may be able to reuse * quoted and/or encoded representation. *<p> * Default implementation just calls {@link #writeString(String)}; * sub-classes should override it with more efficient implementation * if possible. * * @param text Pre-encoded String value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeString(SerializableString text) throws IOException; /** * Method similar to {@link #writeString(String)} but that takes as * its input a UTF-8 encoded String that is to be output as-is, without additional * escaping (type of which depends on data format; backslashes for JSON). * However, quoting that data format requires (like double-quotes for JSON) will be added * around the value if and as necessary. *<p> * Note that some backends may choose not to support this method: for * example, if underlying destination is a {@link java.io.Writer} * using this method would require UTF-8 decoding. * If so, implementation may instead choose to throw a * {@link UnsupportedOperationException} due to ineffectiveness * of having to decode input. * * @param buffer Buffer that contains String value to write * @param offset Offset in {@code buffer} of the first byte of String value to write * @param len Length of the String value (in characters) to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeRawUTF8String(byte[] buffer, int offset, int len) throws IOException; /** * Method similar to {@link #writeString(String)} but that takes as its input * a UTF-8 encoded String which has <b>not</b> been escaped using whatever * escaping scheme data format requires (for JSON that is backslash-escaping * for control characters and double-quotes; for other formats something else). * This means that textual JSON backends need to check if value needs * JSON escaping, but otherwise can just be copied as is to output. * Also, quoting that data format requires (like double-quotes for JSON) will be added * around the value if and as necessary. *<p> * Note that some backends may choose not to support this method: for * example, if underlying destination is a {@link java.io.Writer} * using this method would require UTF-8 decoding. * In this case * generator implementation may instead choose to throw a * {@link UnsupportedOperationException} due to ineffectiveness * of having to decode input. * * @param buffer Buffer that contains String value to write * @param offset Offset in {@code buffer} of the first byte of String value to write * @param len Length of the String value (in characters) to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeUTF8String(byte[] buffer, int offset, int len) throws IOException; /* /********************************************************************** /* Public API, write methods, binary/raw content /********************************************************************** */ /** * Method that will force generator to copy * input text verbatim with <b>no</b> modifications (including * that no escaping is done and no separators are added even * if context [array, object] would otherwise require such). * If such separators are desired, use * {@link #writeRawValue(String)} instead. *<p> * Note that not all generator implementations necessarily support * such by-pass methods: those that do not will throw * {@link UnsupportedOperationException}. * * @param text Textual contents to include as-is in output. * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeRaw(String text) throws IOException; /** * Method that will force generator to copy * input text verbatim with <b>no</b> modifications (including * that no escaping is done and no separators are added even * if context [array, object] would otherwise require such). * If such separators are desired, use * {@link #writeRawValue(String)} instead. *<p> * Note that not all generator implementations necessarily support * such by-pass methods: those that do not will throw * {@link UnsupportedOperationException}. * * @param text String that has contents to include as-is in output * @param offset Offset within {@code text} of the first character to output * @param len Length of content (from {@code text}, starting at offset {@code offset}) to output * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeRaw(String text, int offset, int len) throws IOException; /** * Method that will force generator to copy * input text verbatim with <b>no</b> modifications (including * that no escaping is done and no separators are added even * if context [array, object] would otherwise require such). * If such separators are desired, use * {@link #writeRawValue(String)} instead. *<p> * Note that not all generator implementations necessarily support * such by-pass methods: those that do not will throw * {@link UnsupportedOperationException}. * * @param text Buffer that has contents to include as-is in output * @param offset Offset within {@code text} of the first character to output * @param len Length of content (from {@code text}, starting at offset {@code offset}) to output * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeRaw(char[] text, int offset, int len) throws IOException; /** * Method that will force generator to copy * input text verbatim with <b>no</b> modifications (including * that no escaping is done and no separators are added even * if context [array, object] would otherwise require such). * If such separators are desired, use * {@link #writeRawValue(String)} instead. *<p> * Note that not all generator implementations necessarily support * such by-pass methods: those that do not will throw * {@link UnsupportedOperationException}. * * @param c Character to included in output * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeRaw(char c) throws IOException; /** * Method that will force generator to copy * input text verbatim with <b>no</b> modifications (including * that no escaping is done and no separators are added even * if context [array, object] would otherwise require such). * If such separators are desired, use * {@link #writeRawValue(String)} instead. *<p> * Note that not all generator implementations necessarily support * such by-pass methods: those that do not will throw * {@link UnsupportedOperationException}. *<p> * The default implementation delegates to {@link #writeRaw(String)}; * other backends that support raw inclusion of text are encouraged * to implement it in more efficient manner (especially if they * use UTF-8 encoding). * * @param raw Pre-encoded textual contents to included in output * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ // public abstract void writeRaw(SerializableString raw) throws IOException; public void writeRaw(SerializableString raw) throws IOException { writeRaw(raw.getValue()); } /** * Method that will force generator to copy * input text verbatim without any modifications, but assuming * it must constitute a single legal JSON value (number, string, * boolean, null, Array or List). Assuming this, proper separators * are added if and as needed (comma or colon), and generator * state updated to reflect this. * * @param text Textual contents to included in output * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeRawValue(String text) throws IOException; public abstract void writeRawValue(String text, int offset, int len) throws IOException; public abstract void writeRawValue(char[] text, int offset, int len) throws IOException; /** * Method similar to {@link #writeRawValue(String)}, but potentially more * efficient as it may be able to use pre-encoded content (similar to * {@link #writeRaw(SerializableString)}. * * @param raw Pre-encoded textual contents to included in output * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.5 */ public void writeRawValue(SerializableString raw) throws IOException { writeRawValue(raw.getValue()); } /** * Method that will output given chunk of binary data as base64 * encoded, as a complete String value (surrounded by double quotes). * This method defaults *<p> * Note: because JSON Strings can not contain unescaped linefeeds, * if linefeeds are included (as per last argument), they must be * escaped. This adds overhead for decoding without improving * readability. * Alternatively if linefeeds are not included, * resulting String value may violate the requirement of base64 * RFC which mandates line-length of 76 characters and use of * linefeeds. However, all {@link JsonParser} implementations * are required to accept such "long line base64"; as do * typical production-level base64 decoders. * * @param bv Base64 variant to use: defines details such as * whether padding is used (and if so, using which character); * what is the maximum line length before adding linefeed, * and also the underlying alphabet to use. * @param data Buffer that contains binary data to write * @param offset Offset in {@code data} of the first byte of data to write * @param len Length of data to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeBinary(Base64Variant bv, byte[] data, int offset, int len) throws IOException; /** * Similar to {@link #writeBinary(Base64Variant,byte[],int,int)}, * but default to using the Jackson default Base64 variant * (which is {@link Base64Variants#MIME_NO_LINEFEEDS}). * * @param data Buffer that contains binary data to write * @param offset Offset in {@code data} of the first byte of data to write * @param len Length of data to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeBinary(byte[] data, int offset, int len) throws IOException { writeBinary(Base64Variants.getDefaultVariant(), data, offset, len); } /** * Similar to {@link #writeBinary(Base64Variant,byte[],int,int)}, * but assumes default to using the Jackson default Base64 variant * (which is {@link Base64Variants#MIME_NO_LINEFEEDS}). Also * assumes that whole byte array is to be output. * * @param data Buffer that contains binary data to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeBinary(byte[] data) throws IOException { writeBinary(Base64Variants.getDefaultVariant(), data, 0, data.length); } /** * Similar to {@link #writeBinary(Base64Variant,InputStream,int)}, * but assumes default to using the Jackson default Base64 variant * (which is {@link Base64Variants#MIME_NO_LINEFEEDS}). * * @param data InputStream to use for reading binary data to write. * Will not be closed after successful write operation * @param dataLength (optional) number of bytes that will be available; * or -1 to be indicate it is not known. Note that implementations * need not support cases where length is not known in advance; this * depends on underlying data format: JSON output does NOT require length, * other formats may * * @return Number of bytes actually written * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public int writeBinary(InputStream data, int dataLength) throws IOException { return writeBinary(Base64Variants.getDefaultVariant(), data, dataLength); } /** * Method similar to {@link #writeBinary(Base64Variant,byte[],int,int)}, * but where input is provided through a stream, allowing for incremental * writes without holding the whole input in memory. * * @param bv Base64 variant to use * @param data InputStream to use for reading binary data to write. * Will not be closed after successful write operation * @param dataLength (optional) number of bytes that will be available; * or -1 to be indicate it is not known. * If a positive length is given, <code>data</code> MUST provide at least * that many bytes: if not, an exception will be thrown. * Note that implementations * need not support cases where length is not known in advance; this * depends on underlying data format: JSON output does NOT require length, * other formats may. * * @return Number of bytes read from <code>data</code> and written as binary payload * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract int writeBinary(Base64Variant bv, InputStream data, int dataLength) throws IOException; /* /********************************************************************** /* Public API, write methods, numeric /********************************************************************** */ /** * Method for outputting given value as JSON number. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param v Number value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.2 */ public void writeNumber(short v) throws IOException { writeNumber((int) v); } /** * Method for outputting given value as JSON number. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param v Number value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeNumber(int v) throws IOException; /** * Method for outputting given value as JSON number. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param v Number value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeNumber(long v) throws IOException; /** * Method for outputting given value as JSON number. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param v Number value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeNumber(BigInteger v) throws IOException; /** * Method for outputting indicate JSON numeric value. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param v Number value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeNumber(double v) throws IOException; /** * Method for outputting indicate JSON numeric value. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param v Number value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeNumber(float v) throws IOException; /** * Method for outputting indicate JSON numeric value. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param v Number value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeNumber(BigDecimal v) throws IOException; /** * Write method that can be used for custom numeric types that can * not be (easily?) converted to "standard" Java number types. * Because numbers are not surrounded by double quotes, regular * {@link #writeString} method can not be used; nor * {@link #writeRaw} because that does not properly handle * value separators needed in Array or Object contexts. *<p> * Note: because of lack of type safety, some generator * implementations may not be able to implement this * method. For example, if a binary JSON format is used, * it may require type information for encoding; similarly * for generator-wrappers around Java objects or JSON nodes. * If implementation does not implement this method, * it needs to throw {@link UnsupportedOperationException}. * * @param encodedValue Textual (possibly format) number representation to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * @throws UnsupportedOperationException If underlying data format does not * support numbers serialized textually AND if generator is not allowed * to just output a String instead (Schema-based formats may require actual * number, for example) */ public abstract void writeNumber(String encodedValue) throws IOException; /** * Overloaded version of {@link #writeNumber(String)} with same semantics * but possibly more efficient operation. * * @param encodedValueBuffer Buffer that contains the textual number representation to write * @param offset Offset of the first character of value to write * @param len Length of the value (in characters) to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.11 */ public void writeNumber(char[] encodedValueBuffer, int offset, int len) throws IOException { writeNumber(new String(encodedValueBuffer, offset, len)); } /* /********************************************************************** /* Public API, write methods, other value types /********************************************************************** */ /** * Method for outputting literal JSON boolean value (one of * Strings 'true' and 'false'). * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @param state Boolean value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeBoolean(boolean state) throws IOException; /** * Method for outputting literal JSON null value. * Can be called in any context where a value is expected * (Array value, Object field value, root-level value). * Additional white space may be added around the value * if pretty-printing is enabled. * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeNull() throws IOException; /** * Method that can be called on backends that support passing opaque native * values that some data formats support; not used with JSON backend, * more common with binary formats. *<p> * NOTE: this is NOT the method to call for serializing regular POJOs, * see {@link #writeObject} instead. * * @param object Native format-specific value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.8 */ public void writeEmbeddedObject(Object object) throws IOException { // 01-Sep-2016, tatu: As per [core#318], handle small number of cases if (object == null) { writeNull(); return; } if (object instanceof byte[]) { writeBinary((byte[]) object); return; } throw new JsonGenerationException("No native support for writing embedded objects of type " +object.getClass().getName(), this); } /* /********************************************************************** /* Public API, write methods, Native Ids (type, object) /********************************************************************** */ /** * Method that can be called to output so-called native Object Id. * Note that it may only be called after ensuring this is legal * (with {@link #canWriteObjectId()}), as not all data formats * have native type id support; and some may only allow them in * certain positions or locations. * If output is not allowed by the data format in this position, * a {@link JsonGenerationException} will be thrown. * * @param id Native Object Id to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * @throws JsonGenerationException if Object ID output is not allowed * (either at all, or specifically in this position in output) * * @since 2.3 */ public void writeObjectId(Object id) throws IOException { throw new JsonGenerationException("No native support for writing Object Ids", this); } /** * Method that can be called to output references to native Object Ids. * Note that it may only be called after ensuring this is legal * (with {@link #canWriteObjectId()}), as not all data formats * have native type id support; and some may only allow them in * certain positions or locations. * If output is not allowed by the data format in this position, * a {@link JsonGenerationException} will be thrown. * * @param referenced Referenced value, for which Object Id is expected to be written * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * @throws JsonGenerationException if Object ID output is not allowed * (either at all, or specifically in this position in output) */ public void writeObjectRef(Object referenced) throws IOException { throw new JsonGenerationException("No native support for writing Object Ids", this); } /** * Method that can be called to output so-called native Type Id. * Note that it may only be called after ensuring this is legal * (with {@link #canWriteTypeId()}), as not all data formats * have native type id support; and some may only allow them in * certain positions or locations. * If output is not allowed by the data format in this position, * a {@link JsonGenerationException} will be thrown. * * @param id Native Type Id to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * @throws JsonGenerationException if Type ID output is not allowed * (either at all, or specifically in this position in output) * * @since 2.3 */ public void writeTypeId(Object id) throws IOException { throw new JsonGenerationException("No native support for writing Type Ids", this); } /** * Replacement method for {@link #writeTypeId(Object)} which is called * regardless of whether format has native type ids. If it does have native * type ids, those are to be used (if configuration allows this), if not, * structural type id inclusion is to be used. For JSON, for example, no * native type ids exist and structural inclusion is always used. *<p> * NOTE: databind may choose to skip calling this method for some special cases * (and instead included type id via regular write methods and/or {@link #writeTypeId} * -- this is discouraged, but not illegal, and may be necessary as a work-around * in some cases. * * @param typeIdDef Full Type Id definition * * @return {@link WritableTypeId} for caller to retain and pass to matching * {@link #writeTypeSuffix} call * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * @throws JsonGenerationException if Type ID output is not allowed * (either at all, or specifically in this position in output) * * @since 2.9 */ public WritableTypeId writeTypePrefix(WritableTypeId typeIdDef) throws IOException { Object id = typeIdDef.id; final JsonToken valueShape = typeIdDef.valueShape; if (canWriteTypeId()) { typeIdDef.wrapperWritten = false; // just rely on native type output method (sub-classes likely to override) writeTypeId(id); } else { // No native type id; write wrappers // Normally we only support String type ids (non-String reserved for native type ids) String idStr = (id instanceof String) ? (String) id : String.valueOf(id); typeIdDef.wrapperWritten = true; Inclusion incl = typeIdDef.include; // first: can not output "as property" if value not Object; if so, must do "as array" if ((valueShape != JsonToken.START_OBJECT) && incl.requiresObjectContext()) { typeIdDef.include = incl = WritableTypeId.Inclusion.WRAPPER_ARRAY; } switch (incl) { case PARENT_PROPERTY: // nothing to do here, as it has to be written in suffix... break; case PAYLOAD_PROPERTY: // only output as native type id; otherwise caller must handle using some // other mechanism, so... break; case METADATA_PROPERTY: // must have Object context by now, so simply write as field name // Note, too, that it's bit tricky, since we must print START_OBJECT that is part // of value first -- and then NOT output it later on: hence return "early" writeStartObject(typeIdDef.forValue); writeStringField(typeIdDef.asProperty, idStr); return typeIdDef; case WRAPPER_OBJECT: // NOTE: this is wrapper, not directly related to value to output, so don't pass writeStartObject(); writeFieldName(idStr); break; case WRAPPER_ARRAY: default: // should never occur but translate as "as-array" writeStartArray(); // wrapper, not actual array object to write writeString(idStr); } } // and finally possible start marker for value itself: if (valueShape == JsonToken.START_OBJECT) { writeStartObject(typeIdDef.forValue); } else if (valueShape == JsonToken.START_ARRAY) { // should we now set the current object? writeStartArray(); } return typeIdDef; } /** * Method to call along with {@link #writeTypePrefix}, but after actual value * that has type id has been completely written. This allows post-processing * for some cases (for example if the actual Type Id is written at the END of * the value, not before or at the beginning). * * @param typeIdDef Value returned by the earlier matching call to {@link #writeTypePrefix(WritableTypeId)} * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * @throws JsonGenerationException if Type ID output is not allowed * (either at all, or specifically in this position in output) * * @return Argument {@code typeIdDef}, possibly modified * * @since 2.9 */ public WritableTypeId writeTypeSuffix(WritableTypeId typeIdDef) throws IOException { final JsonToken valueShape = typeIdDef.valueShape; // First: does value need closing? if (valueShape == JsonToken.START_OBJECT) { writeEndObject(); } else if (valueShape == JsonToken.START_ARRAY) { writeEndArray(); } if (typeIdDef.wrapperWritten) { switch (typeIdDef.include) { case WRAPPER_ARRAY: writeEndArray(); break; case PARENT_PROPERTY: // unusually, need to output AFTER value. And no real wrapper... { Object id = typeIdDef.id; String idStr = (id instanceof String) ? (String) id : String.valueOf(id); writeStringField(typeIdDef.asProperty, idStr); } break; case METADATA_PROPERTY: case PAYLOAD_PROPERTY: // no actual wrapper; included within Object itself break; case WRAPPER_OBJECT: default: // should never occur but... writeEndObject(); break; } } return typeIdDef; } /* /********************************************************************** /* Public API, write methods, serializing Java objects /********************************************************************** */ /** * Method for writing given Java object (POJO) as JSON. * Exactly how the object gets written depends on object * in question (and on codec, its configuration); for * typical POJOs it will result in JSON Object, but for others JSON * Array, or String or numeric value (and for nulls, JSON * null literal). * <b>NOTE</b>: generator must have its {@code ObjectCodec} * set to non-null value; for generators created by a mapping * factory this is the case, for others not. * * @param pojo Java value (usually POJO) to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.13 (to eventually replace {@link #writeObject(Object)} */ public void writePOJO(Object pojo) throws IOException { writeObject(pojo); } // TODO: deprecate in 2.14 or later /** * Older alias for {@link #writePOJO(Object)} * * @param pojo Java value (usually POJO) to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeObject(Object pojo) throws IOException; /** * Method for writing given JSON tree (expressed as a tree * where given JsonNode is the root) using this generator. * This will generally just call * {@link #writeObject} with given node, but is added * for convenience and to make code more explicit in cases * where it deals specifically with trees. * * @param rootNode {@link TreeNode} to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public abstract void writeTree(TreeNode rootNode) throws IOException; /* /********************************************************************** /* Public API, convenience field write methods /********************************************************************** */ // 04-Oct-2019, tatu: Reminder: these could be defined final to // remember NOT to override in delegating sub-classes -- but // not final in 2.x to reduce compatibility issues /** * Convenience method for outputting a field entry ("member") * that contains specified data in base64-encoded form. * Equivalent to: *<pre> * writeFieldName(fieldName); * writeBinary(value); *</pre> * * @param fieldName Name of the field to write * @param data Binary data for the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeBinaryField(String fieldName, byte[] data) throws IOException { writeFieldName(fieldName); writeBinary(data); } /** * Convenience method for outputting a field entry ("member") * that has a boolean value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeBoolean(value); *</pre> * * @param fieldName Name of the field to write * @param value Boolean value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeBooleanField(String fieldName, boolean value) throws IOException { writeFieldName(fieldName); writeBoolean(value); } /** * Convenience method for outputting a field entry ("member") * that has JSON literal value null. Equivalent to: *<pre> * writeFieldName(fieldName); * writeNull(); *</pre> * * @param fieldName Name of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeNullField(String fieldName) throws IOException { writeFieldName(fieldName); writeNull(); } /** * Convenience method for outputting a field entry ("member") * that has a String value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeString(value); *</pre> * * @param fieldName Name of the field to write * @param value String value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeStringField(String fieldName, String value) throws IOException { writeFieldName(fieldName); writeString(value); } /** * Convenience method for outputting a field entry ("member") * that has the specified numeric value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeNumber(value); *</pre> * * @param fieldName Name of the field to write * @param value Numeric value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.11 */ public void writeNumberField(String fieldName, short value) throws IOException { writeFieldName(fieldName); writeNumber(value); } /** * Convenience method for outputting a field entry ("member") * that has the specified numeric value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeNumber(value); *</pre> * * @param fieldName Name of the field to write * @param value Numeric value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeNumberField(String fieldName, int value) throws IOException { writeFieldName(fieldName); writeNumber(value); } /** * Convenience method for outputting a field entry ("member") * that has the specified numeric value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeNumber(value); *</pre> * * @param fieldName Name of the field to write * @param value Numeric value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeNumberField(String fieldName, long value) throws IOException { writeFieldName(fieldName); writeNumber(value); } /** * Convenience method for outputting a field entry ("member") * that has the specified numeric value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeNumber(value); *</pre> * * @param fieldName Name of the field to write * @param value Numeric value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.11 */ public void writeNumberField(String fieldName, BigInteger value) throws IOException { writeFieldName(fieldName); writeNumber(value); } /** * Convenience method for outputting a field entry ("member") * that has the specified numeric value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeNumber(value); *</pre> * * @param fieldName Name of the field to write * @param value Numeric value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeNumberField(String fieldName, float value) throws IOException { writeFieldName(fieldName); writeNumber(value); } /** * Convenience method for outputting a field entry ("member") * that has the specified numeric value. Equivalent to: *<pre> * writeFieldName(fieldName); * writeNumber(value); *</pre> * * @param fieldName Name of the field to write * @param value Numeric value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeNumberField(String fieldName, double value) throws IOException { writeFieldName(fieldName); writeNumber(value); } /** * Convenience method for outputting a field entry ("member") * that has the specified numeric value. * Equivalent to: *<pre> * writeFieldName(fieldName); * writeNumber(value); *</pre> * * @param fieldName Name of the field to write * @param value Numeric value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeNumberField(String fieldName, BigDecimal value) throws IOException { writeFieldName(fieldName); writeNumber(value); } /** * Convenience method for outputting a field entry ("member") * (that will contain a JSON Array value), and the START_ARRAY marker. * Equivalent to: *<pre> * writeFieldName(fieldName); * writeStartArray(); *</pre> *<p> * Note: caller still has to take care to close the array * (by calling {#link #writeEndArray}) after writing all values * of the value Array. * * @param fieldName Name of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeArrayFieldStart(String fieldName) throws IOException { writeFieldName(fieldName); writeStartArray(); } /** * Convenience method for outputting a field entry ("member") * (that will contain an Object value), and the START_OBJECT marker. * Equivalent to: *<pre> * writeFieldName(fieldName); * writeStartObject(); *</pre> *<p> * Note: caller still has to take care to close the Object * (by calling {#link #writeEndObject}) after writing all * entries of the value Object. * * @param fieldName Name of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeObjectFieldStart(String fieldName) throws IOException { writeFieldName(fieldName); writeStartObject(); } /** * Convenience method for outputting a field entry ("member") * that has contents of specific Java object as its value. * Equivalent to: *<pre> * writeFieldName(fieldName); * writeObject(pojo); *</pre> *<p> * NOTE: actual serialization of POJO value requires assigned {@code ObjectCodec} * and will delegate to that (usually {@code ObjectMapper} of databind layer) * * @param fieldName Name of the field to write * @param pojo POJO value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writePOJOField(String fieldName, Object pojo) throws IOException { writeObjectField(fieldName, pojo); } // TODO: deprecate in 2.14 or later /** * Older alias for {@link #writePOJOField} * * @param fieldName Name of the field to write * @param pojo POJO value of the field to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void writeObjectField(String fieldName, Object pojo) throws IOException { writeFieldName(fieldName); writeObject(pojo); } // // // But this method does need to be delegate so... /** * Method called to indicate that a property in this position was * skipped. It is usually only called for generators that return * <code>false</code> from {@link #canOmitFields()}. *<p> * Default implementation does nothing. * * @param fieldName Name of the field omitted * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer * * @since 2.3 */ public void writeOmittedField(String fieldName) throws IOException { } /* /********************************************************************** /* Public API, copy-through methods /********************************************************************** */ /** * Method for copying contents of the current event that * the given parser instance points to. * Note that the method <b>will not</b> copy any other events, * such as events contained within JSON Array or Object structures. *<p> * Calling this method will not advance the given * parser, although it may cause parser to internally process * more data (if it lazy loads contents of value events, for example) * * @param p Parser that points to event (token) to copy * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void copyCurrentEvent(JsonParser p) throws IOException { JsonToken t = p.currentToken(); final int token = (t == null) ? ID_NOT_AVAILABLE : t.id(); switch (token) { case ID_NOT_AVAILABLE: _reportError("No current event to copy"); break; // never gets here case ID_START_OBJECT: writeStartObject(); break; case ID_END_OBJECT: writeEndObject(); break; case ID_START_ARRAY: writeStartArray(); break; case ID_END_ARRAY: writeEndArray(); break; case ID_FIELD_NAME: writeFieldName(p.getCurrentName()); break; case ID_STRING: if (p.hasTextCharacters()) { writeString(p.getTextCharacters(), p.getTextOffset(), p.getTextLength()); } else { writeString(p.getText()); } break; case ID_NUMBER_INT: { NumberType n = p.getNumberType(); if (n == NumberType.INT) { writeNumber(p.getIntValue()); } else if (n == NumberType.BIG_INTEGER) { writeNumber(p.getBigIntegerValue()); } else { writeNumber(p.getLongValue()); } break; } case ID_NUMBER_FLOAT: { NumberType n = p.getNumberType(); if (n == NumberType.BIG_DECIMAL) { writeNumber(p.getDecimalValue()); } else if (n == NumberType.FLOAT) { writeNumber(p.getFloatValue()); } else { writeNumber(p.getDoubleValue()); } break; } case ID_TRUE: writeBoolean(true); break; case ID_FALSE: writeBoolean(false); break; case ID_NULL: writeNull(); break; case ID_EMBEDDED_OBJECT: writeObject(p.getEmbeddedObject()); break; default: throw new IllegalStateException("Internal error: unknown current token, "+t); } } /** * Method for copying contents of the current event * <b>and following events that it encloses</b> * the given parser instance points to. *<p> * So what constitutes enclosing? Here is the list of * events that have associated enclosed events that will * get copied: *<ul> * <li>{@link JsonToken#START_OBJECT}: * all events up to and including matching (closing) * {@link JsonToken#END_OBJECT} will be copied * </li> * <li>{@link JsonToken#START_ARRAY} * all events up to and including matching (closing) * {@link JsonToken#END_ARRAY} will be copied * </li> * <li>{@link JsonToken#FIELD_NAME} the logical value (which * can consist of a single scalar value; or a sequence of related * events for structured types (JSON Arrays, Objects)) will * be copied along with the name itself. So essentially the * whole <b>field entry</b> (name and value) will be copied. * </li> *</ul> *<p> * After calling this method, parser will point to the * <b>last event</b> that was copied. This will either be * the event parser already pointed to (if there were no * enclosed events), or the last enclosed event copied. * * @param p Parser that points to the value to copy * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ public void copyCurrentStructure(JsonParser p) throws IOException { JsonToken t = p.currentToken(); // Let's handle field-name separately first int id = (t == null) ? ID_NOT_AVAILABLE : t.id(); if (id == ID_FIELD_NAME) { writeFieldName(p.getCurrentName()); t = p.nextToken(); id = (t == null) ? ID_NOT_AVAILABLE : t.id(); // fall-through to copy the associated value } switch (id) { case ID_START_OBJECT: writeStartObject(); _copyCurrentContents(p); return; case ID_START_ARRAY: writeStartArray(); _copyCurrentContents(p); return; default: copyCurrentEvent(p); } } // @since 2.10 protected void _copyCurrentContents(JsonParser p) throws IOException { int depth = 1; JsonToken t; // Mostly copied from `copyCurrentEvent()`, but with added nesting counts while ((t = p.nextToken()) != null) { switch (t.id()) { case ID_FIELD_NAME: writeFieldName(p.getCurrentName()); break; case ID_START_ARRAY: writeStartArray(); ++depth; break; case ID_START_OBJECT: writeStartObject(); ++depth; break; case ID_END_ARRAY: writeEndArray(); if (--depth == 0) { return; } break; case ID_END_OBJECT: writeEndObject(); if (--depth == 0) { return; } break; case ID_STRING: if (p.hasTextCharacters()) { writeString(p.getTextCharacters(), p.getTextOffset(), p.getTextLength()); } else { writeString(p.getText()); } break; case ID_NUMBER_INT: { NumberType n = p.getNumberType(); if (n == NumberType.INT) { writeNumber(p.getIntValue()); } else if (n == NumberType.BIG_INTEGER) { writeNumber(p.getBigIntegerValue()); } else { writeNumber(p.getLongValue()); } break; } case ID_NUMBER_FLOAT: { NumberType n = p.getNumberType(); if (n == NumberType.BIG_DECIMAL) { writeNumber(p.getDecimalValue()); } else if (n == NumberType.FLOAT) { writeNumber(p.getFloatValue()); } else { writeNumber(p.getDoubleValue()); } break; } case ID_TRUE: writeBoolean(true); break; case ID_FALSE: writeBoolean(false); break; case ID_NULL: writeNull(); break; case ID_EMBEDDED_OBJECT: writeObject(p.getEmbeddedObject()); break; default: throw new IllegalStateException("Internal error: unknown current token, "+t); } } } /* /********************************************************************** /* Public API, buffer handling /********************************************************************** */ /** * Method called to flush any buffered content to the underlying * target (output stream, writer), and to flush the target itself * as well. * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ @Override public abstract void flush() throws IOException; /** * Method that can be called to determine whether this generator * is closed or not. If it is closed, no more output can be done. * * @return {@code True} if this generator instance has been closed */ public abstract boolean isClosed(); /* /********************************************************************** /* Closeable implementation /********************************************************************** */ /** * Method called to close this generator, so that no more content * can be written. *<p> * Whether the underlying target (stream, writer) gets closed depends * on whether this generator either manages the target (i.e. is the * only one with access to the target -- case if caller passes a * reference to the resource such as File, but not stream); or * has feature {@link Feature#AUTO_CLOSE_TARGET} enabled. * If either of above is true, the target is also closed. Otherwise * (not managing, feature not enabled), target is not closed. * * @throws IOException if there is either an underlying I/O problem */ @Override public abstract void close() throws IOException; /* /********************************************************************** /* Helper methods for sub-classes /********************************************************************** */ /** * Helper method used for constructing and throwing * {@link JsonGenerationException} with given base message. *<p> * Note that sub-classes may override this method to add more detail * or use a {@link JsonGenerationException} sub-class. * * @param msg Exception message to use * * @throws JsonGenerationException constructed */ protected void _reportError(String msg) throws JsonGenerationException { throw new JsonGenerationException(msg, this); } protected final void _throwInternal() { VersionUtil.throwInternal(); } protected void _reportUnsupportedOperation() { throw new UnsupportedOperationException("Operation not supported by generator of type "+getClass().getName()); } // @since 2.8 protected final void _verifyOffsets(int arrayLength, int offset, int length) { if ((offset < 0) || (offset + length) > arrayLength) { throw new IllegalArgumentException(String.format( "invalid argument(s) (offset=%d, length=%d) for input array of %d element", offset, length, arrayLength)); } } /** * Helper method to try to call appropriate write method for given * untyped Object. At this point, no structural conversions should be done, * only simple basic types are to be coerced as necessary. * * @param value Value to write * * @throws IOException if there is either an underlying I/O problem or encoding * issue at format layer */ protected void _writeSimpleObject(Object value) throws IOException { // 31-Dec-2009, tatu: Actually, we could just handle some basic // types even without codec. This can improve interoperability, // and specifically help with TokenBuffer. if (value == null) { writeNull(); return; } if (value instanceof String) { writeString((String) value); return; } if (value instanceof Number) { Number n = (Number) value; if (n instanceof Integer) { writeNumber(n.intValue()); return; } else if (n instanceof Long) { writeNumber(n.longValue()); return; } else if (n instanceof Double) { writeNumber(n.doubleValue()); return; } else if (n instanceof Float) { writeNumber(n.floatValue()); return; } else if (n instanceof Short) { writeNumber(n.shortValue()); return; } else if (n instanceof Byte) { writeNumber(n.byteValue()); return; } else if (n instanceof BigInteger) { writeNumber((BigInteger) n); return; } else if (n instanceof BigDecimal) { writeNumber((BigDecimal) n); return; // then Atomic types } else if (n instanceof AtomicInteger) { writeNumber(((AtomicInteger) n).get()); return; } else if (n instanceof AtomicLong) { writeNumber(((AtomicLong) n).get()); return; } } else if (value instanceof byte[]) { writeBinary((byte[]) value); return; } else if (value instanceof Boolean) { writeBoolean((Boolean) value); return; } else if (value instanceof AtomicBoolean) { writeBoolean(((AtomicBoolean) value).get()); return; } throw new IllegalStateException("No ObjectCodec defined for the generator, can only serialize simple wrapper types (type passed " +value.getClass().getName()+")"); } }
apache-2.0
cyq7on/DataStructureAndAlgorithm
Project/Practice/src/com/cyq7on/leetcode/dp/LongestIncreasingSubsequence.java
1653
package com.cyq7on.leetcode.dp; import java.util.Arrays; /** * @description *给定一个无序的整数数组,找到其中最长上升子序列的长度。 * * 示例: * * 输入: [10,9,2,5,3,7,101,18] * 输出: 4 * 解释: 最长的上升子序列是 [2,3,7,101],它的长度是 4。 * 说明: * * 可能会有多种最长上升子序列的组合,你只需要输出对应的长度即可。 * 你算法的时间复杂度应该为 O(n2) 。 * 进阶: 你能将算法的时间复杂度降低到 O(n log n) 吗? * * 来源:力扣(LeetCode) * 链接:https://leetcode-cn.com/problems/longest-increasing-subsequence * 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 * @author cyq7on * @create 2020/3/14 **/ public class LongestIncreasingSubsequence { // https://leetcode-cn.com/problems/longest-increasing-subsequence/solution/zui-chang-shang-sheng-zi-xu-lie-dong-tai-gui-hua-2/ public int lengthOfLIS(int[] nums) { int length = nums.length; //dp[i] 的值代表 nums 前 i(i>0) 个数字的最长子序列长度。 int[] dp = new int[length]; //dp[i] 所有元素置 1,含义是每个元素都至少可以单独成为子序列,此时长度都为 1 Arrays.fill(dp, 1); int max = 0; for (int i = 0; i < length; i++) { for (int j = 0; j < i; j++) { if (nums[j] < nums[i]) { dp[i] = Math.max(dp[i], dp[j] + 1); } } if (max < dp[i]) { max = dp[i]; } } return max; } }
apache-2.0
RuedigerMoeller/fast-serialization
src/main/java/org/nustaq/serialization/FSTSerializerRegistry.java
4274
/* * Copyright 2014 Ruediger Moeller. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.nustaq.serialization; import java.util.*; /** * Created with IntelliJ IDEA. * User: ruedi * Date: 10.11.12 * Time: 15:04 * * contains a map from class => serializer. * One can register Serializers for exact classes or a class and all its subclasses (can have unexpected consequences in case a subclass holds additional state). * */ public class FSTSerializerRegistry { private FSTSerializerRegistryDelegate delegate; public static FSTObjectSerializer NULL = new NULLSerializer(); public void setDelegate(FSTSerializerRegistryDelegate delegate) { this.delegate = delegate; } public FSTSerializerRegistryDelegate getDelegate() { return delegate; } static class NULLSerializer implements FSTObjectSerializer { @Override public void writeObject(FSTObjectOutput out, Object toWrite, FSTClazzInfo clzInfo, FSTClazzInfo.FSTFieldInfo referencedBy, int streamPosition) { } @Override public void readObject(FSTObjectInput out, Object toWrite, FSTClazzInfo clzInfo, FSTClazzInfo.FSTFieldInfo referencedBy) throws Exception { } @Override public boolean willHandleClass(Class cl) { return true; } /** * @return true if FST can skip a search for same instances in the serialized ObjectGraph. This speeds up reading and writing and makes * sense for short immutable such as Integer, Short, Character, Date, .. . For those classes it is more expensive (CPU, size) to do a lookup than to just * write the Object twice in case. */ @Override public boolean alwaysCopy() { return false; } @Override public Object instantiate(Class objectClass, FSTObjectInput fstObjectInput, FSTClazzInfo serializationInfo, FSTClazzInfo.FSTFieldInfo referencee, int streamPosition) throws Exception { return null; } }; final static class SerEntry { boolean forSubClasses = false; FSTObjectSerializer ser; SerEntry(boolean forSubClasses, FSTObjectSerializer ser) { this.forSubClasses = forSubClasses; this.ser = ser; } } HashMap<Class,SerEntry> map = new HashMap<Class, SerEntry>(97); public final FSTObjectSerializer getSerializer(Class cl) { if ( cl.isPrimitive()) { return null; } if ( delegate != null ) { FSTObjectSerializer ser = delegate.getSerializer(cl); if ( ser != null ) { return ser; } } final Class[] lineage = FSTClazzLineageInfo.getLineage(cl); for (final Class ascendant : lineage) { final FSTObjectSerializer serializer = getSerializer(ascendant, cl); if (serializer != null) return serializer; } return null; } final FSTObjectSerializer getSerializer(Class cl, Class lookupStart) { if ( cl == null ) { return null; } final SerEntry serEntry = map.get(cl); if ( serEntry != null ) { if ( cl == lookupStart && serEntry.ser.willHandleClass(lookupStart)) { return serEntry.ser; } if ( serEntry.forSubClasses && serEntry.ser.willHandleClass(lookupStart) ) { putSerializer(lookupStart, serEntry.ser, false); return serEntry.ser; } } return null; } public void putSerializer( Class cl, FSTObjectSerializer ser, boolean includeSubclasses) { map.put(cl,new SerEntry(includeSubclasses,ser)); } }
apache-2.0
saulbein/web3j
core/src/main/java/org/web3j/tx/TransactionManager.java
3950
package org.web3j.tx; import java.io.IOException; import java.math.BigInteger; import java.util.Optional; import org.web3j.protocol.Web3j; import org.web3j.protocol.core.methods.response.EthGetTransactionReceipt; import org.web3j.protocol.core.methods.response.EthSendTransaction; import org.web3j.protocol.core.methods.response.TransactionReceipt; import org.web3j.protocol.exceptions.TransactionTimeoutException; /** * Transaction manager abstraction for executing transactions with Ethereum client via * various mechanisms. */ public abstract class TransactionManager { private static final int SLEEP_DURATION = 15000; private static final int ATTEMPTS = 40; private final int sleepDuration; private final int attempts; private final Web3j web3j; protected TransactionManager(Web3j web3j) { this.web3j = web3j; this.attempts = ATTEMPTS; this.sleepDuration = SLEEP_DURATION; } protected TransactionManager(Web3j web3j, int attempts, int sleepDuration) { this.web3j = web3j; this.attempts = attempts; this.sleepDuration = sleepDuration; } TransactionReceipt executeTransaction( BigInteger gasPrice, BigInteger gasLimit, String to, String data, BigInteger value) throws InterruptedException, IOException, TransactionTimeoutException { EthSendTransaction ethSendTransaction = sendTransaction( gasPrice, gasLimit, to, data, value); return processResponse(ethSendTransaction); } public abstract EthSendTransaction sendTransaction( BigInteger gasPrice, BigInteger gasLimit, String to, String data, BigInteger value) throws IOException; public abstract String getFromAddress(); private TransactionReceipt processResponse(EthSendTransaction transactionResponse) throws InterruptedException, IOException, TransactionTimeoutException { if (transactionResponse.hasError()) { throw new RuntimeException("Error processing transaction request: " + transactionResponse.getError().getMessage()); } String transactionHash = transactionResponse.getTransactionHash(); return waitForTransactionReceipt(transactionHash); } private TransactionReceipt waitForTransactionReceipt( String transactionHash) throws InterruptedException, IOException, TransactionTimeoutException { return getTransactionReceipt(transactionHash, sleepDuration, attempts); } private TransactionReceipt getTransactionReceipt( String transactionHash, int sleepDuration, int attempts) throws IOException, InterruptedException, TransactionTimeoutException { Optional<TransactionReceipt> receiptOptional = sendTransactionReceiptRequest(transactionHash); for (int i = 0; i < attempts; i++) { if (!receiptOptional.isPresent()) { Thread.sleep(sleepDuration); receiptOptional = sendTransactionReceiptRequest(transactionHash); } else { return receiptOptional.get(); } } throw new TransactionTimeoutException("Transaction receipt was not generated after " + ((sleepDuration * attempts) / 1000 + " seconds for transaction: " + transactionHash)); } private Optional<TransactionReceipt> sendTransactionReceiptRequest( String transactionHash) throws IOException { EthGetTransactionReceipt transactionReceipt = web3j.ethGetTransactionReceipt(transactionHash).send(); if (transactionReceipt.hasError()) { throw new RuntimeException("Error processing request: " + transactionReceipt.getError().getMessage()); } return transactionReceipt.getTransactionReceipt(); } }
apache-2.0
SrGrieves/rasberry-node-frame
monitorcontrol.js
652
const util = require('util'); const exec = util.promisify(require('child_process').exec); const { execSync } = require('child_process'); const _ = require('lodash'); async function set_vcgencmd(mode) { const { stdout, stderr } = await exec(`/opt/vc/bin/vcgencmd display_power ${mode}`); // return stdout; } async function test_vcgencmd(type) { const { stdout, stderr } = await exec('echo ' + type); console.log('Output: ' + stdout); } async function putMonitorToSleep() { await set_vcgencmd(0) } async function wakeMonitor() { await set_vcgencmd(1) } module.exports = { putMonitorToSleep: putMonitorToSleep, wakeMonitor: wakeMonitor }
apache-2.0
PalomaMobile/paloma-android-sdk
palomamobile-android-sdk-core/android-sdk-core-library/src/main/java/com/palomamobile/android/sdk/core/CustomHeader.java
992
package com.palomamobile.android.sdk.core; /** * Defines names of custom headers available to all Service interfaces across the various SDK modules. The values of those * headers are used by the SDK Core module to enhance SDK functionality such as to determine compatibility between the * SDK module version and current Server API version. */ public interface CustomHeader { /** * Header with this name is placed on each Service call to describe the SDK compatibility requirements. */ String HEADER_PALOMA_TARGET_SERVICE_VERSION = "Paloma-Target-Service-Version"; /** * Header with this name is placed on each Service call to describe the SDK version for error reporting etc. */ String HEADER_PALOMA_SDK_MODULE_VERSION = "Paloma-SDK-Module-Version"; /** * Header with this name identifies each individual request for the purposes of de-duping requests during retries. */ String HEADER_NAME_PALOMA_REQUEST = "X-Paloma-Request"; }
apache-2.0
bovigny/cassandra-driver-spark
src/test/scala/com/datastax/driver/spark/types/CanBuildFromTest.scala
958
package com.datastax.driver.spark.types import com.datastax.driver.spark.util.SerializationUtil import org.junit.Assert._ import org.junit.Test class CanBuildFromTest { @Test def testBuild() { val bf = CanBuildFrom.setCanBuildFrom[Int] val builder = bf.apply() builder += 1 builder += 2 builder += 3 assertEquals(Set(1,2,3), builder.result()) } @Test def testSerializeAndBuild() { val bf = CanBuildFrom.setCanBuildFrom[Int] val bf2 = SerializationUtil.serializeAndDeserialize(bf) val builder = bf2.apply() builder += 1 builder += 2 builder += 3 assertEquals(Set(1,2,3), builder.result()) } @Test def testSerializeAndBuildWithOrdering() { val bf = CanBuildFrom.treeSetCanBuildFrom[Int] val bf2 = SerializationUtil.serializeAndDeserialize(bf) val builder = bf2.apply() builder += 1 builder += 2 builder += 3 assertEquals(Set(1,2,3), builder.result()) } }
apache-2.0
deepanjanroy/lighthouse
lighthouse-core/audits/byte-efficiency/unused-javascript.js
5051
/** * @license Copyright 2017 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ 'use strict'; const ByteEfficiencyAudit = require('./byte-efficiency-audit'); const i18n = require('../../lib/i18n/i18n.js'); const UIStrings = { /** Imperative title of a Lighthouse audit that tells the user to remove JavaScript that is never evaluated during page load. This is displayed in a list of audit titles that Lighthouse generates. */ title: 'Remove unused JavaScript', /** Description of a Lighthouse audit that tells the user *why* they should remove JavaScript that is never needed/evaluated by the browser. This is displayed after a user expands the section to see more. No character length limits. 'Learn More' becomes link text to additional documentation. */ description: 'Remove unused JavaScript to reduce bytes consumed by network activity.', }; const str_ = i18n.createMessageInstanceIdFn(__filename, UIStrings); const IGNORE_THRESHOLD_IN_BYTES = 2048; class UnusedJavaScript extends ByteEfficiencyAudit { /** * @return {LH.Audit.Meta} */ static get meta() { return { id: 'unused-javascript', title: str_(UIStrings.title), description: str_(UIStrings.description), scoreDisplayMode: ByteEfficiencyAudit.SCORING_MODES.NUMERIC, requiredArtifacts: ['JsUsage', 'devtoolsLogs', 'traces'], }; } /** * @param {LH.Crdp.Profiler.ScriptCoverage} script * @return {{unusedLength: number, contentLength: number}} */ static computeWaste(script) { let maximumEndOffset = 0; for (const func of script.functions) { for (const range of func.ranges) { maximumEndOffset = Math.max(maximumEndOffset, range.endOffset); } } // We only care about unused ranges of the script, so we can ignore all the nesting and safely // assume that if a range is unexecuted, all nested ranges within it will also be unexecuted. const unusedByIndex = new Uint8Array(maximumEndOffset); for (const func of script.functions) { for (const range of func.ranges) { if (range.count === 0) { for (let i = range.startOffset; i < range.endOffset; i++) { unusedByIndex[i] = 1; } } } } let unused = 0; for (const x of unusedByIndex) { unused += x; } return { unusedLength: unused, contentLength: maximumEndOffset, }; } /** * @param {Array<{unusedLength: number, contentLength: number}>} wasteData * @param {LH.Artifacts.NetworkRequest} networkRecord * @return {LH.Audit.ByteEfficiencyItem} */ static mergeWaste(wasteData, networkRecord) { let unusedLength = 0; let contentLength = 0; for (const usage of wasteData) { unusedLength += usage.unusedLength; contentLength += usage.contentLength; } const totalBytes = ByteEfficiencyAudit.estimateTransferSize(networkRecord, contentLength, 'Script'); const wastedRatio = (unusedLength / contentLength) || 0; const wastedBytes = Math.round(totalBytes * wastedRatio); return { url: networkRecord.url, totalBytes, wastedBytes, wastedPercent: 100 * wastedRatio, }; } /** * @param {LH.Artifacts} artifacts * @param {Array<LH.Artifacts.NetworkRequest>} networkRecords * @return {ByteEfficiencyAudit.ByteEfficiencyProduct} */ static audit_(artifacts, networkRecords) { /** @type {Map<string, Array<LH.Crdp.Profiler.ScriptCoverage>>} */ const scriptsByUrl = new Map(); for (const script of artifacts.JsUsage) { const scripts = scriptsByUrl.get(script.url) || []; scripts.push(script); scriptsByUrl.set(script.url, scripts); } const items = []; for (const [url, scripts] of scriptsByUrl.entries()) { const networkRecord = networkRecords.find(record => record.url === url); if (!networkRecord) continue; const wasteData = scripts.map(UnusedJavaScript.computeWaste); const item = UnusedJavaScript.mergeWaste(wasteData, networkRecord); if (item.wastedBytes <= IGNORE_THRESHOLD_IN_BYTES) continue; items.push(item); } return { items, headings: [ {key: 'url', valueType: 'url', label: str_(i18n.UIStrings.columnURL)}, {key: 'totalBytes', valueType: 'bytes', label: str_(i18n.UIStrings.columnSize)}, {key: 'wastedBytes', valueType: 'bytes', label: str_(i18n.UIStrings.columnWastedBytes)}, ], }; } } module.exports = UnusedJavaScript; module.exports.UIStrings = UIStrings;
apache-2.0
aehlig/bazel
src/main/java/com/google/devtools/build/lib/skylarkbuildapi/ProtoConfigurationApi.java
1179
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skylarkbuildapi; import com.google.devtools.build.lib.skylarkinterface.SkylarkModule; import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory; import com.google.devtools.build.lib.skylarkinterface.SkylarkValue; /** A configuration fragment representing protocol buffers. */ @SkylarkModule( name = "proto", category = SkylarkModuleCategory.CONFIGURATION_FRAGMENT, doc = "A configuration fragment representing protocol buffers.") public interface ProtoConfigurationApi extends SkylarkValue {}
apache-2.0
kjvarga/google-api-ads-ruby
dfp_api/examples/v201605/activity_service/get_all_activities.rb
2482
#!/usr/bin/env ruby # Encoding: utf-8 # # Copyright:: Copyright 2013, Google Inc. All Rights Reserved. # # License:: Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # # This example gets all activities. To create activities, # run create_activities.rb. require 'dfp_api' API_VERSION = :v201605 def get_all_activities() # Get DfpApi instance and load configuration from ~/dfp_api.yml. dfp = DfpApi::Api.new # To enable logging of SOAP requests, set the log_level value to 'DEBUG' in # the configuration file or provide your own logger: # dfp.logger = Logger.new('dfp_xml.log') # Get the ActivityService. activity_service = dfp.service(:ActivityService, API_VERSION) # Create a statement to select all activities. statement = DfpApi::FilterStatement.new('ORDER BY id ASC') begin # Get activities by statement. page = activity_service.get_activities_by_statement( statement.toStatement()) if page[:results] page[:results].each_with_index do |activity, index| puts "%d) Activity with ID: %d, name: %s, type: %s." % [index + statement.offset, activity[:id], activity[:name], activity[:type]] end end statement.offset += DfpApi::SUGGESTED_PAGE_LIMIT end while statement.offset < page[:total_result_set_size] # Print a footer if page.include?(:total_result_set_size) puts "Total number of results: %d" % page[:total_result_set_size] end end if __FILE__ == $0 begin get_all_activities() # HTTP errors. rescue AdsCommon::Errors::HttpError => e puts "HTTP Error: %s" % e # API errors. rescue DfpApi::Errors::ApiException => e puts "Message: %s" % e.message puts 'Errors:' e.errors.each_with_index do |error, index| puts "\tError [%d]:" % (index + 1) error.each do |field, value| puts "\t\t%s: %s" % [field, value] end end end end
apache-2.0
pashute/docs.particular.net
samples/encryption/message-body-encryption/Version_4/Endpoint2/Program.cs
787
using System; using NServiceBus; using NServiceBus.Installation.Environments; class Program { static void Main() { Configure.Serialization.Json(); Configure configure = Configure.With(); configure.DefineEndpointName("Samples.MessageBodyEncryption.Endpoint2"); configure.DefaultBuilder(); configure.UseTransport<Msmq>(); configure.InMemorySagaPersister(); configure.UseInMemoryTimeoutPersister(); configure.InMemorySubscriptionStorage(); configure.RegisterMessageEncryptor(); configure.UnicastBus() .CreateBus() .Start(() => Configure.Instance.ForInstallationOn<Windows>().Install()); Console.WriteLine("Press any key to exit"); Console.ReadLine(); } }
apache-2.0
naokiur/design-pattern-sample
src/main/java/jp/ne/naokiur/design/pattern/iterator/Iterator.java
186
package jp.ne.naokiur.design.pattern.iterator; public interface Iterator { boolean hasNext(); boolean isMatchedAge(Integer age); Employee next(); Employee current(); }
apache-2.0
xiaozhu36/terraform-provider
vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/mts/query_cover_job_list.go
4252
package mts //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // QueryCoverJobList invokes the mts.QueryCoverJobList API synchronously // api document: https://help.aliyun.com/api/mts/querycoverjoblist.html func (client *Client) QueryCoverJobList(request *QueryCoverJobListRequest) (response *QueryCoverJobListResponse, err error) { response = CreateQueryCoverJobListResponse() err = client.DoAction(request, response) return } // QueryCoverJobListWithChan invokes the mts.QueryCoverJobList API asynchronously // api document: https://help.aliyun.com/api/mts/querycoverjoblist.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) QueryCoverJobListWithChan(request *QueryCoverJobListRequest) (<-chan *QueryCoverJobListResponse, <-chan error) { responseChan := make(chan *QueryCoverJobListResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.QueryCoverJobList(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // QueryCoverJobListWithCallback invokes the mts.QueryCoverJobList API asynchronously // api document: https://help.aliyun.com/api/mts/querycoverjoblist.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) QueryCoverJobListWithCallback(request *QueryCoverJobListRequest, callback func(response *QueryCoverJobListResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *QueryCoverJobListResponse var err error defer close(result) response, err = client.QueryCoverJobList(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result } // QueryCoverJobListRequest is the request struct for api QueryCoverJobList type QueryCoverJobListRequest struct { *requests.RpcRequest OwnerId requests.Integer `position:"Query" name:"OwnerId"` ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"` ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"` CoverJobIds string `position:"Query" name:"CoverJobIds"` OwnerAccount string `position:"Query" name:"OwnerAccount"` } // QueryCoverJobListResponse is the response struct for api QueryCoverJobList type QueryCoverJobListResponse struct { *responses.BaseResponse RequestId string `json:"RequestId" xml:"RequestId"` NonExistIds NonExistIdsInQueryCoverJobList `json:"NonExistIds" xml:"NonExistIds"` CoverJobList CoverJobList `json:"CoverJobList" xml:"CoverJobList"` } // CreateQueryCoverJobListRequest creates a request to invoke QueryCoverJobList API func CreateQueryCoverJobListRequest() (request *QueryCoverJobListRequest) { request = &QueryCoverJobListRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("Mts", "2014-06-18", "QueryCoverJobList", "mts", "openAPI") return } // CreateQueryCoverJobListResponse creates a response to parse from QueryCoverJobList response func CreateQueryCoverJobListResponse() (response *QueryCoverJobListResponse) { response = &QueryCoverJobListResponse{ BaseResponse: &responses.BaseResponse{}, } return }
apache-2.0
domchen/typescript-plus
src/compiler/program.ts
163102
namespace ts { const ignoreDiagnosticCommentRegEx = /(^\s*$)|(^\s*\/\/\/?\s*(@ts-ignore)?)/; export function findConfigFile(searchPath: string, fileExists: (fileName: string) => boolean, configName = "tsconfig.json"): string | undefined { return forEachAncestorDirectory(searchPath, ancestor => { const fileName = combinePaths(ancestor, configName); return fileExists(fileName) ? fileName : undefined; }); } export function resolveTripleslashReference(moduleName: string, containingFile: string): string { const basePath = getDirectoryPath(containingFile); const referencedFileName = isRootedDiskPath(moduleName) ? moduleName : combinePaths(basePath, moduleName); return normalizePath(referencedFileName); } /* @internal */ export function computeCommonSourceDirectoryOfFilenames(fileNames: string[], currentDirectory: string, getCanonicalFileName: GetCanonicalFileName): string { let commonPathComponents: string[] | undefined; const failed = forEach(fileNames, sourceFile => { // Each file contributes into common source file path const sourcePathComponents = getNormalizedPathComponents(sourceFile, currentDirectory); sourcePathComponents.pop(); // The base file name is not part of the common directory path if (!commonPathComponents) { // first file commonPathComponents = sourcePathComponents; return; } const n = Math.min(commonPathComponents.length, sourcePathComponents.length); for (let i = 0; i < n; i++) { if (getCanonicalFileName(commonPathComponents[i]) !== getCanonicalFileName(sourcePathComponents[i])) { if (i === 0) { // Failed to find any common path component return true; } // New common path found that is 0 -> i-1 commonPathComponents.length = i; break; } } // If the sourcePathComponents was shorter than the commonPathComponents, truncate to the sourcePathComponents if (sourcePathComponents.length < commonPathComponents.length) { commonPathComponents.length = sourcePathComponents.length; } }); // A common path can not be found when paths span multiple drives on windows, for example if (failed) { return ""; } if (!commonPathComponents) { // Can happen when all input files are .d.ts files return currentDirectory; } return getPathFromPathComponents(commonPathComponents); } interface OutputFingerprint { hash: string; byteOrderMark: boolean; mtime: Date; } export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost { return createCompilerHostWorker(options, setParentNodes); } /*@internal*/ // TODO(shkamat): update this after reworking ts build API export function createCompilerHostWorker(options: CompilerOptions, setParentNodes?: boolean, system = sys): CompilerHost { const existingDirectories = createMap<boolean>(); function getCanonicalFileName(fileName: string): string { // if underlying system can distinguish between two files whose names differs only in cases then file name already in canonical form. // otherwise use toLowerCase as a canonical form. return system.useCaseSensitiveFileNames ? fileName : fileName.toLowerCase(); } function getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void): SourceFile | undefined { let text: string | undefined; try { performance.mark("beforeIORead"); text = system.readFile(fileName, options.charset); performance.mark("afterIORead"); performance.measure("I/O Read", "beforeIORead", "afterIORead"); } catch (e) { if (onError) { onError(e.message); } text = ""; } return text !== undefined ? createSourceFile(fileName, text, languageVersion, setParentNodes) : undefined; } function directoryExists(directoryPath: string): boolean { if (existingDirectories.has(directoryPath)) { return true; } if (system.directoryExists(directoryPath)) { existingDirectories.set(directoryPath, true); return true; } return false; } function ensureDirectoriesExist(directoryPath: string) { if (directoryPath.length > getRootLength(directoryPath) && !directoryExists(directoryPath)) { const parentDirectory = getDirectoryPath(directoryPath); ensureDirectoriesExist(parentDirectory); system.createDirectory(directoryPath); } } let outputFingerprints: Map<OutputFingerprint>; function writeFileIfUpdated(fileName: string, data: string, writeByteOrderMark: boolean): void { if (!outputFingerprints) { outputFingerprints = createMap<OutputFingerprint>(); } const hash = system.createHash!(data); // TODO: GH#18217 const mtimeBefore = system.getModifiedTime!(fileName); // TODO: GH#18217 if (mtimeBefore) { const fingerprint = outputFingerprints.get(fileName); // If output has not been changed, and the file has no external modification if (fingerprint && fingerprint.byteOrderMark === writeByteOrderMark && fingerprint.hash === hash && fingerprint.mtime.getTime() === mtimeBefore.getTime()) { return; } } system.writeFile(fileName, data, writeByteOrderMark); const mtimeAfter = system.getModifiedTime!(fileName) || missingFileModifiedTime; // TODO: GH#18217 outputFingerprints.set(fileName, { hash, byteOrderMark: writeByteOrderMark, mtime: mtimeAfter }); } function writeFile(fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void) { try { performance.mark("beforeIOWrite"); ensureDirectoriesExist(getDirectoryPath(normalizePath(fileName))); if (isWatchSet(options) && system.createHash && system.getModifiedTime) { writeFileIfUpdated(fileName, data, writeByteOrderMark); } else { system.writeFile(fileName, data, writeByteOrderMark); } performance.mark("afterIOWrite"); performance.measure("I/O Write", "beforeIOWrite", "afterIOWrite"); } catch (e) { if (onError) { onError(e.message); } } } function getDefaultLibLocation(): string { return getDirectoryPath(normalizePath(system.getExecutingFilePath())); } const newLine = getNewLineCharacter(options, () => system.newLine); const realpath = system.realpath && ((path: string) => system.realpath!(path)); return { getSourceFile, getDefaultLibLocation, getDefaultLibFileName: options => combinePaths(getDefaultLibLocation(), getDefaultLibFileName(options)), writeFile, getCurrentDirectory: memoize(() => system.getCurrentDirectory()), useCaseSensitiveFileNames: () => system.useCaseSensitiveFileNames, getCanonicalFileName, getNewLine: () => newLine, fileExists: fileName => system.fileExists(fileName), readFile: fileName => system.readFile(fileName), trace: (s: string) => system.write(s + newLine), directoryExists: directoryName => system.directoryExists(directoryName), getEnvironmentVariable: name => system.getEnvironmentVariable ? system.getEnvironmentVariable(name) : "", getDirectories: (path: string) => system.getDirectories(path), realpath, readDirectory: (path, extensions, include, exclude, depth) => system.readDirectory(path, extensions, include, exclude, depth) }; } export function getPreEmitDiagnostics(program: Program, sourceFile?: SourceFile, cancellationToken?: CancellationToken): Diagnostic[] { const diagnostics = [ ...program.getConfigFileParsingDiagnostics(), ...program.getOptionsDiagnostics(cancellationToken), ...program.getSyntacticDiagnostics(sourceFile, cancellationToken), ...program.getGlobalDiagnostics(cancellationToken), ...program.getSemanticDiagnostics(sourceFile, cancellationToken) ]; if (getEmitDeclarations(program.getCompilerOptions())) { addRange(diagnostics, program.getDeclarationDiagnostics(sourceFile, cancellationToken)); } return sortAndDeduplicateDiagnostics(diagnostics); } export interface FormatDiagnosticsHost { getCurrentDirectory(): string; getCanonicalFileName(fileName: string): string; getNewLine(): string; } export function formatDiagnostics(diagnostics: ReadonlyArray<Diagnostic>, host: FormatDiagnosticsHost): string { let output = ""; for (const diagnostic of diagnostics) { output += formatDiagnostic(diagnostic, host); } return output; } export function formatDiagnostic(diagnostic: Diagnostic, host: FormatDiagnosticsHost): string { const errorMessage = `${diagnosticCategoryName(diagnostic)} TS${diagnostic.code}: ${flattenDiagnosticMessageText(diagnostic.messageText, host.getNewLine())}${host.getNewLine()}`; if (diagnostic.file) { const { line, character } = getLineAndCharacterOfPosition(diagnostic.file, diagnostic.start!); // TODO: GH#18217 const fileName = diagnostic.file.fileName; const relativeFileName = convertToRelativePath(fileName, host.getCurrentDirectory(), fileName => host.getCanonicalFileName(fileName)); return `${relativeFileName}(${line + 1},${character + 1}): ` + errorMessage; } return errorMessage; } /** @internal */ export enum ForegroundColorEscapeSequences { Grey = "\u001b[90m", Red = "\u001b[91m", Yellow = "\u001b[93m", Blue = "\u001b[94m", Cyan = "\u001b[96m" } const gutterStyleSequence = "\u001b[7m"; const gutterSeparator = " "; const resetEscapeSequence = "\u001b[0m"; const ellipsis = "..."; const halfIndent = " "; const indent = " "; function getCategoryFormat(category: DiagnosticCategory): ForegroundColorEscapeSequences { switch (category) { case DiagnosticCategory.Error: return ForegroundColorEscapeSequences.Red; case DiagnosticCategory.Warning: return ForegroundColorEscapeSequences.Yellow; case DiagnosticCategory.Suggestion: return Debug.fail("Should never get an Info diagnostic on the command line."); case DiagnosticCategory.Message: return ForegroundColorEscapeSequences.Blue; } } /** @internal */ export function formatColorAndReset(text: string, formatStyle: string) { return formatStyle + text + resetEscapeSequence; } function padLeft(s: string, length: number) { while (s.length < length) { s = " " + s; } return s; } function formatCodeSpan(file: SourceFile, start: number, length: number, indent: string, squiggleColor: ForegroundColorEscapeSequences, host: FormatDiagnosticsHost) { const { line: firstLine, character: firstLineChar } = getLineAndCharacterOfPosition(file, start); const { line: lastLine, character: lastLineChar } = getLineAndCharacterOfPosition(file, start + length); const lastLineInFile = getLineAndCharacterOfPosition(file, file.text.length).line; const hasMoreThanFiveLines = (lastLine - firstLine) >= 4; let gutterWidth = (lastLine + 1 + "").length; if (hasMoreThanFiveLines) { gutterWidth = Math.max(ellipsis.length, gutterWidth); } let context = ""; for (let i = firstLine; i <= lastLine; i++) { context += host.getNewLine(); // If the error spans over 5 lines, we'll only show the first 2 and last 2 lines, // so we'll skip ahead to the second-to-last line. if (hasMoreThanFiveLines && firstLine + 1 < i && i < lastLine - 1) { context += indent + formatColorAndReset(padLeft(ellipsis, gutterWidth), gutterStyleSequence) + gutterSeparator + host.getNewLine(); i = lastLine - 1; } const lineStart = getPositionOfLineAndCharacter(file, i, 0); const lineEnd = i < lastLineInFile ? getPositionOfLineAndCharacter(file, i + 1, 0) : file.text.length; let lineContent = file.text.slice(lineStart, lineEnd); lineContent = lineContent.replace(/\s+$/g, ""); // trim from end lineContent = lineContent.replace("\t", " "); // convert tabs to single spaces // Output the gutter and the actual contents of the line. context += indent + formatColorAndReset(padLeft(i + 1 + "", gutterWidth), gutterStyleSequence) + gutterSeparator; context += lineContent + host.getNewLine(); // Output the gutter and the error span for the line using tildes. context += indent + formatColorAndReset(padLeft("", gutterWidth), gutterStyleSequence) + gutterSeparator; context += squiggleColor; if (i === firstLine) { // If we're on the last line, then limit it to the last character of the last line. // Otherwise, we'll just squiggle the rest of the line, giving 'slice' no end position. const lastCharForLine = i === lastLine ? lastLineChar : undefined; context += lineContent.slice(0, firstLineChar).replace(/\S/g, " "); context += lineContent.slice(firstLineChar, lastCharForLine).replace(/./g, "~"); } else if (i === lastLine) { context += lineContent.slice(0, lastLineChar).replace(/./g, "~"); } else { // Squiggle the entire line. context += lineContent.replace(/./g, "~"); } context += resetEscapeSequence; } return context; } /* @internal */ export function formatLocation(file: SourceFile, start: number, host: FormatDiagnosticsHost, color = formatColorAndReset) { const { line: firstLine, character: firstLineChar } = getLineAndCharacterOfPosition(file, start); // TODO: GH#18217 const relativeFileName = host ? convertToRelativePath(file.fileName, host.getCurrentDirectory(), fileName => host.getCanonicalFileName(fileName)) : file.fileName; let output = ""; output += color(relativeFileName, ForegroundColorEscapeSequences.Cyan); output += ":"; output += color(`${firstLine + 1}`, ForegroundColorEscapeSequences.Yellow); output += ":"; output += color(`${firstLineChar + 1}`, ForegroundColorEscapeSequences.Yellow); return output; } export function formatDiagnosticsWithColorAndContext(diagnostics: ReadonlyArray<Diagnostic>, host: FormatDiagnosticsHost): string { let output = ""; for (const diagnostic of diagnostics) { if (diagnostic.file) { const { file, start } = diagnostic; output += formatLocation(file, start!, host); // TODO: GH#18217 output += " - "; } output += formatColorAndReset(diagnosticCategoryName(diagnostic), getCategoryFormat(diagnostic.category)); output += formatColorAndReset(` TS${diagnostic.code}: `, ForegroundColorEscapeSequences.Grey); output += flattenDiagnosticMessageText(diagnostic.messageText, host.getNewLine()); if (diagnostic.file) { output += host.getNewLine(); output += formatCodeSpan(diagnostic.file, diagnostic.start!, diagnostic.length!, "", getCategoryFormat(diagnostic.category), host); // TODO: GH#18217 if (diagnostic.relatedInformation) { output += host.getNewLine(); for (const { file, start, length, messageText } of diagnostic.relatedInformation) { if (file) { output += host.getNewLine(); output += halfIndent + formatLocation(file, start!, host); // TODO: GH#18217 output += formatCodeSpan(file, start!, length!, indent, ForegroundColorEscapeSequences.Cyan, host); // TODO: GH#18217 } output += host.getNewLine(); output += indent + flattenDiagnosticMessageText(messageText, host.getNewLine()); } } } output += host.getNewLine(); } return output; } export function flattenDiagnosticMessageText(messageText: string | DiagnosticMessageChain | undefined, newLine: string): string { if (isString(messageText)) { return messageText; } else { let diagnosticChain = messageText; let result = ""; let indent = 0; while (diagnosticChain) { if (indent) { result += newLine; for (let i = 0; i < indent; i++) { result += " "; } } result += diagnosticChain.messageText; indent++; diagnosticChain = diagnosticChain.next; } return result; } } function loadWithLocalCache<T>(names: string[], containingFile: string, loader: (name: string, containingFile: string) => T): T[] { if (names.length === 0) { return []; } const resolutions: T[] = []; const cache = createMap<T>(); for (const name of names) { let result: T; if (cache.has(name)) { result = cache.get(name)!; } else { cache.set(name, result = loader(name, containingFile)); } resolutions.push(result); } return resolutions; } interface DiagnosticCache<T extends Diagnostic> { perFile?: Map<T[]>; allDiagnostics?: Diagnostic[]; } /** * Determines if program structure is upto date or needs to be recreated */ /* @internal */ export function isProgramUptoDate( program: Program | undefined, rootFileNames: string[], newOptions: CompilerOptions, getSourceVersion: (path: Path) => string | undefined, fileExists: (fileName: string) => boolean, hasInvalidatedResolution: HasInvalidatedResolution, hasChangedAutomaticTypeDirectiveNames: boolean, projectReferences: ReadonlyArray<ProjectReference> | undefined ): boolean { // If we haven't created a program yet or have changed automatic type directives, then it is not up-to-date if (!program || hasChangedAutomaticTypeDirectiveNames) { return false; } // If number of files in the program do not match, it is not up-to-date if (program.getRootFileNames().length !== rootFileNames.length) { return false; } // If project references dont match if (!arrayIsEqualTo(program.getProjectReferences(), projectReferences, projectReferenceUptoDate)) { return false; } // If any file is not up-to-date, then the whole program is not up-to-date if (program.getSourceFiles().some(sourceFileNotUptoDate)) { return false; } // If any of the missing file paths are now created if (program.getMissingFilePaths().some(fileExists)) { return false; } const currentOptions = program.getCompilerOptions(); // If the compilation settings do no match, then the program is not up-to-date if (!compareDataObjects(currentOptions, newOptions)) { return false; } // If everything matches but the text of config file is changed, // error locations can change for program options, so update the program if (currentOptions.configFile && newOptions.configFile) { return currentOptions.configFile.text === newOptions.configFile.text; } return true; function sourceFileNotUptoDate(sourceFile: SourceFile) { return !sourceFileVersionUptoDate(sourceFile) || hasInvalidatedResolution(sourceFile.resolvedPath); } function sourceFileVersionUptoDate(sourceFile: SourceFile) { return sourceFile.version === getSourceVersion(sourceFile.resolvedPath); } function projectReferenceUptoDate(oldRef: ProjectReference, newRef: ProjectReference, index: number) { if (!projectReferenceIsEqualTo(oldRef, newRef)) { return false; } const oldResolvedRef = program!.getResolvedProjectReferences()![index]; if (oldResolvedRef) { // If sourceFile for the oldResolvedRef existed, check the version for uptodate return sourceFileVersionUptoDate(oldResolvedRef.sourceFile); } // In old program, not able to resolve project reference path, // so if config file doesnt exist, it is uptodate. return !fileExists(resolveProjectReferencePath(oldRef)); } } export function getConfigFileParsingDiagnostics(configFileParseResult: ParsedCommandLine): ReadonlyArray<Diagnostic> { return configFileParseResult.options.configFile ? [...configFileParseResult.options.configFile.parseDiagnostics, ...configFileParseResult.errors] : configFileParseResult.errors; } /** * Determine if source file needs to be re-created even if its text hasn't changed */ function shouldProgramCreateNewSourceFiles(program: Program | undefined, newOptions: CompilerOptions): boolean { if (!program) return false; // If any compiler options change, we can't reuse old source file even if version match // The change in options like these could result in change in syntax tree or `sourceFile.bindDiagnostics`. const oldOptions = program.getCompilerOptions(); return !!sourceFileAffectingCompilerOptions.some(option => !isJsonEqual(getCompilerOptionValue(oldOptions, option), getCompilerOptionValue(newOptions, option))); } function createCreateProgramOptions(rootNames: ReadonlyArray<string>, options: CompilerOptions, host?: CompilerHost, oldProgram?: Program, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>): CreateProgramOptions { return { rootNames, options, host, oldProgram, configFileParsingDiagnostics }; } /** * Create a new 'Program' instance. A Program is an immutable collection of 'SourceFile's and a 'CompilerOptions' * that represent a compilation unit. * * Creating a program proceeds from a set of root files, expanding the set of inputs by following imports and * triple-slash-reference-path directives transitively. '@types' and triple-slash-reference-types are also pulled in. * * @param createProgramOptions - The options for creating a program. * @returns A 'Program' object. */ export function createProgram(createProgramOptions: CreateProgramOptions): Program; /** * Create a new 'Program' instance. A Program is an immutable collection of 'SourceFile's and a 'CompilerOptions' * that represent a compilation unit. * * Creating a program proceeds from a set of root files, expanding the set of inputs by following imports and * triple-slash-reference-path directives transitively. '@types' and triple-slash-reference-types are also pulled in. * * @param rootNames - A set of root files. * @param options - The compiler options which should be used. * @param host - The host interacts with the underlying file system. * @param oldProgram - Reuses an old program structure. * @param configFileParsingDiagnostics - error during config file parsing * @returns A 'Program' object. */ export function createProgram(rootNames: ReadonlyArray<string>, options: CompilerOptions, host?: CompilerHost, oldProgram?: Program, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>): Program; export function createProgram(rootNamesOrOptions: ReadonlyArray<string> | CreateProgramOptions, _options?: CompilerOptions, _host?: CompilerHost, _oldProgram?: Program, _configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>): Program { const createProgramOptions = isArray(rootNamesOrOptions) ? createCreateProgramOptions(rootNamesOrOptions, _options!, _host, _oldProgram, _configFileParsingDiagnostics) : rootNamesOrOptions; // TODO: GH#18217 const { rootNames, options, configFileParsingDiagnostics, projectReferences } = createProgramOptions; let { oldProgram } = createProgramOptions; let program: Program; let processingDefaultLibFiles: SourceFile[] | undefined; let processingOtherFiles: SourceFile[] | undefined; let files: SourceFile[]; let commonSourceDirectory: string; let diagnosticsProducingTypeChecker: TypeChecker; let noDiagnosticsTypeChecker: TypeChecker; let classifiableNames: UnderscoreEscapedMap<true>; let modifiedFilePaths: Path[] | undefined; const cachedSemanticDiagnosticsForFile: DiagnosticCache<Diagnostic> = {}; const cachedDeclarationDiagnosticsForFile: DiagnosticCache<DiagnosticWithLocation> = {}; let resolvedTypeReferenceDirectives = createMap<ResolvedTypeReferenceDirective>(); let fileProcessingDiagnostics = createDiagnosticCollection(); // The below settings are to track if a .js file should be add to the program if loaded via searching under node_modules. // This works as imported modules are discovered recursively in a depth first manner, specifically: // - For each root file, findSourceFile is called. // - This calls processImportedModules for each module imported in the source file. // - This calls resolveModuleNames, and then calls findSourceFile for each resolved module. // As all these operations happen - and are nested - within the createProgram call, they close over the below variables. // The current resolution depth is tracked by incrementing/decrementing as the depth first search progresses. const maxNodeModuleJsDepth = typeof options.maxNodeModuleJsDepth === "number" ? options.maxNodeModuleJsDepth : 0; let currentNodeModulesDepth = 0; // If a module has some of its imports skipped due to being at the depth limit under node_modules, then track // this, as it may be imported at a shallower depth later, and then it will need its skipped imports processed. const modulesWithElidedImports = createMap<boolean>(); // Track source files that are source files found by searching under node_modules, as these shouldn't be compiled. const sourceFilesFoundSearchingNodeModules = createMap<boolean>(); performance.mark("beforeProgram"); const host = createProgramOptions.host || createCompilerHost(options); const configParsingHost = parseConfigHostFromCompilerHost(host); let skipDefaultLib = options.noLib; const getDefaultLibraryFileName = memoize(() => host.getDefaultLibFileName(options)); const defaultLibraryPath = host.getDefaultLibLocation ? host.getDefaultLibLocation() : getDirectoryPath(getDefaultLibraryFileName()); const programDiagnostics = createDiagnosticCollection(); const currentDirectory = host.getCurrentDirectory(); const supportedExtensions = getSupportedExtensions(options); const supportedExtensionsWithJsonIfResolveJsonModule = options.resolveJsonModule ? [...supportedExtensions, Extension.Json] : undefined; // Map storing if there is emit blocking diagnostics for given input const hasEmitBlockingDiagnostics = createMap<boolean>(); let _compilerOptionsObjectLiteralSyntax: ObjectLiteralExpression | null | undefined; let _referencesArrayLiteralSyntax: ArrayLiteralExpression | null | undefined; let moduleResolutionCache: ModuleResolutionCache | undefined; let resolveModuleNamesWorker: (moduleNames: string[], containingFile: string, reusedNames?: string[]) => ResolvedModuleFull[]; const hasInvalidatedResolution = host.hasInvalidatedResolution || returnFalse; if (host.resolveModuleNames) { resolveModuleNamesWorker = (moduleNames, containingFile, reusedNames) => host.resolveModuleNames!(Debug.assertEachDefined(moduleNames), containingFile, reusedNames).map(resolved => { // An older host may have omitted extension, in which case we should infer it from the file extension of resolvedFileName. if (!resolved || (resolved as ResolvedModuleFull).extension !== undefined) { return resolved as ResolvedModuleFull; } const withExtension = clone(resolved) as ResolvedModuleFull; withExtension.extension = extensionFromPath(resolved.resolvedFileName); return withExtension; }); } else { moduleResolutionCache = createModuleResolutionCache(currentDirectory, x => host.getCanonicalFileName(x)); const loader = (moduleName: string, containingFile: string) => resolveModuleName(moduleName, containingFile, options, host, moduleResolutionCache).resolvedModule!; // TODO: GH#18217 resolveModuleNamesWorker = (moduleNames, containingFile) => loadWithLocalCache<ResolvedModuleFull>(Debug.assertEachDefined(moduleNames), containingFile, loader); } let resolveTypeReferenceDirectiveNamesWorker: (typeDirectiveNames: string[], containingFile: string) => ResolvedTypeReferenceDirective[]; if (host.resolveTypeReferenceDirectives) { resolveTypeReferenceDirectiveNamesWorker = (typeDirectiveNames, containingFile) => host.resolveTypeReferenceDirectives!(Debug.assertEachDefined(typeDirectiveNames), containingFile); } else { const loader = (typesRef: string, containingFile: string) => resolveTypeReferenceDirective(typesRef, containingFile, options, host).resolvedTypeReferenceDirective!; // TODO: GH#18217 resolveTypeReferenceDirectiveNamesWorker = (typeReferenceDirectiveNames, containingFile) => loadWithLocalCache<ResolvedTypeReferenceDirective>(Debug.assertEachDefined(typeReferenceDirectiveNames), containingFile, loader); } // Map from a stringified PackageId to the source file with that id. // Only one source file may have a given packageId. Others become redirects (see createRedirectSourceFile). // `packageIdToSourceFile` is only used while building the program, while `sourceFileToPackageName` and `isSourceFileTargetOfRedirect` are kept around. const packageIdToSourceFile = createMap<SourceFile>(); // Maps from a SourceFile's `.path` to the name of the package it was imported with. let sourceFileToPackageName = createMap<string>(); // Key is a file name. Value is the (non-empty, or undefined) list of files that redirect to it. let redirectTargetsMap = createMultiMap<string>(); const filesByName = createMap<SourceFile | undefined>(); let missingFilePaths: ReadonlyArray<Path> | undefined; // stores 'filename -> file association' ignoring case // used to track cases when two file names differ only in casing const filesByNameIgnoreCase = host.useCaseSensitiveFileNames() ? createMap<SourceFile>() : undefined; // A parallel array to projectReferences storing the results of reading in the referenced tsconfig files let resolvedProjectReferences: (ResolvedProjectReference | undefined)[] | undefined = projectReferences ? [] : undefined; let projectReferenceRedirects: ParsedCommandLine[] | undefined; const shouldCreateNewSourceFile = shouldProgramCreateNewSourceFiles(oldProgram, options); const structuralIsReused = tryReuseStructureFromOldProgram(); if (structuralIsReused !== StructureIsReused.Completely) { processingDefaultLibFiles = []; processingOtherFiles = []; if (projectReferences) { for (const ref of projectReferences) { const parsedRef = parseProjectReferenceConfigFile(ref); resolvedProjectReferences!.push(parsedRef); if (parsedRef) { const out = parsedRef.commandLine.options.outFile || parsedRef.commandLine.options.out; if (out) { const dtsOutfile = changeExtension(out, ".d.ts"); processSourceFile(dtsOutfile, /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, /*packageId*/ undefined); } addProjectReferenceRedirects(parsedRef.commandLine); } } } forEach(rootNames, name => processRootFile(name, /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false)); // load type declarations specified via 'types' argument or implicitly from types/ and node_modules/@types folders const typeReferences: string[] = getAutomaticTypeDirectiveNames(options, host); if (typeReferences.length) { // This containingFilename needs to match with the one used in managed-side const containingDirectory = options.configFilePath ? getDirectoryPath(options.configFilePath) : host.getCurrentDirectory(); const containingFilename = combinePaths(containingDirectory, "__inferred type names__.ts"); const resolutions = resolveTypeReferenceDirectiveNamesWorker(typeReferences, containingFilename); for (let i = 0; i < typeReferences.length; i++) { processTypeReferenceDirective(typeReferences[i], resolutions[i]); } } // Do not process the default library if: // - The '--noLib' flag is used. // - A 'no-default-lib' reference comment is encountered in // processing the root files. if (!skipDefaultLib) { // If '--lib' is not specified, include default library file according to '--target' // otherwise, using options specified in '--lib' instead of '--target' default library file const defaultLibraryFileName = getDefaultLibraryFileName(); if (!options.lib && defaultLibraryFileName) { processRootFile(defaultLibraryFileName, /*isDefaultLib*/ true, /*ignoreNoDefaultLib*/ false); } else { forEach(options.lib, libFileName => { processRootFile(combinePaths(defaultLibraryPath, libFileName), /*isDefaultLib*/ true, /*ignoreNoDefaultLib*/ false); }); } } missingFilePaths = arrayFrom(filesByName.keys(), p => <Path>p).filter(p => !filesByName.get(p)); files = stableSort(processingDefaultLibFiles, compareDefaultLibFiles).concat(processingOtherFiles); processingDefaultLibFiles = undefined; processingOtherFiles = undefined; } Debug.assert(!!missingFilePaths); // Release any files we have acquired in the old program but are // not part of the new program. if (oldProgram && host.onReleaseOldSourceFile) { const oldSourceFiles = oldProgram.getSourceFiles(); for (const oldSourceFile of oldSourceFiles) { if (!getSourceFile(oldSourceFile.path) || shouldCreateNewSourceFile) { host.onReleaseOldSourceFile(oldSourceFile, oldProgram.getCompilerOptions()); } } } // unconditionally set oldProgram to undefined to prevent it from being captured in closure oldProgram = undefined; program = { getRootFileNames: () => rootNames, getSourceFile, getSourceFileByPath, getSourceFiles: () => files, getMissingFilePaths: () => missingFilePaths!, // TODO: GH#18217 getCompilerOptions: () => options, getSyntacticDiagnostics, getOptionsDiagnostics, getGlobalDiagnostics, getSemanticDiagnostics, getSuggestionDiagnostics, getDeclarationDiagnostics, getTypeChecker, getClassifiableNames, getDiagnosticsProducingTypeChecker, getCommonSourceDirectory, emit, getCurrentDirectory: () => currentDirectory, getNodeCount: () => getDiagnosticsProducingTypeChecker().getNodeCount(), getIdentifierCount: () => getDiagnosticsProducingTypeChecker().getIdentifierCount(), getSymbolCount: () => getDiagnosticsProducingTypeChecker().getSymbolCount(), getTypeCount: () => getDiagnosticsProducingTypeChecker().getTypeCount(), getFileProcessingDiagnostics: () => fileProcessingDiagnostics, getResolvedTypeReferenceDirectives: () => resolvedTypeReferenceDirectives, isSourceFileFromExternalLibrary, isSourceFileDefaultLibrary, dropDiagnosticsProducingTypeChecker, getSourceFileFromReference, getLibFileFromReference, sourceFileToPackageName, redirectTargetsMap, isEmittedFile, getConfigFileParsingDiagnostics, getResolvedModuleWithFailedLookupLocationsFromCache, getProjectReferences, getResolvedProjectReferences, getProjectReferenceRedirect }; verifyCompilerOptions(); performance.mark("afterProgram"); performance.measure("Program", "beforeProgram", "afterProgram"); return program; function compareDefaultLibFiles(a: SourceFile, b: SourceFile) { return compareValues(getDefaultLibFilePriority(a), getDefaultLibFilePriority(b)); } function getDefaultLibFilePriority(a: SourceFile) { if (containsPath(defaultLibraryPath, a.fileName, /*ignoreCase*/ false)) { const basename = getBaseFileName(a.fileName); if (basename === "lib.d.ts" || basename === "lib.es6.d.ts") return 0; const name = removeSuffix(removePrefix(basename, "lib."), ".d.ts"); const index = libs.indexOf(name); if (index !== -1) return index + 1; } return libs.length + 2; } function getResolvedModuleWithFailedLookupLocationsFromCache(moduleName: string, containingFile: string): ResolvedModuleWithFailedLookupLocations | undefined { return moduleResolutionCache && resolveModuleNameFromCache(moduleName, containingFile, moduleResolutionCache); } function toPath(fileName: string): Path { return ts.toPath(fileName, currentDirectory, getCanonicalFileName); } function getCommonSourceDirectory() { if (commonSourceDirectory === undefined) { const emittedFiles = filter(files, file => sourceFileMayBeEmitted(file, options, isSourceFileFromExternalLibrary)); if (options.rootDir && checkSourceFilesBelongToPath(emittedFiles, options.rootDir)) { // If a rootDir is specified use it as the commonSourceDirectory commonSourceDirectory = getNormalizedAbsolutePath(options.rootDir, currentDirectory); } else if (options.composite && options.configFilePath) { // Project compilations never infer their root from the input source paths commonSourceDirectory = getDirectoryPath(normalizeSlashes(options.configFilePath)); checkSourceFilesBelongToPath(emittedFiles, commonSourceDirectory); } else { commonSourceDirectory = computeCommonSourceDirectory(emittedFiles); } if (commonSourceDirectory && commonSourceDirectory[commonSourceDirectory.length - 1] !== directorySeparator) { // Make sure directory path ends with directory separator so this string can directly // used to replace with "" to get the relative path of the source file and the relative path doesn't // start with / making it rooted path commonSourceDirectory += directorySeparator; } } return commonSourceDirectory; } function getClassifiableNames() { if (!classifiableNames) { // Initialize a checker so that all our files are bound. getTypeChecker(); classifiableNames = createUnderscoreEscapedMap<true>(); for (const sourceFile of files) { copyEntries(sourceFile.classifiableNames!, classifiableNames); } } return classifiableNames; } interface OldProgramState { program: Program | undefined; oldSourceFile: SourceFile | undefined; /** The collection of paths modified *since* the old program. */ modifiedFilePaths: Path[] | undefined; } function resolveModuleNamesReusingOldState(moduleNames: string[], containingFile: string, file: SourceFile, oldProgramState: OldProgramState) { if (structuralIsReused === StructureIsReused.Not && !file.ambientModuleNames.length) { // If the old program state does not permit reusing resolutions and `file` does not contain locally defined ambient modules, // the best we can do is fallback to the default logic. return resolveModuleNamesWorker(moduleNames, containingFile); } const oldSourceFile = oldProgramState.program && oldProgramState.program.getSourceFile(containingFile); if (oldSourceFile !== file && file.resolvedModules) { // `file` was created for the new program. // // We only set `file.resolvedModules` via work from the current function, // so it is defined iff we already called the current function on `file`. // That call happened no later than the creation of the `file` object, // which per above occurred during the current program creation. // Since we assume the filesystem does not change during program creation, // it is safe to reuse resolutions from the earlier call. const result: ResolvedModuleFull[] = []; for (const moduleName of moduleNames) { const resolvedModule = file.resolvedModules.get(moduleName)!; result.push(resolvedModule); } return result; } // At this point, we know at least one of the following hold: // - file has local declarations for ambient modules // - old program state is available // With this information, we can infer some module resolutions without performing resolution. /** An ordered list of module names for which we cannot recover the resolution. */ let unknownModuleNames: string[] | undefined; /** * The indexing of elements in this list matches that of `moduleNames`. * * Before combining results, result[i] is in one of the following states: * * undefined: needs to be recomputed, * * predictedToResolveToAmbientModuleMarker: known to be an ambient module. * Needs to be reset to undefined before returning, * * ResolvedModuleFull instance: can be reused. */ let result: ResolvedModuleFull[] | undefined; let reusedNames: string[] | undefined; /** A transient placeholder used to mark predicted resolution in the result list. */ const predictedToResolveToAmbientModuleMarker: ResolvedModuleFull = <any>{}; for (let i = 0; i < moduleNames.length; i++) { const moduleName = moduleNames[i]; // If the source file is unchanged and doesnt have invalidated resolution, reuse the module resolutions if (file === oldSourceFile && !hasInvalidatedResolution(oldSourceFile.path)) { const oldResolvedModule = oldSourceFile && oldSourceFile.resolvedModules!.get(moduleName); if (oldResolvedModule) { if (isTraceEnabled(options, host)) { trace(host, Diagnostics.Reusing_resolution_of_module_0_to_file_1_from_old_program, moduleName, containingFile); } (result || (result = new Array(moduleNames.length)))[i] = oldResolvedModule; (reusedNames || (reusedNames = [])).push(moduleName); continue; } } // We know moduleName resolves to an ambient module provided that moduleName: // - is in the list of ambient modules locally declared in the current source file. // - resolved to an ambient module in the old program whose declaration is in an unmodified file // (so the same module declaration will land in the new program) let resolvesToAmbientModuleInNonModifiedFile = false; if (contains(file.ambientModuleNames, moduleName)) { resolvesToAmbientModuleInNonModifiedFile = true; if (isTraceEnabled(options, host)) { trace(host, Diagnostics.Module_0_was_resolved_as_locally_declared_ambient_module_in_file_1, moduleName, containingFile); } } else { resolvesToAmbientModuleInNonModifiedFile = moduleNameResolvesToAmbientModuleInNonModifiedFile(moduleName, oldProgramState); } if (resolvesToAmbientModuleInNonModifiedFile) { (result || (result = new Array(moduleNames.length)))[i] = predictedToResolveToAmbientModuleMarker; } else { // Resolution failed in the old program, or resolved to an ambient module for which we can't reuse the result. (unknownModuleNames || (unknownModuleNames = [])).push(moduleName); } } const resolutions = unknownModuleNames && unknownModuleNames.length ? resolveModuleNamesWorker(unknownModuleNames, containingFile, reusedNames) : emptyArray; // Combine results of resolutions and predicted results if (!result) { // There were no unresolved/ambient resolutions. Debug.assert(resolutions.length === moduleNames.length); return resolutions; } let j = 0; for (let i = 0; i < result.length; i++) { if (result[i]) { // `result[i]` is either a `ResolvedModuleFull` or a marker. // If it is the former, we can leave it as is. if (result[i] === predictedToResolveToAmbientModuleMarker) { result[i] = undefined!; // TODO: GH#18217 } } else { result[i] = resolutions[j]; j++; } } Debug.assert(j === resolutions.length); return result; // If we change our policy of rechecking failed lookups on each program create, // we should adjust the value returned here. function moduleNameResolvesToAmbientModuleInNonModifiedFile(moduleName: string, oldProgramState: OldProgramState): boolean { if (!oldProgramState.program) { return false; } const resolutionToFile = getResolvedModule(oldProgramState.oldSourceFile!, moduleName); // TODO: GH#18217 const resolvedFile = resolutionToFile && oldProgramState.program.getSourceFile(resolutionToFile.resolvedFileName); if (resolutionToFile && resolvedFile && !resolvedFile.externalModuleIndicator) { // In the old program, we resolved to an ambient module that was in the same // place as we expected to find an actual module file. // We actually need to return 'false' here even though this seems like a 'true' case // because the normal module resolution algorithm will find this anyway. return false; } // at least one of declarations should come from non-modified source file const firstUnmodifiedFile = oldProgramState.program.getSourceFiles().find( f => !contains(oldProgramState.modifiedFilePaths, f.path) && contains(f.ambientModuleNames, moduleName) ); if (!firstUnmodifiedFile) { return false; } if (isTraceEnabled(options, host)) { trace(host, Diagnostics.Module_0_was_resolved_as_ambient_module_declared_in_1_since_this_file_was_not_modified, moduleName, firstUnmodifiedFile.fileName); } return true; } } function tryReuseStructureFromOldProgram(): StructureIsReused { if (!oldProgram) { return StructureIsReused.Not; } // check properties that can affect structure of the program or module resolution strategy // if any of these properties has changed - structure cannot be reused const oldOptions = oldProgram.getCompilerOptions(); if (changesAffectModuleResolution(oldOptions, options)) { return oldProgram.structureIsReused = StructureIsReused.Not; } Debug.assert(!(oldProgram.structureIsReused! & (StructureIsReused.Completely | StructureIsReused.SafeModules))); // there is an old program, check if we can reuse its structure const oldRootNames = oldProgram.getRootFileNames(); if (!arrayIsEqualTo(oldRootNames, rootNames)) { return oldProgram.structureIsReused = StructureIsReused.Not; } if (!arrayIsEqualTo(options.types, oldOptions.types)) { return oldProgram.structureIsReused = StructureIsReused.Not; } // Check if any referenced project tsconfig files are different // If array of references is changed, we cant resue old program const oldProjectReferences = oldProgram.getProjectReferences(); if (!arrayIsEqualTo(oldProjectReferences!, projectReferences, projectReferenceIsEqualTo)) { return oldProgram.structureIsReused = StructureIsReused.Not; } // Check the json files for the project references const oldRefs = oldProgram.getResolvedProjectReferences(); if (projectReferences) { // Resolved project referenced should be array if projectReferences provided are array Debug.assert(!!oldRefs); for (let i = 0; i < projectReferences.length; i++) { const oldRef = oldRefs![i]; const newRef = parseProjectReferenceConfigFile(projectReferences[i]); if (oldRef) { if (!newRef || newRef.sourceFile !== oldRef.sourceFile) { // Resolved project reference has gone missing or changed return oldProgram.structureIsReused = StructureIsReused.Not; } } else { // A previously-unresolved reference may be resolved now if (newRef !== undefined) { return oldProgram.structureIsReused = StructureIsReused.Not; } } } } else { // Resolved project referenced should be undefined if projectReferences is undefined Debug.assert(!oldRefs); } // check if program source files has changed in the way that can affect structure of the program const newSourceFiles: SourceFile[] = []; const filePaths: Path[] = []; const modifiedSourceFiles: { oldFile: SourceFile, newFile: SourceFile }[] = []; oldProgram.structureIsReused = StructureIsReused.Completely; // If the missing file paths are now present, it can change the progam structure, // and hence cant reuse the structure. // This is same as how we dont reuse the structure if one of the file from old program is now missing if (oldProgram.getMissingFilePaths().some(missingFilePath => host.fileExists(missingFilePath))) { return oldProgram.structureIsReused = StructureIsReused.Not; } const oldSourceFiles = oldProgram.getSourceFiles(); const enum SeenPackageName { Exists, Modified } const seenPackageNames = createMap<SeenPackageName>(); for (const oldSourceFile of oldSourceFiles) { let newSourceFile = host.getSourceFileByPath ? host.getSourceFileByPath(oldSourceFile.fileName, oldSourceFile.resolvedPath, options.target!, /*onError*/ undefined, shouldCreateNewSourceFile) : host.getSourceFile(oldSourceFile.fileName, options.target!, /*onError*/ undefined, shouldCreateNewSourceFile); // TODO: GH#18217 if (!newSourceFile) { return oldProgram.structureIsReused = StructureIsReused.Not; } Debug.assert(!newSourceFile.redirectInfo, "Host should not return a redirect source file from `getSourceFile`"); let fileChanged: boolean; if (oldSourceFile.redirectInfo) { // We got `newSourceFile` by path, so it is actually for the unredirected file. // This lets us know if the unredirected file has changed. If it has we should break the redirect. if (newSourceFile !== oldSourceFile.redirectInfo.unredirected) { // Underlying file has changed. Might not redirect anymore. Must rebuild program. return oldProgram.structureIsReused = StructureIsReused.Not; } fileChanged = false; newSourceFile = oldSourceFile; // Use the redirect. } else if (oldProgram.redirectTargetsMap.has(oldSourceFile.path)) { // If a redirected-to source file changes, the redirect may be broken. if (newSourceFile !== oldSourceFile) { return oldProgram.structureIsReused = StructureIsReused.Not; } fileChanged = false; } else { fileChanged = newSourceFile !== oldSourceFile; } // Since the project references havent changed, its right to set originalFileName and resolvedPath here newSourceFile.path = oldSourceFile.path; newSourceFile.originalFileName = oldSourceFile.originalFileName; newSourceFile.resolvedPath = oldSourceFile.resolvedPath; newSourceFile.fileName = oldSourceFile.fileName; filePaths.push(newSourceFile.path); const packageName = oldProgram.sourceFileToPackageName.get(oldSourceFile.path); if (packageName !== undefined) { // If there are 2 different source files for the same package name and at least one of them changes, // they might become redirects. So we must rebuild the program. const prevKind = seenPackageNames.get(packageName); const newKind = fileChanged ? SeenPackageName.Modified : SeenPackageName.Exists; if ((prevKind !== undefined && newKind === SeenPackageName.Modified) || prevKind === SeenPackageName.Modified) { return oldProgram.structureIsReused = StructureIsReused.Not; } seenPackageNames.set(packageName, newKind); } if (fileChanged) { // The `newSourceFile` object was created for the new program. if (!arrayIsEqualTo(oldSourceFile.libReferenceDirectives, newSourceFile.libReferenceDirectives, fileReferenceIsEqualTo)) { // 'lib' references has changed. Matches behavior in changesAffectModuleResolution return oldProgram.structureIsReused = StructureIsReused.Not; } if (oldSourceFile.hasNoDefaultLib !== newSourceFile.hasNoDefaultLib) { // value of no-default-lib has changed // this will affect if default library is injected into the list of files oldProgram.structureIsReused = StructureIsReused.SafeModules; } // check tripleslash references if (!arrayIsEqualTo(oldSourceFile.referencedFiles, newSourceFile.referencedFiles, fileReferenceIsEqualTo)) { // tripleslash references has changed oldProgram.structureIsReused = StructureIsReused.SafeModules; } // check imports and module augmentations collectExternalModuleReferences(newSourceFile); if (!arrayIsEqualTo(oldSourceFile.imports, newSourceFile.imports, moduleNameIsEqualTo)) { // imports has changed oldProgram.structureIsReused = StructureIsReused.SafeModules; } if (!arrayIsEqualTo(oldSourceFile.moduleAugmentations, newSourceFile.moduleAugmentations, moduleNameIsEqualTo)) { // moduleAugmentations has changed oldProgram.structureIsReused = StructureIsReused.SafeModules; } if ((oldSourceFile.flags & NodeFlags.PermanentlySetIncrementalFlags) !== (newSourceFile.flags & NodeFlags.PermanentlySetIncrementalFlags)) { // dynamicImport has changed oldProgram.structureIsReused = StructureIsReused.SafeModules; } if (!arrayIsEqualTo(oldSourceFile.typeReferenceDirectives, newSourceFile.typeReferenceDirectives, fileReferenceIsEqualTo)) { // 'types' references has changed oldProgram.structureIsReused = StructureIsReused.SafeModules; } // tentatively approve the file modifiedSourceFiles.push({ oldFile: oldSourceFile, newFile: newSourceFile }); } else if (hasInvalidatedResolution(oldSourceFile.path)) { // 'module/types' references could have changed oldProgram.structureIsReused = StructureIsReused.SafeModules; // add file to the modified list so that we will resolve it later modifiedSourceFiles.push({ oldFile: oldSourceFile, newFile: newSourceFile }); } // if file has passed all checks it should be safe to reuse it newSourceFiles.push(newSourceFile); } if (oldProgram.structureIsReused !== StructureIsReused.Completely) { return oldProgram.structureIsReused; } modifiedFilePaths = modifiedSourceFiles.map(f => f.newFile.path); // try to verify results of module resolution for (const { oldFile: oldSourceFile, newFile: newSourceFile } of modifiedSourceFiles) { const newSourceFilePath = getNormalizedAbsolutePath(newSourceFile.originalFileName, currentDirectory); if (resolveModuleNamesWorker) { const moduleNames = getModuleNames(newSourceFile); const oldProgramState: OldProgramState = { program: oldProgram, oldSourceFile, modifiedFilePaths }; const resolutions = resolveModuleNamesReusingOldState(moduleNames, newSourceFilePath, newSourceFile, oldProgramState); // ensure that module resolution results are still correct const resolutionsChanged = hasChangesInResolutions(moduleNames, resolutions, oldSourceFile.resolvedModules, moduleResolutionIsEqualTo); if (resolutionsChanged) { oldProgram.structureIsReused = StructureIsReused.SafeModules; newSourceFile.resolvedModules = zipToMap(moduleNames, resolutions); } else { newSourceFile.resolvedModules = oldSourceFile.resolvedModules; } } if (resolveTypeReferenceDirectiveNamesWorker) { // We lower-case all type references because npm automatically lowercases all packages. See GH#9824. const typesReferenceDirectives = map(newSourceFile.typeReferenceDirectives, ref => ref.fileName.toLocaleLowerCase()); const resolutions = resolveTypeReferenceDirectiveNamesWorker(typesReferenceDirectives, newSourceFilePath); // ensure that types resolutions are still correct const resolutionsChanged = hasChangesInResolutions(typesReferenceDirectives, resolutions, oldSourceFile.resolvedTypeReferenceDirectiveNames, typeDirectiveIsEqualTo); if (resolutionsChanged) { oldProgram.structureIsReused = StructureIsReused.SafeModules; newSourceFile.resolvedTypeReferenceDirectiveNames = zipToMap(typesReferenceDirectives, resolutions); } else { newSourceFile.resolvedTypeReferenceDirectiveNames = oldSourceFile.resolvedTypeReferenceDirectiveNames; } } } if (oldProgram.structureIsReused !== StructureIsReused.Completely) { return oldProgram.structureIsReused; } if (host.hasChangedAutomaticTypeDirectiveNames) { return oldProgram.structureIsReused = StructureIsReused.SafeModules; } missingFilePaths = oldProgram.getMissingFilePaths(); // update fileName -> file mapping for (let i = 0; i < newSourceFiles.length; i++) { filesByName.set(filePaths[i], newSourceFiles[i]); // Set the file as found during node modules search if it was found that way in old progra, if (oldProgram.isSourceFileFromExternalLibrary(oldProgram.getSourceFileByPath(filePaths[i])!)) { sourceFilesFoundSearchingNodeModules.set(filePaths[i], true); } } files = newSourceFiles; fileProcessingDiagnostics = oldProgram.getFileProcessingDiagnostics(); for (const modifiedFile of modifiedSourceFiles) { fileProcessingDiagnostics.reattachFileDiagnostics(modifiedFile.newFile); } resolvedTypeReferenceDirectives = oldProgram.getResolvedTypeReferenceDirectives(); resolvedProjectReferences = oldProgram.getResolvedProjectReferences(); if (resolvedProjectReferences) { resolvedProjectReferences.forEach(ref => { if (ref) { addProjectReferenceRedirects(ref.commandLine); } }); } sourceFileToPackageName = oldProgram.sourceFileToPackageName; redirectTargetsMap = oldProgram.redirectTargetsMap; return oldProgram.structureIsReused = StructureIsReused.Completely; } function getEmitHost(writeFileCallback?: WriteFileCallback): EmitHost { return { getPrependNodes, getCanonicalFileName, getCommonSourceDirectory: program.getCommonSourceDirectory, getCompilerOptions: program.getCompilerOptions, getCurrentDirectory: () => currentDirectory, getNewLine: () => host.getNewLine(), getSourceFile: program.getSourceFile, getSourceFileByPath: program.getSourceFileByPath, getSourceFiles: program.getSourceFiles, getTypeChecker: program.getTypeChecker, getLibFileFromReference: program.getLibFileFromReference, isSourceFileFromExternalLibrary, writeFile: writeFileCallback || ( (fileName, data, writeByteOrderMark, onError, sourceFiles) => host.writeFile(fileName, data, writeByteOrderMark, onError, sourceFiles)), isEmitBlocked, readFile: f => host.readFile(f), fileExists: f => { // Use local caches const path = toPath(f); if (getSourceFileByPath(path)) return true; if (contains(missingFilePaths, path)) return false; // Before falling back to the host return host.fileExists(f); }, ...(host.directoryExists ? { directoryExists: f => host.directoryExists!(f) } : {}), useCaseSensitiveFileNames: () => host.useCaseSensitiveFileNames(), }; } function getResolvedProjectReferences() { return resolvedProjectReferences; } function getProjectReferences() { return projectReferences; } function getPrependNodes(): InputFiles[] { if (!projectReferences) { return emptyArray; } const nodes: InputFiles[] = []; for (let i = 0; i < projectReferences.length; i++) { const ref = projectReferences[i]; const resolvedRefOpts = resolvedProjectReferences![i]!.commandLine; if (ref.prepend && resolvedRefOpts && resolvedRefOpts.options) { const out = resolvedRefOpts.options.outFile || resolvedRefOpts.options.out; // Upstream project didn't have outFile set -- skip (error will have been issued earlier) if (!out) continue; const dtsFilename = changeExtension(out, ".d.ts"); const js = host.readFile(out) || `/* Input file ${out} was missing */\r\n`; const jsMapPath = out + ".map"; // TODO: try to read sourceMappingUrl comment from the file const jsMap = host.readFile(jsMapPath); const dts = host.readFile(dtsFilename) || `/* Input file ${dtsFilename} was missing */\r\n`; const dtsMapPath = dtsFilename + ".map"; const dtsMap = host.readFile(dtsMapPath); const node = createInputFiles(js, dts, jsMap && jsMapPath, jsMap, dtsMap && dtsMapPath, dtsMap); nodes.push(node); } } return nodes; } function isSourceFileFromExternalLibrary(file: SourceFile): boolean { return !!sourceFilesFoundSearchingNodeModules.get(file.path); } function isSourceFileDefaultLibrary(file: SourceFile): boolean { if (file.hasNoDefaultLib) { return true; } if (!options.noLib) { return false; } // If '--lib' is not specified, include default library file according to '--target' // otherwise, using options specified in '--lib' instead of '--target' default library file const equalityComparer = host.useCaseSensitiveFileNames() ? equateStringsCaseSensitive : equateStringsCaseInsensitive; if (!options.lib) { return equalityComparer(file.fileName, getDefaultLibraryFileName()); } else { return some(options.lib, libFileName => equalityComparer(file.fileName, combinePaths(defaultLibraryPath, libFileName))); } } function getDiagnosticsProducingTypeChecker() { return diagnosticsProducingTypeChecker || (diagnosticsProducingTypeChecker = createTypeChecker(program, /*produceDiagnostics:*/ true)); } function dropDiagnosticsProducingTypeChecker() { diagnosticsProducingTypeChecker = undefined!; } function getTypeChecker() { return noDiagnosticsTypeChecker || (noDiagnosticsTypeChecker = createTypeChecker(program, /*produceDiagnostics:*/ false)); } function emit(sourceFile?: SourceFile, writeFileCallback?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, transformers?: CustomTransformers): EmitResult { return runWithCancellationToken(() => emitWorker(program, sourceFile, writeFileCallback, cancellationToken, emitOnlyDtsFiles, transformers)); } function isEmitBlocked(emitFileName: string): boolean { return hasEmitBlockingDiagnostics.has(toPath(emitFileName)); } function emitWorker(program: Program, sourceFile: SourceFile | undefined, writeFileCallback: WriteFileCallback | undefined, cancellationToken: CancellationToken | undefined, emitOnlyDtsFiles?: boolean, customTransformers?: CustomTransformers): EmitResult { let declarationDiagnostics: ReadonlyArray<Diagnostic> = []; if (!emitOnlyDtsFiles) { if (options.noEmit) { return { diagnostics: declarationDiagnostics, sourceMaps: undefined, emittedFiles: undefined, emitSkipped: true }; } // If the noEmitOnError flag is set, then check if we have any errors so far. If so, // immediately bail out. Note that we pass 'undefined' for 'sourceFile' so that we // get any preEmit diagnostics, not just the ones if (options.noEmitOnError) { const diagnostics = [ ...program.getOptionsDiagnostics(cancellationToken), ...program.getSyntacticDiagnostics(sourceFile, cancellationToken), ...program.getGlobalDiagnostics(cancellationToken), ...program.getSemanticDiagnostics(sourceFile, cancellationToken) ]; if (diagnostics.length === 0 && getEmitDeclarations(program.getCompilerOptions())) { declarationDiagnostics = program.getDeclarationDiagnostics(/*sourceFile*/ undefined, cancellationToken); } if (diagnostics.length > 0 || declarationDiagnostics.length > 0) { return { diagnostics: concatenate(diagnostics, declarationDiagnostics), sourceMaps: undefined, emittedFiles: undefined, emitSkipped: true }; } } } // Create the emit resolver outside of the "emitTime" tracking code below. That way // any cost associated with it (like type checking) are appropriate associated with // the type-checking counter. // // If the -out option is specified, we should not pass the source file to getEmitResolver. // This is because in the -out scenario all files need to be emitted, and therefore all // files need to be type checked. And the way to specify that all files need to be type // checked is to not pass the file to getEmitResolver. const emitResolver = getDiagnosticsProducingTypeChecker().getEmitResolver((options.outFile || options.out) ? undefined : sourceFile, cancellationToken); performance.mark("beforeEmit"); const transformers = emitOnlyDtsFiles ? [] : getTransformers(options, customTransformers); const emitResult = emitFiles( emitResolver, getEmitHost(writeFileCallback), sourceFile!, // TODO: GH#18217 emitOnlyDtsFiles, transformers, customTransformers && customTransformers.afterDeclarations ); performance.mark("afterEmit"); performance.measure("Emit", "beforeEmit", "afterEmit"); return emitResult; } function getSourceFile(fileName: string): SourceFile | undefined { return getSourceFileByPath(toPath(fileName)); } function getSourceFileByPath(path: Path): SourceFile | undefined { return filesByName.get(path); } function getDiagnosticsHelper<T extends Diagnostic>( sourceFile: SourceFile, getDiagnostics: (sourceFile: SourceFile, cancellationToken: CancellationToken) => ReadonlyArray<T>, cancellationToken: CancellationToken): ReadonlyArray<T> { if (sourceFile) { return getDiagnostics(sourceFile, cancellationToken); } return sortAndDeduplicateDiagnostics(flatMap(program.getSourceFiles(), sourceFile => { if (cancellationToken) { cancellationToken.throwIfCancellationRequested(); } return getDiagnostics(sourceFile, cancellationToken); })); } function getSyntacticDiagnostics(sourceFile: SourceFile, cancellationToken: CancellationToken): ReadonlyArray<DiagnosticWithLocation> { return getDiagnosticsHelper(sourceFile, getSyntacticDiagnosticsForFile, cancellationToken); } function getSemanticDiagnostics(sourceFile: SourceFile, cancellationToken: CancellationToken): ReadonlyArray<Diagnostic> { return getDiagnosticsHelper(sourceFile, getSemanticDiagnosticsForFile, cancellationToken); } function getDeclarationDiagnostics(sourceFile: SourceFile, cancellationToken: CancellationToken): ReadonlyArray<DiagnosticWithLocation> { const options = program.getCompilerOptions(); // collect diagnostics from the program only once if either no source file was specified or out/outFile is set (bundled emit) if (!sourceFile || options.out || options.outFile) { return getDeclarationDiagnosticsWorker(sourceFile, cancellationToken); } else { return getDiagnosticsHelper(sourceFile, getDeclarationDiagnosticsForFile, cancellationToken); } } function getSyntacticDiagnosticsForFile(sourceFile: SourceFile): ReadonlyArray<DiagnosticWithLocation> { // For JavaScript files, we report semantic errors for using TypeScript-only // constructs from within a JavaScript file as syntactic errors. if (isSourceFileJS(sourceFile)) { if (!sourceFile.additionalSyntacticDiagnostics) { sourceFile.additionalSyntacticDiagnostics = getJSSyntacticDiagnosticsForFile(sourceFile); } return concatenate(sourceFile.additionalSyntacticDiagnostics, sourceFile.parseDiagnostics); } return sourceFile.parseDiagnostics; } function runWithCancellationToken<T>(func: () => T): T { try { return func(); } catch (e) { if (e instanceof OperationCanceledException) { // We were canceled while performing the operation. Because our type checker // might be a bad state, we need to throw it away. // // Note: we are overly aggressive here. We do not actually *have* to throw away // the "noDiagnosticsTypeChecker". However, for simplicity, i'd like to keep // the lifetimes of these two TypeCheckers the same. Also, we generally only // cancel when the user has made a change anyways. And, in that case, we (the // program instance) will get thrown away anyways. So trying to keep one of // these type checkers alive doesn't serve much purpose. noDiagnosticsTypeChecker = undefined!; diagnosticsProducingTypeChecker = undefined!; } throw e; } } function getSemanticDiagnosticsForFile(sourceFile: SourceFile, cancellationToken: CancellationToken): ReadonlyArray<Diagnostic> { return getAndCacheDiagnostics(sourceFile, cancellationToken, cachedSemanticDiagnosticsForFile, getSemanticDiagnosticsForFileNoCache); } function getSemanticDiagnosticsForFileNoCache(sourceFile: SourceFile, cancellationToken: CancellationToken): Diagnostic[] | undefined { return runWithCancellationToken(() => { if (skipTypeChecking(sourceFile, options)) { return emptyArray; } const typeChecker = getDiagnosticsProducingTypeChecker(); Debug.assert(!!sourceFile.bindDiagnostics); const isCheckJs = isCheckJsEnabledForFile(sourceFile, options); // By default, only type-check .ts, .tsx, 'Deferred' and 'External' files (external files are added by plugins) const includeBindAndCheckDiagnostics = sourceFile.scriptKind === ScriptKind.TS || sourceFile.scriptKind === ScriptKind.TSX || sourceFile.scriptKind === ScriptKind.External || isCheckJs || sourceFile.scriptKind === ScriptKind.Deferred; const bindDiagnostics: ReadonlyArray<Diagnostic> = includeBindAndCheckDiagnostics ? sourceFile.bindDiagnostics : emptyArray; const checkDiagnostics = includeBindAndCheckDiagnostics ? typeChecker.getDiagnostics(sourceFile, cancellationToken) : emptyArray; const fileProcessingDiagnosticsInFile = fileProcessingDiagnostics.getDiagnostics(sourceFile.fileName); const programDiagnosticsInFile = programDiagnostics.getDiagnostics(sourceFile.fileName); let diagnostics: Diagnostic[] | undefined; for (const diags of [bindDiagnostics, checkDiagnostics, fileProcessingDiagnosticsInFile, programDiagnosticsInFile, isCheckJs ? sourceFile.jsDocDiagnostics : undefined]) { if (diags) { for (const diag of diags) { if (shouldReportDiagnostic(diag)) { diagnostics = append(diagnostics, diag); } } } } return diagnostics; }); } function getSuggestionDiagnostics(sourceFile: SourceFile, cancellationToken: CancellationToken): ReadonlyArray<DiagnosticWithLocation> { return runWithCancellationToken(() => { return getDiagnosticsProducingTypeChecker().getSuggestionDiagnostics(sourceFile, cancellationToken); }); } /** * Skip errors if previous line start with '// @ts-ignore' comment, not counting non-empty non-comment lines */ function shouldReportDiagnostic(diagnostic: Diagnostic) { const { file, start } = diagnostic; if (file) { const lineStarts = getLineStarts(file); let { line } = computeLineAndCharacterOfPosition(lineStarts, start!); // TODO: GH#18217 while (line > 0) { const previousLineText = file.text.slice(lineStarts[line - 1], lineStarts[line]); const result = ignoreDiagnosticCommentRegEx.exec(previousLineText); if (!result) { // non-empty line return true; } if (result[3]) { // @ts-ignore return false; } line--; } } return true; } function getJSSyntacticDiagnosticsForFile(sourceFile: SourceFile): DiagnosticWithLocation[] { return runWithCancellationToken(() => { const diagnostics: DiagnosticWithLocation[] = []; let parent: Node = sourceFile; walk(sourceFile); return diagnostics; function walk(node: Node) { // Return directly from the case if the given node doesnt want to visit each child // Otherwise break to visit each child switch (parent.kind) { case SyntaxKind.Parameter: case SyntaxKind.PropertyDeclaration: if ((<ParameterDeclaration | PropertyDeclaration>parent).questionToken === node) { diagnostics.push(createDiagnosticForNode(node, Diagnostics._0_can_only_be_used_in_a_ts_file, "?")); return; } // falls through case SyntaxKind.MethodDeclaration: case SyntaxKind.MethodSignature: case SyntaxKind.Constructor: case SyntaxKind.GetAccessor: case SyntaxKind.SetAccessor: case SyntaxKind.FunctionExpression: case SyntaxKind.FunctionDeclaration: case SyntaxKind.ArrowFunction: case SyntaxKind.VariableDeclaration: // type annotation if ((<FunctionLikeDeclaration | VariableDeclaration | ParameterDeclaration | PropertyDeclaration>parent).type === node) { diagnostics.push(createDiagnosticForNode(node, Diagnostics.types_can_only_be_used_in_a_ts_file)); return; } } switch (node.kind) { case SyntaxKind.ImportEqualsDeclaration: diagnostics.push(createDiagnosticForNode(node, Diagnostics.import_can_only_be_used_in_a_ts_file)); return; case SyntaxKind.ExportAssignment: if ((<ExportAssignment>node).isExportEquals) { diagnostics.push(createDiagnosticForNode(node, Diagnostics.export_can_only_be_used_in_a_ts_file)); return; } break; case SyntaxKind.HeritageClause: const heritageClause = <HeritageClause>node; if (heritageClause.token === SyntaxKind.ImplementsKeyword) { diagnostics.push(createDiagnosticForNode(node, Diagnostics.implements_clauses_can_only_be_used_in_a_ts_file)); return; } break; case SyntaxKind.InterfaceDeclaration: diagnostics.push(createDiagnosticForNode(node, Diagnostics.interface_declarations_can_only_be_used_in_a_ts_file)); return; case SyntaxKind.ModuleDeclaration: diagnostics.push(createDiagnosticForNode(node, Diagnostics.module_declarations_can_only_be_used_in_a_ts_file)); return; case SyntaxKind.TypeAliasDeclaration: diagnostics.push(createDiagnosticForNode(node, Diagnostics.type_aliases_can_only_be_used_in_a_ts_file)); return; case SyntaxKind.EnumDeclaration: diagnostics.push(createDiagnosticForNode(node, Diagnostics.enum_declarations_can_only_be_used_in_a_ts_file)); return; case SyntaxKind.NonNullExpression: diagnostics.push(createDiagnosticForNode(node, Diagnostics.non_null_assertions_can_only_be_used_in_a_ts_file)); return; case SyntaxKind.AsExpression: diagnostics.push(createDiagnosticForNode((node as AsExpression).type, Diagnostics.type_assertion_expressions_can_only_be_used_in_a_ts_file)); return; case SyntaxKind.TypeAssertionExpression: Debug.fail(); // Won't parse these in a JS file anyway, as they are interpreted as JSX. } const prevParent = parent; parent = node; forEachChild(node, walk, walkArray); parent = prevParent; } function walkArray(nodes: NodeArray<Node>) { if (parent.decorators === nodes && !options.experimentalDecorators) { diagnostics.push(createDiagnosticForNode(parent, Diagnostics.Experimental_support_for_decorators_is_a_feature_that_is_subject_to_change_in_a_future_release_Set_the_experimentalDecorators_option_to_remove_this_warning)); } switch (parent.kind) { case SyntaxKind.ClassDeclaration: case SyntaxKind.MethodDeclaration: case SyntaxKind.MethodSignature: case SyntaxKind.Constructor: case SyntaxKind.GetAccessor: case SyntaxKind.SetAccessor: case SyntaxKind.FunctionExpression: case SyntaxKind.FunctionDeclaration: case SyntaxKind.ArrowFunction: // Check type parameters if (nodes === (<ClassDeclaration | FunctionLikeDeclaration>parent).typeParameters) { diagnostics.push(createDiagnosticForNodeArray(nodes, Diagnostics.type_parameter_declarations_can_only_be_used_in_a_ts_file)); return; } // falls through case SyntaxKind.VariableStatement: // Check modifiers if (nodes === (<ClassDeclaration | FunctionLikeDeclaration | VariableStatement>parent).modifiers) { return checkModifiers(<NodeArray<Modifier>>nodes, parent.kind === SyntaxKind.VariableStatement); } break; case SyntaxKind.PropertyDeclaration: // Check modifiers of property declaration if (nodes === (<PropertyDeclaration>parent).modifiers) { for (const modifier of <NodeArray<Modifier>>nodes) { if (modifier.kind !== SyntaxKind.StaticKeyword) { diagnostics.push(createDiagnosticForNode(modifier, Diagnostics._0_can_only_be_used_in_a_ts_file, tokenToString(modifier.kind))); } } return; } break; case SyntaxKind.Parameter: // Check modifiers of parameter declaration if (nodes === (<ParameterDeclaration>parent).modifiers) { diagnostics.push(createDiagnosticForNodeArray(nodes, Diagnostics.parameter_modifiers_can_only_be_used_in_a_ts_file)); return; } break; case SyntaxKind.CallExpression: case SyntaxKind.NewExpression: case SyntaxKind.ExpressionWithTypeArguments: case SyntaxKind.JsxSelfClosingElement: case SyntaxKind.JsxOpeningElement: // Check type arguments if (nodes === (<CallExpression | NewExpression | ExpressionWithTypeArguments | JsxOpeningLikeElement>parent).typeArguments) { diagnostics.push(createDiagnosticForNodeArray(nodes, Diagnostics.type_arguments_can_only_be_used_in_a_ts_file)); return; } break; } for (const node of nodes) { walk(node); } } function checkModifiers(modifiers: NodeArray<Modifier>, isConstValid: boolean) { for (const modifier of modifiers) { switch (modifier.kind) { case SyntaxKind.ConstKeyword: if (isConstValid) { continue; } // to report error, // falls through case SyntaxKind.PublicKeyword: case SyntaxKind.PrivateKeyword: case SyntaxKind.ProtectedKeyword: case SyntaxKind.ReadonlyKeyword: case SyntaxKind.DeclareKeyword: case SyntaxKind.AbstractKeyword: diagnostics.push(createDiagnosticForNode(modifier, Diagnostics._0_can_only_be_used_in_a_ts_file, tokenToString(modifier.kind))); break; // These are all legal modifiers. case SyntaxKind.StaticKeyword: case SyntaxKind.ExportKeyword: case SyntaxKind.DefaultKeyword: } } } function createDiagnosticForNodeArray(nodes: NodeArray<Node>, message: DiagnosticMessage, arg0?: string | number, arg1?: string | number, arg2?: string | number): DiagnosticWithLocation { const start = nodes.pos; return createFileDiagnostic(sourceFile, start, nodes.end - start, message, arg0, arg1, arg2); } // Since these are syntactic diagnostics, parent might not have been set // this means the sourceFile cannot be infered from the node function createDiagnosticForNode(node: Node, message: DiagnosticMessage, arg0?: string | number, arg1?: string | number, arg2?: string | number): DiagnosticWithLocation { return createDiagnosticForNodeInSourceFile(sourceFile, node, message, arg0, arg1, arg2); } }); } function getDeclarationDiagnosticsWorker(sourceFile: SourceFile, cancellationToken: CancellationToken): ReadonlyArray<DiagnosticWithLocation> { return getAndCacheDiagnostics(sourceFile, cancellationToken, cachedDeclarationDiagnosticsForFile, getDeclarationDiagnosticsForFileNoCache); } function getDeclarationDiagnosticsForFileNoCache(sourceFile: SourceFile | undefined, cancellationToken: CancellationToken) { return runWithCancellationToken(() => { const resolver = getDiagnosticsProducingTypeChecker().getEmitResolver(sourceFile, cancellationToken); // Don't actually write any files since we're just getting diagnostics. return ts.getDeclarationDiagnostics(getEmitHost(noop), resolver, sourceFile); }); } function getAndCacheDiagnostics<T extends Diagnostic>( sourceFile: SourceFile | undefined, cancellationToken: CancellationToken, cache: DiagnosticCache<T>, getDiagnostics: (sourceFile: SourceFile, cancellationToken: CancellationToken) => T[] | undefined, ): ReadonlyArray<T> { const cachedResult = sourceFile ? cache.perFile && cache.perFile.get(sourceFile.path) : cache.allDiagnostics as T[]; if (cachedResult) { return cachedResult; } const result = getDiagnostics(sourceFile!, cancellationToken) || emptyArray; // TODO: GH#18217 if (sourceFile) { if (!cache.perFile) { cache.perFile = createMap<T[]>(); } cache.perFile.set(sourceFile.path, result); } else { cache.allDiagnostics = result; } return result; } function getDeclarationDiagnosticsForFile(sourceFile: SourceFile, cancellationToken: CancellationToken): ReadonlyArray<DiagnosticWithLocation> { return sourceFile.isDeclarationFile ? [] : getDeclarationDiagnosticsWorker(sourceFile, cancellationToken); } function getOptionsDiagnostics(): Diagnostic[] { return sortAndDeduplicateDiagnostics(concatenate( fileProcessingDiagnostics.getGlobalDiagnostics(), concatenate( programDiagnostics.getGlobalDiagnostics(), options.configFile ? programDiagnostics.getDiagnostics(options.configFile.fileName) : [] ) )); } function getGlobalDiagnostics(): Diagnostic[] { return sortAndDeduplicateDiagnostics(getDiagnosticsProducingTypeChecker().getGlobalDiagnostics().slice()); } function getConfigFileParsingDiagnostics(): ReadonlyArray<Diagnostic> { return configFileParsingDiagnostics || emptyArray; } function processRootFile(fileName: string, isDefaultLib: boolean, ignoreNoDefaultLib: boolean) { processSourceFile(normalizePath(fileName), isDefaultLib, ignoreNoDefaultLib, /*packageId*/ undefined); } function fileReferenceIsEqualTo(a: FileReference, b: FileReference): boolean { return a.fileName === b.fileName; } function moduleNameIsEqualTo(a: StringLiteralLike | Identifier, b: StringLiteralLike | Identifier): boolean { return a.kind === SyntaxKind.Identifier ? b.kind === SyntaxKind.Identifier && a.escapedText === b.escapedText : b.kind === SyntaxKind.StringLiteral && a.text === b.text; } function collectExternalModuleReferences(file: SourceFile): void { if (file.imports) { return; } const isJavaScriptFile = isSourceFileJS(file); const isExternalModuleFile = isExternalModule(file); // file.imports may not be undefined if there exists dynamic import let imports: StringLiteralLike[] | undefined; let moduleAugmentations: (StringLiteral | Identifier)[] | undefined; let ambientModules: string[] | undefined; // If we are importing helpers, we need to add a synthetic reference to resolve the // helpers library. if (options.importHelpers && (options.isolatedModules || isExternalModuleFile) && !file.isDeclarationFile) { // synthesize 'import "tslib"' declaration const externalHelpersModuleReference = createLiteral(externalHelpersModuleNameText); const importDecl = createImportDeclaration(/*decorators*/ undefined, /*modifiers*/ undefined, /*importClause*/ undefined, externalHelpersModuleReference); addEmitFlags(importDecl, EmitFlags.NeverApplyImportHelper); externalHelpersModuleReference.parent = importDecl; importDecl.parent = file; imports = [externalHelpersModuleReference]; } for (const node of file.statements) { collectModuleReferences(node, /*inAmbientModule*/ false); if ((file.flags & NodeFlags.PossiblyContainsDynamicImport) || isJavaScriptFile) { collectDynamicImportOrRequireCalls(node); } } if ((file.flags & NodeFlags.PossiblyContainsDynamicImport) || isJavaScriptFile) { collectDynamicImportOrRequireCalls(file.endOfFileToken); } file.imports = imports || emptyArray; file.moduleAugmentations = moduleAugmentations || emptyArray; file.ambientModuleNames = ambientModules || emptyArray; return; function collectModuleReferences(node: Statement, inAmbientModule: boolean): void { if (isAnyImportOrReExport(node)) { const moduleNameExpr = getExternalModuleName(node); // TypeScript 1.0 spec (April 2014): 12.1.6 // An ExternalImportDeclaration in an AmbientExternalModuleDeclaration may reference other external modules // only through top - level external module names. Relative external module names are not permitted. if (moduleNameExpr && isStringLiteral(moduleNameExpr) && moduleNameExpr.text && (!inAmbientModule || !isExternalModuleNameRelative(moduleNameExpr.text))) { imports = append(imports, moduleNameExpr); } } else if (isModuleDeclaration(node)) { if (isAmbientModule(node) && (inAmbientModule || hasModifier(node, ModifierFlags.Ambient) || file.isDeclarationFile)) { const nameText = getTextOfIdentifierOrLiteral(node.name); // Ambient module declarations can be interpreted as augmentations for some existing external modules. // This will happen in two cases: // - if current file is external module then module augmentation is a ambient module declaration defined in the top level scope // - if current file is not external module then module augmentation is an ambient module declaration with non-relative module name // immediately nested in top level ambient module declaration . if (isExternalModuleFile || (inAmbientModule && !isExternalModuleNameRelative(nameText))) { (moduleAugmentations || (moduleAugmentations = [])).push(node.name); } else if (!inAmbientModule) { if (file.isDeclarationFile) { // for global .d.ts files record name of ambient module (ambientModules || (ambientModules = [])).push(nameText); } // An AmbientExternalModuleDeclaration declares an external module. // This type of declaration is permitted only in the global module. // The StringLiteral must specify a top - level external module name. // Relative external module names are not permitted // NOTE: body of ambient module is always a module block, if it exists const body = <ModuleBlock>(<ModuleDeclaration>node).body; if (body) { for (const statement of body.statements) { collectModuleReferences(statement, /*inAmbientModule*/ true); } } } } } } function collectDynamicImportOrRequireCalls(node: Node): void { if (isRequireCall(node, /*checkArgumentIsStringLiteralLike*/ true)) { imports = append(imports, node.arguments[0]); } // we have to check the argument list has length of 1. We will still have to process these even though we have parsing error. else if (isImportCall(node) && node.arguments.length === 1 && isStringLiteralLike(node.arguments[0])) { imports = append(imports, node.arguments[0] as StringLiteralLike); } else if (isLiteralImportTypeNode(node)) { imports = append(imports, node.argument.literal); } collectDynamicImportOrRequireCallsForEachChild(node); if (hasJSDocNodes(node)) { forEach(node.jsDoc, collectDynamicImportOrRequireCallsForEachChild); } } function collectDynamicImportOrRequireCallsForEachChild(node: Node) { forEachChild(node, collectDynamicImportOrRequireCalls); } } function getLibFileFromReference(ref: FileReference) { const libName = ref.fileName.toLocaleLowerCase(); const libFileName = libMap.get(libName); if (libFileName) { return getSourceFile(combinePaths(defaultLibraryPath, libFileName)); } } /** This should have similar behavior to 'processSourceFile' without diagnostics or mutation. */ function getSourceFileFromReference(referencingFile: SourceFile, ref: FileReference): SourceFile | undefined { return getSourceFileFromReferenceWorker(resolveTripleslashReference(ref.fileName, referencingFile.fileName), fileName => filesByName.get(toPath(fileName))); } function getSourceFileFromReferenceWorker( fileName: string, getSourceFile: (fileName: string) => SourceFile | undefined, fail?: (diagnostic: DiagnosticMessage, ...argument: string[]) => void, refFile?: SourceFile): SourceFile | undefined { if (hasExtension(fileName)) { if (!options.allowNonTsExtensions && !forEach(supportedExtensionsWithJsonIfResolveJsonModule || supportedExtensions, extension => fileExtensionIs(host.getCanonicalFileName(fileName), extension))) { if (fail) fail(Diagnostics.File_0_has_unsupported_extension_The_only_supported_extensions_are_1, fileName, "'" + supportedExtensions.join("', '") + "'"); return undefined; } const sourceFile = getSourceFile(fileName); if (fail) { if (!sourceFile) { const redirect = getProjectReferenceRedirect(fileName); if (redirect) { fail(Diagnostics.Output_file_0_has_not_been_built_from_source_file_1, redirect, fileName); } else { fail(Diagnostics.File_0_not_found, fileName); } } else if (refFile && host.getCanonicalFileName(fileName) === host.getCanonicalFileName(refFile.fileName)) { fail(Diagnostics.A_file_cannot_have_a_reference_to_itself); } } return sourceFile; } else { const sourceFileNoExtension = options.allowNonTsExtensions && getSourceFile(fileName); if (sourceFileNoExtension) return sourceFileNoExtension; if (fail && options.allowNonTsExtensions) { fail(Diagnostics.File_0_not_found, fileName); return undefined; } const sourceFileWithAddedExtension = forEach(supportedExtensions, extension => getSourceFile(fileName + extension)); if (fail && !sourceFileWithAddedExtension) fail(Diagnostics.File_0_not_found, fileName + Extension.Ts); return sourceFileWithAddedExtension; } } /** This has side effects through `findSourceFile`. */ function processSourceFile(fileName: string, isDefaultLib: boolean, ignoreNoDefaultLib: boolean, packageId: PackageId | undefined, refFile?: SourceFile, refPos?: number, refEnd?: number): void { getSourceFileFromReferenceWorker(fileName, fileName => findSourceFile(fileName, toPath(fileName), isDefaultLib, ignoreNoDefaultLib, refFile!, refPos!, refEnd!, packageId), // TODO: GH#18217 (diagnostic, ...args) => { fileProcessingDiagnostics.add(refFile !== undefined && refEnd !== undefined && refPos !== undefined ? createFileDiagnostic(refFile, refPos, refEnd - refPos, diagnostic, ...args) : createCompilerDiagnostic(diagnostic, ...args)); }, refFile); } function reportFileNamesDifferOnlyInCasingError(fileName: string, existingFileName: string, refFile: SourceFile, refPos: number, refEnd: number): void { if (refFile !== undefined && refPos !== undefined && refEnd !== undefined) { fileProcessingDiagnostics.add(createFileDiagnostic(refFile, refPos, refEnd - refPos, Diagnostics.File_name_0_differs_from_already_included_file_name_1_only_in_casing, fileName, existingFileName)); } else { fileProcessingDiagnostics.add(createCompilerDiagnostic(Diagnostics.File_name_0_differs_from_already_included_file_name_1_only_in_casing, fileName, existingFileName)); } } function createRedirectSourceFile(redirectTarget: SourceFile, unredirected: SourceFile, fileName: string, path: Path): SourceFile { const redirect: SourceFile = Object.create(redirectTarget); redirect.fileName = fileName; redirect.path = path; redirect.redirectInfo = { redirectTarget, unredirected }; Object.defineProperties(redirect, { id: { get(this: SourceFile) { return this.redirectInfo!.redirectTarget.id; }, set(this: SourceFile, value: SourceFile["id"]) { this.redirectInfo!.redirectTarget.id = value; }, }, symbol: { get(this: SourceFile) { return this.redirectInfo!.redirectTarget.symbol; }, set(this: SourceFile, value: SourceFile["symbol"]) { this.redirectInfo!.redirectTarget.symbol = value; }, }, }); return redirect; } // Get source file from normalized fileName function findSourceFile(fileName: string, path: Path, isDefaultLib: boolean, ignoreNoDefaultLib: boolean, refFile: SourceFile, refPos: number, refEnd: number, packageId: PackageId | undefined): SourceFile | undefined { const originalFileName = fileName; if (filesByName.has(path)) { const file = filesByName.get(path); // try to check if we've already seen this file but with a different casing in path // NOTE: this only makes sense for case-insensitive file systems, and only on files which are not redirected if (file && options.forceConsistentCasingInFileNames) { let inputName = fileName; const checkedName = file.fileName; const isRedirect = toPath(checkedName) !== toPath(inputName); if (isRedirect) { inputName = getProjectReferenceRedirect(fileName) || fileName; } if (getNormalizedAbsolutePath(checkedName, currentDirectory) !== getNormalizedAbsolutePath(inputName, currentDirectory)) { reportFileNamesDifferOnlyInCasingError(inputName, checkedName, refFile, refPos, refEnd); } } // If the file was previously found via a node_modules search, but is now being processed as a root file, // then everything it sucks in may also be marked incorrectly, and needs to be checked again. if (file && sourceFilesFoundSearchingNodeModules.get(file.path) && currentNodeModulesDepth === 0) { sourceFilesFoundSearchingNodeModules.set(file.path, false); if (!options.noResolve) { processReferencedFiles(file, isDefaultLib); processTypeReferenceDirectives(file); } processLibReferenceDirectives(file); modulesWithElidedImports.set(file.path, false); processImportedModules(file); } // See if we need to reprocess the imports due to prior skipped imports else if (file && modulesWithElidedImports.get(file.path)) { if (currentNodeModulesDepth < maxNodeModuleJsDepth) { modulesWithElidedImports.set(file.path, false); processImportedModules(file); } } return file; } let redirectedPath: string | undefined; if (refFile) { const redirect = getProjectReferenceRedirect(fileName); if (redirect) { ((refFile.redirectedReferences || (refFile.redirectedReferences = [])) as string[]).push(fileName); fileName = redirect; // Once we start redirecting to a file, we can potentially come back to it // via a back-reference from another file in the .d.ts folder. If that happens we'll // end up trying to add it to the program *again* because we were tracking it via its // original (un-redirected) name. So we have to map both the original path and the redirected path // to the source file we're about to find/create redirectedPath = toPath(redirect); } } // We haven't looked for this file, do so now and cache result const file = host.getSourceFile(fileName, options.target!, hostErrorMessage => { // TODO: GH#18217 if (refFile !== undefined && refPos !== undefined && refEnd !== undefined) { fileProcessingDiagnostics.add(createFileDiagnostic(refFile, refPos, refEnd - refPos, Diagnostics.Cannot_read_file_0_Colon_1, fileName, hostErrorMessage)); } else { fileProcessingDiagnostics.add(createCompilerDiagnostic(Diagnostics.Cannot_read_file_0_Colon_1, fileName, hostErrorMessage)); } }, shouldCreateNewSourceFile); if (packageId) { const packageIdKey = packageIdToString(packageId); const fileFromPackageId = packageIdToSourceFile.get(packageIdKey); if (fileFromPackageId) { // Some other SourceFile already exists with this package name and version. // Instead of creating a duplicate, just redirect to the existing one. const dupFile = createRedirectSourceFile(fileFromPackageId, file!, fileName, path); // TODO: GH#18217 redirectTargetsMap.add(fileFromPackageId.path, fileName); filesByName.set(path, dupFile); sourceFileToPackageName.set(path, packageId.name); processingOtherFiles!.push(dupFile); return dupFile; } else if (file) { // This is the first source file to have this packageId. packageIdToSourceFile.set(packageIdKey, file); sourceFileToPackageName.set(path, packageId.name); } } filesByName.set(path, file); if (redirectedPath) { filesByName.set(redirectedPath, file); } if (file) { sourceFilesFoundSearchingNodeModules.set(path, currentNodeModulesDepth > 0); file.path = path; file.resolvedPath = toPath(fileName); file.originalFileName = originalFileName; if (host.useCaseSensitiveFileNames()) { const pathLowerCase = path.toLowerCase(); // for case-sensitive file systems check if we've already seen some file with similar filename ignoring case const existingFile = filesByNameIgnoreCase!.get(pathLowerCase); if (existingFile) { reportFileNamesDifferOnlyInCasingError(fileName, existingFile.fileName, refFile, refPos, refEnd); } else { filesByNameIgnoreCase!.set(pathLowerCase, file); } } skipDefaultLib = skipDefaultLib || (file.hasNoDefaultLib && !ignoreNoDefaultLib); if (!options.noResolve) { processReferencedFiles(file, isDefaultLib); processTypeReferenceDirectives(file); } processLibReferenceDirectives(file); // always process imported modules to record module name resolutions processImportedModules(file); if (isDefaultLib) { processingDefaultLibFiles!.push(file); } else { processingOtherFiles!.push(file); } } return file; } function getProjectReferenceRedirect(fileName: string): string | undefined { // Ignore dts or any of the non ts files if (!projectReferenceRedirects || fileExtensionIs(fileName, Extension.Dts) || !fileExtensionIsOneOf(fileName, supportedTSExtensions)) { return undefined; } // If this file is produced by a referenced project, we need to rewrite it to // look in the output folder of the referenced project rather than the input return forEach(projectReferenceRedirects, referencedProject => { // not input file from the referenced project, ignore if (!contains(referencedProject.fileNames, fileName, isSameFile)) { return undefined; } const out = referencedProject.options.outFile || referencedProject.options.out; return out ? changeExtension(out, Extension.Dts) : getOutputDeclarationFileName(fileName, referencedProject); }); } function processReferencedFiles(file: SourceFile, isDefaultLib: boolean) { forEach(file.referencedFiles, ref => { const referencedFileName = resolveTripleslashReference(ref.fileName, file.originalFileName); processSourceFile(referencedFileName, isDefaultLib, /*ignoreNoDefaultLib*/ false, /*packageId*/ undefined, file, ref.pos, ref.end); }); } function processTypeReferenceDirectives(file: SourceFile) { // We lower-case all type references because npm automatically lowercases all packages. See GH#9824. const typeDirectives = map(file.typeReferenceDirectives, ref => ref.fileName.toLocaleLowerCase()); if (!typeDirectives) { return; } const resolutions = resolveTypeReferenceDirectiveNamesWorker(typeDirectives, file.originalFileName); for (let i = 0; i < typeDirectives.length; i++) { const ref = file.typeReferenceDirectives[i]; const resolvedTypeReferenceDirective = resolutions[i]; // store resolved type directive on the file const fileName = ref.fileName.toLocaleLowerCase(); setResolvedTypeReferenceDirective(file, fileName, resolvedTypeReferenceDirective); processTypeReferenceDirective(fileName, resolvedTypeReferenceDirective, file, ref.pos, ref.end); } } function processTypeReferenceDirective(typeReferenceDirective: string, resolvedTypeReferenceDirective: ResolvedTypeReferenceDirective, refFile?: SourceFile, refPos?: number, refEnd?: number): void { // If we already found this library as a primary reference - nothing to do const previousResolution = resolvedTypeReferenceDirectives.get(typeReferenceDirective); if (previousResolution && previousResolution.primary) { return; } let saveResolution = true; if (resolvedTypeReferenceDirective) { if (resolvedTypeReferenceDirective.primary) { // resolved from the primary path processSourceFile(resolvedTypeReferenceDirective.resolvedFileName!, /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, resolvedTypeReferenceDirective.packageId, refFile, refPos, refEnd); // TODO: GH#18217 } else { // If we already resolved to this file, it must have been a secondary reference. Check file contents // for sameness and possibly issue an error if (previousResolution) { // Don't bother reading the file again if it's the same file. if (resolvedTypeReferenceDirective.resolvedFileName !== previousResolution.resolvedFileName) { const otherFileText = host.readFile(resolvedTypeReferenceDirective.resolvedFileName!); if (otherFileText !== getSourceFile(previousResolution.resolvedFileName!)!.text) { fileProcessingDiagnostics.add(createDiagnostic(refFile!, refPos!, refEnd!, // TODO: GH#18217 Diagnostics.Conflicting_definitions_for_0_found_at_1_and_2_Consider_installing_a_specific_version_of_this_library_to_resolve_the_conflict, typeReferenceDirective, resolvedTypeReferenceDirective.resolvedFileName, previousResolution.resolvedFileName )); } } // don't overwrite previous resolution result saveResolution = false; } else { // First resolution of this library processSourceFile(resolvedTypeReferenceDirective.resolvedFileName!, /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, resolvedTypeReferenceDirective.packageId, refFile, refPos, refEnd); } } } else { fileProcessingDiagnostics.add(createDiagnostic(refFile!, refPos!, refEnd!, Diagnostics.Cannot_find_type_definition_file_for_0, typeReferenceDirective)); // TODO: GH#18217 } if (saveResolution) { resolvedTypeReferenceDirectives.set(typeReferenceDirective, resolvedTypeReferenceDirective); } } function processLibReferenceDirectives(file: SourceFile) { forEach(file.libReferenceDirectives, libReference => { const libName = libReference.fileName.toLocaleLowerCase(); const libFileName = libMap.get(libName); if (libFileName) { // we ignore any 'no-default-lib' reference set on this file. processRootFile(combinePaths(defaultLibraryPath, libFileName), /*isDefaultLib*/ true, /*ignoreNoDefaultLib*/ true); } else { const unqualifiedLibName = removeSuffix(removePrefix(libName, "lib."), ".d.ts"); const suggestion = getSpellingSuggestion(unqualifiedLibName, libs, identity); const message = suggestion ? Diagnostics.Cannot_find_lib_definition_for_0_Did_you_mean_1 : Diagnostics.Cannot_find_lib_definition_for_0; fileProcessingDiagnostics.add(createDiagnostic(file, libReference.pos, libReference.end, message, libName, suggestion)); } }); } function createDiagnostic(refFile: SourceFile, refPos: number, refEnd: number, message: DiagnosticMessage, ...args: any[]): Diagnostic { if (refFile === undefined || refPos === undefined || refEnd === undefined) { return createCompilerDiagnostic(message, ...args); } else { return createFileDiagnostic(refFile, refPos, refEnd - refPos, message, ...args); } } function getCanonicalFileName(fileName: string): string { return host.getCanonicalFileName(fileName); } function processImportedModules(file: SourceFile) { collectExternalModuleReferences(file); if (file.imports.length || file.moduleAugmentations.length) { // Because global augmentation doesn't have string literal name, we can check for global augmentation as such. const moduleNames = getModuleNames(file); const oldProgramState: OldProgramState = { program: oldProgram, oldSourceFile: oldProgram && oldProgram.getSourceFile(file.fileName), modifiedFilePaths }; const resolutions = resolveModuleNamesReusingOldState(moduleNames, getNormalizedAbsolutePath(file.originalFileName, currentDirectory), file, oldProgramState); Debug.assert(resolutions.length === moduleNames.length); for (let i = 0; i < moduleNames.length; i++) { const resolution = resolutions[i]; setResolvedModule(file, moduleNames[i], resolution); if (!resolution) { continue; } const isFromNodeModulesSearch = resolution.isExternalLibraryImport; const isJsFile = !resolutionExtensionIsTSOrJson(resolution.extension); const isJsFileFromNodeModules = isFromNodeModulesSearch && isJsFile; const resolvedFileName = resolution.resolvedFileName; if (isFromNodeModulesSearch) { currentNodeModulesDepth++; } // add file to program only if: // - resolution was successful // - noResolve is falsy // - module name comes from the list of imports // - it's not a top level JavaScript module that exceeded the search max const elideImport = isJsFileFromNodeModules && currentNodeModulesDepth > maxNodeModuleJsDepth; // Don't add the file if it has a bad extension (e.g. 'tsx' if we don't have '--allowJs') // This may still end up being an untyped module -- the file won't be included but imports will be allowed. const shouldAddFile = resolvedFileName && !getResolutionDiagnostic(options, resolution) && !options.noResolve && i < file.imports.length && !elideImport && !(isJsFile && !options.allowJs) && (isInJSFile(file.imports[i]) || !(file.imports[i].flags & NodeFlags.JSDoc)); if (elideImport) { modulesWithElidedImports.set(file.path, true); } else if (shouldAddFile) { const path = toPath(resolvedFileName); const pos = skipTrivia(file.text, file.imports[i].pos); findSourceFile(resolvedFileName, path, /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, file, pos, file.imports[i].end, resolution.packageId); } if (isFromNodeModulesSearch) { currentNodeModulesDepth--; } } } else { // no imports - drop cached module resolutions file.resolvedModules = undefined; } } function computeCommonSourceDirectory(sourceFiles: SourceFile[]): string { const fileNames = mapDefined(sourceFiles, file => file.isDeclarationFile ? undefined : file.fileName); return computeCommonSourceDirectoryOfFilenames(fileNames, currentDirectory, getCanonicalFileName); } function checkSourceFilesBelongToPath(sourceFiles: ReadonlyArray<SourceFile>, rootDirectory: string): boolean { let allFilesBelongToPath = true; const absoluteRootDirectoryPath = host.getCanonicalFileName(getNormalizedAbsolutePath(rootDirectory, currentDirectory)); for (const sourceFile of sourceFiles) { if (!sourceFile.isDeclarationFile) { const absoluteSourceFilePath = host.getCanonicalFileName(getNormalizedAbsolutePath(sourceFile.fileName, currentDirectory)); if (absoluteSourceFilePath.indexOf(absoluteRootDirectoryPath) !== 0) { programDiagnostics.add(createCompilerDiagnostic(Diagnostics.File_0_is_not_under_rootDir_1_rootDir_is_expected_to_contain_all_source_files, sourceFile.fileName, rootDirectory)); allFilesBelongToPath = false; } } } return allFilesBelongToPath; } function parseProjectReferenceConfigFile(ref: ProjectReference): { commandLine: ParsedCommandLine, sourceFile: SourceFile } | undefined { // The actual filename (i.e. add "/tsconfig.json" if necessary) const refPath = resolveProjectReferencePath(ref); // An absolute path pointing to the containing directory of the config file const basePath = getNormalizedAbsolutePath(getDirectoryPath(refPath), host.getCurrentDirectory()); const sourceFile = host.getSourceFile(refPath, ScriptTarget.JSON) as JsonSourceFile | undefined; if (sourceFile === undefined) { return undefined; } sourceFile.path = toPath(refPath); const commandLine = parseJsonSourceFileConfigFileContent(sourceFile, configParsingHost, basePath, /*existingOptions*/ undefined, refPath); return { commandLine, sourceFile }; } function addProjectReferenceRedirects(referencedProject: ParsedCommandLine) { (projectReferenceRedirects || (projectReferenceRedirects = [])).push(referencedProject); } function verifyCompilerOptions() { if (options.strictPropertyInitialization && !getStrictOptionValue(options, "strictNullChecks")) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "strictPropertyInitialization", "strictNullChecks"); } if (options.isolatedModules) { if (getEmitDeclarations(options)) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, getEmitDeclarationOptionName(options), "isolatedModules"); } if (options.noEmitOnError) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "noEmitOnError", "isolatedModules"); } if (options.out) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "out", "isolatedModules"); } if (options.outFile) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "outFile", "isolatedModules"); } } if (options.inlineSourceMap) { if (options.sourceMap) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "sourceMap", "inlineSourceMap"); } if (options.mapRoot) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "mapRoot", "inlineSourceMap"); } } if (options.paths && options.baseUrl === undefined) { createDiagnosticForOptionName(Diagnostics.Option_paths_cannot_be_used_without_specifying_baseUrl_option, "paths"); } if (options.composite) { if (options.declaration === false) { createDiagnosticForOptionName(Diagnostics.Composite_projects_may_not_disable_declaration_emit, "declaration"); } } if (projectReferences) { for (let i = 0; i < projectReferences.length; i++) { const ref = projectReferences[i]; const resolvedRefOpts = resolvedProjectReferences![i] && resolvedProjectReferences![i]!.commandLine.options; if (resolvedRefOpts === undefined) { createDiagnosticForReference(i, Diagnostics.File_0_does_not_exist, ref.path); continue; } if (!resolvedRefOpts.composite) { createDiagnosticForReference(i, Diagnostics.Referenced_project_0_must_have_setting_composite_Colon_true, ref.path); } if (ref.prepend) { const out = resolvedRefOpts.outFile || resolvedRefOpts.out; if (out) { if (!host.fileExists(out)) { createDiagnosticForReference(i, Diagnostics.Output_file_0_from_project_1_does_not_exist, out, ref.path); } } else { createDiagnosticForReference(i, Diagnostics.Cannot_prepend_project_0_because_it_does_not_have_outFile_set, ref.path); } } } } // List of collected files is complete; validate exhautiveness if this is a project with a file list if (options.composite) { const sourceFiles = files.filter(f => !f.isDeclarationFile); if (rootNames.length < sourceFiles.length) { const normalizedRootNames = rootNames.map(r => normalizePath(r).toLowerCase()); for (const file of sourceFiles.map(f => normalizePath(f.path).toLowerCase())) { if (normalizedRootNames.indexOf(file) === -1) { programDiagnostics.add(createCompilerDiagnostic(Diagnostics.File_0_is_not_in_project_file_list_Projects_must_list_all_files_or_use_an_include_pattern, file)); } } } } if (options.paths) { for (const key in options.paths) { if (!hasProperty(options.paths, key)) { continue; } if (!hasZeroOrOneAsteriskCharacter(key)) { createDiagnosticForOptionPaths(/*onKey*/ true, key, Diagnostics.Pattern_0_can_have_at_most_one_Asterisk_character, key); } if (isArray(options.paths[key])) { const len = options.paths[key].length; if (len === 0) { createDiagnosticForOptionPaths(/*onKey*/ false, key, Diagnostics.Substitutions_for_pattern_0_shouldn_t_be_an_empty_array, key); } for (let i = 0; i < len; i++) { const subst = options.paths[key][i]; const typeOfSubst = typeof subst; if (typeOfSubst === "string") { if (!hasZeroOrOneAsteriskCharacter(subst)) { createDiagnosticForOptionPathKeyValue(key, i, Diagnostics.Substitution_0_in_pattern_1_in_can_have_at_most_one_Asterisk_character, subst, key); } } else { createDiagnosticForOptionPathKeyValue(key, i, Diagnostics.Substitution_0_for_pattern_1_has_incorrect_type_expected_string_got_2, subst, key, typeOfSubst); } } } else { createDiagnosticForOptionPaths(/*onKey*/ false, key, Diagnostics.Substitutions_for_pattern_0_should_be_an_array, key); } } } if (!options.sourceMap && !options.inlineSourceMap) { if (options.inlineSources) { createDiagnosticForOptionName(Diagnostics.Option_0_can_only_be_used_when_either_option_inlineSourceMap_or_option_sourceMap_is_provided, "inlineSources"); } if (options.sourceRoot) { createDiagnosticForOptionName(Diagnostics.Option_0_can_only_be_used_when_either_option_inlineSourceMap_or_option_sourceMap_is_provided, "sourceRoot"); } } if (options.out && options.outFile) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "out", "outFile"); } if (options.mapRoot && !(options.sourceMap || options.declarationMap)) { // Error to specify --mapRoot without --sourcemap createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1_or_option_2, "mapRoot", "sourceMap", "declarationMap"); } if (options.declarationDir) { if (!getEmitDeclarations(options)) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1_or_option_2, "declarationDir", "declaration", "composite"); } if (options.out || options.outFile) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "declarationDir", options.out ? "out" : "outFile"); } } if (options.declarationMap && !getEmitDeclarations(options)) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1_or_option_2, "declarationMap", "declaration", "composite"); } if (options.lib && options.noLib) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "lib", "noLib"); } if (options.noImplicitUseStrict && getStrictOptionValue(options, "alwaysStrict")) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "noImplicitUseStrict", "alwaysStrict"); } const languageVersion = options.target || ScriptTarget.ES3; const outFile = options.outFile || options.out; const firstNonAmbientExternalModuleSourceFile = forEach(files, f => isExternalModule(f) && !f.isDeclarationFile ? f : undefined); if (options.isolatedModules) { if (options.module === ModuleKind.None && languageVersion < ScriptTarget.ES2015) { createDiagnosticForOptionName(Diagnostics.Option_isolatedModules_can_only_be_used_when_either_option_module_is_provided_or_option_target_is_ES2015_or_higher, "isolatedModules", "target"); } const firstNonExternalModuleSourceFile = forEach(files, f => !isExternalModule(f) && !f.isDeclarationFile ? f : undefined); if (firstNonExternalModuleSourceFile) { const span = getErrorSpanForNode(firstNonExternalModuleSourceFile, firstNonExternalModuleSourceFile); programDiagnostics.add(createFileDiagnostic(firstNonExternalModuleSourceFile, span.start, span.length, Diagnostics.Cannot_compile_namespaces_when_the_isolatedModules_flag_is_provided)); } } else if (firstNonAmbientExternalModuleSourceFile && languageVersion < ScriptTarget.ES2015 && options.module === ModuleKind.None) { // We cannot use createDiagnosticFromNode because nodes do not have parents yet const span = getErrorSpanForNode(firstNonAmbientExternalModuleSourceFile, firstNonAmbientExternalModuleSourceFile.externalModuleIndicator!); programDiagnostics.add(createFileDiagnostic(firstNonAmbientExternalModuleSourceFile, span.start, span.length, Diagnostics.Cannot_use_imports_exports_or_module_augmentations_when_module_is_none)); } // Cannot specify module gen that isn't amd or system with --out if (outFile) { if (options.module && !(options.module === ModuleKind.AMD || options.module === ModuleKind.System)) { createDiagnosticForOptionName(Diagnostics.Only_amd_and_system_modules_are_supported_alongside_0, options.out ? "out" : "outFile", "module"); } else if (options.module === undefined && firstNonAmbientExternalModuleSourceFile) { const span = getErrorSpanForNode(firstNonAmbientExternalModuleSourceFile, firstNonAmbientExternalModuleSourceFile.externalModuleIndicator!); programDiagnostics.add(createFileDiagnostic(firstNonAmbientExternalModuleSourceFile, span.start, span.length, Diagnostics.Cannot_compile_modules_using_option_0_unless_the_module_flag_is_amd_or_system, options.out ? "out" : "outFile")); } } if (options.resolveJsonModule) { if (getEmitModuleResolutionKind(options) !== ModuleResolutionKind.NodeJs) { createDiagnosticForOptionName(Diagnostics.Option_resolveJsonModule_cannot_be_specified_without_node_module_resolution_strategy, "resolveJsonModule"); } // Any emit other than common js, amd, es2015 or esnext is error else if (!hasJsonModuleEmitEnabled(options)) { createDiagnosticForOptionName(Diagnostics.Option_resolveJsonModule_can_only_be_specified_when_module_code_generation_is_commonjs_amd_es2015_or_esNext, "resolveJsonModule", "module"); } } // there has to be common source directory if user specified --outdir || --sourceRoot // if user specified --mapRoot, there needs to be common source directory if there would be multiple files being emitted if (options.outDir || // there is --outDir specified options.sourceRoot || // there is --sourceRoot specified options.mapRoot) { // there is --mapRoot specified // Precalculate and cache the common source directory const dir = getCommonSourceDirectory(); // If we failed to find a good common directory, but outDir is specified and at least one of our files is on a windows drive/URL/other resource, add a failure if (options.outDir && dir === "" && forEach(files, file => getRootLength(file.fileName) > 1)) { createDiagnosticForOptionName(Diagnostics.Cannot_find_the_common_subdirectory_path_for_the_input_files, "outDir"); } } if (!options.noEmit && options.allowJs && getEmitDeclarations(options)) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "allowJs", getEmitDeclarationOptionName(options)); } if (options.checkJs && !options.allowJs) { programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "checkJs", "allowJs")); } if (options.emitDeclarationOnly) { if (!getEmitDeclarations(options)) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1_or_option_2, "emitDeclarationOnly", "declaration", "composite"); } if (options.noEmit) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "emitDeclarationOnly", "noEmit"); } } if (options.emitDecoratorMetadata && !options.experimentalDecorators) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "emitDecoratorMetadata", "experimentalDecorators"); } if (options.jsxFactory) { if (options.reactNamespace) { createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "reactNamespace", "jsxFactory"); } if (!parseIsolatedEntityName(options.jsxFactory, languageVersion)) { createOptionValueDiagnostic("jsxFactory", Diagnostics.Invalid_value_for_jsxFactory_0_is_not_a_valid_identifier_or_qualified_name, options.jsxFactory); } } else if (options.reactNamespace && !isIdentifierText(options.reactNamespace, languageVersion)) { createOptionValueDiagnostic("reactNamespace", Diagnostics.Invalid_value_for_reactNamespace_0_is_not_a_valid_identifier, options.reactNamespace); } // If the emit is enabled make sure that every output file is unique and not overwriting any of the input files if (!options.noEmit && !options.suppressOutputPathCheck) { const emitHost = getEmitHost(); const emitFilesSeen = createMap<true>(); forEachEmittedFile(emitHost, (emitFileNames) => { if (!options.emitDeclarationOnly) { verifyEmitFilePath(emitFileNames.jsFilePath, emitFilesSeen); } verifyEmitFilePath(emitFileNames.declarationFilePath, emitFilesSeen); }); } // Verify that all the emit files are unique and don't overwrite input files function verifyEmitFilePath(emitFileName: string | undefined, emitFilesSeen: Map<true>) { if (emitFileName) { const emitFilePath = toPath(emitFileName); // Report error if the output overwrites input file if (filesByName.has(emitFilePath)) { let chain: DiagnosticMessageChain | undefined; if (!options.configFilePath) { // The program is from either an inferred project or an external project chain = chainDiagnosticMessages(/*details*/ undefined, Diagnostics.Adding_a_tsconfig_json_file_will_help_organize_projects_that_contain_both_TypeScript_and_JavaScript_files_Learn_more_at_https_Colon_Slash_Slashaka_ms_Slashtsconfig); } chain = chainDiagnosticMessages(chain, Diagnostics.Cannot_write_file_0_because_it_would_overwrite_input_file, emitFileName); blockEmittingOfFile(emitFileName, createCompilerDiagnosticFromMessageChain(chain)); } const emitFileKey = !host.useCaseSensitiveFileNames() ? emitFilePath.toLocaleLowerCase() : emitFilePath; // Report error if multiple files write into same file if (emitFilesSeen.has(emitFileKey)) { // Already seen the same emit file - report error blockEmittingOfFile(emitFileName, createCompilerDiagnostic(Diagnostics.Cannot_write_file_0_because_it_would_be_overwritten_by_multiple_input_files, emitFileName)); } else { emitFilesSeen.set(emitFileKey, true); } } } } function createDiagnosticForOptionPathKeyValue(key: string, valueIndex: number, message: DiagnosticMessage, arg0: string | number, arg1: string | number, arg2?: string | number) { let needCompilerDiagnostic = true; const pathsSyntax = getOptionPathsSyntax(); for (const pathProp of pathsSyntax) { if (isObjectLiteralExpression(pathProp.initializer)) { for (const keyProps of getPropertyAssignment(pathProp.initializer, key)) { const initializer = keyProps.initializer; if (isArrayLiteralExpression(initializer) && initializer.elements.length > valueIndex) { programDiagnostics.add(createDiagnosticForNodeInSourceFile(options.configFile!, initializer.elements[valueIndex], message, arg0, arg1, arg2)); needCompilerDiagnostic = false; } } } } if (needCompilerDiagnostic) { programDiagnostics.add(createCompilerDiagnostic(message, arg0, arg1, arg2)); } } function createDiagnosticForOptionPaths(onKey: boolean, key: string, message: DiagnosticMessage, arg0: string | number) { let needCompilerDiagnostic = true; const pathsSyntax = getOptionPathsSyntax(); for (const pathProp of pathsSyntax) { if (isObjectLiteralExpression(pathProp.initializer) && createOptionDiagnosticInObjectLiteralSyntax( pathProp.initializer, onKey, key, /*key2*/ undefined, message, arg0)) { needCompilerDiagnostic = false; } } if (needCompilerDiagnostic) { programDiagnostics.add(createCompilerDiagnostic(message, arg0)); } } function getOptionsSyntaxByName(name: string): object | undefined { const compilerOptionsObjectLiteralSyntax = getCompilerOptionsObjectLiteralSyntax(); if (compilerOptionsObjectLiteralSyntax) { return getPropertyAssignment(compilerOptionsObjectLiteralSyntax, name); } return undefined; } function getOptionPathsSyntax(): PropertyAssignment[] { return getOptionsSyntaxByName("paths") as PropertyAssignment[] || emptyArray; } function createDiagnosticForOptionName(message: DiagnosticMessage, option1: string, option2?: string, option3?: string) { createDiagnosticForOption(/*onKey*/ true, option1, option2, message, option1, option2, option3); } function createOptionValueDiagnostic(option1: string, message: DiagnosticMessage, arg0: string) { createDiagnosticForOption(/*onKey*/ false, option1, /*option2*/ undefined, message, arg0); } function createDiagnosticForReference(index: number, message: DiagnosticMessage, arg0?: string | number, arg1?: string | number) { const referencesSyntax = getProjectReferencesSyntax(); if (referencesSyntax && referencesSyntax.elements.length > index) { programDiagnostics.add(createDiagnosticForNodeInSourceFile(options.configFile!, referencesSyntax.elements[index], message, arg0, arg1)); } else { programDiagnostics.add(createCompilerDiagnostic(message, arg0, arg1)); } } function createDiagnosticForOption(onKey: boolean, option1: string, option2: string | undefined, message: DiagnosticMessage, arg0: string | number, arg1?: string | number, arg2?: string | number) { const compilerOptionsObjectLiteralSyntax = getCompilerOptionsObjectLiteralSyntax(); const needCompilerDiagnostic = !compilerOptionsObjectLiteralSyntax || !createOptionDiagnosticInObjectLiteralSyntax(compilerOptionsObjectLiteralSyntax, onKey, option1, option2, message, arg0, arg1, arg2); if (needCompilerDiagnostic) { programDiagnostics.add(createCompilerDiagnostic(message, arg0, arg1, arg2)); } } function getProjectReferencesSyntax(): ArrayLiteralExpression | null { if (_referencesArrayLiteralSyntax === undefined) { _referencesArrayLiteralSyntax = null; // tslint:disable-line:no-null-keyword if (options.configFile) { const jsonObjectLiteral = getTsConfigObjectLiteralExpression(options.configFile)!; // TODO: GH#18217 for (const prop of getPropertyAssignment(jsonObjectLiteral, "references")) { if (isArrayLiteralExpression(prop.initializer)) { _referencesArrayLiteralSyntax = prop.initializer; break; } } } } return _referencesArrayLiteralSyntax; } function getCompilerOptionsObjectLiteralSyntax() { if (_compilerOptionsObjectLiteralSyntax === undefined) { _compilerOptionsObjectLiteralSyntax = null; // tslint:disable-line:no-null-keyword const jsonObjectLiteral = getTsConfigObjectLiteralExpression(options.configFile); if (jsonObjectLiteral) { for (const prop of getPropertyAssignment(jsonObjectLiteral, "compilerOptions")) { if (isObjectLiteralExpression(prop.initializer)) { _compilerOptionsObjectLiteralSyntax = prop.initializer; break; } } } } return _compilerOptionsObjectLiteralSyntax; } function createOptionDiagnosticInObjectLiteralSyntax(objectLiteral: ObjectLiteralExpression, onKey: boolean, key1: string, key2: string | undefined, message: DiagnosticMessage, arg0: string | number, arg1?: string | number, arg2?: string | number): boolean { const props = getPropertyAssignment(objectLiteral, key1, key2); for (const prop of props) { programDiagnostics.add(createDiagnosticForNodeInSourceFile(options.configFile!, onKey ? prop.name : prop.initializer, message, arg0, arg1, arg2)); } return !!props.length; } function blockEmittingOfFile(emitFileName: string, diag: Diagnostic) { hasEmitBlockingDiagnostics.set(toPath(emitFileName), true); programDiagnostics.add(diag); } function isEmittedFile(file: string): boolean { if (options.noEmit) { return false; } // If this is source file, its not emitted file const filePath = toPath(file); if (getSourceFileByPath(filePath)) { return false; } // If options have --outFile or --out just check that const out = options.outFile || options.out; if (out) { return isSameFile(filePath, out) || isSameFile(filePath, removeFileExtension(out) + Extension.Dts); } // If declarationDir is specified, return if its a file in that directory if (options.declarationDir && containsPath(options.declarationDir, filePath, currentDirectory, !host.useCaseSensitiveFileNames())) { return true; } // If --outDir, check if file is in that directory if (options.outDir) { return containsPath(options.outDir, filePath, currentDirectory, !host.useCaseSensitiveFileNames()); } if (fileExtensionIsOneOf(filePath, supportedJSExtensions) || fileExtensionIs(filePath, Extension.Dts)) { // Otherwise just check if sourceFile with the name exists const filePathWithoutExtension = removeFileExtension(filePath); return !!getSourceFileByPath((filePathWithoutExtension + Extension.Ts) as Path) || !!getSourceFileByPath((filePathWithoutExtension + Extension.Tsx) as Path); } return false; } function isSameFile(file1: string, file2: string) { return comparePaths(file1, file2, currentDirectory, !host.useCaseSensitiveFileNames()) === Comparison.EqualTo; } } /* @internal */ export function parseConfigHostFromCompilerHost(host: CompilerHost): ParseConfigFileHost { return { fileExists: f => host.fileExists(f), readDirectory(root, extensions, excludes, includes, depth) { Debug.assertDefined(host.readDirectory, "'CompilerHost.readDirectory' must be implemented to correctly process 'projectReferences'"); return host.readDirectory!(root, extensions, excludes, includes, depth); }, readFile: f => host.readFile(f), useCaseSensitiveFileNames: host.useCaseSensitiveFileNames(), getCurrentDirectory: () => host.getCurrentDirectory(), onUnRecoverableConfigFileDiagnostic: () => undefined }; } // For backward compatibility /** @deprecated */ export interface ResolveProjectReferencePathHost { fileExists(fileName: string): boolean; } /** * Returns the target config filename of a project reference. * Note: The file might not exist. */ export function resolveProjectReferencePath(ref: ProjectReference): ResolvedConfigFileName; /** @deprecated */ export function resolveProjectReferencePath(host: ResolveProjectReferencePathHost, ref: ProjectReference): ResolvedConfigFileName; export function resolveProjectReferencePath(hostOrRef: ResolveProjectReferencePathHost | ProjectReference, ref?: ProjectReference): ResolvedConfigFileName { const passedInRef = ref ? ref : hostOrRef as ProjectReference; return resolveConfigFileProjectName(passedInRef.path); } function getEmitDeclarationOptionName(options: CompilerOptions) { return options.declaration ? "declaration" : "composite"; } /* @internal */ /** * Returns a DiagnosticMessage if we won't include a resolved module due to its extension. * The DiagnosticMessage's parameters are the imported module name, and the filename it resolved to. * This returns a diagnostic even if the module will be an untyped module. */ export function getResolutionDiagnostic(options: CompilerOptions, { extension }: ResolvedModuleFull): DiagnosticMessage | undefined { switch (extension) { case Extension.Ts: case Extension.Dts: // These are always allowed. return undefined; case Extension.Tsx: return needJsx(); case Extension.Jsx: return needJsx() || needAllowJs(); case Extension.Js: return needAllowJs(); case Extension.Json: return needResolveJsonModule(); } function needJsx() { return options.jsx ? undefined : Diagnostics.Module_0_was_resolved_to_1_but_jsx_is_not_set; } function needAllowJs() { return options.allowJs || !getStrictOptionValue(options, "noImplicitAny") ? undefined : Diagnostics.Could_not_find_a_declaration_file_for_module_0_1_implicitly_has_an_any_type; } function needResolveJsonModule() { return options.resolveJsonModule ? undefined : Diagnostics.Module_0_was_resolved_to_1_but_resolveJsonModule_is_not_used; } } function getModuleNames({ imports, moduleAugmentations }: SourceFile): string[] { const res = imports.map(i => i.text); for (const aug of moduleAugmentations) { if (aug.kind === SyntaxKind.StringLiteral) { res.push(aug.text); } // Do nothing if it's an Identifier; we don't need to do module resolution for `declare global`. } return res; } }
apache-2.0
eemirtekin/Sakai-10.6-TR
sitestats/sitestats-tool/src/java/org/sakaiproject/sitestats/tool/wicket/components/SelectOptionsGroup.java
1455
/** * $URL: https://source.sakaiproject.org/svn/sitestats/tags/sakai-10.6/sitestats-tool/src/java/org/sakaiproject/sitestats/tool/wicket/components/SelectOptionsGroup.java $ * $Id: SelectOptionsGroup.java 105078 2012-02-24 23:00:38Z ottenhoff@longsight.com $ * * Copyright (c) 2006-2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.sitestats.tool.wicket.components; import org.apache.wicket.AttributeModifier; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.border.Border; import org.apache.wicket.model.IModel; public class SelectOptionsGroup extends Border { public SelectOptionsGroup(String id, IModel model) { super(id); WebMarkupContainer optgroup = new WebMarkupContainer("optgroup"); optgroup.add(new AttributeModifier("label", true, model)); add(optgroup); optgroup.add(getBodyContainer()); } }
apache-2.0
inloop/AndroidViewModel
sample/src/main/java/eu/inloop/viewmodel/sample/fragment/SampleBindingFragment.java
1292
package eu.inloop.viewmodel.sample.fragment; import android.os.Bundle; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import android.view.View; import androidx.fragment.app.Fragment; import eu.inloop.viewmodel.binding.ViewModelBaseBindingFragment; import eu.inloop.viewmodel.binding.ViewModelBindingConfig; import eu.inloop.viewmodel.sample.R; import eu.inloop.viewmodel.sample.databinding.FragmentSampleBindingBinding; import eu.inloop.viewmodel.sample.viewmodel.SampleBindingViewModel; import eu.inloop.viewmodel.sample.viewmodel.view.ISampleBindingView; /** * A simple {@link Fragment} subclass. */ public class SampleBindingFragment extends ViewModelBaseBindingFragment<ISampleBindingView, SampleBindingViewModel, FragmentSampleBindingBinding> implements ISampleBindingView { public SampleBindingFragment() { // Required empty public constructor } @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); setModelView(this); } @Override public ViewModelBindingConfig getViewModelBindingConfig() { return new ViewModelBindingConfig(R.layout.fragment_sample_binding, requireActivity()); } }
apache-2.0
consulo/consulo-markdown
src/main/java/org/intellij/plugins/markdown/highlighting/MarkdownColorSettingsPage.java
8693
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.markdown.highlighting; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import org.intellij.plugins.markdown.MarkdownBundle; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.fileTypes.SyntaxHighlighter; import com.intellij.openapi.options.colors.AttributesDescriptor; import com.intellij.openapi.options.colors.ColorDescriptor; import com.intellij.openapi.options.colors.ColorSettingsPage; import com.intellij.openapi.util.io.StreamUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; public class MarkdownColorSettingsPage implements ColorSettingsPage { private static final AttributesDescriptor[] ATTRIBUTE_DESCRIPTORS = AttributeDescriptorsHolder.INSTANCE.get(); @NotNull public Map<String, TextAttributesKey> getAdditionalHighlightingTagToDescriptorMap() { final Map<String, TextAttributesKey> result = new HashMap<String, TextAttributesKey>(); result.put("hh1", MarkdownHighlighterColors.HEADER_LEVEL_1_ATTR_KEY); result.put("hh2", MarkdownHighlighterColors.HEADER_LEVEL_2_ATTR_KEY); result.put("hh3", MarkdownHighlighterColors.HEADER_LEVEL_3_ATTR_KEY); result.put("hh4", MarkdownHighlighterColors.HEADER_LEVEL_4_ATTR_KEY); result.put("hh5", MarkdownHighlighterColors.HEADER_LEVEL_5_ATTR_KEY); result.put("hh6", MarkdownHighlighterColors.HEADER_LEVEL_6_ATTR_KEY); result.put("bold", MarkdownHighlighterColors.BOLD_ATTR_KEY); result.put("boldm", MarkdownHighlighterColors.BOLD_MARKER_ATTR_KEY); result.put("italic", MarkdownHighlighterColors.ITALIC_ATTR_KEY); result.put("italicm", MarkdownHighlighterColors.ITALIC_MARKER_ATTR_KEY); result.put("strike", MarkdownHighlighterColors.STRIKE_THROUGH_ATTR_KEY); result.put("alink", MarkdownHighlighterColors.AUTO_LINK_ATTR_KEY); result.put("link_def", MarkdownHighlighterColors.LINK_DEFINITION_ATTR_KEY); result.put("link_text", MarkdownHighlighterColors.LINK_TEXT_ATTR_KEY); result.put("link_label", MarkdownHighlighterColors.LINK_LABEL_ATTR_KEY); result.put("link_dest", MarkdownHighlighterColors.LINK_DESTINATION_ATTR_KEY); result.put("link_img", MarkdownHighlighterColors.IMAGE_ATTR_KEY); result.put("link_title", MarkdownHighlighterColors.LINK_TITLE_ATTR_KEY); result.put("code_span", MarkdownHighlighterColors.CODE_SPAN_ATTR_KEY); result.put("code_block", MarkdownHighlighterColors.CODE_BLOCK_ATTR_KEY); result.put("code_fence", MarkdownHighlighterColors.CODE_FENCE_ATTR_KEY); result.put("quote", MarkdownHighlighterColors.BLOCK_QUOTE_ATTR_KEY); result.put("ul", MarkdownHighlighterColors.UNORDERED_LIST_ATTR_KEY); result.put("ol", MarkdownHighlighterColors.ORDERED_LIST_ATTR_KEY); return result; } @NotNull public AttributesDescriptor[] getAttributeDescriptors() { return ATTRIBUTE_DESCRIPTORS; } @NotNull public ColorDescriptor[] getColorDescriptors() { return ColorDescriptor.EMPTY_ARRAY; } @NonNls @NotNull public String getDemoText() { final InputStream stream = getClass().getResourceAsStream("SampleDocument.md"); try { final String result = StreamUtil.readText(stream, CharsetToolkit.UTF8); stream.close(); return StringUtil.convertLineSeparators(result); } catch (IOException ignored) { } return "*error loading text*"; } @NotNull public String getDisplayName() { return MarkdownBundle.message("markdown.plugin.name"); } @NotNull public SyntaxHighlighter getHighlighter() { return new MarkdownSyntaxHighlighter(); } private enum AttributeDescriptorsHolder { INSTANCE; private Map<String, TextAttributesKey> myMap = new HashMap<String, TextAttributesKey>(); AttributeDescriptorsHolder() { put("markdown.editor.colors.text", MarkdownHighlighterColors.TEXT_ATTR_KEY); put("markdown.editor.colors.bold", MarkdownHighlighterColors.BOLD_ATTR_KEY); put("markdown.editor.colors.bold_marker", MarkdownHighlighterColors.BOLD_MARKER_ATTR_KEY); put("markdown.editor.colors.italic", MarkdownHighlighterColors.ITALIC_ATTR_KEY); put("markdown.editor.colors.italic_marker", MarkdownHighlighterColors.ITALIC_MARKER_ATTR_KEY); put("markdown.editor.colors.strikethrough", MarkdownHighlighterColors.STRIKE_THROUGH_ATTR_KEY); put("markdown.editor.colors.header_level_1", MarkdownHighlighterColors.HEADER_LEVEL_1_ATTR_KEY); put("markdown.editor.colors.header_level_2", MarkdownHighlighterColors.HEADER_LEVEL_2_ATTR_KEY); put("markdown.editor.colors.header_level_3", MarkdownHighlighterColors.HEADER_LEVEL_3_ATTR_KEY); put("markdown.editor.colors.header_level_4", MarkdownHighlighterColors.HEADER_LEVEL_4_ATTR_KEY); put("markdown.editor.colors.header_level_5", MarkdownHighlighterColors.HEADER_LEVEL_5_ATTR_KEY); put("markdown.editor.colors.header_level_6", MarkdownHighlighterColors.HEADER_LEVEL_6_ATTR_KEY); put("markdown.editor.colors.blockquote", MarkdownHighlighterColors.BLOCK_QUOTE_ATTR_KEY); put("markdown.editor.colors.code_span", MarkdownHighlighterColors.CODE_SPAN_ATTR_KEY); put("markdown.editor.colors.code_span_marker", MarkdownHighlighterColors.CODE_SPAN_MARKER_ATTR_KEY); put("markdown.editor.colors.code_block", MarkdownHighlighterColors.CODE_BLOCK_ATTR_KEY); put("markdown.editor.colors.code_fence", MarkdownHighlighterColors.CODE_FENCE_ATTR_KEY); put("markdown.editor.colors.hrule", MarkdownHighlighterColors.HRULE_ATTR_KEY); put("markdown.editor.colors.table_separator", MarkdownHighlighterColors.TABLE_SEPARATOR_ATTR_KEY); put("markdown.editor.colors.blockquote_marker", MarkdownHighlighterColors.BLOCK_QUOTE_MARKER_ATTR_KEY); put("markdown.editor.colors.list_marker", MarkdownHighlighterColors.LIST_MARKER_ATTR_KEY); put("markdown.editor.colors.header_marker", MarkdownHighlighterColors.HEADER_MARKER_ATTR_KEY); put("markdown.editor.colors.auto_link", MarkdownHighlighterColors.AUTO_LINK_ATTR_KEY); put("markdown.editor.colors.explicit_link", MarkdownHighlighterColors.EXPLICIT_LINK_ATTR_KEY); put("markdown.editor.colors.reference_link", MarkdownHighlighterColors.REFERENCE_LINK_ATTR_KEY); put("markdown.editor.colors.image", MarkdownHighlighterColors.IMAGE_ATTR_KEY); put("markdown.editor.colors.link_definition", MarkdownHighlighterColors.LINK_DEFINITION_ATTR_KEY); put("markdown.editor.colors.link_text", MarkdownHighlighterColors.LINK_TEXT_ATTR_KEY); put("markdown.editor.colors.link_label", MarkdownHighlighterColors.LINK_LABEL_ATTR_KEY); put("markdown.editor.colors.link_destination", MarkdownHighlighterColors.LINK_DESTINATION_ATTR_KEY); put("markdown.editor.colors.link_title", MarkdownHighlighterColors.LINK_TITLE_ATTR_KEY); put("markdown.editor.colors.unordered_list", MarkdownHighlighterColors.UNORDERED_LIST_ATTR_KEY); put("markdown.editor.colors.ordered_list", MarkdownHighlighterColors.ORDERED_LIST_ATTR_KEY); put("markdown.editor.colors.list_item", MarkdownHighlighterColors.LIST_ITEM_ATTR_KEY); put("markdown.editor.colors.html_block", MarkdownHighlighterColors.HTML_BLOCK_ATTR_KEY); put("markdown.editor.colors.inline_html", MarkdownHighlighterColors.INLINE_HTML_ATTR_KEY); } @NotNull public AttributesDescriptor[] get() { final AttributesDescriptor[] result = new AttributesDescriptor[myMap.size()]; int i = 0; for (Map.Entry<String, TextAttributesKey> entry : myMap.entrySet()) { result[i++] = new AttributesDescriptor(MarkdownBundle.message(entry.getKey()), entry.getValue()); } return result; } private void put(@NotNull String bundleKey, @NotNull TextAttributesKey attributes) { if (myMap.put(bundleKey, attributes) != null) { throw new IllegalArgumentException("Duplicated key: " + bundleKey); } } } }
apache-2.0
GAIA-GMU/PAR
Documentation/Tutorials/Source/Tutorial9/Source/Tutorial9/Private/PARGameMode.cpp
1458
// Fill out your copyright notice in the Description page of Project Settings. #include "Tutorial9.h" #include "PARGameMode.h" #include "Humanoid.h" #include "lwnet.h" extern parTime *partime; extern Actionary *actionary; extern TArray<AHumanoid*> all_agents; extern TArray<UAnimMontage*> all_montages; extern ActionTable actionTable; //Holds the mapping of actions to real world code int doNod(iPAR *ipar){ MetaObject * agent=ipar->getAgent(); AHumanoid * hum = NULL; for (auto& ag : all_agents){ if (agent == ag->GetPARAgent()->getObject()){ hum = ag; } } if (hum != NULL){ hum->AddAction(all_montages[0]); } return 1; } APARGameMode::APARGameMode(){ error = 0; static ConstructorHelpers::FObjectFinder<UAnimMontage> TestMontage(TEXT("AnimMontage'/Game/Actions/TestMontage'")); all_montages.Add(TestMontage.Object); } // Called when the game starts or when spawned void APARGameMode::PreInitializeComponents(){ Super::PreInitializeComponents(); partime = new parTime(); partime->setTimeOffset(8,30,0); partime->setTimeRate(1); // how fast should time change actionary = new Actionary(); actionary->init(); //Finally, we add all of our actions to the action table actionTable.addFunctions("Nod", &doNod); } // Called every frame void APARGameMode::Tick(float DeltaSeconds){ Super::Tick(DeltaSeconds); //partime->setTimeOffset(partime->getCurrentTime() + DeltaSeconds); int error = 0; LWNetList::advance(&error); }
apache-2.0
TheNotoriousOOP/Iteration3
src/main/model/ability_management/ability/move_abilities/MoveNorthEastAbility.java
386
package model.ability_management.ability.move_abilities; import model.ability_management.ability.Ability; /** * */ public class MoveNorthEastAbility extends Ability { public MoveNorthEastAbility() { } @Override public void perform() { getActor().moveNorthEast(); } @Override public String toString() { return "Move Northeast"; } }
apache-2.0
sumveds/weather-report
app.js
671
var express = require('express'); var path = require('path'); // var favicon = require('serve-favicon'); var logger = require('morgan'); var bodyParser = require('body-parser'); var weather = require('./routes/weather'); var app = express(); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'jade'); // uncomment after placing your favicon in /public // app.use(favicon(__dirname + '/public/favicon.ico')); app.use(logger('dev')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(express.static(path.join(__dirname, 'public'))); app.use('/weather', weather); module.exports = app;
apache-2.0
domaframework/simple-examples
dao-style-text/src/test/java/example/dao_style_text/SelectTest.java
4556
package example.dao_style_text; import static java.util.stream.Collectors.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import example.dao_style_text.dao.EmployeeDao; import example.dao_style_text.dao.EmployeeDaoImpl; import example.dao_style_text.domain.Age; import example.dao_style_text.domain.Salary; import example.dao_style_text.entity.Employee; import java.sql.Timestamp; import java.util.List; import java.util.Objects; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.seasar.doma.jdbc.Config; import org.seasar.doma.jdbc.SelectOptions; @ExtendWith(TestEnvironment.class) public class SelectTest { private final EmployeeDao dao; public SelectTest(Config config) { Objects.requireNonNull(config); this.dao = new EmployeeDaoImpl(config); } @Test public void testSelectAll() { var employees = dao.selectAll(); assertEquals(14, employees.size()); } @Test public void testSelectById() { var employee = dao.selectById(1); assertNotNull(employee); } @Test public void testConditionalSelect() { var list = dao.selectByAgeRange(new Age(30), new Age(40)); assertEquals(6, list.size()); list = dao.selectByAgeRange(new Age(30), null); assertEquals(12, list.size()); list = dao.selectByAgeRange(null, new Age(40)); assertEquals(8, list.size()); list = dao.selectByAgeRange(null, null); assertEquals(14, list.size()); } @Test public void testConditionalSelect2() { var list = dao.selectByName("SMITH"); assertEquals(1, list.size()); list = dao.selectByName(null); assertEquals(0, list.size()); } @Test public void testLoopSelect() { var ages = Stream.of(30, 40, 50, 60).map(Age::new).collect(toList()); var list = dao.selectByAges(ages); assertEquals(3, list.size()); } @Test public void testIsNotEmptyFunction() { var list = dao.selectByNotEmptyName("SMITH"); assertEquals(1, list.size()); list = dao.selectByNotEmptyName(null); assertEquals(14, list.size()); list = dao.selectByNotEmptyName(""); assertEquals(14, list.size()); list = dao.selectByNotEmptyName(" "); assertEquals(0, list.size()); } @Test public void testLikePredicate_prefix() { var list = dao.selectByNameWithPrefixMatching("S"); assertEquals(2, list.size()); } @Test public void testLikePredicate_suffix() { var list = dao.selectByNameWithSuffixMatching("S"); assertEquals(3, list.size()); } @Test public void testLikePredicate_inside() { var list = dao.selectByNameWithInfixMatching("A"); assertEquals(7, list.size()); } @Test public void testInPredicate() { var names = List.of("JONES", "SCOTT", "XXX"); var list = dao.selectByNames(names); assertEquals(2, list.size()); } @Test public void testSelectByTimestampRange() { var from = Timestamp.valueOf("2008-01-20 12:34:56"); var to = Timestamp.valueOf("2008-03-20 12:34:56"); var list = dao.selectByHiredateRange(from, to); assertEquals(3, list.size()); } @Test public void testSelectByDomain() { var list = dao.selectBySalary(new Salary(2900)); assertEquals(4, list.size()); } @Test public void testSelectDomain() { var salary = dao.selectSummedSalary(); assertNotNull(salary); } @Test public void testSelectByEntity() { var e = new Employee(); e.setName("SMITH"); var list = dao.selectByExample(e); assertEquals(1, list.size()); } @Test public void testStream() { var sum = dao.selectByAge( 30, s -> s.map(Employee::getSalary) .filter(Objects::nonNull) .reduce(new Salary(0), Salary::add)); assertEquals(Integer.valueOf(21975), sum.getValue()); } @Test public void testOffsetLimit() { var options = SelectOptions.get().offset(5).limit(3); var list = dao.selectAll(options); assertEquals(3, list.size()); } @Test public void testCount() { var options = SelectOptions.get().offset(5).limit(3).count(); var list = dao.selectAll(options); assertEquals(3, list.size()); assertEquals(14, options.getCount()); } @Test public void testSelectJoinedResult() { var list = dao.selectAllEmployeeDepartment(); assertEquals(14, list.size()); for (var e : list) { assertNotNull(e.getDepartmentName()); } } }
apache-2.0
todd-x86/tkplus
panel.py
787
from control import Control from Tkinter import Frame as TkFrame BORDER_SINGLE = 'ridge' BORDER_RAISED = 'raised' BORDER_LOWERED = 'sunken' BORDER_NONE = 'flat' class Panel(Control): def __init__(self, parent, **kwargs): Control.__init__(self, TkFrame(parent._frame), **kwargs) self._frame = self._ctrl self.border_width = 1 self.border_style = BORDER_SINGLE @property def border_width(self): return self._control_get('borderwidth') @border_width.setter def border_width(self, value): self._control_set('borderwidth', value) @property def border_style(self): return self._control_get('relief') @border_style.setter def border_style(self, value): self._control_set('relief', value)
apache-2.0
paulomorgado/StateMachineExplorations
tests/StateMachineExplorationTests/Runtime/RuntimeStateBaseTests.cs
9285
namespace Morgados.StateMachineExploration.Tests.Runtime { using System; using System.Threading; using System.Threading.Tasks; using FakeItEasy; using Morgados.StateMachines.Runtime; using Xunit; public class RuntimeStateBaseTests { [Fact] public async Task ExecuteAsync_WithoutCancellation_RunsEnterAndExitActions() { var tracker = new TestTracker(); var state = A.Fake<RuntimeStateBase>(builder => builder.WithArgumentsForConstructor(new object[] { "test", tracker.StateEnterAction, tracker.StateExitAction, tracker.StateCanceledAction, }) .CallsBaseMethods()); await state.ExecuteAsync(CancellationToken.None); Assert.Equal(">test;<test;", tracker.ToString()); } [Fact] public async Task ExecuteAsync_WithoutCancellationAndNonTargettedTransitions_RunsEnterAndExecutesTransitionsUntilNullTransitionAndExitActionsAndReturnsNull() { var tracker = new TestTracker(); var nonTargetedTransition = new RuntimeTransition("NonTargeted", null, tracker.TransitionAction, null); var state = A.Fake<RuntimeStateBase>(builder => builder .WithArgumentsForConstructor(new object[] { "test", tracker.StateEnterAction, tracker.StateExitAction, tracker.StateCanceledAction, }) .CallsBaseMethods()); A.CallTo(state) .Where(call => call.Method.Name == "ExecuteStepAsync") .WithReturnType<Task<RuntimeTransition>>() .ReturnsNextFromSequence(nonTargetedTransition, nonTargetedTransition, null); var actual = await state.ExecuteAsync(CancellationToken.None); Assert.Equal(null, actual); Assert.Equal(">test;@test;@test;<test;", tracker.ToString()); } [Fact] public async Task ExecuteAsync_WithoutCancellationAndNonTargettedTransitions_RunsEnterAndExecutesTransitionsUntilTargettedTransitionAndExitActionsAndReturnsTargettedTransitionWithoutExecuting() { var tracker = new TestTracker(); var targetedTransition = new RuntimeTransition("Targeted", A.Fake<ITransitionTarget>(), tracker.TransitionAction, null); var nonTargetedTransition = new RuntimeTransition("NonTargeted", null, tracker.TransitionAction, null); var state = A.Fake<RuntimeStateBase>(builder => builder .WithArgumentsForConstructor(new object[] { "test", tracker.StateEnterAction, tracker.StateExitAction, tracker.StateCanceledAction, }) .CallsBaseMethods()); A.CallTo(state) .Where(call => call.Method.Name == "ExecuteStepAsync") .WithReturnType<Task<RuntimeTransition>>() .ReturnsNextFromSequence(nonTargetedTransition, nonTargetedTransition, targetedTransition); var actual = await state.ExecuteAsync(CancellationToken.None); Assert.Equal(targetedTransition, actual); Assert.Equal(">test;@test;@test;<test;", tracker.ToString()); } [Fact] public async Task ExecuteAsync_CanceledBeforeExecution_RunsNodActionAndThrowsOperationCanceledException() { var tracker = new TestTracker(); using (var cts = new CancellationTokenSource()) { cts.Cancel(); var state = A.Fake<RuntimeStateBase>(builder => builder .WithArgumentsForConstructor(new object[] { "test", tracker.StateEnterAction, tracker.StateExitAction, tracker.StateCanceledAction, }) .CallsBaseMethods()); await Assert.ThrowsAsync<OperationCanceledException>(async () => await state.ExecuteAsync(cts.Token)); } Assert.Equal(string.Empty, tracker.ToString()); } [Fact] public async Task ExecuteAsync_CanceledDuringEnter_RunsEnterAndCanceledActionsAndThrowsOperationCanceledException() { var tracker = new TestTracker(); using (var cts = new CancellationTokenSource()) { var state = A.Fake<RuntimeStateBase>(builder => builder .WithArgumentsForConstructor(new object[] { "test", new Func<string, Task>(async s => { await tracker.StateEnterAction(s); cts.Cancel(); }), tracker.StateExitAction, tracker.StateCanceledAction, }) .CallsBaseMethods()); await Assert.ThrowsAsync<OperationCanceledException>(async () => await state.ExecuteAsync(cts.Token)); } Assert.Equal(">test;!test;", tracker.ToString()); } [Fact] public async Task ExecuteAsync_CanceledDuringExit_RunsEnterAndExitActionsAndThrowsOperationCanceledException() { var tracker = new TestTracker(); using (var cts = new CancellationTokenSource()) { var state = A.Fake<RuntimeStateBase>(builder => builder .WithArgumentsForConstructor(new object[] { "test", tracker.StateEnterAction, new Func<string, Task>(async s => { await tracker.StateExitAction(s); cts.Cancel(); }), tracker.StateCanceledAction, }) .CallsBaseMethods()); await Assert.ThrowsAsync<OperationCanceledException>(async () => await state.ExecuteAsync(cts.Token)); } Assert.Equal(">test;<test;", tracker.ToString()); } [Fact] public async Task ExecuteAsync_WithCancellationDuringExecute_RunsEnterAndExitActionsAndThrowsOperationCanceledException() { var tracker = new TestTracker(); using (var cts = new CancellationTokenSource()) { var state = A.Fake<RuntimeStateBase>(builder => builder .WithArgumentsForConstructor(new object[] { "test", tracker.StateEnterAction, tracker.StateExitAction, tracker.StateCanceledAction, }) .CallsBaseMethods()); A.CallTo(state) .Where(call => call.Method.Name == "ExecuteStepAsync") .WithReturnType<Task<RuntimeTransition>>() .Invokes(() => cts.Cancel()); await Assert.ThrowsAsync<OperationCanceledException>(async () => await state.ExecuteAsync(cts.Token)); } Assert.Equal(">test;!test;", tracker.ToString()); } [Fact] public async Task ExecuteAsync_ExecutingState_ThrowsInvalidOperationExceptionAsync() { var tracker = new TestTracker(); var tcs = new TaskCompletionSource<RuntimeTransition>(); var state = A.Fake<RuntimeStateBase>(builder => builder .WithArgumentsForConstructor(new object[] { "test", tracker.StateEnterAction, tracker.StateExitAction, tracker.StateCanceledAction, }) .CallsBaseMethods()); A.CallTo(state) .Where(call => call.Method.Name == "ExecuteStepAsync") .WithReturnType<Task<RuntimeTransition>>() .Returns(tcs.Task); var task = state.ExecuteAsync(CancellationToken.None); await Assert.ThrowsAsync<InvalidOperationException>(() => state.ExecuteAsync(CancellationToken.None)); tcs.SetResult(null); await task; } } }
apache-2.0
rethink-neil/rosnodejs
test/stress.js
7582
const chai = require('chai'); const xmlrpc = require('xmlrpc'); const rosnodejs = require('rosnodejs'); const TOPIC = '/topic'; const TYPE = 'std_msgs/String'; const SERVICE = '/service'; const SRV = 'std_srvs/Empty'; const MASTER_PORT = 11234; // Each Test in this suite simulates rapid fire connection/disconnection // of TCPROS clients describe('ClientShutdown', function() { this.timeout(10000); this.slow(10000); let sub = null; let pub = null; let service = null; let client = null; let interval1; let interval2; let interval3; let masterStub; function startSub(nh) { sub = nh.subscribe(TOPIC, TYPE, (msg) => { console.log('%j', msg); }); return sub; } function stopSub() { if (sub) { sub.shutdown(); sub = null; } } function startPub(nh) { pub = nh.advertise(TOPIC, TYPE); return pub; } function stopPub() { if (pub) { pub.shutdown(); pub = null; } } function startService(nh) { service = nh.advertiseService(SERVICE, SRV, () => { console.log('handling service call'); return true; }); return service; } function stopService() { if (service) { service.shutdown(); service = null; } } function startClient(nh) { client = nh.serviceClient(SERVICE, SRV); return client; } function stopClient() { if (client) { client.shutdown(); client = null; } } before((done) => { masterStub = xmlrpc.createServer({host: 'localhost', port: MASTER_PORT}, () => { done(); }); }); after((done) => { masterStub.close(() => { done(); }); }); beforeEach(() => { let pubInfo = null; let subInfo = null; let serviceInfo = null; masterStub.on('getUri', (err, params, callback) => { const resp = [ 1, '', 'localhost:11311/' ]; callback(null, resp); }); masterStub.on('registerSubscriber', (err, params, callback) => { subInfo = params[3]; // console.log('sub reg ' + params); //console.log(pubInfo); const resp = [1, 'You did it!', []]; if (pubInfo) { resp[2].push(pubInfo); } callback(null, resp); }); masterStub.on('unregisterSubscriber', (err, params, callback) => { // console.log('unregister subscriber!'); const resp = [1, 'You did it!', subInfo ? 1 : 0]; callback(null, resp); subInfo = null; }); masterStub.on('registerPublisher', (err, params, callback) => { // console.log('pub reg ' + Date.now()); pubInfo = params[3]; const resp = [1, 'You did it!', []]; if (subInfo) { resp[2].push(pubInfo); let subAddrParts = subInfo.replace('http://', '').split(':'); let client = xmlrpc.createClient({host: subAddrParts[0], port: subAddrParts[1]}); let data = [1, TOPIC, [pubInfo]]; client.methodCall('publisherUpdate', data, (err, response) => { }); } callback(null, resp); }); masterStub.on('unregisterPublisher', (err, params, callback) => { // console.log('pub unreg ' + Date.now()); const resp = [1, 'You did it!', pubInfo ? 1 : 0]; callback(null, resp); if (subInfo) { let subAddrParts = subInfo.replace('http://', '').split(':'); let client = xmlrpc.createClient({host: subAddrParts[0], port: subAddrParts[1]}); let data = [1, TOPIC, []]; client.methodCall('publisherUpdate', data, (err, response) => { }); } pubInfo = null; }); masterStub.on('registerService', (err, params, callback) => { serviceInfo = params[2]; const resp = [1, 'You did it!', []]; callback(null, resp); }); masterStub.on('unregisterService', (err, params, callback) => { const resp = [1, 'You did it!', subInfo ? 1 : 0]; callback(null, resp); serviceInfo = null; }); masterStub.on('lookupService', (err, params, callback) => { if (serviceInfo) { const resp = [1, "you did it", serviceInfo]; callback(null, resp); } else { const resp = [-1, "no provider", ""]; callback(null, resp); } }); masterStub.on('NotFound', (method, params) => { console.error('Got unknown method call %s: %j', method, params); }); return rosnodejs.initNode('/my_node', {rosMasterUri: `http://localhost:${MASTER_PORT}`, logging: {testing: true}}); }); afterEach(() => { sub = null; pub = null; service = null; client = null; clearInterval(interval1); clearInterval(interval2); clearInterval(interval3); const nh = rosnodejs.nh; // clear out any service, subs, pubs nh._node._services = {}; nh._node._subscribers = {}; nh._node._publishers = {}; // remove any master api handlers we set up masterStub.removeAllListeners(); }); it('Subscriber Shutdown', (done) => { const nh = rosnodejs.nh; const pub = startPub(nh); const msg = {data: 'This shouldn\'t crash'}; interval1 = setInterval(() => { pub.publish(msg); }, 3); interval2 = setInterval(() => { if (sub === null) { startSub(nh); } else { stopSub(); } }, 10); setTimeout(done, 8000); }); it('Publisher Shutdown', (done) => { const nh = rosnodejs.nh; startSub(nh); const msg = {data: 'This shouldn\'t crash'}; interval1 = setInterval(() => { if (pub) { pub.publish(msg, -1); } }, 3); interval2 = setInterval(() => { if (pub === null) { startPub(nh); } else { stopPub(); } }, 10); setTimeout(done, 8000); }); it('Pub Sub Shutdown', (done) => { const nh = rosnodejs.nh; const msg = {data: 'This shouldn\'t crash'}; interval1 = setInterval(() => { if (pub) { pub.publish(msg); } }, 3); interval2 = setInterval(() => { if (pub === null) { startPub(nh); } else { stopPub(); } }, 10); interval3 = setInterval(() => { if (sub === null) { startSub(nh); } else { stopSub(); } }, 7); setTimeout(done, 8000); }); it('Service Shutdown', (done) => { const nh = rosnodejs.nh; const client = startClient(nh); const req = {}; interval1 = setInterval(() => { client.call(req); }, 3); interval2 = setInterval(() => { if (service === null) { startService(nh); } else { stopService(); } }, 10); setTimeout(done, 8000); }); it('Client Shutdown', (done) => { const nh = rosnodejs.nh; startService(nh); const req = {}; interval1 = setInterval(() => { if (client) { client.call(req); } }, 1); interval2 = setInterval(() => { if (client === null) { startClient(nh); } else { stopClient(); } }, 10); setTimeout(done, 8000); }); it('Client Service Shutdown', (done) => { const nh = rosnodejs.nh; const req = {}; interval1 = setInterval(() => { if (client) { client.call(req); } }, 1); interval2 = setInterval(() => { if (client === null) { startClient(nh); } else { stopClient(); } }, 10); interval3 = setInterval(() => { if (service === null) { startService(nh); } else { stopService(); } }, 7); setTimeout(done, 8000); }); });
apache-2.0
DaniPix/Countries
app/src/main/java/com/readr/ro/countries/constants/RestConstants.java
453
package com.readr.ro.countries.constants; /** * Created by Domnica on 11/1/2016. */ public class RestConstants { // Rest Constants for Retrofit public static final String BASE_ENDPOINT = "https://restcountries.eu/rest/v1/"; public static final String FETCH_ALL_COUNTRIES = "all"; public static final String FETCH_COUNTRY = "callingcode/{callingCodeId}"; private RestConstants(){ // override default constructor } }
apache-2.0