repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
fedepaol/DroidChineseCheckers
src/com/whiterabbit/checkers/ui/BoardsListActivity.java
5669
/******************************************************************************* * Copyright 2011 Federico Paolinelli * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.whiterabbit.checkers.ui; import android.app.AlertDialog; import android.app.ListActivity; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.os.Bundle; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.ListView; import com.google.analytics.tracking.android.GoogleAnalytics; import com.google.analytics.tracking.android.Tracker; import com.immersion.uhl.Launcher; import com.whiterabbit.checkers.R; import com.whiterabbit.checkers.boards.BoardKind; import com.whiterabbit.checkers.util.Utils; import java.util.ArrayList; import java.util.List; public class BoardsListActivity extends ListActivity { ArrayList<BoardKind> mBoards; MyArrayAdapter mAdapter; Launcher mHapticsLauncher; private Tracker mGaTracker; private GoogleAnalytics mGaInstance; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.board_list); mBoards = BoardKind.getAllBoards(); mAdapter = new MyArrayAdapter(this, R.layout.board_list_elem, mBoards); mHapticsLauncher = new Launcher(this); mGaInstance = GoogleAnalytics.getInstance(this); mGaTracker = mGaInstance.getTracker(getString(R.string.ga_trackingId)); this.setListAdapter(mAdapter); } void showDialog(Boolean existsSaved, final BoardKind board) { String button1String = getString(R.string.new_game); String button2String = getString(R.string.restore_game); String cancelString = getString(R.string.back); AlertDialog.Builder ad = new AlertDialog.Builder(this); ad.setTitle(board.getName()); if(existsSaved){ ad.setMessage(getString(R.string.do_you_want_to_restore)); ad.setNeutralButton(button2String, new OnClickListener() { public void onClick(DialogInterface dialog, int arg1) { Utils.playButtonPressed(mHapticsLauncher, BoardsListActivity.this); CheckersGameActivity.launch(BoardsListActivity.this, board.getName(), true); } }); }else{ ad.setMessage(getString(R.string.starting_new_game)); } ad.setPositiveButton(button1String, new OnClickListener() { public void onClick(DialogInterface dialog, int arg1) { Utils.playButtonPressed(mHapticsLauncher, BoardsListActivity.this); board.delete(BoardsListActivity.this); CheckersGameActivity.launch(BoardsListActivity.this, board.getName()); } }); ad.setNegativeButton(cancelString, new OnClickListener() { public void onClick(DialogInterface dialog, int arg1) { Utils.playButtonPressed(mHapticsLauncher, BoardsListActivity.this); // do nothing } }); ad.show(); return; } @Override protected void onListItemClick(ListView l, View v, int position, long id) { Utils.playButtonPressed(mHapticsLauncher, BoardsListActivity.this); BoardKind board = mBoards.get(position); Boolean existsSaved = board.load(this); showDialog(existsSaved, board); mGaTracker.sendEvent("Board", "board_play", board.getName(), Long.valueOf(0)); } public class MyArrayAdapter extends ArrayAdapter<BoardKind> { int resource; public MyArrayAdapter(Context context, int _resource, List<BoardKind> items) { super(context, _resource, items); resource = _resource; } @Override public View getView(int position, View convertView, ViewGroup parent) { BoardListElem newView; BoardKind board = getItem(position); if (convertView == null) { newView = new BoardListElem(getContext(), board); } else { newView = (BoardListElem)convertView; newView.setFromBoard(board); } return newView; } } @Override protected void onDestroy() { super.onDestroy(); // Stop the tracker when it is no longer needed. } @Override protected void onResume() { // TODO Auto-generated method stub super.onResume(); mAdapter.notifyDataSetChanged(); } }
apache-2.0
palava/palava-ipc-session-store
src/main/java/de/cosmocode/palava/ipc/session/store/SessionProviderMBean.java
1122
/** * Copyright 2010 CosmoCode GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cosmocode.palava.ipc.session.store; /** * MBean interface for {@link SessionProvider}. * * @author Tobias Sarnowski */ public interface SessionProviderMBean { /** * Retrieves the current session count. * * @since 1.0 * @return the current session count */ int getSessionCount(); /** * Retrieves the current hydrated session count. * * @since 1.0 * @return the current hydrated session count */ int getHydratedSessionCount(); }
apache-2.0
bkaradzic/SwiftShader
third_party/SPIRV-Headers/include/spirv/unified1/spirv.lua
54333
-- Copyright (c) 2014-2020 The Khronos Group Inc. -- -- Permission is hereby granted, free of charge, to any person obtaining a copy -- of this software and/or associated documentation files (the "Materials"), -- to deal in the Materials without restriction, including without limitation -- the rights to use, copy, modify, merge, publish, distribute, sublicense, -- and/or sell copies of the Materials, and to permit persons to whom the -- Materials are furnished to do so, subject to the following conditions: -- -- The above copyright notice and this permission notice shall be included in -- all copies or substantial portions of the Materials. -- -- MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS -- STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -- HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -- -- THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -- OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -- THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -- FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS -- IN THE MATERIALS. -- This header is automatically generated by the same tool that creates -- the Binary Section of the SPIR-V specification. -- Enumeration tokens for SPIR-V, in various styles: -- C, C++, C++11, JSON, Lua, Python, C#, D -- -- - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL -- - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL -- - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL -- - Lua will use tables, e.g.: spv.SourceLanguage.GLSL -- - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] -- - C# will use enum classes in the Specification class located in the "Spv" namespace, -- e.g.: Spv.Specification.SourceLanguage.GLSL -- - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL -- -- Some tokens act like mask values, which can be OR'd together, -- while others are mutually exclusive. The mask-like ones have -- "Mask" in their name, and a parallel enum that has the shift -- amount (1 << x) for each corresponding enumerant. spv = { MagicNumber = 0x07230203, Version = 0x00010500, Revision = 4, OpCodeMask = 0xffff, WordCountShift = 16, SourceLanguage = { Unknown = 0, ESSL = 1, GLSL = 2, OpenCL_C = 3, OpenCL_CPP = 4, HLSL = 5, }, ExecutionModel = { Vertex = 0, TessellationControl = 1, TessellationEvaluation = 2, Geometry = 3, Fragment = 4, GLCompute = 5, Kernel = 6, TaskNV = 5267, MeshNV = 5268, RayGenerationKHR = 5313, RayGenerationNV = 5313, IntersectionKHR = 5314, IntersectionNV = 5314, AnyHitKHR = 5315, AnyHitNV = 5315, ClosestHitKHR = 5316, ClosestHitNV = 5316, MissKHR = 5317, MissNV = 5317, CallableKHR = 5318, CallableNV = 5318, }, AddressingModel = { Logical = 0, Physical32 = 1, Physical64 = 2, PhysicalStorageBuffer64 = 5348, PhysicalStorageBuffer64EXT = 5348, }, MemoryModel = { Simple = 0, GLSL450 = 1, OpenCL = 2, Vulkan = 3, VulkanKHR = 3, }, ExecutionMode = { Invocations = 0, SpacingEqual = 1, SpacingFractionalEven = 2, SpacingFractionalOdd = 3, VertexOrderCw = 4, VertexOrderCcw = 5, PixelCenterInteger = 6, OriginUpperLeft = 7, OriginLowerLeft = 8, EarlyFragmentTests = 9, PointMode = 10, Xfb = 11, DepthReplacing = 12, DepthGreater = 14, DepthLess = 15, DepthUnchanged = 16, LocalSize = 17, LocalSizeHint = 18, InputPoints = 19, InputLines = 20, InputLinesAdjacency = 21, Triangles = 22, InputTrianglesAdjacency = 23, Quads = 24, Isolines = 25, OutputVertices = 26, OutputPoints = 27, OutputLineStrip = 28, OutputTriangleStrip = 29, VecTypeHint = 30, ContractionOff = 31, Initializer = 33, Finalizer = 34, SubgroupSize = 35, SubgroupsPerWorkgroup = 36, SubgroupsPerWorkgroupId = 37, LocalSizeId = 38, LocalSizeHintId = 39, PostDepthCoverage = 4446, DenormPreserve = 4459, DenormFlushToZero = 4460, SignedZeroInfNanPreserve = 4461, RoundingModeRTE = 4462, RoundingModeRTZ = 4463, StencilRefReplacingEXT = 5027, OutputLinesNV = 5269, OutputPrimitivesNV = 5270, DerivativeGroupQuadsNV = 5289, DerivativeGroupLinearNV = 5290, OutputTrianglesNV = 5298, PixelInterlockOrderedEXT = 5366, PixelInterlockUnorderedEXT = 5367, SampleInterlockOrderedEXT = 5368, SampleInterlockUnorderedEXT = 5369, ShadingRateInterlockOrderedEXT = 5370, ShadingRateInterlockUnorderedEXT = 5371, SharedLocalMemorySizeINTEL = 5618, RoundingModeRTPINTEL = 5620, RoundingModeRTNINTEL = 5621, FloatingPointModeALTINTEL = 5622, FloatingPointModeIEEEINTEL = 5623, MaxWorkgroupSizeINTEL = 5893, MaxWorkDimINTEL = 5894, NoGlobalOffsetINTEL = 5895, NumSIMDWorkitemsINTEL = 5896, SchedulerTargetFmaxMhzINTEL = 5903, }, StorageClass = { UniformConstant = 0, Input = 1, Uniform = 2, Output = 3, Workgroup = 4, CrossWorkgroup = 5, Private = 6, Function = 7, Generic = 8, PushConstant = 9, AtomicCounter = 10, Image = 11, StorageBuffer = 12, CallableDataKHR = 5328, CallableDataNV = 5328, IncomingCallableDataKHR = 5329, IncomingCallableDataNV = 5329, RayPayloadKHR = 5338, RayPayloadNV = 5338, HitAttributeKHR = 5339, HitAttributeNV = 5339, IncomingRayPayloadKHR = 5342, IncomingRayPayloadNV = 5342, ShaderRecordBufferKHR = 5343, ShaderRecordBufferNV = 5343, PhysicalStorageBuffer = 5349, PhysicalStorageBufferEXT = 5349, CodeSectionINTEL = 5605, DeviceOnlyINTEL = 5936, HostOnlyINTEL = 5937, }, Dim = { Dim1D = 0, Dim2D = 1, Dim3D = 2, Cube = 3, Rect = 4, Buffer = 5, SubpassData = 6, }, SamplerAddressingMode = { None = 0, ClampToEdge = 1, Clamp = 2, Repeat = 3, RepeatMirrored = 4, }, SamplerFilterMode = { Nearest = 0, Linear = 1, }, ImageFormat = { Unknown = 0, Rgba32f = 1, Rgba16f = 2, R32f = 3, Rgba8 = 4, Rgba8Snorm = 5, Rg32f = 6, Rg16f = 7, R11fG11fB10f = 8, R16f = 9, Rgba16 = 10, Rgb10A2 = 11, Rg16 = 12, Rg8 = 13, R16 = 14, R8 = 15, Rgba16Snorm = 16, Rg16Snorm = 17, Rg8Snorm = 18, R16Snorm = 19, R8Snorm = 20, Rgba32i = 21, Rgba16i = 22, Rgba8i = 23, R32i = 24, Rg32i = 25, Rg16i = 26, Rg8i = 27, R16i = 28, R8i = 29, Rgba32ui = 30, Rgba16ui = 31, Rgba8ui = 32, R32ui = 33, Rgb10a2ui = 34, Rg32ui = 35, Rg16ui = 36, Rg8ui = 37, R16ui = 38, R8ui = 39, R64ui = 40, R64i = 41, }, ImageChannelOrder = { R = 0, A = 1, RG = 2, RA = 3, RGB = 4, RGBA = 5, BGRA = 6, ARGB = 7, Intensity = 8, Luminance = 9, Rx = 10, RGx = 11, RGBx = 12, Depth = 13, DepthStencil = 14, sRGB = 15, sRGBx = 16, sRGBA = 17, sBGRA = 18, ABGR = 19, }, ImageChannelDataType = { SnormInt8 = 0, SnormInt16 = 1, UnormInt8 = 2, UnormInt16 = 3, UnormShort565 = 4, UnormShort555 = 5, UnormInt101010 = 6, SignedInt8 = 7, SignedInt16 = 8, SignedInt32 = 9, UnsignedInt8 = 10, UnsignedInt16 = 11, UnsignedInt32 = 12, HalfFloat = 13, Float = 14, UnormInt24 = 15, UnormInt101010_2 = 16, }, ImageOperandsShift = { Bias = 0, Lod = 1, Grad = 2, ConstOffset = 3, Offset = 4, ConstOffsets = 5, Sample = 6, MinLod = 7, MakeTexelAvailable = 8, MakeTexelAvailableKHR = 8, MakeTexelVisible = 9, MakeTexelVisibleKHR = 9, NonPrivateTexel = 10, NonPrivateTexelKHR = 10, VolatileTexel = 11, VolatileTexelKHR = 11, SignExtend = 12, ZeroExtend = 13, }, ImageOperandsMask = { MaskNone = 0, Bias = 0x00000001, Lod = 0x00000002, Grad = 0x00000004, ConstOffset = 0x00000008, Offset = 0x00000010, ConstOffsets = 0x00000020, Sample = 0x00000040, MinLod = 0x00000080, MakeTexelAvailable = 0x00000100, MakeTexelAvailableKHR = 0x00000100, MakeTexelVisible = 0x00000200, MakeTexelVisibleKHR = 0x00000200, NonPrivateTexel = 0x00000400, NonPrivateTexelKHR = 0x00000400, VolatileTexel = 0x00000800, VolatileTexelKHR = 0x00000800, SignExtend = 0x00001000, ZeroExtend = 0x00002000, }, FPFastMathModeShift = { NotNaN = 0, NotInf = 1, NSZ = 2, AllowRecip = 3, Fast = 4, AllowContractFastINTEL = 16, AllowReassocINTEL = 17, }, FPFastMathModeMask = { MaskNone = 0, NotNaN = 0x00000001, NotInf = 0x00000002, NSZ = 0x00000004, AllowRecip = 0x00000008, Fast = 0x00000010, AllowContractFastINTEL = 0x00010000, AllowReassocINTEL = 0x00020000, }, FPRoundingMode = { RTE = 0, RTZ = 1, RTP = 2, RTN = 3, }, FPDenormMode = { Preserve = 0, FlushToZero = 1, }, FPOperationMode = { IEEE = 0, ALT = 1, }, LinkageType = { Export = 0, Import = 1, }, AccessQualifier = { ReadOnly = 0, WriteOnly = 1, ReadWrite = 2, }, FunctionParameterAttribute = { Zext = 0, Sext = 1, ByVal = 2, Sret = 3, NoAlias = 4, NoCapture = 5, NoWrite = 6, NoReadWrite = 7, }, Decoration = { RelaxedPrecision = 0, SpecId = 1, Block = 2, BufferBlock = 3, RowMajor = 4, ColMajor = 5, ArrayStride = 6, MatrixStride = 7, GLSLShared = 8, GLSLPacked = 9, CPacked = 10, BuiltIn = 11, NoPerspective = 13, Flat = 14, Patch = 15, Centroid = 16, Sample = 17, Invariant = 18, Restrict = 19, Aliased = 20, Volatile = 21, Constant = 22, Coherent = 23, NonWritable = 24, NonReadable = 25, Uniform = 26, UniformId = 27, SaturatedConversion = 28, Stream = 29, Location = 30, Component = 31, Index = 32, Binding = 33, DescriptorSet = 34, Offset = 35, XfbBuffer = 36, XfbStride = 37, FuncParamAttr = 38, FPRoundingMode = 39, FPFastMathMode = 40, LinkageAttributes = 41, NoContraction = 42, InputAttachmentIndex = 43, Alignment = 44, MaxByteOffset = 45, AlignmentId = 46, MaxByteOffsetId = 47, NoSignedWrap = 4469, NoUnsignedWrap = 4470, ExplicitInterpAMD = 4999, OverrideCoverageNV = 5248, PassthroughNV = 5250, ViewportRelativeNV = 5252, SecondaryViewportRelativeNV = 5256, PerPrimitiveNV = 5271, PerViewNV = 5272, PerTaskNV = 5273, PerVertexNV = 5285, NonUniform = 5300, NonUniformEXT = 5300, RestrictPointer = 5355, RestrictPointerEXT = 5355, AliasedPointer = 5356, AliasedPointerEXT = 5356, SIMTCallINTEL = 5599, ReferencedIndirectlyINTEL = 5602, ClobberINTEL = 5607, SideEffectsINTEL = 5608, VectorComputeVariableINTEL = 5624, FuncParamIOKindINTEL = 5625, VectorComputeFunctionINTEL = 5626, StackCallINTEL = 5627, GlobalVariableOffsetINTEL = 5628, CounterBuffer = 5634, HlslCounterBufferGOOGLE = 5634, HlslSemanticGOOGLE = 5635, UserSemantic = 5635, UserTypeGOOGLE = 5636, FunctionRoundingModeINTEL = 5822, FunctionDenormModeINTEL = 5823, RegisterINTEL = 5825, MemoryINTEL = 5826, NumbanksINTEL = 5827, BankwidthINTEL = 5828, MaxPrivateCopiesINTEL = 5829, SinglepumpINTEL = 5830, DoublepumpINTEL = 5831, MaxReplicatesINTEL = 5832, SimpleDualPortINTEL = 5833, MergeINTEL = 5834, BankBitsINTEL = 5835, ForcePow2DepthINTEL = 5836, BurstCoalesceINTEL = 5899, CacheSizeINTEL = 5900, DontStaticallyCoalesceINTEL = 5901, PrefetchINTEL = 5902, StallEnableINTEL = 5905, FuseLoopsInFunctionINTEL = 5907, BufferLocationINTEL = 5921, IOPipeStorageINTEL = 5944, FunctionFloatingPointModeINTEL = 6080, SingleElementVectorINTEL = 6085, VectorComputeCallableFunctionINTEL = 6087, }, BuiltIn = { Position = 0, PointSize = 1, ClipDistance = 3, CullDistance = 4, VertexId = 5, InstanceId = 6, PrimitiveId = 7, InvocationId = 8, Layer = 9, ViewportIndex = 10, TessLevelOuter = 11, TessLevelInner = 12, TessCoord = 13, PatchVertices = 14, FragCoord = 15, PointCoord = 16, FrontFacing = 17, SampleId = 18, SamplePosition = 19, SampleMask = 20, FragDepth = 22, HelperInvocation = 23, NumWorkgroups = 24, WorkgroupSize = 25, WorkgroupId = 26, LocalInvocationId = 27, GlobalInvocationId = 28, LocalInvocationIndex = 29, WorkDim = 30, GlobalSize = 31, EnqueuedWorkgroupSize = 32, GlobalOffset = 33, GlobalLinearId = 34, SubgroupSize = 36, SubgroupMaxSize = 37, NumSubgroups = 38, NumEnqueuedSubgroups = 39, SubgroupId = 40, SubgroupLocalInvocationId = 41, VertexIndex = 42, InstanceIndex = 43, SubgroupEqMask = 4416, SubgroupEqMaskKHR = 4416, SubgroupGeMask = 4417, SubgroupGeMaskKHR = 4417, SubgroupGtMask = 4418, SubgroupGtMaskKHR = 4418, SubgroupLeMask = 4419, SubgroupLeMaskKHR = 4419, SubgroupLtMask = 4420, SubgroupLtMaskKHR = 4420, BaseVertex = 4424, BaseInstance = 4425, DrawIndex = 4426, PrimitiveShadingRateKHR = 4432, DeviceIndex = 4438, ViewIndex = 4440, ShadingRateKHR = 4444, BaryCoordNoPerspAMD = 4992, BaryCoordNoPerspCentroidAMD = 4993, BaryCoordNoPerspSampleAMD = 4994, BaryCoordSmoothAMD = 4995, BaryCoordSmoothCentroidAMD = 4996, BaryCoordSmoothSampleAMD = 4997, BaryCoordPullModelAMD = 4998, FragStencilRefEXT = 5014, ViewportMaskNV = 5253, SecondaryPositionNV = 5257, SecondaryViewportMaskNV = 5258, PositionPerViewNV = 5261, ViewportMaskPerViewNV = 5262, FullyCoveredEXT = 5264, TaskCountNV = 5274, PrimitiveCountNV = 5275, PrimitiveIndicesNV = 5276, ClipDistancePerViewNV = 5277, CullDistancePerViewNV = 5278, LayerPerViewNV = 5279, MeshViewCountNV = 5280, MeshViewIndicesNV = 5281, BaryCoordNV = 5286, BaryCoordNoPerspNV = 5287, FragSizeEXT = 5292, FragmentSizeNV = 5292, FragInvocationCountEXT = 5293, InvocationsPerPixelNV = 5293, LaunchIdKHR = 5319, LaunchIdNV = 5319, LaunchSizeKHR = 5320, LaunchSizeNV = 5320, WorldRayOriginKHR = 5321, WorldRayOriginNV = 5321, WorldRayDirectionKHR = 5322, WorldRayDirectionNV = 5322, ObjectRayOriginKHR = 5323, ObjectRayOriginNV = 5323, ObjectRayDirectionKHR = 5324, ObjectRayDirectionNV = 5324, RayTminKHR = 5325, RayTminNV = 5325, RayTmaxKHR = 5326, RayTmaxNV = 5326, InstanceCustomIndexKHR = 5327, InstanceCustomIndexNV = 5327, ObjectToWorldKHR = 5330, ObjectToWorldNV = 5330, WorldToObjectKHR = 5331, WorldToObjectNV = 5331, HitTNV = 5332, HitKindKHR = 5333, HitKindNV = 5333, IncomingRayFlagsKHR = 5351, IncomingRayFlagsNV = 5351, RayGeometryIndexKHR = 5352, WarpsPerSMNV = 5374, SMCountNV = 5375, WarpIDNV = 5376, SMIDNV = 5377, }, SelectionControlShift = { Flatten = 0, DontFlatten = 1, }, SelectionControlMask = { MaskNone = 0, Flatten = 0x00000001, DontFlatten = 0x00000002, }, LoopControlShift = { Unroll = 0, DontUnroll = 1, DependencyInfinite = 2, DependencyLength = 3, MinIterations = 4, MaxIterations = 5, IterationMultiple = 6, PeelCount = 7, PartialCount = 8, InitiationIntervalINTEL = 16, MaxConcurrencyINTEL = 17, DependencyArrayINTEL = 18, PipelineEnableINTEL = 19, LoopCoalesceINTEL = 20, MaxInterleavingINTEL = 21, SpeculatedIterationsINTEL = 22, NoFusionINTEL = 23, }, LoopControlMask = { MaskNone = 0, Unroll = 0x00000001, DontUnroll = 0x00000002, DependencyInfinite = 0x00000004, DependencyLength = 0x00000008, MinIterations = 0x00000010, MaxIterations = 0x00000020, IterationMultiple = 0x00000040, PeelCount = 0x00000080, PartialCount = 0x00000100, InitiationIntervalINTEL = 0x00010000, MaxConcurrencyINTEL = 0x00020000, DependencyArrayINTEL = 0x00040000, PipelineEnableINTEL = 0x00080000, LoopCoalesceINTEL = 0x00100000, MaxInterleavingINTEL = 0x00200000, SpeculatedIterationsINTEL = 0x00400000, NoFusionINTEL = 0x00800000, }, FunctionControlShift = { Inline = 0, DontInline = 1, Pure = 2, Const = 3, }, FunctionControlMask = { MaskNone = 0, Inline = 0x00000001, DontInline = 0x00000002, Pure = 0x00000004, Const = 0x00000008, }, MemorySemanticsShift = { Acquire = 1, Release = 2, AcquireRelease = 3, SequentiallyConsistent = 4, UniformMemory = 6, SubgroupMemory = 7, WorkgroupMemory = 8, CrossWorkgroupMemory = 9, AtomicCounterMemory = 10, ImageMemory = 11, OutputMemory = 12, OutputMemoryKHR = 12, MakeAvailable = 13, MakeAvailableKHR = 13, MakeVisible = 14, MakeVisibleKHR = 14, Volatile = 15, }, MemorySemanticsMask = { MaskNone = 0, Acquire = 0x00000002, Release = 0x00000004, AcquireRelease = 0x00000008, SequentiallyConsistent = 0x00000010, UniformMemory = 0x00000040, SubgroupMemory = 0x00000080, WorkgroupMemory = 0x00000100, CrossWorkgroupMemory = 0x00000200, AtomicCounterMemory = 0x00000400, ImageMemory = 0x00000800, OutputMemory = 0x00001000, OutputMemoryKHR = 0x00001000, MakeAvailable = 0x00002000, MakeAvailableKHR = 0x00002000, MakeVisible = 0x00004000, MakeVisibleKHR = 0x00004000, Volatile = 0x00008000, }, MemoryAccessShift = { Volatile = 0, Aligned = 1, Nontemporal = 2, MakePointerAvailable = 3, MakePointerAvailableKHR = 3, MakePointerVisible = 4, MakePointerVisibleKHR = 4, NonPrivatePointer = 5, NonPrivatePointerKHR = 5, }, MemoryAccessMask = { MaskNone = 0, Volatile = 0x00000001, Aligned = 0x00000002, Nontemporal = 0x00000004, MakePointerAvailable = 0x00000008, MakePointerAvailableKHR = 0x00000008, MakePointerVisible = 0x00000010, MakePointerVisibleKHR = 0x00000010, NonPrivatePointer = 0x00000020, NonPrivatePointerKHR = 0x00000020, }, Scope = { CrossDevice = 0, Device = 1, Workgroup = 2, Subgroup = 3, Invocation = 4, QueueFamily = 5, QueueFamilyKHR = 5, ShaderCallKHR = 6, }, GroupOperation = { Reduce = 0, InclusiveScan = 1, ExclusiveScan = 2, ClusteredReduce = 3, PartitionedReduceNV = 6, PartitionedInclusiveScanNV = 7, PartitionedExclusiveScanNV = 8, }, KernelEnqueueFlags = { NoWait = 0, WaitKernel = 1, WaitWorkGroup = 2, }, KernelProfilingInfoShift = { CmdExecTime = 0, }, KernelProfilingInfoMask = { MaskNone = 0, CmdExecTime = 0x00000001, }, Capability = { Matrix = 0, Shader = 1, Geometry = 2, Tessellation = 3, Addresses = 4, Linkage = 5, Kernel = 6, Vector16 = 7, Float16Buffer = 8, Float16 = 9, Float64 = 10, Int64 = 11, Int64Atomics = 12, ImageBasic = 13, ImageReadWrite = 14, ImageMipmap = 15, Pipes = 17, Groups = 18, DeviceEnqueue = 19, LiteralSampler = 20, AtomicStorage = 21, Int16 = 22, TessellationPointSize = 23, GeometryPointSize = 24, ImageGatherExtended = 25, StorageImageMultisample = 27, UniformBufferArrayDynamicIndexing = 28, SampledImageArrayDynamicIndexing = 29, StorageBufferArrayDynamicIndexing = 30, StorageImageArrayDynamicIndexing = 31, ClipDistance = 32, CullDistance = 33, ImageCubeArray = 34, SampleRateShading = 35, ImageRect = 36, SampledRect = 37, GenericPointer = 38, Int8 = 39, InputAttachment = 40, SparseResidency = 41, MinLod = 42, Sampled1D = 43, Image1D = 44, SampledCubeArray = 45, SampledBuffer = 46, ImageBuffer = 47, ImageMSArray = 48, StorageImageExtendedFormats = 49, ImageQuery = 50, DerivativeControl = 51, InterpolationFunction = 52, TransformFeedback = 53, GeometryStreams = 54, StorageImageReadWithoutFormat = 55, StorageImageWriteWithoutFormat = 56, MultiViewport = 57, SubgroupDispatch = 58, NamedBarrier = 59, PipeStorage = 60, GroupNonUniform = 61, GroupNonUniformVote = 62, GroupNonUniformArithmetic = 63, GroupNonUniformBallot = 64, GroupNonUniformShuffle = 65, GroupNonUniformShuffleRelative = 66, GroupNonUniformClustered = 67, GroupNonUniformQuad = 68, ShaderLayer = 69, ShaderViewportIndex = 70, FragmentShadingRateKHR = 4422, SubgroupBallotKHR = 4423, DrawParameters = 4427, SubgroupVoteKHR = 4431, StorageBuffer16BitAccess = 4433, StorageUniformBufferBlock16 = 4433, StorageUniform16 = 4434, UniformAndStorageBuffer16BitAccess = 4434, StoragePushConstant16 = 4435, StorageInputOutput16 = 4436, DeviceGroup = 4437, MultiView = 4439, VariablePointersStorageBuffer = 4441, VariablePointers = 4442, AtomicStorageOps = 4445, SampleMaskPostDepthCoverage = 4447, StorageBuffer8BitAccess = 4448, UniformAndStorageBuffer8BitAccess = 4449, StoragePushConstant8 = 4450, DenormPreserve = 4464, DenormFlushToZero = 4465, SignedZeroInfNanPreserve = 4466, RoundingModeRTE = 4467, RoundingModeRTZ = 4468, RayQueryProvisionalKHR = 4471, RayQueryKHR = 4472, RayTraversalPrimitiveCullingKHR = 4478, RayTracingKHR = 4479, Float16ImageAMD = 5008, ImageGatherBiasLodAMD = 5009, FragmentMaskAMD = 5010, StencilExportEXT = 5013, ImageReadWriteLodAMD = 5015, Int64ImageEXT = 5016, ShaderClockKHR = 5055, SampleMaskOverrideCoverageNV = 5249, GeometryShaderPassthroughNV = 5251, ShaderViewportIndexLayerEXT = 5254, ShaderViewportIndexLayerNV = 5254, ShaderViewportMaskNV = 5255, ShaderStereoViewNV = 5259, PerViewAttributesNV = 5260, FragmentFullyCoveredEXT = 5265, MeshShadingNV = 5266, ImageFootprintNV = 5282, FragmentBarycentricNV = 5284, ComputeDerivativeGroupQuadsNV = 5288, FragmentDensityEXT = 5291, ShadingRateNV = 5291, GroupNonUniformPartitionedNV = 5297, ShaderNonUniform = 5301, ShaderNonUniformEXT = 5301, RuntimeDescriptorArray = 5302, RuntimeDescriptorArrayEXT = 5302, InputAttachmentArrayDynamicIndexing = 5303, InputAttachmentArrayDynamicIndexingEXT = 5303, UniformTexelBufferArrayDynamicIndexing = 5304, UniformTexelBufferArrayDynamicIndexingEXT = 5304, StorageTexelBufferArrayDynamicIndexing = 5305, StorageTexelBufferArrayDynamicIndexingEXT = 5305, UniformBufferArrayNonUniformIndexing = 5306, UniformBufferArrayNonUniformIndexingEXT = 5306, SampledImageArrayNonUniformIndexing = 5307, SampledImageArrayNonUniformIndexingEXT = 5307, StorageBufferArrayNonUniformIndexing = 5308, StorageBufferArrayNonUniformIndexingEXT = 5308, StorageImageArrayNonUniformIndexing = 5309, StorageImageArrayNonUniformIndexingEXT = 5309, InputAttachmentArrayNonUniformIndexing = 5310, InputAttachmentArrayNonUniformIndexingEXT = 5310, UniformTexelBufferArrayNonUniformIndexing = 5311, UniformTexelBufferArrayNonUniformIndexingEXT = 5311, StorageTexelBufferArrayNonUniformIndexing = 5312, StorageTexelBufferArrayNonUniformIndexingEXT = 5312, RayTracingNV = 5340, VulkanMemoryModel = 5345, VulkanMemoryModelKHR = 5345, VulkanMemoryModelDeviceScope = 5346, VulkanMemoryModelDeviceScopeKHR = 5346, PhysicalStorageBufferAddresses = 5347, PhysicalStorageBufferAddressesEXT = 5347, ComputeDerivativeGroupLinearNV = 5350, RayTracingProvisionalKHR = 5353, CooperativeMatrixNV = 5357, FragmentShaderSampleInterlockEXT = 5363, FragmentShaderShadingRateInterlockEXT = 5372, ShaderSMBuiltinsNV = 5373, FragmentShaderPixelInterlockEXT = 5378, DemoteToHelperInvocationEXT = 5379, SubgroupShuffleINTEL = 5568, SubgroupBufferBlockIOINTEL = 5569, SubgroupImageBlockIOINTEL = 5570, SubgroupImageMediaBlockIOINTEL = 5579, RoundToInfinityINTEL = 5582, FloatingPointModeINTEL = 5583, IntegerFunctions2INTEL = 5584, FunctionPointersINTEL = 5603, IndirectReferencesINTEL = 5604, AsmINTEL = 5606, VectorComputeINTEL = 5617, VectorAnyINTEL = 5619, SubgroupAvcMotionEstimationINTEL = 5696, SubgroupAvcMotionEstimationIntraINTEL = 5697, SubgroupAvcMotionEstimationChromaINTEL = 5698, VariableLengthArrayINTEL = 5817, FunctionFloatControlINTEL = 5821, FPGAMemoryAttributesINTEL = 5824, FPFastMathModeINTEL = 5837, ArbitraryPrecisionIntegersINTEL = 5844, UnstructuredLoopControlsINTEL = 5886, FPGALoopControlsINTEL = 5888, KernelAttributesINTEL = 5892, FPGAKernelAttributesINTEL = 5897, FPGAMemoryAccessesINTEL = 5898, FPGAClusterAttributesINTEL = 5904, LoopFuseINTEL = 5906, FPGABufferLocationINTEL = 5920, USMStorageClassesINTEL = 5935, IOPipesINTEL = 5943, BlockingPipesINTEL = 5945, FPGARegINTEL = 5948, AtomicFloat32AddEXT = 6033, AtomicFloat64AddEXT = 6034, LongConstantCompositeINTEL = 6089, }, RayFlagsShift = { OpaqueKHR = 0, NoOpaqueKHR = 1, TerminateOnFirstHitKHR = 2, SkipClosestHitShaderKHR = 3, CullBackFacingTrianglesKHR = 4, CullFrontFacingTrianglesKHR = 5, CullOpaqueKHR = 6, CullNoOpaqueKHR = 7, SkipTrianglesKHR = 8, SkipAABBsKHR = 9, }, RayFlagsMask = { MaskNone = 0, OpaqueKHR = 0x00000001, NoOpaqueKHR = 0x00000002, TerminateOnFirstHitKHR = 0x00000004, SkipClosestHitShaderKHR = 0x00000008, CullBackFacingTrianglesKHR = 0x00000010, CullFrontFacingTrianglesKHR = 0x00000020, CullOpaqueKHR = 0x00000040, CullNoOpaqueKHR = 0x00000080, SkipTrianglesKHR = 0x00000100, SkipAABBsKHR = 0x00000200, }, RayQueryIntersection = { RayQueryCandidateIntersectionKHR = 0, RayQueryCommittedIntersectionKHR = 1, }, RayQueryCommittedIntersectionType = { RayQueryCommittedIntersectionNoneKHR = 0, RayQueryCommittedIntersectionTriangleKHR = 1, RayQueryCommittedIntersectionGeneratedKHR = 2, }, RayQueryCandidateIntersectionType = { RayQueryCandidateIntersectionTriangleKHR = 0, RayQueryCandidateIntersectionAABBKHR = 1, }, FragmentShadingRateShift = { Vertical2Pixels = 0, Vertical4Pixels = 1, Horizontal2Pixels = 2, Horizontal4Pixels = 3, }, FragmentShadingRateMask = { MaskNone = 0, Vertical2Pixels = 0x00000001, Vertical4Pixels = 0x00000002, Horizontal2Pixels = 0x00000004, Horizontal4Pixels = 0x00000008, }, Op = { OpNop = 0, OpUndef = 1, OpSourceContinued = 2, OpSource = 3, OpSourceExtension = 4, OpName = 5, OpMemberName = 6, OpString = 7, OpLine = 8, OpExtension = 10, OpExtInstImport = 11, OpExtInst = 12, OpMemoryModel = 14, OpEntryPoint = 15, OpExecutionMode = 16, OpCapability = 17, OpTypeVoid = 19, OpTypeBool = 20, OpTypeInt = 21, OpTypeFloat = 22, OpTypeVector = 23, OpTypeMatrix = 24, OpTypeImage = 25, OpTypeSampler = 26, OpTypeSampledImage = 27, OpTypeArray = 28, OpTypeRuntimeArray = 29, OpTypeStruct = 30, OpTypeOpaque = 31, OpTypePointer = 32, OpTypeFunction = 33, OpTypeEvent = 34, OpTypeDeviceEvent = 35, OpTypeReserveId = 36, OpTypeQueue = 37, OpTypePipe = 38, OpTypeForwardPointer = 39, OpConstantTrue = 41, OpConstantFalse = 42, OpConstant = 43, OpConstantComposite = 44, OpConstantSampler = 45, OpConstantNull = 46, OpSpecConstantTrue = 48, OpSpecConstantFalse = 49, OpSpecConstant = 50, OpSpecConstantComposite = 51, OpSpecConstantOp = 52, OpFunction = 54, OpFunctionParameter = 55, OpFunctionEnd = 56, OpFunctionCall = 57, OpVariable = 59, OpImageTexelPointer = 60, OpLoad = 61, OpStore = 62, OpCopyMemory = 63, OpCopyMemorySized = 64, OpAccessChain = 65, OpInBoundsAccessChain = 66, OpPtrAccessChain = 67, OpArrayLength = 68, OpGenericPtrMemSemantics = 69, OpInBoundsPtrAccessChain = 70, OpDecorate = 71, OpMemberDecorate = 72, OpDecorationGroup = 73, OpGroupDecorate = 74, OpGroupMemberDecorate = 75, OpVectorExtractDynamic = 77, OpVectorInsertDynamic = 78, OpVectorShuffle = 79, OpCompositeConstruct = 80, OpCompositeExtract = 81, OpCompositeInsert = 82, OpCopyObject = 83, OpTranspose = 84, OpSampledImage = 86, OpImageSampleImplicitLod = 87, OpImageSampleExplicitLod = 88, OpImageSampleDrefImplicitLod = 89, OpImageSampleDrefExplicitLod = 90, OpImageSampleProjImplicitLod = 91, OpImageSampleProjExplicitLod = 92, OpImageSampleProjDrefImplicitLod = 93, OpImageSampleProjDrefExplicitLod = 94, OpImageFetch = 95, OpImageGather = 96, OpImageDrefGather = 97, OpImageRead = 98, OpImageWrite = 99, OpImage = 100, OpImageQueryFormat = 101, OpImageQueryOrder = 102, OpImageQuerySizeLod = 103, OpImageQuerySize = 104, OpImageQueryLod = 105, OpImageQueryLevels = 106, OpImageQuerySamples = 107, OpConvertFToU = 109, OpConvertFToS = 110, OpConvertSToF = 111, OpConvertUToF = 112, OpUConvert = 113, OpSConvert = 114, OpFConvert = 115, OpQuantizeToF16 = 116, OpConvertPtrToU = 117, OpSatConvertSToU = 118, OpSatConvertUToS = 119, OpConvertUToPtr = 120, OpPtrCastToGeneric = 121, OpGenericCastToPtr = 122, OpGenericCastToPtrExplicit = 123, OpBitcast = 124, OpSNegate = 126, OpFNegate = 127, OpIAdd = 128, OpFAdd = 129, OpISub = 130, OpFSub = 131, OpIMul = 132, OpFMul = 133, OpUDiv = 134, OpSDiv = 135, OpFDiv = 136, OpUMod = 137, OpSRem = 138, OpSMod = 139, OpFRem = 140, OpFMod = 141, OpVectorTimesScalar = 142, OpMatrixTimesScalar = 143, OpVectorTimesMatrix = 144, OpMatrixTimesVector = 145, OpMatrixTimesMatrix = 146, OpOuterProduct = 147, OpDot = 148, OpIAddCarry = 149, OpISubBorrow = 150, OpUMulExtended = 151, OpSMulExtended = 152, OpAny = 154, OpAll = 155, OpIsNan = 156, OpIsInf = 157, OpIsFinite = 158, OpIsNormal = 159, OpSignBitSet = 160, OpLessOrGreater = 161, OpOrdered = 162, OpUnordered = 163, OpLogicalEqual = 164, OpLogicalNotEqual = 165, OpLogicalOr = 166, OpLogicalAnd = 167, OpLogicalNot = 168, OpSelect = 169, OpIEqual = 170, OpINotEqual = 171, OpUGreaterThan = 172, OpSGreaterThan = 173, OpUGreaterThanEqual = 174, OpSGreaterThanEqual = 175, OpULessThan = 176, OpSLessThan = 177, OpULessThanEqual = 178, OpSLessThanEqual = 179, OpFOrdEqual = 180, OpFUnordEqual = 181, OpFOrdNotEqual = 182, OpFUnordNotEqual = 183, OpFOrdLessThan = 184, OpFUnordLessThan = 185, OpFOrdGreaterThan = 186, OpFUnordGreaterThan = 187, OpFOrdLessThanEqual = 188, OpFUnordLessThanEqual = 189, OpFOrdGreaterThanEqual = 190, OpFUnordGreaterThanEqual = 191, OpShiftRightLogical = 194, OpShiftRightArithmetic = 195, OpShiftLeftLogical = 196, OpBitwiseOr = 197, OpBitwiseXor = 198, OpBitwiseAnd = 199, OpNot = 200, OpBitFieldInsert = 201, OpBitFieldSExtract = 202, OpBitFieldUExtract = 203, OpBitReverse = 204, OpBitCount = 205, OpDPdx = 207, OpDPdy = 208, OpFwidth = 209, OpDPdxFine = 210, OpDPdyFine = 211, OpFwidthFine = 212, OpDPdxCoarse = 213, OpDPdyCoarse = 214, OpFwidthCoarse = 215, OpEmitVertex = 218, OpEndPrimitive = 219, OpEmitStreamVertex = 220, OpEndStreamPrimitive = 221, OpControlBarrier = 224, OpMemoryBarrier = 225, OpAtomicLoad = 227, OpAtomicStore = 228, OpAtomicExchange = 229, OpAtomicCompareExchange = 230, OpAtomicCompareExchangeWeak = 231, OpAtomicIIncrement = 232, OpAtomicIDecrement = 233, OpAtomicIAdd = 234, OpAtomicISub = 235, OpAtomicSMin = 236, OpAtomicUMin = 237, OpAtomicSMax = 238, OpAtomicUMax = 239, OpAtomicAnd = 240, OpAtomicOr = 241, OpAtomicXor = 242, OpPhi = 245, OpLoopMerge = 246, OpSelectionMerge = 247, OpLabel = 248, OpBranch = 249, OpBranchConditional = 250, OpSwitch = 251, OpKill = 252, OpReturn = 253, OpReturnValue = 254, OpUnreachable = 255, OpLifetimeStart = 256, OpLifetimeStop = 257, OpGroupAsyncCopy = 259, OpGroupWaitEvents = 260, OpGroupAll = 261, OpGroupAny = 262, OpGroupBroadcast = 263, OpGroupIAdd = 264, OpGroupFAdd = 265, OpGroupFMin = 266, OpGroupUMin = 267, OpGroupSMin = 268, OpGroupFMax = 269, OpGroupUMax = 270, OpGroupSMax = 271, OpReadPipe = 274, OpWritePipe = 275, OpReservedReadPipe = 276, OpReservedWritePipe = 277, OpReserveReadPipePackets = 278, OpReserveWritePipePackets = 279, OpCommitReadPipe = 280, OpCommitWritePipe = 281, OpIsValidReserveId = 282, OpGetNumPipePackets = 283, OpGetMaxPipePackets = 284, OpGroupReserveReadPipePackets = 285, OpGroupReserveWritePipePackets = 286, OpGroupCommitReadPipe = 287, OpGroupCommitWritePipe = 288, OpEnqueueMarker = 291, OpEnqueueKernel = 292, OpGetKernelNDrangeSubGroupCount = 293, OpGetKernelNDrangeMaxSubGroupSize = 294, OpGetKernelWorkGroupSize = 295, OpGetKernelPreferredWorkGroupSizeMultiple = 296, OpRetainEvent = 297, OpReleaseEvent = 298, OpCreateUserEvent = 299, OpIsValidEvent = 300, OpSetUserEventStatus = 301, OpCaptureEventProfilingInfo = 302, OpGetDefaultQueue = 303, OpBuildNDRange = 304, OpImageSparseSampleImplicitLod = 305, OpImageSparseSampleExplicitLod = 306, OpImageSparseSampleDrefImplicitLod = 307, OpImageSparseSampleDrefExplicitLod = 308, OpImageSparseSampleProjImplicitLod = 309, OpImageSparseSampleProjExplicitLod = 310, OpImageSparseSampleProjDrefImplicitLod = 311, OpImageSparseSampleProjDrefExplicitLod = 312, OpImageSparseFetch = 313, OpImageSparseGather = 314, OpImageSparseDrefGather = 315, OpImageSparseTexelsResident = 316, OpNoLine = 317, OpAtomicFlagTestAndSet = 318, OpAtomicFlagClear = 319, OpImageSparseRead = 320, OpSizeOf = 321, OpTypePipeStorage = 322, OpConstantPipeStorage = 323, OpCreatePipeFromPipeStorage = 324, OpGetKernelLocalSizeForSubgroupCount = 325, OpGetKernelMaxNumSubgroups = 326, OpTypeNamedBarrier = 327, OpNamedBarrierInitialize = 328, OpMemoryNamedBarrier = 329, OpModuleProcessed = 330, OpExecutionModeId = 331, OpDecorateId = 332, OpGroupNonUniformElect = 333, OpGroupNonUniformAll = 334, OpGroupNonUniformAny = 335, OpGroupNonUniformAllEqual = 336, OpGroupNonUniformBroadcast = 337, OpGroupNonUniformBroadcastFirst = 338, OpGroupNonUniformBallot = 339, OpGroupNonUniformInverseBallot = 340, OpGroupNonUniformBallotBitExtract = 341, OpGroupNonUniformBallotBitCount = 342, OpGroupNonUniformBallotFindLSB = 343, OpGroupNonUniformBallotFindMSB = 344, OpGroupNonUniformShuffle = 345, OpGroupNonUniformShuffleXor = 346, OpGroupNonUniformShuffleUp = 347, OpGroupNonUniformShuffleDown = 348, OpGroupNonUniformIAdd = 349, OpGroupNonUniformFAdd = 350, OpGroupNonUniformIMul = 351, OpGroupNonUniformFMul = 352, OpGroupNonUniformSMin = 353, OpGroupNonUniformUMin = 354, OpGroupNonUniformFMin = 355, OpGroupNonUniformSMax = 356, OpGroupNonUniformUMax = 357, OpGroupNonUniformFMax = 358, OpGroupNonUniformBitwiseAnd = 359, OpGroupNonUniformBitwiseOr = 360, OpGroupNonUniformBitwiseXor = 361, OpGroupNonUniformLogicalAnd = 362, OpGroupNonUniformLogicalOr = 363, OpGroupNonUniformLogicalXor = 364, OpGroupNonUniformQuadBroadcast = 365, OpGroupNonUniformQuadSwap = 366, OpCopyLogical = 400, OpPtrEqual = 401, OpPtrNotEqual = 402, OpPtrDiff = 403, OpTerminateInvocation = 4416, OpSubgroupBallotKHR = 4421, OpSubgroupFirstInvocationKHR = 4422, OpSubgroupAllKHR = 4428, OpSubgroupAnyKHR = 4429, OpSubgroupAllEqualKHR = 4430, OpSubgroupReadInvocationKHR = 4432, OpTraceRayKHR = 4445, OpExecuteCallableKHR = 4446, OpConvertUToAccelerationStructureKHR = 4447, OpIgnoreIntersectionKHR = 4448, OpTerminateRayKHR = 4449, OpTypeRayQueryKHR = 4472, OpRayQueryInitializeKHR = 4473, OpRayQueryTerminateKHR = 4474, OpRayQueryGenerateIntersectionKHR = 4475, OpRayQueryConfirmIntersectionKHR = 4476, OpRayQueryProceedKHR = 4477, OpRayQueryGetIntersectionTypeKHR = 4479, OpGroupIAddNonUniformAMD = 5000, OpGroupFAddNonUniformAMD = 5001, OpGroupFMinNonUniformAMD = 5002, OpGroupUMinNonUniformAMD = 5003, OpGroupSMinNonUniformAMD = 5004, OpGroupFMaxNonUniformAMD = 5005, OpGroupUMaxNonUniformAMD = 5006, OpGroupSMaxNonUniformAMD = 5007, OpFragmentMaskFetchAMD = 5011, OpFragmentFetchAMD = 5012, OpReadClockKHR = 5056, OpImageSampleFootprintNV = 5283, OpGroupNonUniformPartitionNV = 5296, OpWritePackedPrimitiveIndices4x8NV = 5299, OpReportIntersectionKHR = 5334, OpReportIntersectionNV = 5334, OpIgnoreIntersectionNV = 5335, OpTerminateRayNV = 5336, OpTraceNV = 5337, OpTypeAccelerationStructureKHR = 5341, OpTypeAccelerationStructureNV = 5341, OpExecuteCallableNV = 5344, OpTypeCooperativeMatrixNV = 5358, OpCooperativeMatrixLoadNV = 5359, OpCooperativeMatrixStoreNV = 5360, OpCooperativeMatrixMulAddNV = 5361, OpCooperativeMatrixLengthNV = 5362, OpBeginInvocationInterlockEXT = 5364, OpEndInvocationInterlockEXT = 5365, OpDemoteToHelperInvocationEXT = 5380, OpIsHelperInvocationEXT = 5381, OpSubgroupShuffleINTEL = 5571, OpSubgroupShuffleDownINTEL = 5572, OpSubgroupShuffleUpINTEL = 5573, OpSubgroupShuffleXorINTEL = 5574, OpSubgroupBlockReadINTEL = 5575, OpSubgroupBlockWriteINTEL = 5576, OpSubgroupImageBlockReadINTEL = 5577, OpSubgroupImageBlockWriteINTEL = 5578, OpSubgroupImageMediaBlockReadINTEL = 5580, OpSubgroupImageMediaBlockWriteINTEL = 5581, OpUCountLeadingZerosINTEL = 5585, OpUCountTrailingZerosINTEL = 5586, OpAbsISubINTEL = 5587, OpAbsUSubINTEL = 5588, OpIAddSatINTEL = 5589, OpUAddSatINTEL = 5590, OpIAverageINTEL = 5591, OpUAverageINTEL = 5592, OpIAverageRoundedINTEL = 5593, OpUAverageRoundedINTEL = 5594, OpISubSatINTEL = 5595, OpUSubSatINTEL = 5596, OpIMul32x16INTEL = 5597, OpUMul32x16INTEL = 5598, OpConstFunctionPointerINTEL = 5600, OpFunctionPointerCallINTEL = 5601, OpAsmTargetINTEL = 5609, OpAsmINTEL = 5610, OpAsmCallINTEL = 5611, OpDecorateString = 5632, OpDecorateStringGOOGLE = 5632, OpMemberDecorateString = 5633, OpMemberDecorateStringGOOGLE = 5633, OpVmeImageINTEL = 5699, OpTypeVmeImageINTEL = 5700, OpTypeAvcImePayloadINTEL = 5701, OpTypeAvcRefPayloadINTEL = 5702, OpTypeAvcSicPayloadINTEL = 5703, OpTypeAvcMcePayloadINTEL = 5704, OpTypeAvcMceResultINTEL = 5705, OpTypeAvcImeResultINTEL = 5706, OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, OpTypeAvcImeSingleReferenceStreaminINTEL = 5709, OpTypeAvcImeDualReferenceStreaminINTEL = 5710, OpTypeAvcRefResultINTEL = 5711, OpTypeAvcSicResultINTEL = 5712, OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, OpSubgroupAvcMceConvertToImePayloadINTEL = 5732, OpSubgroupAvcMceConvertToImeResultINTEL = 5733, OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, OpSubgroupAvcMceConvertToRefResultINTEL = 5735, OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, OpSubgroupAvcMceConvertToSicResultINTEL = 5737, OpSubgroupAvcMceGetMotionVectorsINTEL = 5738, OpSubgroupAvcMceGetInterDistortionsINTEL = 5739, OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, OpSubgroupAvcMceGetInterDirectionsINTEL = 5743, OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, OpSubgroupAvcImeInitializeINTEL = 5747, OpSubgroupAvcImeSetSingleReferenceINTEL = 5748, OpSubgroupAvcImeSetDualReferenceINTEL = 5749, OpSubgroupAvcImeRefWindowSizeINTEL = 5750, OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, OpSubgroupAvcImeSetWeightedSadINTEL = 5756, OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, OpSubgroupAvcImeConvertToMceResultINTEL = 5765, OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, OpSubgroupAvcImeGetBorderReachedINTEL = 5776, OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, OpSubgroupAvcFmeInitializeINTEL = 5781, OpSubgroupAvcBmeInitializeINTEL = 5782, OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, OpSubgroupAvcRefConvertToMceResultINTEL = 5790, OpSubgroupAvcSicInitializeINTEL = 5791, OpSubgroupAvcSicConfigureSkcINTEL = 5792, OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, OpSubgroupAvcSicEvaluateIpeINTEL = 5803, OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, OpSubgroupAvcSicConvertToMceResultINTEL = 5808, OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, OpVariableLengthArrayINTEL = 5818, OpSaveMemoryINTEL = 5819, OpRestoreMemoryINTEL = 5820, OpLoopControlINTEL = 5887, OpPtrCastToCrossWorkgroupINTEL = 5934, OpCrossWorkgroupCastToPtrINTEL = 5938, OpReadPipeBlockingINTEL = 5946, OpWritePipeBlockingINTEL = 5947, OpFPGARegINTEL = 5949, OpRayQueryGetRayTMinKHR = 6016, OpRayQueryGetRayFlagsKHR = 6017, OpRayQueryGetIntersectionTKHR = 6018, OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019, OpRayQueryGetIntersectionInstanceIdKHR = 6020, OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021, OpRayQueryGetIntersectionGeometryIndexKHR = 6022, OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023, OpRayQueryGetIntersectionBarycentricsKHR = 6024, OpRayQueryGetIntersectionFrontFaceKHR = 6025, OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026, OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027, OpRayQueryGetIntersectionObjectRayOriginKHR = 6028, OpRayQueryGetWorldRayDirectionKHR = 6029, OpRayQueryGetWorldRayOriginKHR = 6030, OpRayQueryGetIntersectionObjectToWorldKHR = 6031, OpRayQueryGetIntersectionWorldToObjectKHR = 6032, OpAtomicFAddEXT = 6035, OpTypeBufferSurfaceINTEL = 6086, OpTypeStructContinuedINTEL = 6090, OpConstantCompositeContinuedINTEL = 6091, OpSpecConstantCompositeContinuedINTEL = 6092, }, }
apache-2.0
lovejinstar/POOL2.LINE
src/main/java/org/apache/commons/pool2/BaseKeyedPooledObjectFactory.java
3803
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.pool2; /** * A base implementation of <code>KeyedPooledObjectFactory</code>. * <p> * All operations defined here are essentially no-op's. * </p> * This class is immutable, and therefore thread-safe. * * @see KeyedPooledObjectFactory * * @param <K> The type of keys managed by this factory. * @param <V> Type of element managed by this factory. * * @version $Revision: 1333925 $ * * @since 2.0 */ public abstract class BaseKeyedPooledObjectFactory<K,V> implements KeyedPooledObjectFactory<K,V> { /** * Create an instance that can be served by the pool. * * @param key the key used when constructing the object * @return an instance that can be served by the pool * * @throws Exception if there is a problem creating a new instance, * this will be propagated to the code requesting an object. */ public abstract V create(K key) throws Exception; /** * Wrap the provided instance with an implementation of * {@link PooledObject}. * * @param value the instance to wrap * * @return The provided instance, wrapped by a {@link PooledObject} */ public abstract PooledObject<V> wrap(V value); @Override public PooledObject<V> makeObject(K key) throws Exception { return wrap(create(key)); } /** * Destroy an instance no longer needed by the pool. * <p> * The default implementation is a no-op. * * @param key the key used when selecting the instance * @param p a {@code PooledObject} wrapping the the instance to be destroyed */ @Override public void destroyObject(K key, PooledObject<V> p) throws Exception { } /** * Ensures that the instance is safe to be returned by the pool. * <p> * The default implementation always returns <tt>true</tt>. * * @param key the key used when selecting the object * @param p a {@code PooledObject} wrapping the the instance to be validated * @return always <code>true</code> in the default implementation */ @Override public boolean validateObject(K key, PooledObject<V> p) { return true; } /** * Reinitialize an instance to be returned by the pool. * <p> * The default implementation is a no-op. * * @param key the key used when selecting the object * @param p a {@code PooledObject} wrapping the the instance to be activated */ @Override public void activateObject(K key, PooledObject<V> p) throws Exception { } /** * Uninitialize an instance to be returned to the idle object pool. * <p> * The default implementation is a no-op. * * @param key the key used when selecting the object * @param p a {@code PooledObject} wrapping the the instance to be passivated */ @Override public void passivateObject(K key, PooledObject<V> p) throws Exception { } }
apache-2.0
329277920/Snail
Snail.Pay.Ali.Sdk/Domain/AlipayDataDataserviceAntdataassetsClearnodeCreateModel.cs
449
using System; using System.Xml.Serialization; namespace Aop.Api.Domain { /// <summary> /// AlipayDataDataserviceAntdataassetsClearnodeCreateModel Data Structure. /// </summary> [Serializable] public class AlipayDataDataserviceAntdataassetsClearnodeCreateModel : AopObject { /// <summary> /// ODPS表的guid /// </summary> [XmlElement("guid")] public string Guid { get; set; } } }
apache-2.0
sacrefies/csc643_bigdata
project3/src/net/team1/dev/HousingAnalysis.java
4559
// ---------------------------------------------------------------------------- // Copyright 2017 team1@course_bigdata, Saint Joseph's University // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------- package net.team1.dev; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.lib.MultipleInputs; import java.io.IOException; import java.util.HashMap; import java.util.logging.Logger; /** * A class for the map/reduce process */ public class HousingAnalysis { /** * The main entry point for the map/reduce runner. * * @param args 2 args: \<input dir\> \<output dir\> * @throws Exception Throws IOException */ public static void main(String[] args) throws Exception { Path inputDir = new Path(args[0]); Path outputDir = new Path(args[1]); FileSystem fs = FileSystem.get(new Configuration()); if (!fs.exists(inputDir)) throw new IOException("The input path does not exist."); if (fs.isFile(inputDir)) throw new IOException("The input path is a file."); if (fs.exists(outputDir)) fs.delete(outputDir, true); // set job configuration JobConf conf = new JobConf(HousingAnalysis.class); conf.setJobName("housinganalysis"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setOutputFormat(TextOutputFormat.class); conf.setCombinerClass(HousingReducer.class); conf.setReducerClass(HousingReducer.class); // set multiple input files HashMap<Path, Class<? extends Mapper>> inputMappers = getInputFilePaths(inputDir, fs); for (Path p : inputMappers.keySet()) { MultipleInputs.addInputPath(conf, p, TextInputFormat.class, inputMappers.get(p)); LOG.info(p.getName() + ": " + inputMappers.get(p).getName()); } // set output FileOutputFormat.setOutputPath(conf, outputDir); // start the job JobClient.runJob(conf); } /** * Walk through the input directory to get all the input files and construct the file path and mapper class dictionary. * * @param inputDir The path of the input directory * @param fs The HDFS file system object. * @return A {@link HashMap} whose keys are the file path and values are the corresponding mapper classes. * @throws IOException IOException */ private static HashMap<Path, Class<? extends Mapper>> getInputFilePaths(Path inputDir, FileSystem fs) throws IOException { HashMap<Path, Class<? extends Mapper>> mappers = new HashMap<>(); FileStatus files[] = fs.listStatus(inputDir, new PathFilter() { @Override public boolean accept(Path path) { String name = path.getName(); return name.endsWith(".txt") && name.startsWith("thads"); } }); for (FileStatus f : files) { Path p = f.getPath(); String fileName = p.getName(); if (fileName.contains("2013")) mappers.put(p, Mapper2013.class); else if (fileName.contains("2003")) mappers.put(p, Mapper2003.class); else if (fileName.contains("2005")) mappers.put(p, Mapper2005.class); else if (fileName.contains("2007")) mappers.put(p, Mapper2007.class); else if (fileName.contains("2009")) mappers.put(p, Mapper2009.class); else if (fileName.contains("2011")) mappers.put(p, Mapper2011.class); } return mappers; } private static final Logger LOG = Logger.getLogger(HousingAnalysis.class.getName()); }
apache-2.0
Netflix/hollow
hollow/src/main/java/com/netflix/hollow/api/sampling/SampleResult.java
1419
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.api.sampling; public class SampleResult implements Comparable<SampleResult> { private final String identifier; private final long numSamples; public SampleResult(String identifier, long numSamples) { this.identifier = identifier; this.numSamples = numSamples; } public String getIdentifier() { return identifier; } public long getNumSamples() { return numSamples; } @Override public int compareTo(SampleResult o) { if(o.numSamples == numSamples) return identifier.compareTo(o.identifier); return Long.compare(o.numSamples, numSamples); } @Override public String toString() { return identifier + ": " + numSamples; } }
apache-2.0
d/fruit
tests/injector_non_class_type.cpp
861
// expect-compile-error NonClassTypeError<X\*,X>|A non-class type T was specified. Use C instead. /* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <fruit/fruit.h> #include "test_macros.h" using fruit::Injector; using fruit::Component; struct X { }; fruit::Injector<X*> injector;
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Framework/SoftwareModeling/src/main/java/ghidra/pcodeCPort/utils/MutableInt.java
1029
/* ### * IP: GHIDRA * REVIEWED: YES * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.pcodeCPort.utils; public class MutableInt { int value; public MutableInt(int value) { this.value = value; } public MutableInt() { this.value = 0; } @Override public String toString() { return "" + value; } public void increment() { value++; } public int get() { return value; } public void set( int i ) { value = i; } public void add( int amount ) { value += amount; } }
apache-2.0
opensource21/fuwesta
fuwesta-sample/src/main/java/de/ppi/samples/fuwesta/model/User.java
5215
// User.java // package de.ppi.samples.fuwesta.model; import java.util.ArrayList; import java.util.List; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.OneToMany; import de.ppi.fuwesta.jpa.helper.JPAList; import de.ppi.fuwesta.jpa.helper.VersionedModel; import de.ppi.fuwesta.spring.mvc.formatter.NonEmpty; /** * A user who can create blog entries. * * @author niels */ @Entity(name = "T_USER") public class User extends VersionedModel { /** * Unique identifier for the user. */ @Column(nullable = false, unique = true, length = 10) @NonEmpty private String userId; /** The first name of the user. */ @Column(length = 50) private String firstName; /** The last name of the user. */ @Column(length = 100) private String lastName; /** * The sex of the user. */ @Column(length = 1) private Character sex; /** * The list of postings the user has created. */ @OneToMany(mappedBy = "user", cascade = CascadeType.ALL) private List<Post> postings; /** * Initiates an object of type User. * * @param userId the userId */ public User(String userId) { super(); this.userId = userId; } /** * Initiates an object of type User. * */ public User() { super(); } /** * Gets the unique identifier for the user. * * @return the unique identifier for the user */ public String getUserId() { return userId; } /** * Gets the first name of the user. * * @return the first name of the user */ public String getFirstName() { return firstName; } /** * Sets the first name of the user. * * @param firstName the new first name of the user */ public void setFirstName(final String firstName) { this.firstName = firstName; } /** * Gets the last name of the user. * * @return the last name of the user */ public String getLastName() { return lastName; } /** * Sets the last name of the user. * * @param lastName the new last name of the user */ public void setLastName(final String lastName) { this.lastName = lastName; } /** * Gets the sex of the user. * * @return the sex of the user */ public Sex getSex() { return Sex.parse(this.sex); } /** * Sets the sex of the user. * * @param sex the new sex of the user */ public void setSex(Sex sex) { this.sex = sex.getId(); } /** * Sets the unique identifier for the user. * * @param userId the new unique identifier for the user */ public void setUserId(final String userId) { this.userId = userId; } /** * Gets the list of postings the user has created. * * @return the list of postings the user has created */ public List<Post> getPostings() { return getPostingsInternal(); } /** * Delivers definitely a {@link PostList}. * * @return a PostList which wraps the internal list. */ private PostList getPostingsInternal() { if (postings == null) { postings = new ArrayList<Post>(); } if (!(postings instanceof PostList)) { return new PostList(postings, this); } else { return (PostList) postings; } } /** * Sets the list of postings the user has created. * * @param postings the new list of postings the user has created */ public void setPostings(final List<Post> postings) { getPostingsInternal().set(postings); } /** * {@inheritDoc} */ @Override public String toString() { return "User [id=" + getId() + ", userId=" + userId + ", firstName=" + firstName + ", lastName=" + lastName + ", postings=" + postings + "]"; } /** * List which handles the association between {@link User} and {@link Post}. * */ private static final class PostList extends JPAList<Post, User> { /** * Initiates an object of type PostList. * * @param associatedEntity the user-object */ public PostList(User associatedEntity) { super(associatedEntity); } /** * Initiates an object of type PostList. * * @param associatedEntity the user-object * @param internalList the internalList to store the entries. */ public PostList(List<Post> internalList, User associatedEntity) { super(internalList, associatedEntity); } /** * {@inheritDoc} */ @Override public void add(Post entity, User associatedEntity) { entity.setUser(associatedEntity); } /** * {@inheritDoc} */ @Override public void remove(Post entity, User associatedEntity) { entity.setUser(null); } } }
apache-2.0
apereo/cas
support/cas-server-support-authy/src/main/java/org/apereo/cas/adaptors/authy/config/support/authentication/AuthyAuthenticationEventExecutionPlanConfiguration.java
7066
package org.apereo.cas.adaptors.authy.config.support.authentication; import org.apereo.cas.adaptors.authy.AuthyAuthenticationHandler; import org.apereo.cas.adaptors.authy.AuthyClientInstance; import org.apereo.cas.adaptors.authy.AuthyMultifactorAuthenticationProvider; import org.apereo.cas.adaptors.authy.AuthyTokenCredential; import org.apereo.cas.adaptors.authy.web.flow.AuthyAuthenticationRegistrationWebflowAction; import org.apereo.cas.authentication.AuthenticationEventExecutionPlanConfigurer; import org.apereo.cas.authentication.AuthenticationHandler; import org.apereo.cas.authentication.AuthenticationMetaDataPopulator; import org.apereo.cas.authentication.MultifactorAuthenticationFailureModeEvaluator; import org.apereo.cas.authentication.MultifactorAuthenticationProvider; import org.apereo.cas.authentication.bypass.MultifactorAuthenticationProviderBypassEvaluator; import org.apereo.cas.authentication.handler.ByCredentialTypeAuthenticationHandlerResolver; import org.apereo.cas.authentication.metadata.AuthenticationContextAttributeMetaDataPopulator; import org.apereo.cas.authentication.principal.PrincipalFactory; import org.apereo.cas.authentication.principal.PrincipalFactoryUtils; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.services.ServicesManager; import com.authy.AuthyApiClient; import lombok.val; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.context.config.annotation.RefreshScope; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.ScopedProxyMode; import org.springframework.webflow.execution.Action; import java.net.URL; /** * This is {@link AuthyAuthenticationEventExecutionPlanConfiguration}. * * @author Misagh Moayyed * @author Dmitriy Kopylenko * @since 5.1.0 */ @EnableConfigurationProperties(CasConfigurationProperties.class) @ConditionalOnProperty(prefix = "cas.authn.mfa.authy", name = "api-key") @Configuration(value = "AuthyAuthenticationEventExecutionPlanConfiguration", proxyBeanMethods = false) public class AuthyAuthenticationEventExecutionPlanConfiguration { @RefreshScope(proxyMode = ScopedProxyMode.DEFAULT) @Bean @ConditionalOnMissingBean(name = "authyClientInstance") public AuthyClientInstance authyClientInstance(final CasConfigurationProperties casProperties) throws Exception { val properties = casProperties.getAuthn().getMfa().getAuthy(); val authyUrl = StringUtils.defaultIfBlank(properties.getApiUrl(), AuthyApiClient.DEFAULT_API_URI); val url = new URL(authyUrl); val testFlag = url.getProtocol().equalsIgnoreCase("http"); val authyClient = new AuthyApiClient(properties.getApiKey(), authyUrl, testFlag); return new AuthyClientInstance(authyClient, properties); } @ConditionalOnMissingBean(name = "authyAuthenticationHandler") @RefreshScope(proxyMode = ScopedProxyMode.DEFAULT) @Bean public AuthenticationHandler authyAuthenticationHandler( final CasConfigurationProperties casProperties, @Qualifier("authyPrincipalFactory") final PrincipalFactory authyPrincipalFactory, @Qualifier("authyClientInstance") final AuthyClientInstance authyClientInstance, @Qualifier(ServicesManager.BEAN_NAME) final ServicesManager servicesManager) { val authy = casProperties.getAuthn().getMfa().getAuthy(); val forceVerification = authy.isForceVerification(); return new AuthyAuthenticationHandler(authy.getName(), servicesManager, authyPrincipalFactory, authyClientInstance, forceVerification, authy.getOrder()); } @ConditionalOnMissingBean(name = "authyPrincipalFactory") @Bean @RefreshScope(proxyMode = ScopedProxyMode.DEFAULT) public PrincipalFactory authyPrincipalFactory() { return PrincipalFactoryUtils.newPrincipalFactory(); } @Bean @RefreshScope(proxyMode = ScopedProxyMode.DEFAULT) public MultifactorAuthenticationProvider authyAuthenticatorMultifactorAuthenticationProvider( final CasConfigurationProperties casProperties, @Qualifier("authyBypassEvaluator") final MultifactorAuthenticationProviderBypassEvaluator authyBypassEvaluator, @Qualifier("failureModeEvaluator") final MultifactorAuthenticationFailureModeEvaluator failureModeEvaluator) { val p = new AuthyMultifactorAuthenticationProvider(); p.setBypassEvaluator(authyBypassEvaluator); val authy = casProperties.getAuthn().getMfa().getAuthy(); p.setFailureMode(authy.getFailureMode()); p.setFailureModeEvaluator(failureModeEvaluator); p.setOrder(authy.getRank()); p.setId(authy.getId()); return p; } @Bean @RefreshScope(proxyMode = ScopedProxyMode.DEFAULT) public AuthenticationMetaDataPopulator authyAuthenticationMetaDataPopulator( final CasConfigurationProperties casProperties, @Qualifier("authyAuthenticationHandler") final AuthenticationHandler authyAuthenticationHandler, @Qualifier("authyAuthenticatorMultifactorAuthenticationProvider") final MultifactorAuthenticationProvider authyAuthenticatorMultifactorAuthenticationProvider) { return new AuthenticationContextAttributeMetaDataPopulator(casProperties.getAuthn().getMfa().getCore().getAuthenticationContextAttribute(), authyAuthenticationHandler, authyAuthenticatorMultifactorAuthenticationProvider.getId()); } @RefreshScope(proxyMode = ScopedProxyMode.DEFAULT) @Bean public Action authyAuthenticationRegistrationWebflowAction( @Qualifier("authyClientInstance") final AuthyClientInstance authyClientInstance) { return new AuthyAuthenticationRegistrationWebflowAction(authyClientInstance); } @ConditionalOnMissingBean(name = "authyAuthenticationEventExecutionPlanConfigurer") @Bean @RefreshScope(proxyMode = ScopedProxyMode.DEFAULT) public AuthenticationEventExecutionPlanConfigurer authyAuthenticationEventExecutionPlanConfigurer( @Qualifier("authyAuthenticationHandler") final AuthenticationHandler authyAuthenticationHandler, @Qualifier("authyAuthenticationMetaDataPopulator") final AuthenticationMetaDataPopulator authyAuthenticationMetaDataPopulator) { return plan -> { plan.registerAuthenticationHandler(authyAuthenticationHandler); plan.registerAuthenticationMetadataPopulator(authyAuthenticationMetaDataPopulator); plan.registerAuthenticationHandlerResolver(new ByCredentialTypeAuthenticationHandlerResolver(AuthyTokenCredential.class)); }; } }
apache-2.0
anuzzolese/lizard
lizard.commons/src/main/java/it/cnr/istc/stlab/lizard/commons/OntologyCodeProject.java
635
package it.cnr.istc.stlab.lizard.commons; import java.net.URI; import it.cnr.istc.stlab.lizard.commons.model.OntologyCodeModel; public class OntologyCodeProject { private OntologyCodeModel ontologyCodeModel; private URI ontologyURI; public OntologyCodeProject(URI ontologyURI, OntologyCodeModel ontologyCodeModel) { this.ontologyCodeModel = ontologyCodeModel; this.ontologyURI = ontologyURI; } public OntologyCodeModel getOntologyCodeModel() { return ontologyCodeModel; } public URI getOntologyURI() { return ontologyURI; } public void setOntologyURI(URI ontologyURI) { this.ontologyURI = ontologyURI; } }
apache-2.0
yauritux/venice-legacy
Venice/Venice-Interface-Model/src/main/java/com/gdn/venice/facade/RafUserRoleSessionEJBRemote.java
2346
package com.gdn.venice.facade; import java.util.ArrayList; import java.util.List; import javax.ejb.Remote; import com.djarum.raf.utilities.JPQLAdvancedQueryCriteria; import com.gdn.venice.facade.finder.FinderReturn; import com.gdn.venice.persistence.RafUserRole; @Remote public interface RafUserRoleSessionEJBRemote { /** * queryByRange - allows querying by range/block * * @param jpqlStmt * @param firstResult * @param maxResults * @return a list of RafUserRole */ public List<RafUserRole> queryByRange(String jpqlStmt, int firstResult, int maxResults); /** * persistRafUserRole persists a country * * @param rafUserRole * @return the persisted RafUserRole */ public RafUserRole persistRafUserRole(RafUserRole rafUserRole); /** * persistRafUserRoleList - persists a list of RafUserRole * * @param rafUserRoleList * @return the list of persisted RafUserRole */ public ArrayList<RafUserRole> persistRafUserRoleList( List<RafUserRole> rafUserRoleList); /** * mergeRafUserRole - merges a RafUserRole * * @param rafUserRole * @return the merged RafUserRole */ public RafUserRole mergeRafUserRole(RafUserRole rafUserRole); /** * mergeRafUserRoleList - merges a list of RafUserRole * * @param rafUserRoleList * @return the merged list of RafUserRole */ public ArrayList<RafUserRole> mergeRafUserRoleList( List<RafUserRole> rafUserRoleList); /** * removeRafUserRole - removes a RafUserRole * * @param rafUserRole */ public void removeRafUserRole(RafUserRole rafUserRole); /** * removeRafUserRoleList - removes a list of RafUserRole * * @param rafUserRoleList */ public void removeRafUserRoleList(List<RafUserRole> rafUserRoleList); /** * findByRafUserRoleLike - finds a list of RafUserRole Like * * @param rafUserRole * @return the list of RafUserRole found */ public List<RafUserRole> findByRafUserRoleLike(RafUserRole rafUserRole, JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults); /** * findByRafUserRole>LikeFR - finds a list of RafUserRole> Like with a finder return object * * @param rafUserRole * @return the list of RafUserRole found */ public FinderReturn findByRafUserRoleLikeFR(RafUserRole rafUserRole, JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults); }
apache-2.0
enaml-ops/omg-product-bundle
products/redis/enaml-gen/broker-deregistrar/redis.go
232
package broker_deregistrar /* * File Generated by enaml generator * !!! Please do not edit this file !!! */ type Redis struct { /*Broker - Descr: Service name Default: p-redis */ Broker *RedisBroker `yaml:"broker,omitempty"` }
apache-2.0
webengfhnw/WE-CRM
modelling/php/Customer.php
1907
<?php /** * @access private * @author andreas.martin */ class Customer { /** * @AttributeType int */ protected $id; /** * @AttributeType String */ protected $name; /** * @AttributeType String */ protected $email; /** * @AttributeType String */ protected $mobile; /** * @AssociationType int * @AssociationMultiplicity 1 */ private $agentid; /** * @access public * @return int * @ReturnType int */ public function getId() { return $this->id; } /** * @access public * @param int id * @return void * @ParamType id int * @ReturnType void */ public function setId($id) { $this->id = $id; } /** * @access public * @return String * @ReturnType String */ public function getName() { return $this->name; } /** * @access public * @param String name * @return void * @ParamType name String * @ReturnType void */ public function setName($name) { $this->name = $name; } /** * @access public * @return String * @ReturnType String */ public function getEmail() { return $this->email; } /** * @access public * @param String email * @return void * @ParamType email String * @ReturnType void */ public function setEmail($email) { $this->email = $email; } /** * @access public * @return String * @ReturnType String */ public function getMobile() { return $this->mobile; } /** * @access public * @param String mobile * @return void * @ParamType mobile String * @ReturnType void */ public function setMobile($mobile) { $this->mobile = $mobile; } /** * @access public * @return int * @ReturnType int */ public function getAgentid() { return $this->agentid; } /** * @access public * @param int agentid * @return void * @ParamType agentid int * @ReturnType void */ public function setAgentid($agentid) { $this->agentid = $agentid; } } ?>
apache-2.0
agarman/pulsar
pulsar-client/src/main/java/com/yahoo/pulsar/client/impl/BatchMessageIdImpl.java
2491
/** * Copyright 2016 Yahoo Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.pulsar.client.impl; import com.google.common.base.Objects; import com.google.common.collect.ComparisonChain; /** */ public class BatchMessageIdImpl extends MessageIdImpl implements Comparable<MessageIdImpl> { private final int batchIndex; public BatchMessageIdImpl(long ledgerId, long entryId, int partitionIndex, int batchIndex) { super(ledgerId, entryId, partitionIndex); this.batchIndex = batchIndex; } int getBatchIndex() { return batchIndex; } @Override public int compareTo(MessageIdImpl o) { if (!(o instanceof BatchMessageIdImpl)) { throw new IllegalArgumentException( "expected BatchMessageIdImpl object. Got instance of " + o.getClass().getName()); } BatchMessageIdImpl other = (BatchMessageIdImpl) o; return ComparisonChain.start().compare(this.ledgerId, other.ledgerId).compare(this.entryId, other.entryId) .compare(this.batchIndex, other.batchIndex).compare(this.getPartitionIndex(), other.getPartitionIndex()) .result(); } @Override public int hashCode() { return (int) (31 * (ledgerId + 31 * entryId) + (31 * partitionIndex) + batchIndex); } @Override public boolean equals(Object obj) { if (obj instanceof BatchMessageIdImpl) { BatchMessageIdImpl other = (BatchMessageIdImpl) obj; return ledgerId == other.ledgerId && entryId == other.entryId && partitionIndex == other.partitionIndex && batchIndex == other.batchIndex; } return false; } @Override public String toString() { return String.format("%d:%d:%d:%d", ledgerId, entryId, partitionIndex, batchIndex); } // Serialization @Override public byte[] toByteArray() { return toByteArray(batchIndex); } }
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Features/Base/src/main/java/ghidra/app/util/bin/format/pe/debug/S_GPROC32_NEW.java
3162
/* ### * IP: GHIDRA * REVIEWED: YES * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.util.bin.format.pe.debug; import ghidra.app.util.bin.*; import ghidra.app.util.bin.format.*; import java.io.*; /** * A class to represent the S_GPROC32_NEW data structure. * */ public class S_GPROC32_NEW extends DebugSymbol{ private int pParent; private int pEnd; private int pNext; private int procLen; private int debugStart; private int debugEnd; private int procOffset; //offset to start of procedure... private short procType; static S_GPROC32_NEW createS_GPROC32_NEW(short length, short type, FactoryBundledWithBinaryReader reader, int ptr) throws IOException { S_GPROC32_NEW s_gproc32_new = (S_GPROC32_NEW) reader.getFactory().create(S_GPROC32_NEW.class); s_gproc32_new.initS_GPROC32_NEW(length, type, reader, ptr); return s_gproc32_new; } /** * DO NOT USE THIS CONSTRUCTOR, USE create*(GenericFactory ...) FACTORY METHODS INSTEAD. */ public S_GPROC32_NEW() {} private void initS_GPROC32_NEW(short length, short type, FactoryBundledWithBinaryReader reader, int ptr) throws IOException { processDebugSymbol(length, type); pParent = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; pEnd = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; pNext = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; procLen = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; debugStart = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; debugEnd = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; offset = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; procOffset = reader.readInt(ptr); ptr += BinaryReader.SIZEOF_INT; section = reader.readShort(ptr); ptr += BinaryReader.SIZEOF_SHORT; procType = reader.readShort(ptr); ptr += BinaryReader.SIZEOF_SHORT; name = reader.readAsciiString(ptr); ptr += name.length(); } public int getParent() { return pParent; } public int getEnd() { return pEnd; } public int getNext() { return pNext; } public int getDebugStart() { return debugStart; } public int getDebugEnd() { return debugEnd; } /** * Returns the procedure length. * @return the procedure length */ public int getProcLen() { return procLen; } /** * Returns the procedure type. * @return the procedure type */ public short getProcType() { return procType; } /** * Returns the procedure offset. * @return the procedure offset */ public int getProcOffset() { return procOffset; } }
apache-2.0
google/mako
cxx/clients/downsampler/standard_downsampler_test.cc
53105
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // see the license for the specific language governing permissions and // limitations under the license. #include "cxx/clients/downsampler/standard_downsampler.h" #include <algorithm> #include <cmath> #include <type_traits> #include <utility> #include <vector> #include "glog/logging.h" #include "src/google/protobuf/descriptor.h" #include "src/google/protobuf/repeated_field.h" #include "gmock/gmock.h" #include "gtest/gtest.h" #include "absl/container/flat_hash_set.h" #include "absl/strings/str_cat.h" #include "cxx/clients/fileio/memory_fileio.h" #include "cxx/internal/filter_utils.h" #include "cxx/internal/pgmath.h" #include "spec/proto/mako.pb.h" namespace mako { namespace downsampler { static constexpr int kMetricValueMax = 200; static constexpr int kSampleErrorCountMax = 10; static constexpr int kBatchSizeMax = 100000; class StandardMetricDownsamplerTest : public ::testing::Test { protected: void SetUp() override { d_.SetFileIO(std::unique_ptr<mako::FileIO>( new mako::memory_fileio::FileIO())); } void ReseedDownsampler() { d_.Reseed(99999); } Downsampler d_; }; int CountPointsForMetricKey( const std::string& metric_key, const google::protobuf::RepeatedPtrField<mako::SampleBatch>& sample_batch_list) { int count = 0; for (const mako::SampleBatch& sample_batch : sample_batch_list) { for (const auto& sample_point : sample_batch.sample_point_list()) { for (const auto& metric_value : sample_point.metric_value_list()) { if (metric_value.value_key() == metric_key) { count++; } } } } return count; } int CountErrorsForSampler( const std::string& sampler_name, const google::protobuf::RepeatedPtrField<mako::SampleBatch>& sample_batch_list) { int count = 0; for (const mako::SampleBatch& sample_batch : sample_batch_list) { if (sampler_name.empty()) { count += sample_batch.sample_error_list_size(); } else { for (const mako::SampleError& sample_error : sample_batch.sample_error_list()) { if (sample_error.sampler_name() == sampler_name) { count++; } } } } return count; } int CountErrors( const google::protobuf::RepeatedPtrField<mako::SampleBatch>& sample_batch_list) { return CountErrorsForSampler("", sample_batch_list); } SampleRecord CreateSampleRecord( double input_value, const std::vector<std::pair<std::string, double>>& metrics, const std::vector<std::pair<std::string, std::string>>& aux_data = {}) { SampleRecord sr; SamplePoint* sp = sr.mutable_sample_point(); sp->set_input_value(input_value); for (auto pair : metrics) { KeyedValue* kv = sp->add_metric_value_list(); kv->set_value_key(pair.first); kv->set_value(pair.second); } for (auto kv : aux_data) { (*sp->mutable_aux_data())[kv.first] = kv.second; } return sr; } DownsamplerInput CreateDownsamplerInput( const std::vector<std::string>& files, int sample_error_count_max = kSampleErrorCountMax, int metric_value_count_max = kMetricValueMax, int batch_size_max = kBatchSizeMax) { mako::DownsamplerInput di; for (const auto& file : files) { mako::SampleFile* sample_file = di.add_sample_file_list(); sample_file->set_file_path(file); sample_file->set_sampler_name(absl::StrCat("Sampler", file)); } // Create RunInfo di.mutable_run_info()->set_benchmark_key("benchmark_key"); di.mutable_run_info()->set_run_key("run_key"); di.mutable_run_info()->set_timestamp_ms(123456); di.set_sample_error_count_max(sample_error_count_max); di.set_metric_value_count_max(metric_value_count_max); di.set_batch_size_max(batch_size_max); return di; } void WriteFile(const std::string& file_path, const std::vector<mako::SampleRecord>& data) { mako::memory_fileio::FileIO fileio; ASSERT_TRUE(fileio.Open(file_path, mako::FileIO::AccessMode::kWrite)); for (const auto& d : data) { ASSERT_TRUE(fileio.Write(d)); } ASSERT_TRUE(fileio.Close()); } TEST_F(StandardMetricDownsamplerTest, MissingFileIO) { Downsampler d; mako::DownsamplerOutput out; std::string err = d.Downsample(CreateDownsamplerInput({}), &out); ASSERT_NE("", err); } TEST_F(StandardMetricDownsamplerTest, InvalidDownsamplerInput) { mako::DownsamplerOutput out; mako::DownsamplerInput in = CreateDownsamplerInput({}); // invalidate the input in.clear_run_info(); std::string err = d_.Downsample(in, &out); ASSERT_NE("", err); } TEST_F(StandardMetricDownsamplerTest, NoFilesToDownsample) { mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(CreateDownsamplerInput({}), &out)); ASSERT_EQ(0, out.sample_batch_list_size()); } TEST_F(StandardMetricDownsamplerTest, NoSuchFiles) { mako::DownsamplerOutput out; ASSERT_NE("", d_.Downsample(CreateDownsamplerInput({"NoSuchFile"}), &out)); } TEST_F(StandardMetricDownsamplerTest, EmptyFile) { mako::DownsamplerOutput out; mako::memory_fileio::FileIO fileio; fileio.Open("file", mako::FileIO::AccessMode::kWrite); fileio.Close(); ASSERT_EQ("", d_.Downsample(CreateDownsamplerInput({"file"}), &out)); ASSERT_EQ(0, out.sample_batch_list_size()); } TEST_F(StandardMetricDownsamplerTest, SmallBatchSizeToForceMultiBatchCreation) { mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); // Set a small batch size so we get lots of batches. in.set_batch_size_max(3000); std::vector<SampleRecord> sample_records; for (int i = 0; i < 200; i++) { mako::SampleRecord sb = CreateSampleRecord(i, {{"y", i}}); sb.mutable_sample_error()->set_error_message("An Error message"); sb.mutable_sample_error()->set_input_value(i); sb.mutable_sample_error()->set_sampler_name(absl::StrCat("Sampler", i)); sample_records.push_back(sb); ASSERT_GT(200, sb.ByteSizeLong()); } WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); ASSERT_GT(out.sample_batch_list_size(), 1); } TEST_F(StandardMetricDownsamplerTest, NoDataRequest) { mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); in.set_metric_value_count_max(0); in.set_sample_error_count_max(0); std::vector<SampleRecord> sample_records; for (int i = 0; i < 200; i++) { mako::SampleRecord sb = CreateSampleRecord(i, {{"y", i}}); sb.mutable_sample_error()->set_error_message("An Error message"); sb.mutable_sample_error()->set_input_value(i); sb.mutable_sample_error()->set_sampler_name(absl::StrCat("Sampler", i)); sample_records.push_back(sb); } WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); ASSERT_EQ(0, out.sample_batch_list_size()); } TEST_F(StandardMetricDownsamplerTest, SampleBatchesSortedByInputValue) { std::vector<SampleRecord> sample_records; for (int input_value : {4, 1, 5, 3, 2}) { mako::SampleRecord sb = CreateSampleRecord(input_value, {{"y", input_value}}); sb.mutable_sample_error()->set_error_message("An Error message"); sb.mutable_sample_error()->set_input_value(input_value); sb.mutable_sample_error()->set_sampler_name( absl::StrCat("Sampler", input_value)); sample_records.push_back(sb); } WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(CreateDownsamplerInput({"file1"}), &out)); ASSERT_EQ(1, out.sample_batch_list_size()); std::vector<int> sorted_input_values({1, 2, 3, 4, 5}); // Check sort order of sample points std::vector<int> actual_sample_point_input_values; for (const auto& sample_point : out.sample_batch_list(0).sample_point_list()) { actual_sample_point_input_values.push_back(sample_point.input_value()); } ASSERT_EQ(sorted_input_values, actual_sample_point_input_values); // Check sort order of errors std::vector<int> actual_sample_error_input_values; for (const auto& sample_error : out.sample_batch_list(0).sample_error_list()) { actual_sample_error_input_values.push_back(sample_error.input_value()); } ASSERT_EQ(sorted_input_values, actual_sample_error_input_values); } TEST_F(StandardMetricDownsamplerTest, DuplicateMetricKeysInSampleRecord) { std::string duplicate_key = "m1"; mako::SampleRecord sb = CreateSampleRecord( 1, {{duplicate_key, 1}, {duplicate_key, 2}, {"m2", 3}}); WriteFile("file3", {sb}); mako::DownsamplerOutput out; std::string err = d_.Downsample(CreateDownsamplerInput({"file3"}), &out); ASSERT_EQ("", err); } TEST_F(StandardMetricDownsamplerTest, InvalidSampleRecord) { mako::SampleRecord sr; WriteFile("file4", {sr}); mako::DownsamplerOutput out; std::string err = d_.Downsample(CreateDownsamplerInput({"file4"}), &out); EXPECT_EQ("SampleRecord must contain either sample_point or sample_error.", err); } TEST_F(StandardMetricDownsamplerTest, MetricSetTooBig) { // If we can only save 1 metricValue but a single SamplePoint has > 1 metric // inside. mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); in.set_metric_value_count_max(1); mako::SampleRecord sb = CreateSampleRecord(1, {{"y1,y2", 1}, {"y2", 2}, {"y3", 3}}); // We have more than a single metric packed ASSERT_GT(sb.sample_point().metric_value_list_size(), 1); WriteFile("file1", {sb}); mako::DownsamplerOutput out; ASSERT_NE("", d_.Downsample(in, &out)); // But if raise size, then should work. mako::DownsamplerOutput out2; in.set_metric_value_count_max(10); ASSERT_EQ("", d_.Downsample(in, &out2)); } TEST_F(StandardMetricDownsamplerTest, PointBatchTooBig) { mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); // Reset batch size so anything is invalid. in.set_batch_size_max(1); mako::SampleRecord sb = CreateSampleRecord(1, {{"y1", 1}, {"y2", 2}, {"y3", 3}}); ASSERT_GT(sb.ByteSizeLong(), 1); WriteFile("file1", {sb}); mako::DownsamplerOutput out; ASSERT_NE("", d_.Downsample(in, &out)); // But if raise size, then should work. in.set_batch_size_max(sb.ByteSizeLong() * 2); mako::DownsamplerOutput out2; ASSERT_EQ("", d_.Downsample(in, &out2)); ASSERT_EQ(1, CountPointsForMetricKey("y1", out2.sample_batch_list())); ASSERT_EQ(1, CountPointsForMetricKey("y2", out2.sample_batch_list())); ASSERT_EQ(1, CountPointsForMetricKey("y3", out2.sample_batch_list())); } TEST_F(StandardMetricDownsamplerTest, ErrorBatchTooBig) { mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); std::vector<SampleRecord> sample_records; for (int i = 0; i < 100; i++) { mako::SampleRecord sb = CreateSampleRecord(1, {}); sb.clear_sample_point(); sb.mutable_sample_error()->set_error_message("something"); sb.mutable_sample_error()->set_input_value(i); sb.mutable_sample_error()->set_sampler_name(absl::StrCat("Sampler", i)); sample_records.push_back(sb); ASSERT_GT(sb.ByteSizeLong(), 1); } WriteFile("file1", sample_records); // Reset batch size so anything is invalid. in.set_batch_size_max(1); mako::DownsamplerOutput out; ASSERT_NE("", d_.Downsample(in, &out)); // But if raise size, then should work. in.set_batch_size_max(1024); mako::DownsamplerOutput out2; ASSERT_EQ("", d_.Downsample(in, &out2)); ASSERT_GT(CountErrors(out2.sample_batch_list()), 0); } TEST_F(StandardMetricDownsamplerTest, ErrorMessagesTruncated) { mako::DownsamplerOutput out; mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); // Error message should not be truncated. mako::SampleRecord small_sample_record = CreateSampleRecord(1, {{"y", 1}}); std::string small_str; small_str.resize(kMaxErrorStringLength - 1); small_sample_record.mutable_sample_error()->set_error_message(small_str); small_sample_record.mutable_sample_error()->set_input_value(1); small_sample_record.mutable_sample_error()->set_sampler_name("s1"); ASSERT_LT(small_str.length(), kMaxErrorStringLength); // Error message should not be truncated. mako::SampleRecord exact_sample_record = CreateSampleRecord(2, {{"y", 1}}); std::string exact_str; exact_str.resize(kMaxErrorStringLength); exact_sample_record.mutable_sample_error()->set_error_message(exact_str); exact_sample_record.mutable_sample_error()->set_input_value(1); exact_sample_record.mutable_sample_error()->set_sampler_name("s1"); ASSERT_EQ(exact_str.length(), kMaxErrorStringLength); // Error message should be truncated. mako::SampleRecord big_sample_record = CreateSampleRecord(3, {{"y", 1}}); std::string big_str; big_str.resize(kMaxErrorStringLength + 1); big_sample_record.mutable_sample_error()->set_error_message(big_str); big_sample_record.mutable_sample_error()->set_input_value(1); big_sample_record.mutable_sample_error()->set_sampler_name("s1"); ASSERT_GT(big_str.length(), kMaxErrorStringLength); // These are big, so set our size very large. in.set_batch_size_max(big_sample_record.ByteSizeLong() * 3); WriteFile("file1", {small_sample_record, exact_sample_record, big_sample_record}); ASSERT_EQ("", d_.Downsample(in, &out)); for (const mako::SampleBatch& sample_batch : out.sample_batch_list()) { ASSERT_GT(sample_batch.sample_error_list_size(), 0); for (const mako::SampleError& sample_error : sample_batch.sample_error_list()) { ASSERT_LE(sample_error.error_message().size(), kMaxErrorStringLength); } } } TEST_F(StandardMetricDownsamplerTest, SingleMetricDownsample) { mako::memory_fileio::FileIO fileio; // pair = # of SampleRecord to create with points, # of SampleRecords to // create with // errors // If num is > Max then we expect it to truncated, otherwise should not // downsample. std::vector<std::pair<int, int>> tests = { // Only points std::make_pair(1, 0), std::make_pair(kMetricValueMax - 1, 0), std::make_pair(kMetricValueMax, 0), std::make_pair(kMetricValueMax + 1, 0), std::make_pair(kMetricValueMax * 10, 0), // Only errors std::make_pair(0, 1), std::make_pair(0, kSampleErrorCountMax - 1), std::make_pair(0, kSampleErrorCountMax), std::make_pair(0, kSampleErrorCountMax + 1), std::make_pair(0, kSampleErrorCountMax * 10), // Mixed std::make_pair(1, 1), std::make_pair(kMetricValueMax - 1, kSampleErrorCountMax - 1), std::make_pair(kMetricValueMax - 1, kSampleErrorCountMax * 10), std::make_pair(kMetricValueMax * 10, kSampleErrorCountMax - 1), std::make_pair(kMetricValueMax, kSampleErrorCountMax), std::make_pair(kMetricValueMax + 1, kSampleErrorCountMax + 1), std::make_pair(kMetricValueMax * 10, kSampleErrorCountMax * 10), }; for (const auto& pair : tests) { int number_of_points = pair.first; int number_of_errors = pair.second; LOG(INFO) << "Current testing with: " << number_of_points << " points and " << number_of_errors << " errors."; fileio.Clear(); mako::DownsamplerOutput out; std::vector<mako::SampleRecord> data; for (int i = 0; i < number_of_points; i++) { data.push_back(CreateSampleRecord(i, {{"y", i}})); } for (int i = 0; i < number_of_errors; i++) { SampleRecord sr = CreateSampleRecord(i, {}); sr.clear_sample_point(); sr.mutable_sample_error()->set_error_message(absl::StrCat("Error # ", i)); sr.mutable_sample_error()->set_input_value(i); sr.mutable_sample_error()->set_sampler_name("badsampler"); data.push_back(sr); } WriteFile("file1", data); mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); ASSERT_EQ("", d_.Downsample(in, &out)); if (number_of_points > 0) { if (number_of_points > kMetricValueMax) { ASSERT_EQ(kMetricValueMax, CountPointsForMetricKey("y", out.sample_batch_list())); } else { ASSERT_EQ(number_of_points, CountPointsForMetricKey("y", out.sample_batch_list())); } } if (number_of_errors > 0) { if (number_of_errors > kSampleErrorCountMax) { ASSERT_EQ(kSampleErrorCountMax, CountErrors(out.sample_batch_list())); } else { ASSERT_EQ(number_of_errors, CountErrors(out.sample_batch_list())); } } } } struct SamplerInstructions { SamplerInstructions(const std::string& sampler_name, int file_count, int metric_count, int expected_metric_count, int error_count, int expected_error_count) : sampler_name(sampler_name), file_count(file_count), metric_count(metric_count), expected_metric_count(expected_metric_count), error_count(error_count), expected_error_count(expected_error_count) {} std::string sampler_name; // The above totals are written to *each* of these files. int file_count; int metric_count; int expected_metric_count; int error_count; int expected_error_count; }; struct MultiSamplerTest { MultiSamplerTest(const std::string& test_name, const std::vector<SamplerInstructions>& sampler_instructions) : test_name(test_name), sampler_instructions(sampler_instructions) {} std::string test_name; std::vector<SamplerInstructions> sampler_instructions; }; TEST_F(StandardMetricDownsamplerTest, MultiSamplerDownsample) { mako::memory_fileio::FileIO fileio; // Sampler name mapped to how many points we'd like that sampler to create. std::vector<MultiSamplerTest> tests; tests.push_back( MultiSamplerTest("two-samplers-with-few-points", { SamplerInstructions("Sampler1", 1, // file_count 1, // metric count 1, // expected_metric_count 0, // error_count 0), // expected_error_count SamplerInstructions("Sampler2", 1, // file_count 1, // metric count 1, // expected_metric_count 0, // error_count 0), // expected_error_count })); tests.push_back(MultiSamplerTest( "no-data", { SamplerInstructions("Sampler1", 1, // file_count 0, // metric count 0, // expected_metric_count 0, // error_count 0), // expected_error_count SamplerInstructions("Sampler2", 1, // file_count 0, // metric count 0, // expected_metric_count 0, // error_count 0), // expected_error_count })); tests.push_back(MultiSamplerTest( "two-samplers-with-max-points", { SamplerInstructions("Sampler1", 1, // file_count kMetricValueMax, // metric count kMetricValueMax / 2, // expected_metric_count 0, // error_count 0), // expected_error_count SamplerInstructions("Sampler2", 1, // file_count kMetricValueMax, // metric count kMetricValueMax / 2, // expected_metric_count 0, // error_count 0), // expected_error_count })); tests.push_back(MultiSamplerTest( "two-samplers-with-zero-and-max-points", { SamplerInstructions("Sampler1", 1, // file_count 0, // metric count 0, // expected_metric_count 0, // error_count 0), // expected_error_count SamplerInstructions("Sampler2", 1, // file_count kMetricValueMax, // metric count kMetricValueMax, // expected_metric_count 0, // error_count 0), // expected_error_count })); tests.push_back(MultiSamplerTest( "two-samplers-with-few-and-over-max-points", { SamplerInstructions("Sampler1", 1, // file_count 2, // metric count 2, // expected_metric_count 0, // error_count 0), // expected_error_count SamplerInstructions("Sampler2", 1, // file_count 2 * kMetricValueMax, // metric count kMetricValueMax - 2, // expected_metric_count 0, // error_count 0), // expected_error_count })); tests.push_back(MultiSamplerTest( "two-samplers-with-few-errors-and-over-max-points", { SamplerInstructions("Sampler1", 1, // file_count 2, // metric count 2, // expected_metric_count 2, // error_count 2), // expected_error_count SamplerInstructions("Sampler2", 1, // file_count 2 * kMetricValueMax, // metric count kMetricValueMax - 2, // expected_metric_count 0, // error_count 0), // expected_error_count })); tests.push_back(MultiSamplerTest( "two-samplers-with-few-errors-and-over-max-errors", { SamplerInstructions("Sampler1", 1, // file_count 2, // metric count 2, // expected_metric_count 2, // error_count 2), // expected_error_count SamplerInstructions( "Sampler2", 1, // file_count 0, // metric count 0, // expected_metric_count kSampleErrorCountMax * 2, // error_count kSampleErrorCountMax - 2), // expected_error_count })); tests.push_back(MultiSamplerTest( "two-samplers-with-over-on-everything", { SamplerInstructions( "Sampler1", 1, // file_count kMetricValueMax * 2, // metric count kMetricValueMax / 2, // expected_metric_count kSampleErrorCountMax * 2, // error_count kSampleErrorCountMax / 2), // expected_error_count SamplerInstructions( "Sampler2", 1, // file_count kMetricValueMax * 2, // metric count kMetricValueMax / 2, // expected_metric_count kSampleErrorCountMax * 2, // error_count kSampleErrorCountMax / 2), // expected_error_count })); tests.push_back(MultiSamplerTest( "single-sampler-with-multiple-files", { SamplerInstructions("Sampler1", 2, // file_count kMetricValueMax / 2, // metric count kMetricValueMax, // expected_metric_count kSampleErrorCountMax / 2, // error_count kSampleErrorCountMax), // expected_error_count })); tests.push_back(MultiSamplerTest( "two-samplers-with-multiple-files", { SamplerInstructions("Sampler1", 2, // file_count 2, // metric count 2 * 2, // expected_metric_count 2, // error_count 2 * 2), // expected_error_count SamplerInstructions( "Sampler2", 2, // file_count kMetricValueMax, // metric count kMetricValueMax - 2 * 2, // expected_metric_count kSampleErrorCountMax, // error_count kSampleErrorCountMax - 2 * 2), // expected_error_count })); int expected = (kMetricValueMax - (kMetricValueMax / 20)) / 2; tests.push_back(MultiSamplerTest( "two-samplers-over-max-points-one-few-points", { SamplerInstructions("Sampler1", 1, // file_count kMetricValueMax * 5, // metric count expected, // expected_metric_count 0, // error_count 0), // expected error_count SamplerInstructions("Sampler2", 1, // file_count kMetricValueMax * 5, // metric count expected, // expected_metric_count 0, // error_count 0), // expected error_count SamplerInstructions("Sampler3", 1, // file_count kMetricValueMax / 20, // metric count kMetricValueMax / 20, // expected_metric_count 0, // error_count 0), // expected error_count })); for (const MultiSamplerTest& sampler_test : tests) { mako::DownsamplerOutput out; std::vector<std::string> files; LOG(INFO) << "======================================="; LOG(INFO) << "Test name : " << sampler_test.test_name; for (const SamplerInstructions& si : sampler_test.sampler_instructions) { std::vector<mako::SampleRecord> data; LOG(INFO) << " Sampler name: " << si.sampler_name; LOG(INFO) << " File Count: " << si.file_count; LOG(INFO) << " Metric count: " << si.metric_count; LOG(INFO) << " Expected metric count: " << si.expected_metric_count; LOG(INFO) << " Error count: " << si.error_count; LOG(INFO) << " Expected error count: " << si.expected_error_count; for (int i = 0; i < si.metric_count; i++) { // Use the sampler name as the metric name as well, to keep them // separate data.push_back(CreateSampleRecord(i, {{si.sampler_name, i}})); } for (int i = 0; i < si.error_count; i++) { mako::SampleRecord sr = CreateSampleRecord(i, {}); sr.clear_sample_point(); sr.mutable_sample_error()->set_error_message(std::to_string(i)); sr.mutable_sample_error()->set_input_value(i); sr.mutable_sample_error()->set_sampler_name(si.sampler_name); data.push_back(sr); } for (int i = 0; i < si.file_count; ++i) { // Use sampler name as file name std::string file_name = absl::StrCat(si.sampler_name, "_", i); WriteFile(file_name, data); files.push_back(file_name); } } ASSERT_EQ("", d_.Downsample(CreateDownsamplerInput(files), &out)); // NOTE: We used the name of samplers as files name as well as metric names. for (const SamplerInstructions& si : sampler_test.sampler_instructions) { ASSERT_NEAR( si.expected_metric_count, CountPointsForMetricKey(si.sampler_name, out.sample_batch_list()), 1) << out.DebugString(); ASSERT_NEAR( si.expected_error_count, CountErrorsForSampler(si.sampler_name, out.sample_batch_list()), 1) << out.DebugString(); } } LOG(INFO) << "======================================="; } // extract inputs of points containing a specific metric from the batches std::vector<double> GetInputsWithMetric( const std::string& metric_name, const google::protobuf::RepeatedPtrField<mako::SampleBatch>& batches) { std::vector<mako::SamplePoint> point_list; DataFilter data_filter; data_filter.set_data_type(mako::DataFilter::METRIC_SAMPLEPOINTS); data_filter.set_value_key(metric_name); bool no_sort_data = false; std::vector<internal::DataPoint> results; auto err_str = mako::internal::ApplyFilter( mako::BenchmarkInfo{}, mako::RunInfo{}, batches.pointer_begin(), batches.pointer_end(), data_filter, no_sort_data, &results); EXPECT_EQ("", err_str); std::vector<double> inputs(results.size()); std::transform(results.begin(), results.end(), inputs.begin(), [](const internal::DataPoint p) { return p.x_value; }); return inputs; } // computes the expected standard deviation for a discrete uniform distribution // in the range [a,b] double expected_std_dev_uniform(int a, int b) { return std::sqrt((std::pow(b - a + 1, 2) - 1) / 12); } TEST_F(StandardMetricDownsamplerTest, DoubleMetricDownsampledDistribution) { mako::memory_fileio::FileIO().Clear(); // since the downsampler is stochastic, there's a chance a degenerate // downsampling could produce a failing test. We make the downsampling // deterministic by hardcoding the seed ReseedDownsampler(); mako::DownsamplerOutput out; std::vector<mako::SampleRecord> data; // uniformly distribute both metrics' input values over the // range [0,kMetricValueMax*2) const int num_points = kMetricValueMax * 2; for (int i = 0; i < num_points; ++i) { data.push_back(CreateSampleRecord(i, {{"m1", i}})); } for (int i = 0; i < num_points; ++i) { data.push_back(CreateSampleRecord(i, {{"m2", i}})); } WriteFile("file1", data); ASSERT_EQ("", d_.Downsample(CreateDownsamplerInput({"file1"}), &out)); // note, we use (numPointsPerMetric-1) in the below two cases because our // discrete range is [0, numPointsPerMetric-1] double expected_mean = (num_points - 1) / 2.0; double expected_std_dev = expected_std_dev_uniform(0, num_points - 1); // check that downsampled m1 and m2 results' input values are distributed // uniformly along the same range as before // if we're within 10% of expected mean and standard deviation, we know // the downsampled results are "pretty good" mako::internal::RunningStats stats; stats.AddVector(GetInputsWithMetric("m1", out.sample_batch_list())); EXPECT_NEAR(expected_mean, stats.Mean().value, expected_mean * 0.1); EXPECT_NEAR(expected_std_dev, stats.Stddev().value, expected_std_dev * 0.1); // now check that downsampled m1 results are distributed evenly stats = mako::internal::RunningStats(); stats.AddVector(GetInputsWithMetric("m2", out.sample_batch_list())); EXPECT_NEAR(expected_mean, stats.Mean().value, expected_mean * 0.1); EXPECT_NEAR(expected_std_dev, stats.Stddev().value, expected_std_dev * 0.1); } TEST_F(StandardMetricDownsamplerTest, DoubleMetricDownsampledEvenly) { // This test makes sure we're not "off by 1" when downsampling a // greater-than-max number of two different metrics. This happens when a // metric goes in and both metrics are consuming max/2 (their fair share of) // slots. When choosing which metric from which to evict we should pick the // incoming metric so that, once that incoming metric is inserted, we haven't // just unbalanced the shares. mako::memory_fileio::FileIO().Clear(); // fill up our list of samples to be "full" -- both "m1" and "m2" are at their // fair share std::vector<mako::SampleRecord> recordsFull; for (auto metric : {"m1", "m2"}) { for (int i = 0; i < kMetricValueMax / 2; ++i) { recordsFull.push_back(CreateSampleRecord(i, {{metric, i}})); } } // In the case of a tie, a given implementation might be "off-by-1" by always // picking the "first seen" metric (in this case "m1") or by always picking // the "last seen" metric (in this case "m2"). We can test for both. for (auto metric : {"m1", "m2"}) { LOG(INFO) << "Testing for even downsampling after adding " << metric; mako::DownsamplerOutput out; auto records = recordsFull; int i = recordsFull.size(); records.push_back(CreateSampleRecord(i, {{metric, i}})); std::string fileName = absl::StrCat("file", metric); WriteFile(fileName, records); ASSERT_EQ("", d_.Downsample(CreateDownsamplerInput({fileName}), &out)); int num_m1 = GetInputsWithMetric("m1", out.sample_batch_list()).size(); int num_m2 = GetInputsWithMetric("m2", out.sample_batch_list()).size(); ASSERT_EQ(0, kMetricValueMax % 2) << "Make kMetricValueMax even please!"; ASSERT_EQ(num_m1, num_m2); } } TEST_F(StandardMetricDownsamplerTest, SampleBatchSizeTest) { mako::KeyedValue kv; kv.set_value(1); // 9 bytes:key 1;field 8 kv.set_value_key("m1"); // 4 bytes:key 1;length 1;field 2 constexpr int expected_kv_size = 13; ASSERT_EQ(expected_kv_size, kv.ByteSizeLong()); mako::SamplePoint p; p.set_input_value(1); // 9 bytes:key 1;field 8 *p.add_metric_value_list() = kv; // 15 bytes:key 1;length 1;field 13 constexpr int expected_point_size = 24; ASSERT_EQ(expected_point_size, p.ByteSizeLong()); (*p.mutable_aux_data())["key"] = "value"; ASSERT_LT(expected_point_size, p.ByteSizeLong()); mako::DownsamplerOutput out; mako::SampleBatch* batch = out.add_sample_batch_list(); mako::SampleBatch* expected_batch = batch; constexpr int expected_empty_sample_batch_size = 0; ASSERT_EQ(expected_empty_sample_batch_size, batch->ByteSizeLong()); int64_t calculated_batch_size = 0; auto field = batch->GetDescriptor()->FindFieldByName("sample_point_list"); // key 1; length 1; field expected_point_size int expected_batch_size = expected_point_size + 2; int num_points = 1000 / expected_batch_size; for (int i = 0; i < num_points; ++i) { std::string err = AddBatch("benchmark", "run", 1000, field->index(), &p, &batch, &calculated_batch_size, &out); ASSERT_EQ("", err); ASSERT_EQ(expected_batch, batch) << "New batch created unexpectedly."; ASSERT_EQ(expected_batch_size * (i + 1), calculated_batch_size); ASSERT_EQ(expected_batch_size * (i + 1), batch->ByteSizeLong()); } std::string err = AddBatch("benchmark", "run", 1000, field->index(), &p, &batch, &calculated_batch_size, &out); ASSERT_EQ("", err); ASSERT_NE(expected_batch, batch) << "New batch should have been created."; } TEST_F(StandardMetricDownsamplerTest, PointBiggerThanSampleBatchMaxSizeTest) { mako::KeyedValue kv; kv.set_value(1); // 9 bytes:key 1;field 8 kv.set_value_key("m1"); // 4 bytes:key 1;length 1;field 2 constexpr int expected_kv_size = 13; ASSERT_EQ(expected_kv_size, kv.ByteSizeLong()); mako::SamplePoint p; p.set_input_value(1); // 9 bytes:key 1;field 8 *p.add_metric_value_list() = kv; // 15 bytes:key 1;length 1;field 13 constexpr int expected_point_size = 24; ASSERT_EQ(expected_point_size, p.ByteSizeLong()); mako::DownsamplerOutput out; mako::SampleBatch* batch = out.add_sample_batch_list(); constexpr int expected_empty_sample_batch_size = 0; ASSERT_EQ(expected_empty_sample_batch_size, batch->ByteSizeLong()); int64_t calculated_batch_size = 0; auto field = batch->GetDescriptor()->FindFieldByName("sample_point_list"); std::string err = AddBatch("benchmark", "run", expected_point_size, field->index(), &p, &batch, &calculated_batch_size, &out); ASSERT_NE("", err) << "Point should have been too big to put in a batch"; ASSERT_EQ(0, batch->sample_point_list_size()); } TEST_F(StandardMetricDownsamplerTest, DownsamplingWorstCaseTest) { std::vector<std::string> metrics; for (int i = 0; i < 1000; ++i) { metrics.emplace_back(absl::StrCat("m", i)); } std::vector<mako::SampleRecord> records; for (int i = 0; i < 1000; ++i) { SampleRecord sr; SamplePoint* sp = sr.mutable_sample_point(); sp->set_input_value(i); for (const auto& metric : metrics) { KeyedValue* kv = sp->add_metric_value_list(); kv->set_value_key(metric); kv->set_value(i); } records.emplace_back(std::move(sr)); } mako::DownsamplerOutput out; std::string fileName = "DownsamplingWorstCase"; WriteFile(fileName, records); int sample_error_count_max = 5000; int metric_value_count_max = 50000; int batch_size_max = 1000000; ASSERT_EQ("", d_.Downsample(CreateDownsamplerInput( {fileName}, sample_error_count_max, metric_value_count_max, batch_size_max), &out)); ASSERT_LE(out.sample_batch_list_size(), 5); int num_sample_points = 0; for (const auto& batch : out.sample_batch_list()) { num_sample_points += batch.sample_point_list_size(); } ASSERT_EQ(metric_value_count_max / 1000, num_sample_points); } TEST_F(StandardMetricDownsamplerTest, DownsamplingDownsamplesAnnotationsInSamplePoints) { // Write 10kb std::string int annotations_string_size = 10000; // String that we will be writing std::string annotation_string(annotations_string_size, 'a'); // To fill 3 times more than max annotations size allow to int number_of_annotations = kMaxAnnotationsSize / annotations_string_size * 3; int number_of_annotations_in_a_point = 4; int number_of_points = number_of_annotations / number_of_annotations_in_a_point; mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); // SampleBatch can be up to 2 times larger than the annotations. in.set_batch_size_max(kMaxAnnotationsSize * 2); // Allow big number of metrics size to fit all points. in.set_metric_value_count_max(number_of_annotations * 2); std::vector<SampleRecord> sample_records; for (int i = 0; i != number_of_points; i++) { mako::SampleRecord sb = CreateSampleRecord(i, {{"y", i}}); mako::SamplePoint* sample_point = sb.mutable_sample_point(); for (int j = 0; j != number_of_annotations_in_a_point; ++j) { sample_point->add_sample_annotations_list()->set_text(annotation_string); } sample_records.push_back(sb); } WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); EXPECT_EQ(out.sample_batch_list_size(), 1); // Total size in bytes of all annotations. We don't need exact size with the // encoding overhead here to make the test very simple. int total_annotations_size = 0; for (const mako::SampleBatch& batch : out.sample_batch_list()) { for (const mako::SamplePoint& sample_point : batch.sample_point_list()) { // If we have annotations in this SamplePoint, then check it. if (sample_point.sample_annotations_list_size()) { // Check that all annotations in one SamplePoint were preserved EXPECT_EQ(number_of_annotations_in_a_point, sample_point.sample_annotations_list_size()); for (const mako::SampleAnnotation& annotation : sample_point.sample_annotations_list()) { total_annotations_size += annotation.ByteSizeLong(); } } } } EXPECT_GT(total_annotations_size, kMaxAnnotationsSize * 0.8); EXPECT_LT(total_annotations_size, kMaxAnnotationsSize); } TEST_F(StandardMetricDownsamplerTest, DownsamplingRemovesAnnotationsOnOneSamplePointWithLargeAnnotations) { // Write 100kb strings int annotations_string_size = 100000; // String that we will be writing std::string annotation_string(annotations_string_size, 'a'); // We want to exceed the limit int annotations_count = 1 + kMaxAnnotationsSize / annotations_string_size; mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); in.set_batch_size_max(kMaxAnnotationsSize * 2); in.set_metric_value_count_max(10); std::vector<SampleRecord> sample_records; mako::SampleRecord sb = CreateSampleRecord(0, {{"y", 0}}); mako::SamplePoint* sample_point = sb.mutable_sample_point(); for (int j = 0; j != annotations_count; ++j) { sample_point->add_sample_annotations_list()->set_text(annotation_string); } sample_records.push_back(sb); WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); EXPECT_EQ(out.sample_batch_list_size(), 1); for (const mako::SampleBatch& batch : out.sample_batch_list()) { for (const mako::SamplePoint& sample_point : batch.sample_point_list()) { // We should not have any annotations here EXPECT_FALSE(sample_point.sample_annotations_list_size()); } } } TEST_F( StandardMetricDownsamplerTest, DownsamplingPreservesAnnotationsIfAdditionalMetricForAnnotationsIsAdded) { // This unit-test tests a workaround around downsampler downsampling // annotations. Here we have 10010 SamplePoints where only 10 points have // annotations. The downsampler should enforce maximum of 40 values stored. // // In usual case, each annotation is saved with a probability of 1/10010. // With an additional metric that is present only when we have annotations, // all 10 annotations should be saved. // // We will recommend this workaround for our clients that will experience // a significant amount of their annotations getting downsampled. mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); in.set_batch_size_max(10000); in.set_metric_value_count_max(40); std::vector<SampleRecord> records; for (int i = 0; i != 10000; ++i) { mako::SampleRecord sr = CreateSampleRecord(i, {{"y", i}}); records.push_back(sr); } // Write 10 points with annotations, adding an additional metric to preserve // the annotations. // // We have 40 maximum metrics to be saved, 20 should be saved for {"y"} // metric, 20 for {"y", "a"} metric. 20 metrics for {"y", "a"} means that 10 // records will be saved, so all of the annotations should be saved. // // Without the additional metric, each annotation has 1/10010 chance to be // saved. for (int i = 0; i != 10; ++i) { mako::SampleRecord sr = CreateSampleRecord(i, {{"y", i}, {"a", 0}}); sr.mutable_sample_point()->add_sample_annotations_list()->set_text("a"); records.push_back(sr); } WriteFile("file1", records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); EXPECT_EQ(out.sample_batch_list_size(), 1); int annotations_after_downsampling_count = 0; for (const mako::SampleBatch& batch : out.sample_batch_list()) { for (const mako::SamplePoint& sample_point : batch.sample_point_list()) { annotations_after_downsampling_count += sample_point.sample_annotations_list_size(); } } EXPECT_EQ(annotations_after_downsampling_count, 10); } TEST_F(StandardMetricDownsamplerTest, DownsamplingDoesNotDownsampleAnnotationsWhenCloseToLimits) { // Maximum size + 10 bytes on encoding (should be more than enough). std::string annotation_string(kMaxAnnotationsSize - 10, 'a'); mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); in.set_batch_size_max(kMaxAnnotationsSize * 2); in.set_metric_value_count_max(10); std::vector<SampleRecord> sample_records; mako::SampleRecord sb = CreateSampleRecord(0, {{"y", 0}}); mako::SamplePoint* sample_point = sb.mutable_sample_point(); sample_point->add_sample_annotations_list()->set_text(annotation_string); sample_records.push_back(sb); WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); EXPECT_EQ(out.sample_batch_list_size(), 1); int total_annotations_count = 0; for (const mako::SampleBatch& batch : out.sample_batch_list()) { for (const mako::SamplePoint& sample_point : batch.sample_point_list()) { total_annotations_count += sample_point.sample_annotations_list_size(); } } EXPECT_EQ(total_annotations_count, 1); } TEST_F(StandardMetricDownsamplerTest, DownsamplingDownsamplesAnnotationsWhenStringOfExactlyMaxSize) { // Maximum size + encoding should not fit. std::string annotation_string(kMaxAnnotationsSize, 'a'); mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); in.set_batch_size_max(kMaxAnnotationsSize * 2); in.set_metric_value_count_max(10); std::vector<SampleRecord> sample_records; mako::SampleRecord sb = CreateSampleRecord(0, {{"y", 0}}); mako::SamplePoint* sample_point = sb.mutable_sample_point(); sample_point->add_sample_annotations_list()->set_text(annotation_string); sample_records.push_back(sb); WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); EXPECT_EQ(out.sample_batch_list_size(), 1); int total_annotations_count = 0; for (const mako::SampleBatch& batch : out.sample_batch_list()) { for (const mako::SamplePoint& sample_point : batch.sample_point_list()) { total_annotations_count += sample_point.sample_annotations_list_size(); } } EXPECT_EQ(total_annotations_count, 0); } TEST_F(StandardMetricDownsamplerTest, AuxDataIsRemovedTest) { mako::DownsamplerInput in = CreateDownsamplerInput({"file1"}); // Set a small batch size so we get lots of batches. in.set_batch_size_max(3000); std::vector<SampleRecord> sample_records; for (int i = 0; i < 200; i++) { mako::SampleRecord sb = CreateSampleRecord(i, {{"y", i}}, {{"key1", "value1"}}); sb.mutable_sample_error()->set_error_message("An Error message"); sb.mutable_sample_error()->set_input_value(i); sb.mutable_sample_error()->set_sampler_name(absl::StrCat("Sampler", i)); sample_records.push_back(sb); ASSERT_GT(200, sb.ByteSizeLong()); ASSERT_EQ(sb.sample_point().aux_data_size(), 1); } WriteFile("file1", sample_records); mako::DownsamplerOutput out; ASSERT_EQ("", d_.Downsample(in, &out)); ASSERT_GT(out.sample_batch_list_size(), 1); for (auto& batch : out.sample_batch_list()) { for (auto& point : batch.sample_point_list()) { EXPECT_EQ(point.aux_data_size(), 0); } } } TEST_F(StandardMetricDownsamplerTest, UniformSampleEdgeCase) { constexpr int kNumSampleFiles = 16; constexpr int kNumPointsPerFile = 1000; // Use smaller metric count maximum so test can be smaller. Actual value isn't // important, as long as kMetricValueCountMax/2 is not divisible by 3. constexpr int kMetricValueCountMax = 5000; // Since the downsampler is stochastic, there's a chance a degenerate // downsampling could produce a failing test. We make the downsampling // deterministic by hardcoding the seed. ReseedDownsampler(); std::vector<std::string> file_names; for (int i = 0; i < kNumSampleFiles; i++) { std::string file_name = absl::StrCat("file", i); file_names.push_back(file_name); mako::memory_fileio::FileIO fio; ASSERT_TRUE(fio.Open(file_name, fio.kWrite)); for (int j = 0; j < kNumPointsPerFile; j++) { int index = j * kNumSampleFiles + i; // Each file contains points falling into two metric sets. This makes each // metric set's fair share of metrics kMetricValueCountMax / 2. // One of the points has 3 metrics. Since kMetricValueCountMax / 2 is not // divisible by 3, this means when we begin processing a record, we will // not be at or over quota for that metric set. ASSERT_TRUE(fio.Write(CreateSampleRecord( index, {{"a", index}, {"b", index}, {"c", index}}))); ASSERT_TRUE(fio.Write(CreateSampleRecord(index, {{"d", index}}))); } } mako::DownsamplerInput in = CreateDownsamplerInput(file_names); in.set_metric_value_count_max(kMetricValueCountMax); mako::DownsamplerOutput out; ASSERT_TRUE(d_.Downsample(in, &out).empty()); double min_input_value = 1.0E99; for (const auto& batch : out.sample_batch_list()) { for (const auto& point : batch.sample_point_list()) { for (const auto& metric : point.metric_value_list()) { if (metric.value_key() == "a") { if (point.input_value() < min_input_value) { min_input_value = point.input_value(); } break; } } } } ASSERT_LT(min_input_value, kNumPointsPerFile / 10); } TEST_F(StandardMetricDownsamplerTest, FirstNErrorsSaved) { constexpr int kMaxErrors = 10000; std::vector<std::string> file_names; std::string file_name = "file"; file_names.push_back(file_name); mako::memory_fileio::FileIO fio; ASSERT_TRUE(fio.Open(file_name, fio.kWrite)); // Create kMaxErrors errors with input value 0, then kMaxErrors errors with // input value 1. for (int i = 0; i < kMaxErrors; i++) { mako::SampleRecord record; auto* sample_error = record.mutable_sample_error(); sample_error->set_input_value(0); sample_error->set_error_message("error"); ASSERT_TRUE(fio.Write(record)); } for (int i = 0; i < kMaxErrors; i++) { mako::SampleRecord record; auto* sample_error = record.mutable_sample_error(); sample_error->set_input_value(1); sample_error->set_error_message("error"); ASSERT_TRUE(fio.Write(record)); } ASSERT_TRUE(fio.Close()); mako::DownsamplerInput in = CreateDownsamplerInput(file_names); in.set_sample_error_count_max(kMaxErrors); mako::DownsamplerOutput out; ASSERT_TRUE(d_.Downsample(in, &out).empty()); // If the downsampling is uniform, there should be some errors in the output // with both input values. absl::flat_hash_set<double> input_values; for (const auto& batch : out.sample_batch_list()) { for (const auto& sample_error : batch.sample_error_list()) { input_values.insert(sample_error.input_value()); } } EXPECT_THAT(input_values, testing::UnorderedElementsAre(0)); } TEST_F(StandardMetricDownsamplerTest, ErrorsSampledRandomly) { constexpr int kMaxErrors = 10000; std::vector<std::string> file_names; std::string file_name = "file"; file_names.push_back(file_name); mako::memory_fileio::FileIO fio; ASSERT_TRUE(fio.Open(file_name, fio.kWrite)); // Create kMaxErrors errors with input value 0, then kMaxErrors errors with // input value 1. for (int i = 0; i < kMaxErrors; i++) { mako::SampleRecord record; auto* sample_error = record.mutable_sample_error(); sample_error->set_input_value(0); sample_error->set_error_message("error"); ASSERT_TRUE(fio.Write(record)); } for (int i = 0; i < kMaxErrors; i++) { mako::SampleRecord record; auto* sample_error = record.mutable_sample_error(); sample_error->set_input_value(1); sample_error->set_error_message("error"); ASSERT_TRUE(fio.Write(record)); } ASSERT_TRUE(fio.Close()); mako::DownsamplerInput in = CreateDownsamplerInput(file_names); in.set_sample_error_count_max(kMaxErrors); mako::DownsamplerOutput out; ASSERT_TRUE(d_.Downsample(in, &out).empty()); // If the downsampling is uniform, there should be some errors in the output // with both input values. absl::flat_hash_set<double> input_values; for (const auto& batch : out.sample_batch_list()) { for (const auto& sample_error : batch.sample_error_list()) { input_values.insert(sample_error.input_value()); } } // TODO(b/153887144): support sampling errors randomly in Mako. EXPECT_THAT(input_values, testing::UnorderedElementsAre(0)); } } // namespace downsampler } // namespace mako
apache-2.0
knative/observability
pkg/event/controller_test.go
4739
/* Copyright 2018 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package event_test import ( "errors" "io/ioutil" "log" "testing" "github.com/google/go-cmp/cmp" "k8s.io/api/core/v1" "github.com/knative/observability/pkg/event" ) func init() { log.SetOutput(ioutil.Discard) } func TestForwarding(t *testing.T) { ResetForwarderMetrics() spyFl := &spyFlogger{ t: t, } c := event.NewController(spyFl) ev := &v1.Event{ InvolvedObject: v1.ObjectReference{ Name: "some-object-name", Namespace: "some-namespace", }, Message: "some note with log data", Source: v1.EventSource{ Host: "some-host", }, } expected := map[string]interface{}{ "log": []byte("some note with log data"), "stream": []byte("stdout"), "kubernetes": map[string]interface{}{ "host": []byte("some-host"), "pod_name": []byte("some-object-name"), "namespace_name": []byte("some-namespace"), "source_type": []byte("k8s.event"), }, } c.OnAdd(ev) if diff := cmp.Diff(spyFl.receivedMsg, expected); diff != "" { t.Errorf("Unexpected messages (-want +got): %v", diff) } if spyFl.tag != "k8s.event._some-namespace_" { t.Errorf("Expected tag to be k8s.event._some-namespace_, was %s", spyFl.tag) } if event.ForwarderSent.Value() != 1 { t.Errorf("Expected events sent to be 1, was %d", event.ForwarderSent.Value()) } } func TestNoopUpdate(t *testing.T) { spyFl := &spyFlogger{ t: t, } c := event.NewController(spyFl) c.OnUpdate(nil, nil) if spyFl.called { t.Errorf("Expected not to call Flogger") } } func TestNoopDelete(t *testing.T) { spyFl := &spyFlogger{ t: t, } c := event.NewController(spyFl) c.OnDelete(nil) if spyFl.called { t.Errorf("Expected not to call Flogger") } } func TestNonV1Event(t *testing.T) { ResetForwarderMetrics() spyFl := &spyFlogger{ t: t, } c := event.NewController(spyFl) c.OnAdd("non-v1-event") if spyFl.called { t.Errorf("Expected not to call Flogger") } if event.ForwarderSent.Value() != 0 { t.Errorf("Expected to not send event, sent %d", event.ForwarderSent.Value()) } if event.ForwarderConvertFailed.Value() != 1 { t.Errorf("Expected to fail to convert send event") } } func TestFailToPost(t *testing.T) { ResetForwarderMetrics() spyFl := &spyFlogger{ err: errors.New("some error"), t: t, } c := event.NewController(spyFl) ev := &v1.Event{ InvolvedObject: v1.ObjectReference{ Name: "some object name", Namespace: "some namespace", }, Message: "some note with log data", Source: v1.EventSource{ Host: "some host", }, } c.OnAdd(ev) if event.ForwarderSent.Value() != 0 { t.Errorf("Expected not to send event, sent %d", event.ForwarderSent.Value()) } if event.ForwarderFailed.Value() != 1 { t.Errorf("Expected to fail to forward a event") } } func TestEmptySource(t *testing.T) { spyFl := &spyFlogger{ t: t, } c := event.NewController(spyFl) ev := &v1.Event{ InvolvedObject: v1.ObjectReference{ Name: "some-object-name", Namespace: "some-namespace", }, Message: "some note with log data", } expected := map[string]interface{}{ "log": []byte("some note with log data"), "stream": []byte("stdout"), "kubernetes": map[string]interface{}{ "host": []byte(""), "pod_name": []byte("some-object-name"), "namespace_name": []byte("some-namespace"), "source_type": []byte("k8s.event"), }, } c.OnAdd(ev) if diff := cmp.Diff(spyFl.receivedMsg, expected); diff != "" { t.Errorf("Unexpected messages (-want +got): %v", diff) } if spyFl.tag != "k8s.event._some-namespace_" { t.Errorf("Expected tag to be k8s.event._some-namespace_, was %s", spyFl.tag) } } func ResetForwarderMetrics() { event.ForwarderSent.Set(0) event.ForwarderFailed.Set(0) event.ForwarderConvertFailed.Set(0) } type spyFlogger struct { err error called bool tag string receivedMsg map[string]interface{} t *testing.T } func (s *spyFlogger) Post(tag string, message interface{}) error { s.called = true if s.err != nil { return s.err } s.tag = tag msg, ok := message.(map[string]interface{}) if !ok { s.t.Errorf("message not a map") } s.receivedMsg = msg return nil }
apache-2.0
JavaChat/streems
streems-core/src/main/java/com/github/javachat/streems/Traversal.java
189
package com.github.javachat.streems; /** * The three different defined ways of traversing an n-ary tree */ public enum Traversal { PREORDER, POSTORDER, BREADTHFIRST, ; }
apache-2.0
googleapis/java-compute
proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/DeleteInstancesRegionInstanceGroupManagerRequest.java
61038
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto package com.google.cloud.compute.v1; /** * * * <pre> * A request message for RegionInstanceGroupManagers.DeleteInstances. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest} */ public final class DeleteInstancesRegionInstanceGroupManagerRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) DeleteInstancesRegionInstanceGroupManagerRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeleteInstancesRegionInstanceGroupManagerRequest.newBuilder() to construct. private DeleteInstancesRegionInstanceGroupManagerRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeleteInstancesRegionInstanceGroupManagerRequest() { instanceGroupManager_ = ""; project_ = ""; region_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeleteInstancesRegionInstanceGroupManagerRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DeleteInstancesRegionInstanceGroupManagerRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 296879706: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; requestId_ = s; break; } case 1111570338: { java.lang.String s = input.readStringRequireUtf8(); region_ = s; break; } case 1820481738: { java.lang.String s = input.readStringRequireUtf8(); project_ = s; break; } case 1994907162: { java.lang.String s = input.readStringRequireUtf8(); instanceGroupManager_ = s; break; } case -287953974: { com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest.Builder subBuilder = null; if (regionInstanceGroupManagersDeleteInstancesRequestResource_ != null) { subBuilder = regionInstanceGroupManagersDeleteInstancesRequestResource_.toBuilder(); } regionInstanceGroupManagersDeleteInstancesRequestResource_ = input.readMessage( com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest .parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(regionInstanceGroupManagersDeleteInstancesRequestResource_); regionInstanceGroupManagersDeleteInstancesRequestResource_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DeleteInstancesRegionInstanceGroupManagerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DeleteInstancesRegionInstanceGroupManagerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest.class, com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest.Builder .class); } private int bitField0_; public static final int INSTANCE_GROUP_MANAGER_FIELD_NUMBER = 249363395; private volatile java.lang.Object instanceGroupManager_; /** * * * <pre> * Name of the managed instance group. * </pre> * * <code>string instance_group_manager = 249363395 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instanceGroupManager. */ @java.lang.Override public java.lang.String getInstanceGroupManager() { java.lang.Object ref = instanceGroupManager_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceGroupManager_ = s; return s; } } /** * * * <pre> * Name of the managed instance group. * </pre> * * <code>string instance_group_manager = 249363395 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for instanceGroupManager. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceGroupManagerBytes() { java.lang.Object ref = instanceGroupManager_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceGroupManager_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PROJECT_FIELD_NUMBER = 227560217; private volatile java.lang.Object project_; /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REGION_FIELD_NUMBER = 138946292; private volatile java.lang.Object region_; /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The region. */ @java.lang.Override public java.lang.String getRegion() { java.lang.Object ref = region_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); region_ = s; return s; } } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The bytes for region. */ @java.lang.Override public com.google.protobuf.ByteString getRegionBytes() { java.lang.Object ref = region_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); region_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REGION_INSTANCE_GROUP_MANAGERS_DELETE_INSTANCES_REQUEST_RESOURCE_FIELD_NUMBER = 500876665; private com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest regionInstanceGroupManagersDeleteInstancesRequestResource_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the regionInstanceGroupManagersDeleteInstancesRequestResource field is set. */ @java.lang.Override public boolean hasRegionInstanceGroupManagersDeleteInstancesRequestResource() { return regionInstanceGroupManagersDeleteInstancesRequestResource_ != null; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The regionInstanceGroupManagersDeleteInstancesRequestResource. */ @java.lang.Override public com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest getRegionInstanceGroupManagersDeleteInstancesRequestResource() { return regionInstanceGroupManagersDeleteInstancesRequestResource_ == null ? com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest .getDefaultInstance() : regionInstanceGroupManagersDeleteInstancesRequestResource_; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequestOrBuilder getRegionInstanceGroupManagersDeleteInstancesRequestResourceOrBuilder() { return getRegionInstanceGroupManagersDeleteInstancesRequestResource(); } public static final int REQUEST_ID_FIELD_NUMBER = 37109963; private volatile java.lang.Object requestId_; /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return Whether the requestId field is set. */ @java.lang.Override public boolean hasRequestId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 37109963, requestId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceGroupManager_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 249363395, instanceGroupManager_); } if (regionInstanceGroupManagersDeleteInstancesRequestResource_ != null) { output.writeMessage( 500876665, getRegionInstanceGroupManagersDeleteInstancesRequestResource()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(37109963, requestId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceGroupManager_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize( 249363395, instanceGroupManager_); } if (regionInstanceGroupManagersDeleteInstancesRequestResource_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 500876665, getRegionInstanceGroupManagersDeleteInstancesRequestResource()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest other = (com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) obj; if (!getInstanceGroupManager().equals(other.getInstanceGroupManager())) return false; if (!getProject().equals(other.getProject())) return false; if (!getRegion().equals(other.getRegion())) return false; if (hasRegionInstanceGroupManagersDeleteInstancesRequestResource() != other.hasRegionInstanceGroupManagersDeleteInstancesRequestResource()) return false; if (hasRegionInstanceGroupManagersDeleteInstancesRequestResource()) { if (!getRegionInstanceGroupManagersDeleteInstancesRequestResource() .equals(other.getRegionInstanceGroupManagersDeleteInstancesRequestResource())) return false; } if (hasRequestId() != other.hasRequestId()) return false; if (hasRequestId()) { if (!getRequestId().equals(other.getRequestId())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + INSTANCE_GROUP_MANAGER_FIELD_NUMBER; hash = (53 * hash) + getInstanceGroupManager().hashCode(); hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); if (hasRegionInstanceGroupManagersDeleteInstancesRequestResource()) { hash = (37 * hash) + REGION_INSTANCE_GROUP_MANAGERS_DELETE_INSTANCES_REQUEST_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getRegionInstanceGroupManagersDeleteInstancesRequestResource().hashCode(); } if (hasRequestId()) { hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for RegionInstanceGroupManagers.DeleteInstances. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DeleteInstancesRegionInstanceGroupManagerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DeleteInstancesRegionInstanceGroupManagerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest.class, com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest.Builder .class); } // Construct using // com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); instanceGroupManager_ = ""; project_ = ""; region_ = ""; if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { regionInstanceGroupManagersDeleteInstancesRequestResource_ = null; } else { regionInstanceGroupManagersDeleteInstancesRequestResource_ = null; regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ = null; } requestId_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DeleteInstancesRegionInstanceGroupManagerRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest build() { com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest buildPartial() { com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest result = new com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.instanceGroupManager_ = instanceGroupManager_; result.project_ = project_; result.region_ = region_; if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { result.regionInstanceGroupManagersDeleteInstancesRequestResource_ = regionInstanceGroupManagersDeleteInstancesRequestResource_; } else { result.regionInstanceGroupManagersDeleteInstancesRequestResource_ = regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_.build(); } if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.requestId_ = requestId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) { return mergeFrom( (com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest other) { if (other == com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest .getDefaultInstance()) return this; if (!other.getInstanceGroupManager().isEmpty()) { instanceGroupManager_ = other.instanceGroupManager_; onChanged(); } if (!other.getProject().isEmpty()) { project_ = other.project_; onChanged(); } if (!other.getRegion().isEmpty()) { region_ = other.region_; onChanged(); } if (other.hasRegionInstanceGroupManagersDeleteInstancesRequestResource()) { mergeRegionInstanceGroupManagersDeleteInstancesRequestResource( other.getRegionInstanceGroupManagersDeleteInstancesRequestResource()); } if (other.hasRequestId()) { bitField0_ |= 0x00000001; requestId_ = other.requestId_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object instanceGroupManager_ = ""; /** * * * <pre> * Name of the managed instance group. * </pre> * * <code>string instance_group_manager = 249363395 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instanceGroupManager. */ public java.lang.String getInstanceGroupManager() { java.lang.Object ref = instanceGroupManager_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceGroupManager_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the managed instance group. * </pre> * * <code>string instance_group_manager = 249363395 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The bytes for instanceGroupManager. */ public com.google.protobuf.ByteString getInstanceGroupManagerBytes() { java.lang.Object ref = instanceGroupManager_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceGroupManager_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the managed instance group. * </pre> * * <code>string instance_group_manager = 249363395 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The instanceGroupManager to set. * @return This builder for chaining. */ public Builder setInstanceGroupManager(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceGroupManager_ = value; onChanged(); return this; } /** * * * <pre> * Name of the managed instance group. * </pre> * * <code>string instance_group_manager = 249363395 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearInstanceGroupManager() { instanceGroupManager_ = getDefaultInstance().getInstanceGroupManager(); onChanged(); return this; } /** * * * <pre> * Name of the managed instance group. * </pre> * * <code>string instance_group_manager = 249363395 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The bytes for instanceGroupManager to set. * @return This builder for chaining. */ public Builder setInstanceGroupManagerBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceGroupManager_ = value; onChanged(); return this; } private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; onChanged(); return this; } private java.lang.Object region_ = ""; /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The region. */ public java.lang.String getRegion() { java.lang.Object ref = region_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); region_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return The bytes for region. */ public com.google.protobuf.ByteString getRegionBytes() { java.lang.Object ref = region_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); region_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @param value The region to set. * @return This builder for chaining. */ public Builder setRegion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } region_ = value; onChanged(); return this; } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @return This builder for chaining. */ public Builder clearRegion() { region_ = getDefaultInstance().getRegion(); onChanged(); return this; } /** * * * <pre> * Name of the region scoping this request. * </pre> * * <code> * string region = 138946292 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "region"]; * </code> * * @param value The bytes for region to set. * @return This builder for chaining. */ public Builder setRegionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); region_ = value; onChanged(); return this; } private com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest regionInstanceGroupManagersDeleteInstancesRequestResource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest, com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest.Builder, com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequestOrBuilder> regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the regionInstanceGroupManagersDeleteInstancesRequestResource field is set. */ public boolean hasRegionInstanceGroupManagersDeleteInstancesRequestResource() { return regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ != null || regionInstanceGroupManagersDeleteInstancesRequestResource_ != null; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The regionInstanceGroupManagersDeleteInstancesRequestResource. */ public com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest getRegionInstanceGroupManagersDeleteInstancesRequestResource() { if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { return regionInstanceGroupManagersDeleteInstancesRequestResource_ == null ? com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest .getDefaultInstance() : regionInstanceGroupManagersDeleteInstancesRequestResource_; } else { return regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_.getMessage(); } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRegionInstanceGroupManagersDeleteInstancesRequestResource( com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest value) { if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } regionInstanceGroupManagersDeleteInstancesRequestResource_ = value; onChanged(); } else { regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_.setMessage(value); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRegionInstanceGroupManagersDeleteInstancesRequestResource( com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest.Builder builderForValue) { if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { regionInstanceGroupManagersDeleteInstancesRequestResource_ = builderForValue.build(); onChanged(); } else { regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_.setMessage( builderForValue.build()); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeRegionInstanceGroupManagersDeleteInstancesRequestResource( com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest value) { if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { if (regionInstanceGroupManagersDeleteInstancesRequestResource_ != null) { regionInstanceGroupManagersDeleteInstancesRequestResource_ = com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest .newBuilder(regionInstanceGroupManagersDeleteInstancesRequestResource_) .mergeFrom(value) .buildPartial(); } else { regionInstanceGroupManagersDeleteInstancesRequestResource_ = value; } onChanged(); } else { regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearRegionInstanceGroupManagersDeleteInstancesRequestResource() { if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { regionInstanceGroupManagersDeleteInstancesRequestResource_ = null; onChanged(); } else { regionInstanceGroupManagersDeleteInstancesRequestResource_ = null; regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ = null; } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest.Builder getRegionInstanceGroupManagersDeleteInstancesRequestResourceBuilder() { onChanged(); return getRegionInstanceGroupManagersDeleteInstancesRequestResourceFieldBuilder() .getBuilder(); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequestOrBuilder getRegionInstanceGroupManagersDeleteInstancesRequestResourceOrBuilder() { if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ != null) { return regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ .getMessageOrBuilder(); } else { return regionInstanceGroupManagersDeleteInstancesRequestResource_ == null ? com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest .getDefaultInstance() : regionInstanceGroupManagersDeleteInstancesRequestResource_; } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest region_instance_group_managers_delete_instances_request_resource = 500876665 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest, com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest.Builder, com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequestOrBuilder> getRegionInstanceGroupManagersDeleteInstancesRequestResourceFieldBuilder() { if (regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ == null) { regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest, com.google.cloud.compute.v1.RegionInstanceGroupManagersDeleteInstancesRequest .Builder, com.google.cloud.compute.v1 .RegionInstanceGroupManagersDeleteInstancesRequestOrBuilder>( getRegionInstanceGroupManagersDeleteInstancesRequestResource(), getParentForChildren(), isClean()); regionInstanceGroupManagersDeleteInstancesRequestResource_ = null; } return regionInstanceGroupManagersDeleteInstancesRequestResourceBuilder_; } private java.lang.Object requestId_ = ""; /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return Whether the requestId field is set. */ public boolean hasRequestId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; requestId_ = value; onChanged(); return this; } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @return This builder for chaining. */ public Builder clearRequestId() { bitField0_ = (bitField0_ & ~0x00000001); requestId_ = getDefaultInstance().getRequestId(); onChanged(); return this; } /** * * * <pre> * An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). * </pre> * * <code>optional string request_id = 37109963;</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000001; requestId_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest) private static final com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest(); } public static com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeleteInstancesRegionInstanceGroupManagerRequest> PARSER = new com.google.protobuf.AbstractParser< DeleteInstancesRegionInstanceGroupManagerRequest>() { @java.lang.Override public DeleteInstancesRegionInstanceGroupManagerRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DeleteInstancesRegionInstanceGroupManagerRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DeleteInstancesRegionInstanceGroupManagerRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeleteInstancesRegionInstanceGroupManagerRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.DeleteInstancesRegionInstanceGroupManagerRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
google-code-export/google-api-dfp-java
examples/v201308/creativeservice/UpdateCreativesExample.java
3431
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package v201308.creativeservice; import com.google.api.ads.dfp.lib.DfpService; import com.google.api.ads.dfp.lib.DfpServiceLogger; import com.google.api.ads.dfp.lib.DfpUser; import com.google.api.ads.dfp.lib.utils.v201308.StatementBuilder; import com.google.api.ads.dfp.v201308.Creative; import com.google.api.ads.dfp.v201308.CreativePage; import com.google.api.ads.dfp.v201308.CreativeServiceInterface; import com.google.api.ads.dfp.v201308.ImageCreative; import com.google.api.ads.dfp.v201308.Statement; /** * This example updates the destination URL of all image creatives up to * the first 500. To determine which image creatives exist, run * GetAllCreativesExample.java. * * Tags: CreativeService.getCreativesByStatement * Tags: CreativeService.updateCreatives * * @author api.arogal@gmail.com (Adam Rogal) */ public class UpdateCreativesExample { public static void main(String[] args) { try { // Log SOAP XML request and response. DfpServiceLogger.log(); // Get DfpUser from "~/dfp.properties". DfpUser user = new DfpUser(); // Get the CreativeService. CreativeServiceInterface creativeService = user.getService(DfpService.V201308.CREATIVE_SERVICE); // Create a statement to get all image creatives. Statement filterStatement = new StatementBuilder("WHERE creativeType = :creativeType LIMIT 500") .putValue("creativeType", ImageCreative.class.getSimpleName()).toStatement(); // Get creatives by statement. CreativePage page = creativeService.getCreativesByStatement(filterStatement); if (page.getResults() != null) { Creative[] creatives = page.getResults(); // Update each local creative object by changing its destination URL. for (Creative creative : creatives) { if (creative instanceof ImageCreative) { ImageCreative imageCreative = (ImageCreative) creative; imageCreative.setDestinationUrl("http://news.google.com"); } } // Update the creatives on the server. creatives = creativeService.updateCreatives(creatives); if (creatives != null) { for (Creative creative : creatives) { if (creative instanceof ImageCreative) { ImageCreative imageCreative = (ImageCreative) creative; System.out.println("An image creative with ID \"" + imageCreative.getId() + "\" and destination URL \"" + imageCreative.getDestinationUrl() + "\" was updated."); } } } else { System.out.println("No creatives updated."); } } else { System.out.println("No creatives found to update."); } } catch (Exception e) { e.printStackTrace(); } } }
apache-2.0
aspnet/WebHooks
test/Microsoft.AspNetCore.WebHooks.FunctionalTest/ResourceFile.cs
5802
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.IO; using System.Reflection; using System.Text; using System.Threading.Tasks; using Xunit; namespace Microsoft.AspNetCore.WebHooks.FunctionalTest { /// <summary> /// Reader for files compiled into an assembly as resources. /// </summary> /// <remarks> /// A simplified version of aspnet/Mvc's ResourceFile class. Lacks the ability to create or update files i.e. does /// not include the original's GENERATE_BASELINES support. In turn, the MVC class was inspired by aspnet/Razor's /// BaselineWriter and TestFile test classes. /// </remarks> public class ResourceFile { private static readonly Assembly _resourcesAssembly = typeof(ResourceFile).GetTypeInfo().Assembly; /// <summary> /// Return <see cref="Stream"/> for <paramref name="resourceName"/> from the <see cref="Assembly"/>'s manifest. /// The <see cref="Assembly"/> used is the one containing <see cref="ResourceFile"/> and the test classes. /// </summary> /// <param name="resourceName"> /// Name of the manifest resource in the <see cref="Assembly"/>. Also, a path relative to the test project's /// directory. /// </param> /// <param name="normalizeLineEndings"> /// If <c>true</c> <paramref name="resourceName"/> is used as a source file and its line endings must be /// normalized. /// </param> /// <returns> /// A <see cref="Task"/> that on completion provides a <see cref="Stream"/> for <paramref name="resourceName"/> /// from the <see cref="Assembly"/>'s manifest. /// </returns> /// <exception cref="Xunit.Sdk.TrueException"> /// Thrown if <paramref name="resourceName"/> is not found in the <see cref="Assembly"/>. /// </exception> /// <remarks> /// Normalizes line endings to "\r\n" (CRLF) if <paramref name="normalizeLineEndings"/> is /// <see langword="true"/>. /// </remarks> public static async Task<Stream> GetResourceStreamAsync(string resourceName, bool normalizeLineEndings) { var dottedResourceName = resourceName.Replace('/', '.').Replace('\\', '.'); var fullName = $"{ _resourcesAssembly.GetName().Name }.{ dottedResourceName }"; Assert.True(Exists(fullName), $"Manifest resource '{ fullName }' not found."); var stream = _resourcesAssembly.GetManifestResourceStream(fullName); if (normalizeLineEndings) { // Normalize line endings to '\r\n' (CRLF). This removes core.autocrlf, core.eol, core.safecrlf, and // .gitattributes from the equation and treats "\r\n" and "\n" as equivalent. Does not handle // some line endings like "\r" but otherwise ensures checksums and line mappings are consistent. string text; using (var streamReader = new StreamReader(stream)) { var content = await streamReader.ReadToEndAsync(); text = content.Replace("\r", "").Replace("\n", "\r\n"); } var bytes = Encoding.UTF8.GetBytes(text); stream = new MemoryStream(bytes); } return stream; } /// <summary> /// Return <see cref="string"/> content of <paramref name="resourceName"/> from the <see cref="Assembly"/>'s /// manifest. The <see cref="Assembly"/> used is the one containing <see cref="ResourceFile"/> and the test /// classes. /// </summary> /// <param name="resourceName"> /// Name of the manifest resource in the <see cref="Assembly"/>. Also, a path relative to the test project's /// directory. /// </param> /// <param name="normalizeLineEndings"> /// If <c>true</c> <paramref name="resourceName"/> is used as a source file and its line endings must be /// normalized. /// </param> /// <returns> /// A <see cref="Task{string}"/> that on completion returns the <see cref="string"/> content of /// <paramref name="resourceName"/> from the <see cref="Assembly"/>'s manifest. /// </returns> /// <exception cref="Xunit.Sdk.TrueException"> /// Thrown if <paramref name="resourceName"/> is not found in the <see cref="Assembly"/>. /// </exception> /// <remarks> /// Normalizes line endings to "\r\n" (CRLF) if <paramref name="normalizeLineEndings"/> is /// <see langword="true"/>. /// </remarks> public static async Task<string> GetResourceAsStringAsync(string resourceName, bool normalizeLineEndings) { using (var stream = await GetResourceStreamAsync(resourceName, normalizeLineEndings)) { if (stream == null) { return null; } using (var streamReader = new StreamReader(stream)) { return await streamReader.ReadToEndAsync(); } } } private static bool Exists(string fullName) { var resourceNames = _resourcesAssembly.GetManifestResourceNames(); foreach (var resourceName in resourceNames) { // Resource names are case-sensitive. if (string.Equals(fullName, resourceName, StringComparison.Ordinal)) { return true; } } return false; } } }
apache-2.0
CloudDataObject/appbuilder-mobile-template
mobile template/scripts/app.js
3845
(function() { // store a reference to the application object that will be created // later on so that we can use it if need be var app = { data: {}, mobileApp: {}, jsdoSession: {}, views: {}, viewModels: {} }; var bootstrap = function() { $(function() { try { app.mobileApp = new kendo.mobile.Application(document.body, { // you can change the default transition (slide, zoom or fade) transition: 'slide', // comment out the following line to get a UI which matches the look // and feel of the operating system skin: 'flat', // the application needs to know which view to load first initial: 'views/home.html', layout: "tabstrip-layout", statusBarStyle: 'black-translucent' }); // Session management - behavior deoends on authentication model speecified in JSDO instance for session in jsdoSettings.js progress.util.jsdoSettingsProcessor(jsdoSettings); if (!jsdoSettings.authenticationModel) { console.log("Warning: jsdoSettings.authenticationModel not specified. Default is ANONYMOUS"); } if (jsdoSettings.serviceURI) { app.jsdosession = new progress.data.JSDOSession(jsdoSettings); } else { console.log("Error: jsdoSettings.serviceURI must be specified."); } if (app.jsdosession && app.isAnonymous()) { // Login as anonymous automatically, data will be available on list page $('#loginIcon').hide(); app.viewModels.loginViewModel.login(); } } catch(ex) { console.log("Error creating JSDOSession: " + ex); } }); }; if (window.cordova) { // this function is called by Cordova when the application is loaded by the device document.addEventListener('deviceready', function() { // hide the splash screen as soon as the app is ready. otherwise // Cordova will wait 5 very long seconds to do it for you. if (navigator && navigator.splashscreen) { navigator.splashscreen.hide(); } bootstrap(); }, false); } else { bootstrap(); } window.app = app; app.isOnline = function() { if (!navigator || !navigator.connection) { return true; } else { return navigator.connection.type !== 'none'; } }; app.isAnonymous = function() { // authenticationModel defaults to "ANONYMOUS" if (!jsdoSettings.authenticationModel || jsdoSettings.authenticationModel.toUpperCase() === "ANONYMOUS") { return true; } return false; }; app.showError = function (message) { if (navigator && navigator.notification) { navigator.notification.alert(message); } else { // if run directly in browser alert(message); } }; app.changeTitle = function (customTitle) { setTimeout(function () { app.mobileApp.view().header.find('[data-role="navbar"]').data('kendoMobileNavBar').title(customTitle); }, 0); }; app.onSelectTab = function (e) { if (e.item[0].id == "listIcon") { if (!app.viewModels.loginViewModel.isLoggedIn && !app.isAnonymous()) { app.showError("Please login first."); e.preventDefault(); } } }; }());
apache-2.0
diffplug/spotless
plugin-gradle/src/test/java/com/diffplug/gradle/spotless/PalantirJavaFormatIntegrationTest.java
1501
/* * Copyright 2022 DiffPlug * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.diffplug.gradle.spotless; import java.io.IOException; import org.junit.jupiter.api.Test; class PalantirJavaFormatIntegrationTest extends GradleIntegrationHarness { @Test void integration() throws IOException { setFile("build.gradle").toLines( "plugins {", " id 'com.diffplug.spotless'", "}", "repositories { mavenCentral() }", "", "spotless {", " java {", " target file('test.java')", " palantirJavaFormat('1.1.0')", " }", "}"); setFile("test.java").toResource("java/palantirjavaformat/JavaCodeUnformatted.test"); gradleRunner().withArguments("spotlessApply").build(); assertFile("test.java").sameAsResource("java/palantirjavaformat/JavaCodeFormatted.test"); checkRunsThenUpToDate(); replace("build.gradle", "palantirJavaFormat('1.1.0')", "palantirJavaFormat('1.0.1')"); checkRunsThenUpToDate(); } }
apache-2.0
pmarasse/cas
support/cas-server-support-wsfederation/src/main/java/org/apereo/cas/support/wsfederation/WsFederationConfiguration.java
8550
package org.apereo.cas.support.wsfederation; import org.apache.commons.lang3.StringUtils; import org.opensaml.security.credential.Credential; import org.opensaml.security.x509.BasicX509Credential; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.Resource; import javax.annotation.PostConstruct; import java.io.InputStream; import java.io.Serializable; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; /** * This class gathers configuration information for the WS Federation Identity Provider. * * @author John Gasper * @author Misagh Moayyed * @since 4.2.0 */ public class WsFederationConfiguration implements Serializable { private static final long serialVersionUID = 2310859477512242659L; private static final Logger LOGGER = LoggerFactory.getLogger(WsFederationConfiguration.class); /** * Describes how the WS-FED principal resolution machinery * should process attributes from WS-FED. */ public enum WsFedPrincipalResolutionAttributesType { /** * Cas ws fed principal resolution attributes type. */ CAS, /** * Wsfed ws fed principal resolution attributes type. */ WSFED, /** * Both ws fed principal resolution attributes type. */ BOTH } private Resource encryptionPrivateKey; private Resource encryptionCertificate; private String encryptionPrivateKeyPassword; private String identityAttribute; private String identityProviderIdentifier; private String identityProviderUrl; private List<Resource> signingCertificateResources = new ArrayList<>(); private String relyingPartyIdentifier; private long tolerance; private boolean autoRedirect; private WsFedPrincipalResolutionAttributesType attributesType; private WsFederationAttributeMutator attributeMutator; private List<Credential> signingWallet; private String name; public String getName() { return StringUtils.isBlank(this.name) ? getClass().getSimpleName() : this.name; } public void setName(final String name) { this.name = name; } @PostConstruct private void initCertificates() { createSigningWallet(this.signingCertificateResources); } /** * gets the identity of the IdP. * * @return the identity */ public String getIdentityAttribute() { return this.identityAttribute; } /** * sets the identity of the IdP. * * @param identityAttribute the identity */ public void setIdentityAttribute(final String identityAttribute) { this.identityAttribute = identityAttribute; } /** * gets the identity provider identifier. * * @return the identifier */ public String getIdentityProviderIdentifier() { return this.identityProviderIdentifier; } /** * sets the identity provider identifier. * * @param identityProviderIdentifier the identifier. */ public void setIdentityProviderIdentifier(final String identityProviderIdentifier) { this.identityProviderIdentifier = identityProviderIdentifier; } /** * gets the identity provider url. * * @return the url */ public String getIdentityProviderUrl() { return this.identityProviderUrl; } /** * sets the identity provider url. * * @param identityProviderUrl the url */ public void setIdentityProviderUrl(final String identityProviderUrl) { this.identityProviderUrl = identityProviderUrl; } /** * gets the relying part identifier. * * @return the identifier */ public String getRelyingPartyIdentifier() { return this.relyingPartyIdentifier; } /** * sets the relying party identifier. * * @param relyingPartyIdentifier the identifier */ public void setRelyingPartyIdentifier(final String relyingPartyIdentifier) { this.relyingPartyIdentifier = relyingPartyIdentifier; } /** * gets the signing certificates. * * @return X509credentials of the signing certs */ public List<Credential> getSigningWallet() { if (this.signingWallet == null) { createSigningWallet(this.signingCertificateResources); } return this.signingWallet; } /** * gets the list of signing certificate files. * * @return the list of files */ public List<Resource> getSigningCertificateResources() { return this.signingCertificateResources; } /** * sets the signing certs. * * @param signingCertificateResources a list of certificate files to read in. */ public void setSigningCertificateResources(final Resource... signingCertificateResources) { this.signingCertificateResources = Arrays.asList(signingCertificateResources); createSigningWallet(this.signingCertificateResources); } public boolean isAutoRedirect() { return autoRedirect; } public void setAutoRedirect(final boolean autoRedirect) { this.autoRedirect = autoRedirect; } private void createSigningWallet(final List<Resource> signingCertificateFiles) { this.signingWallet = signingCertificateFiles.stream().map(WsFederationConfiguration::getSigningCredential).collect(Collectors.toList()); } /** * gets the tolerance. * * @return the tolerance in milliseconds */ public long getTolerance() { return this.tolerance; } /** * sets the tolerance of the validity of the timestamp token. * * @param tolerance the tolerance in milliseconds */ public void setTolerance(final long tolerance) { this.tolerance = tolerance; } /** * gets the attributeMutator. * * @return an attributeMutator */ public WsFederationAttributeMutator getAttributeMutator() { return this.attributeMutator; } /** * sets the attributeMutator. * * @param attributeMutator an attributeMutator */ public void setAttributeMutator(final WsFederationAttributeMutator attributeMutator) { this.attributeMutator = attributeMutator; } public WsFedPrincipalResolutionAttributesType getAttributesType() { return this.attributesType; } public void setAttributesType(final WsFedPrincipalResolutionAttributesType attributesType) { this.attributesType = attributesType; } public void setSigningCertificateResources(final List<Resource> signingCertificateResources) { this.signingCertificateResources = signingCertificateResources; } public Resource getEncryptionPrivateKey() { return encryptionPrivateKey; } public void setEncryptionPrivateKey(final Resource encryptionPrivateKey) { this.encryptionPrivateKey = encryptionPrivateKey; } public Resource getEncryptionCertificate() { return encryptionCertificate; } public void setEncryptionCertificate(final Resource encryptionCertificate) { this.encryptionCertificate = encryptionCertificate; } public String getEncryptionPrivateKeyPassword() { return encryptionPrivateKeyPassword; } public void setEncryptionPrivateKeyPassword(final String encryptionPrivateKeyPassword) { this.encryptionPrivateKeyPassword = encryptionPrivateKeyPassword; } /** * getSigningCredential loads up an X509Credential from a file. * * @param resource the signing certificate file * @return an X509 credential */ private static Credential getSigningCredential(final Resource resource) { try (InputStream inputStream = resource.getInputStream()) { final CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509"); final X509Certificate certificate = (X509Certificate) certificateFactory.generateCertificate(inputStream); final Credential publicCredential = new BasicX509Credential(certificate); LOGGER.debug("Signing credential key retrieved from [{}].", resource); return publicCredential; } catch (final Exception ex) { LOGGER.error(ex.getMessage(), ex); } return null; } }
apache-2.0
burberius/eve-esi
src/main/java/net/troja/eve/esi/model/FactionWarfareLeaderboardCorporationsKills.java
6676
/* * EVE Swagger Interface * An OpenAPI for EVE Online * * The version of the OpenAPI document: 1.10.1 * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package net.troja.eve.esi.model; import java.util.Objects; import java.util.Arrays; import com.google.gson.TypeAdapter; import com.google.gson.annotations.JsonAdapter; import com.google.gson.annotations.SerializedName; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.io.IOException; import java.util.ArrayList; import java.util.List; import net.troja.eve.esi.model.FactionWarfareLeaderboardCorporationActiveTotalKills; import net.troja.eve.esi.model.FactionWarfareLeaderboardCorporationLastWeekKills; import net.troja.eve.esi.model.FactionWarfareLeaderboardCorporationYesterdayKills; import java.io.Serializable; /** * Top 10 rankings of corporations by number of kills from yesterday, last week * and in total */ @ApiModel(description = "Top 10 rankings of corporations by number of kills from yesterday, last week and in total") public class FactionWarfareLeaderboardCorporationsKills implements Serializable { private static final long serialVersionUID = 1L; public static final String SERIALIZED_NAME_ACTIVE_TOTAL = "active_total"; @SerializedName(SERIALIZED_NAME_ACTIVE_TOTAL) private List<FactionWarfareLeaderboardCorporationActiveTotalKills> activeTotal = new ArrayList<>(); public static final String SERIALIZED_NAME_LAST_WEEK = "last_week"; @SerializedName(SERIALIZED_NAME_LAST_WEEK) private List<FactionWarfareLeaderboardCorporationLastWeekKills> lastWeek = new ArrayList<>(); public static final String SERIALIZED_NAME_YESTERDAY = "yesterday"; @SerializedName(SERIALIZED_NAME_YESTERDAY) private List<FactionWarfareLeaderboardCorporationYesterdayKills> yesterday = new ArrayList<>(); public FactionWarfareLeaderboardCorporationsKills activeTotal( List<FactionWarfareLeaderboardCorporationActiveTotalKills> activeTotal) { this.activeTotal = activeTotal; return this; } public FactionWarfareLeaderboardCorporationsKills addActiveTotalItem( FactionWarfareLeaderboardCorporationActiveTotalKills activeTotalItem) { this.activeTotal.add(activeTotalItem); return this; } /** * Top 10 ranking of corporations active in faction warfare by total kills. * A corporation is considered \&quot;active\&quot; if they have * participated in faction warfare in the past 14 days * * @return activeTotal **/ @ApiModelProperty(required = true, value = "Top 10 ranking of corporations active in faction warfare by total kills. A corporation is considered \"active\" if they have participated in faction warfare in the past 14 days") public List<FactionWarfareLeaderboardCorporationActiveTotalKills> getActiveTotal() { return activeTotal; } public void setActiveTotal(List<FactionWarfareLeaderboardCorporationActiveTotalKills> activeTotal) { this.activeTotal = activeTotal; } public FactionWarfareLeaderboardCorporationsKills lastWeek( List<FactionWarfareLeaderboardCorporationLastWeekKills> lastWeek) { this.lastWeek = lastWeek; return this; } public FactionWarfareLeaderboardCorporationsKills addLastWeekItem( FactionWarfareLeaderboardCorporationLastWeekKills lastWeekItem) { this.lastWeek.add(lastWeekItem); return this; } /** * Top 10 ranking of corporations by kills in the past week * * @return lastWeek **/ @ApiModelProperty(required = true, value = "Top 10 ranking of corporations by kills in the past week") public List<FactionWarfareLeaderboardCorporationLastWeekKills> getLastWeek() { return lastWeek; } public void setLastWeek(List<FactionWarfareLeaderboardCorporationLastWeekKills> lastWeek) { this.lastWeek = lastWeek; } public FactionWarfareLeaderboardCorporationsKills yesterday( List<FactionWarfareLeaderboardCorporationYesterdayKills> yesterday) { this.yesterday = yesterday; return this; } public FactionWarfareLeaderboardCorporationsKills addYesterdayItem( FactionWarfareLeaderboardCorporationYesterdayKills yesterdayItem) { this.yesterday.add(yesterdayItem); return this; } /** * Top 10 ranking of corporations by kills in the past day * * @return yesterday **/ @ApiModelProperty(required = true, value = "Top 10 ranking of corporations by kills in the past day") public List<FactionWarfareLeaderboardCorporationYesterdayKills> getYesterday() { return yesterday; } public void setYesterday(List<FactionWarfareLeaderboardCorporationYesterdayKills> yesterday) { this.yesterday = yesterday; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } FactionWarfareLeaderboardCorporationsKills factionWarfareLeaderboardCorporationsKills = (FactionWarfareLeaderboardCorporationsKills) o; return Objects.equals(this.activeTotal, factionWarfareLeaderboardCorporationsKills.activeTotal) && Objects.equals(this.lastWeek, factionWarfareLeaderboardCorporationsKills.lastWeek) && Objects.equals(this.yesterday, factionWarfareLeaderboardCorporationsKills.yesterday); } @Override public int hashCode() { return Objects.hash(activeTotal, lastWeek, yesterday); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class FactionWarfareLeaderboardCorporationsKills {\n"); sb.append(" activeTotal: ").append(toIndentedString(activeTotal)).append("\n"); sb.append(" lastWeek: ").append(toIndentedString(lastWeek)).append("\n"); sb.append(" yesterday: ").append(toIndentedString(yesterday)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
apache-2.0
zoozooll/MyExercise
VV/asmackbeem/org/jivesoftware/smackx/MultipleRecipientManager.java
14092
/** * $RCSfile$ * $Revision: $ * $Date: $ * * Copyright 2003-2006 Jive Software. * * All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx; import org.jivesoftware.smack.Connection; import org.jivesoftware.smack.XMPPException; import org.jivesoftware.smack.packet.Message; import org.jivesoftware.smack.packet.Packet; import org.jivesoftware.smack.util.Cache; import org.jivesoftware.smack.util.StringUtils; import org.jivesoftware.smackx.packet.DiscoverInfo; import org.jivesoftware.smackx.packet.DiscoverItems; import org.jivesoftware.smackx.packet.MultipleAddresses; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * A MultipleRecipientManager allows to send packets to multiple recipients by making use of <a * href="http://www.jabber.org/jeps/jep-0033.html">JEP-33: Extended Stanza Addressing</a>. It also * allows to send replies to packets that were sent to multiple recipients. * @author Gaston Dombiak */ public class MultipleRecipientManager { /** * Create a cache to hold the 100 most recently accessed elements for a period of 24 hours. */ private static Cache services = new Cache(100, 24 * 60 * 60 * 1000); /** * Sends the specified packet to the list of specified recipients using the specified * connection. If the server has support for JEP-33 then only one packet is going to be sent to * the server with the multiple recipient instructions. However, if JEP-33 is not supported by * the server then the client is going to send the packet to each recipient. * @param connection the connection to use to send the packet. * @param packet the packet to send to the list of recipients. * @param to the list of JIDs to include in the TO list or <tt>null</tt> if no TO list exists. * @param cc the list of JIDs to include in the CC list or <tt>null</tt> if no CC list exists. * @param bcc the list of JIDs to include in the BCC list or <tt>null</tt> if no BCC list * exists. * @throws XMPPException if server does not support JEP-33: Extended Stanza Addressing and some * JEP-33 specific features were requested. */ public static void send(Connection connection, Packet packet, List to, List cc, List bcc) throws XMPPException { send(connection, packet, to, cc, bcc, null, null, false); } /** * Sends the specified packet to the list of specified recipients using the specified * connection. If the server has support for JEP-33 then only one packet is going to be sent to * the server with the multiple recipient instructions. However, if JEP-33 is not supported by * the server then the client is going to send the packet to each recipient. * @param connection the connection to use to send the packet. * @param packet the packet to send to the list of recipients. * @param to the list of JIDs to include in the TO list or <tt>null</tt> if no TO list exists. * @param cc the list of JIDs to include in the CC list or <tt>null</tt> if no CC list exists. * @param bcc the list of JIDs to include in the BCC list or <tt>null</tt> if no BCC list * exists. * @param replyTo address to which all replies are requested to be sent or <tt>null</tt> * indicating that they can reply to any address. * @param replyRoom JID of a MUC room to which responses should be sent or <tt>null</tt> * indicating that they can reply to any address. * @param noReply true means that receivers should not reply to the message. * @throws XMPPException if server does not support JEP-33: Extended Stanza Addressing and some * JEP-33 specific features were requested. */ public static void send(Connection connection, Packet packet, List to, List cc, List bcc, String replyTo, String replyRoom, boolean noReply) throws XMPPException { String serviceAddress = getMultipleRecipienServiceAddress(connection); if (serviceAddress != null) { // Send packet to target users using multiple recipient service // provided by the server sendThroughService(connection, packet, to, cc, bcc, replyTo, replyRoom, noReply, serviceAddress); } else { // Server does not support JEP-33 so try to send the packet to each // recipient if (noReply || (replyTo != null && replyTo.trim().length() > 0) || (replyRoom != null && replyRoom.trim().length() > 0)) { // Some specified JEP-33 features were requested so throw an // exception alerting // the user that this features are not available throw new XMPPException( "Extended Stanza Addressing not supported by server"); } // Send the packet to each individual recipient sendToIndividualRecipients(connection, packet, to, cc, bcc); } } /** * Sends a reply to a previously received packet that was sent to multiple recipients. Before * attempting to send the reply message some checkings are performed. If any of those checkings * fail then an XMPPException is going to be thrown with the specific error detail. * @param connection the connection to use to send the reply. * @param original the previously received packet that was sent to multiple recipients. * @param reply the new message to send as a reply. * @throws XMPPException if the original message was not sent to multiple recipients, or the * original message cannot be replied or reply should be sent to a room. */ public static void reply(Connection connection, Message original, Message reply) throws XMPPException { MultipleRecipientInfo info = getMultipleRecipientInfo(original); if (info == null) { throw new XMPPException( "Original message does not contain multiple recipient info"); } if (info.shouldNotReply()) { throw new XMPPException("Original message should not be replied"); } if (info.getReplyRoom() != null) { throw new XMPPException("Reply should be sent through a room"); } // Any <thread/> element from the initial message MUST be copied into // the reply. if (original.getThread() != null) { reply.setThread(original.getThread()); } MultipleAddresses.Address replyAddress = info.getReplyAddress(); if (replyAddress != null && replyAddress.getJid() != null) { // Send reply to the reply_to address reply.setTo(replyAddress.getJid()); connection.sendPacket(reply); } else { // Send reply to multiple recipients List to = new ArrayList(); List cc = new ArrayList(); for (Iterator it = info.getTOAddresses().iterator(); it.hasNext();) { String jid = ((MultipleAddresses.Address) it.next()).getJid(); to.add(jid); } for (Iterator it = info.getCCAddresses().iterator(); it.hasNext();) { String jid = ((MultipleAddresses.Address) it.next()).getJid(); cc.add(jid); } // Add original sender as a 'to' address (if not already present) if (!to.contains(original.getFrom()) && !cc.contains(original.getFrom())) { to.add(original.getFrom()); } // Remove the sender from the TO/CC list (try with bare JID too) String from = connection.getUser(); if (!to.remove(from) && !cc.remove(from)) { String bareJID = StringUtils.parseBareAddress(from); to.remove(bareJID); cc.remove(bareJID); } String serviceAddress = getMultipleRecipienServiceAddress(connection); if (serviceAddress != null) { // Send packet to target users using multiple recipient service // provided by the // server sendThroughService(connection, reply, to, cc, null, null, null, false, serviceAddress); } else { // Server does not support JEP-33 so try to send the packet to // each recipient sendToIndividualRecipients(connection, reply, to, cc, null); } } } /** * Returns the {@link MultipleRecipientInfo} contained in the specified packet or <tt>null</tt> * if none was found. Only packets sent to multiple recipients will contain such information. * @param packet the packet to check. * @return the MultipleRecipientInfo contained in the specified packet or <tt>null</tt> if none * was found. */ public static MultipleRecipientInfo getMultipleRecipientInfo(Packet packet) { MultipleAddresses extension = (MultipleAddresses) packet.getExtension( "addresses", "http://jabber.org/protocol/address"); return extension == null ? null : new MultipleRecipientInfo(extension); } private static void sendToIndividualRecipients(Connection connection, Packet packet, List to, List cc, List bcc) { if (to != null) { for (Iterator it = to.iterator(); it.hasNext();) { String jid = (String) it.next(); packet.setTo(jid); connection.sendPacket(new PacketCopy(packet.toXML())); } } if (cc != null) { for (Iterator it = cc.iterator(); it.hasNext();) { String jid = (String) it.next(); packet.setTo(jid); connection.sendPacket(new PacketCopy(packet.toXML())); } } if (bcc != null) { for (Iterator it = bcc.iterator(); it.hasNext();) { String jid = (String) it.next(); packet.setTo(jid); connection.sendPacket(new PacketCopy(packet.toXML())); } } } private static void sendThroughService(Connection connection, Packet packet, List to, List cc, List bcc, String replyTo, String replyRoom, boolean noReply, String serviceAddress) { // Create multiple recipient extension MultipleAddresses multipleAddresses = new MultipleAddresses(); if (to != null) { for (Iterator it = to.iterator(); it.hasNext();) { String jid = (String) it.next(); multipleAddresses.addAddress(MultipleAddresses.TO, jid, null, null, false, null); } } if (cc != null) { for (Iterator it = cc.iterator(); it.hasNext();) { String jid = (String) it.next(); multipleAddresses.addAddress(MultipleAddresses.CC, jid, null, null, false, null); } } if (bcc != null) { for (Iterator it = bcc.iterator(); it.hasNext();) { String jid = (String) it.next(); multipleAddresses.addAddress(MultipleAddresses.BCC, jid, null, null, false, null); } } if (noReply) { multipleAddresses.setNoReply(); } else { if (replyTo != null && replyTo.trim().length() > 0) { multipleAddresses.addAddress(MultipleAddresses.REPLY_TO, replyTo, null, null, false, null); } if (replyRoom != null && replyRoom.trim().length() > 0) { multipleAddresses.addAddress(MultipleAddresses.REPLY_ROOM, replyRoom, null, null, false, null); } } // Set the multiple recipient service address as the target address packet.setTo(serviceAddress); // Add extension to packet packet.addExtension(multipleAddresses); // Send the packet connection.sendPacket(packet); } /** * Returns the address of the multiple recipients service. To obtain such address service * discovery is going to be used on the connected server and if none was found then another * attempt will be tried on the server items. The discovered information is going to be cached * for 24 hours. * @param connection the connection to use for disco. The connected server is going to be * queried. * @return the address of the multiple recipients service or <tt>null</tt> if none was found. */ private static String getMultipleRecipienServiceAddress( Connection connection) { String serviceName = connection.getServiceName(); String serviceAddress = (String) services.get(serviceName); if (serviceAddress == null) { synchronized (services) { serviceAddress = (String) services.get(serviceName); if (serviceAddress == null) { // Send the disco packet to the server itself try { DiscoverInfo info = ServiceDiscoveryManager .getInstanceFor(connection).discoverInfo( serviceName); // Check if the server supports JEP-33 if (info.containsFeature("http://jabber.org/protocol/address")) { serviceAddress = serviceName; } else { // Get the disco items and send the disco packet to // each server item DiscoverItems items = ServiceDiscoveryManager .getInstanceFor(connection).discoverItems( serviceName); for (Iterator it = items.getItems(); it.hasNext();) { DiscoverItems.Item item = (DiscoverItems.Item) it .next(); info = ServiceDiscoveryManager.getInstanceFor( connection).discoverInfo( item.getEntityID(), item.getNode()); if (info.containsFeature("http://jabber.org/protocol/address")) { serviceAddress = serviceName; break; } } } // Cache the discovered information services.put(serviceName, serviceAddress == null ? "" : serviceAddress); } catch (XMPPException e) { e.printStackTrace(); } } } } return "".equals(serviceAddress) ? null : serviceAddress; } /** * Packet that holds the XML stanza to send. This class is useful when the same packet is needed * to be sent to different recipients. Since using the same packet is not possible (i.e. cannot * change the TO address of a queues packet to be sent) then this class was created to keep the * XML stanza to send. */ private static class PacketCopy extends Packet { private String text; /** * Create a copy of a packet with the text to send. The passed text must be a valid text to * send to the server, no validation will be done on the passed text. * @param text the whole text of the packet to send */ public PacketCopy(String text) { this.text = text; } @Override public String toXML() { return text; } } }
apache-2.0
pietern/caffe2
caffe2/python/operator_test/sequence_ops_test.py
11491
# Copyright (c) 2016-present, Facebook, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################## from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import core from hypothesis import given import caffe2.python.hypothesis_test_util as hu import hypothesis.strategies as st import numpy as np from functools import partial def _gen_test_add_padding(with_pad_data=True, is_remove=False): def gen_with_size(args): lengths, inner_shape = args data_dim = [sum(lengths)] + inner_shape lengths = np.array(lengths, dtype=np.int32) if with_pad_data: return st.tuples( st.just(lengths), hu.arrays(data_dim), hu.arrays(inner_shape), hu.arrays(inner_shape)) else: return st.tuples(st.just(lengths), hu.arrays(data_dim)) min_len = 4 if is_remove else 0 lengths = st.lists( st.integers(min_value=min_len, max_value=10), min_size=0, max_size=5) inner_shape = st.lists( st.integers(min_value=1, max_value=3), min_size=0, max_size=2) return st.tuples(lengths, inner_shape).flatmap(gen_with_size) def _add_padding_ref( start_pad_width, end_pad_width, data, lengths, start_padding=None, end_padding=None): if start_padding is None: start_padding = np.zeros(data.shape[1:], dtype=data.dtype) end_padding = ( end_padding if end_padding is not None else start_padding) out_size = data.shape[0] + ( start_pad_width + end_pad_width) * len(lengths) out = np.ndarray((out_size,) + data.shape[1:]) in_ptr = 0 out_ptr = 0 for length in lengths: out[out_ptr:(out_ptr + start_pad_width)] = start_padding out_ptr += start_pad_width out[out_ptr:(out_ptr + length)] = data[in_ptr:(in_ptr + length)] in_ptr += length out_ptr += length out[out_ptr:(out_ptr + end_pad_width)] = end_padding out_ptr += end_pad_width lengths_out = lengths + (start_pad_width + end_pad_width) return (out, lengths_out) def _remove_padding_ref(start_pad_width, end_pad_width, data, lengths): pad_width = start_pad_width + end_pad_width out_size = data.shape[0] - ( start_pad_width + end_pad_width) * len(lengths) out = np.ndarray((out_size,) + data.shape[1:]) in_ptr = 0 out_ptr = 0 for length in lengths: out_length = length - pad_width out[out_ptr:(out_ptr + out_length)] = data[ (in_ptr + start_pad_width):(in_ptr + length - end_pad_width)] in_ptr += length out_ptr += out_length lengths_out = lengths - (start_pad_width + end_pad_width) return (out, lengths_out) def _gather_padding_ref(start_pad_width, end_pad_width, data, lengths): start_padding = np.zeros(data.shape[1:], dtype=data.dtype) end_padding = np.zeros(data.shape[1:], dtype=data.dtype) pad_width = start_pad_width + end_pad_width ptr = 0 for length in lengths: for _ in range(start_pad_width): start_padding += data[ptr] ptr += 1 ptr += length - pad_width for _ in range(end_pad_width): end_padding += data[ptr] ptr += 1 return (start_padding, end_padding) class TestSequenceOps(hu.HypothesisTestCase): @given(start_pad_width=st.integers(min_value=1, max_value=2), end_pad_width=st.integers(min_value=0, max_value=2), args=_gen_test_add_padding(with_pad_data=True), **hu.gcs) def test_add_padding(self, start_pad_width, end_pad_width, args, gc, dc): lengths, data, start_padding, end_padding = args start_padding = np.array(start_padding, dtype=np.float32) end_padding = np.array(end_padding, dtype=np.float32) op = core.CreateOperator( 'AddPadding', ['data', 'lengths', 'start_padding', 'end_padding'], ['output', 'lengths_out'], padding_width=start_pad_width, end_padding_width=end_pad_width) self.assertReferenceChecks( device_option=gc, op=op, inputs=[data, lengths, start_padding, end_padding], reference=partial(_add_padding_ref, start_pad_width, end_pad_width)) @given(start_pad_width=st.integers(min_value=1, max_value=2), end_pad_width=st.integers(min_value=0, max_value=2), args=_gen_test_add_padding(with_pad_data=False)) def test_add_zero_padding(self, start_pad_width, end_pad_width, args): lengths, data = args op = core.CreateOperator( 'AddPadding', ['data', 'lengths'], ['output', 'lengths_out'], padding_width=start_pad_width, end_padding_width=end_pad_width) self.assertReferenceChecks( hu.cpu_do, op, [data, lengths], partial(_add_padding_ref, start_pad_width, end_pad_width)) @given(start_pad_width=st.integers(min_value=1, max_value=2), end_pad_width=st.integers(min_value=0, max_value=2), data=hu.tensor(min_dim=1, max_dim=3)) def test_add_padding_no_length(self, start_pad_width, end_pad_width, data): op = core.CreateOperator( 'AddPadding', ['data'], ['output', 'output_lens'], padding_width=start_pad_width, end_padding_width=end_pad_width) self.assertReferenceChecks( hu.cpu_do, op, [data], partial( _add_padding_ref, start_pad_width, end_pad_width, lengths=np.array([data.shape[0]]))) @given(start_pad_width=st.integers(min_value=1, max_value=2), end_pad_width=st.integers(min_value=0, max_value=2), args=_gen_test_add_padding(with_pad_data=False, is_remove=True)) def test_remove_padding(self, start_pad_width, end_pad_width, args): lengths, data = args op = core.CreateOperator( 'RemovePadding', ['data', 'lengths'], ['output', 'lengths_out'], padding_width=start_pad_width, end_padding_width=end_pad_width) self.assertReferenceChecks( hu.cpu_do, op, [data, lengths], partial(_remove_padding_ref, start_pad_width, end_pad_width)) @given(start_pad_width=st.integers(min_value=1, max_value=2), end_pad_width=st.integers(min_value=0, max_value=2), args=_gen_test_add_padding(with_pad_data=True)) def test_gather_padding(self, start_pad_width, end_pad_width, args): lengths, data, start_padding, end_padding = args padded_data, padded_lengths = _add_padding_ref( start_pad_width, end_pad_width, data, lengths, start_padding, end_padding) op = core.CreateOperator( 'GatherPadding', ['data', 'lengths'], ['start_padding', 'end_padding'], padding_width=start_pad_width, end_padding_width=end_pad_width) self.assertReferenceChecks( hu.cpu_do, op, [padded_data, padded_lengths], partial(_gather_padding_ref, start_pad_width, end_pad_width)) @given(data=hu.tensor(min_dim=3, max_dim=3, dtype=np.float32, elements=st.floats(min_value=-np.inf, max_value=np.inf), min_value=1, max_value=10), **hu.gcs) def test_reverse_packed_segs(self, data, gc, dc): max_length = data.shape[0] batch_size = data.shape[1] lengths = np.random.randint(max_length + 1, size=batch_size) op = core.CreateOperator( "ReversePackedSegs", ["data", "lengths"], ["reversed_data"]) def op_ref(data, lengths): rev_data = np.array(data, copy=True) for i in range(batch_size): seg_length = lengths[i] for j in range(seg_length): rev_data[j][i] = data[seg_length - 1 - j][i] return (rev_data,) def op_grad_ref(grad_out, outputs, inputs): return op_ref(grad_out, inputs[1]) + (None,) self.assertReferenceChecks( device_option=gc, op=op, inputs=[data, lengths], reference=op_ref, output_to_grad='reversed_data', grad_reference=op_grad_ref) @given(data=hu.tensor(min_dim=1, max_dim=3, dtype=np.float32, elements=st.floats(min_value=-np.inf, max_value=np.inf), min_value=10, max_value=10), indices=st.lists(st.integers(min_value=0, max_value=9), min_size=0, max_size=10), **hu.gcs_cpu_only) def test_remove_data_blocks(self, data, indices, gc, dc): indices = np.array(indices) op = core.CreateOperator( "RemoveDataBlocks", ["data", "indices"], ["shrunk_data"]) def op_ref(data, indices): unique_indices = np.unique(indices) sorted_indices = np.sort(unique_indices) shrunk_data = np.delete(data, sorted_indices, axis=0) return (shrunk_data,) self.assertReferenceChecks( device_option=gc, op=op, inputs=[data, indices], reference=op_ref) @given(elements=st.lists(st.integers(min_value=0, max_value=9), min_size=0, max_size=10), **hu.gcs_cpu_only) def test_find_duplicate_elements(self, elements, gc, dc): mapping = { 0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 7: "h", 8: "i", 9: "j"} data = np.array([mapping[e] for e in elements], dtype='|S') op = core.CreateOperator( "FindDuplicateElements", ["data"], ["indices"]) def op_ref(data): unique_data = [] indices = [] for i, e in enumerate(data): if e in unique_data: indices.append(i) else: unique_data.append(e) return (np.array(indices, dtype=np.int64),) self.assertReferenceChecks( device_option=gc, op=op, inputs=[data], reference=op_ref) if __name__ == "__main__": import unittest unittest.main()
apache-2.0
REALTOBIZ/neolua
NeoLua/Parser.cs
81300
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Linq.Expressions; using System.Reflection; using System.Text; namespace Neo.IronLua { #region -- class Parser ------------------------------------------------------------- /////////////////////////////////////////////////////////////////////////////// /// <summary></summary> internal static partial class Parser { private const string csReturnLabel = "#return"; private const string csBreakLabel = "#break"; private const string csContinueLabel = "#continue"; private const string csEnv = "_G"; private const string csArgListP = "#arglistP"; private const string csArgList = "#arglist"; private const string csClr = "clr"; #region -- class Scope ------------------------------------------------------------ /////////////////////////////////////////////////////////////////////////////// /// <summary>Scope, that builds a block.</summary> private class Scope { private Scope parent; // Parent-Scope, that is accessable private Dictionary<string, Expression> scopeVariables = null; // local declared variables or const definitions private List<Expression> block = new List<Expression>(); // Instructions in the current block private bool lBlockGenerated = false; // Is the block generated #region -- Ctor ----------------------------------------------------------------- /// <summary>Creates the scope</summary> /// <param name="parent">parent-scope</param> public Scope(Scope parent) { this.parent = parent; } // ctor #endregion #region -- LookupParameter ------------------------------------------------------ /// <summary>Creates a new variable in the current scope.</summary> /// <param name="type">Type of the variable</param> /// <param name="sName">Name of the variable</param> /// <returns>The expression that represents the access to the variable.</returns> public ParameterExpression RegisterVariable(Type type, string sName) { return RegisterVariable(Expression.Variable(type, sName)); } // proc RegisterVariable /// <summary></summary> /// <param name="expr"></param> /// <returns></returns> public ParameterExpression RegisterVariable(ParameterExpression expr) { RegisterVariableOrConst(expr.Name, expr); return expr; } // proc RegisterVariable public void RegisterConst(string sName, ConstantExpression expr) { RegisterVariableOrConst(sName, expr); } // proc RegisterConst private void RegisterVariableOrConst(string sName, Expression expr) { if (scopeVariables == null) scopeVariables = new Dictionary<string, Expression>(); scopeVariables[sName] = expr; } // proc EnsureVariables /// <summary>Looks up the variable/parameter/const through the scopes.</summary> /// <param name="sName">Name of the variable</param> /// <param name="lLocalOnly"></param> /// <returns>The access-expression for the variable, parameter or <c>null</c>, if it is not registered.</returns> public virtual Expression LookupExpression(string sName, bool lLocalOnly = false) { // Lookup the current scope Expression p; if (scopeVariables != null && scopeVariables.TryGetValue(sName, out p)) return p; if (parent != null && !lLocalOnly) // lookup the parent scope return parent.LookupExpression(sName); else return null; } // func LookupParameter #endregion #region -- LookupLabel ---------------------------------------------------------- /// <summary>Create a named label or look for an existing</summary> /// <param name="type">Returntype for the label</param> /// <param name="sName">Name for the label</param> /// <returns>Labeltarget</returns> public virtual LabelTarget LookupLabel(Type type, string sName) { return parent.LookupLabel(type, sName); } // func LookupLabel #endregion #region -- Expression Block ----------------------------------------------------- [Conditional("DEBUG")] private void CheckBlockGenerated() { if (lBlockGenerated) throw new InvalidOperationException(); } // proc CheckBlockGenerated public void InsertExpression(int iIndex, Expression expr) { CheckBlockGenerated(); block.Insert(iIndex, expr); } // proc AddExpression public void AddExpression(Expression expr) { CheckBlockGenerated(); block.Add(expr); } // proc AddExpression /// <summary>Close the expression block and return it.</summary> public virtual Expression ExpressionBlock { get { CheckBlockGenerated(); lBlockGenerated = true; if (block.Count == 0) return Expression.Empty(); else { ParameterExpression[] variables = Variables; if (variables.Length == 0) return Expression.Block(block); else if (ExpressionBlockType == null) return Expression.Block(variables, block); else return Expression.Block(ExpressionBlockType, variables, block); } } } // func ExpressionBlock public Type ExpressionBlockType { get; set; } public bool ExistExpressions { get { return block.Count > 0; } } #endregion /// <summary>Access to the Lua-Binders</summary> public virtual Lua Runtime { get { return parent.Runtime; } } /// <summary>Emit-Debug-Information</summary> public virtual LuaDebugLevel EmitDebug { get { return parent.EmitDebug; } } /// <summary>Options</summary> public virtual LuaCompileOptions Options { get { return parent.Options; } } /// <summary>DebugInfo on expression level</summary> public bool EmitExpressionDebug { get { return (EmitDebug & LuaDebugLevel.Expression) != 0; } } /// <summary>Return type of the current Lambda-Scope</summary> public virtual Type ReturnType { get { return parent.ReturnType; } } /// <summary></summary> public ParameterExpression[] Variables { get { return scopeVariables == null ? new ParameterExpression[0] : (from v in scopeVariables.Values where v is ParameterExpression select (ParameterExpression)v).ToArray(); } } } // class Scope #endregion #region -- class LoopScope -------------------------------------------------------- /////////////////////////////////////////////////////////////////////////////// /// <summary>Scope that represents the loop content.</summary> private class LoopScope : Scope { private LabelTarget continueLabel = Expression.Label(csContinueLabel); private LabelTarget breakLabel = Expression.Label(csBreakLabel); #region -- Ctor ----------------------------------------------------------------- /// <summary>Scope that represents the loop content.</summary> /// <param name="parent"></param> public LoopScope(Scope parent) : base(parent) { } // ctor #endregion #region -- LookupLabel ---------------------------------------------------------- /// <summary>Creates or lookup the label</summary> /// <param name="type">Type of the label. Is ignored on std. labels.</param> /// <param name="sName">Name of the label.</param> /// <returns>LabelTarget</returns> public override LabelTarget LookupLabel(Type type, string sName) { if (sName == csBreakLabel) return breakLabel; else if (sName == csContinueLabel) return continueLabel; else return base.LookupLabel(type, sName); } // func LookupLabel #endregion /// <summary>Default break position.</summary> public LabelTarget BreakLabel { get { return breakLabel; } } /// <summary>Default continue position.</summary> public LabelTarget ContinueLabel { get { return continueLabel; } } } // class LambdaScope #endregion #region -- class LambdaScope ------------------------------------------------------ /////////////////////////////////////////////////////////////////////////////// /// <summary>Lambda-Scope with labels and parameters.</summary> private class LambdaScope : Scope { #if DEBUG private bool lReturnLabelUsed = false; #endif private LabelTarget returnLabel; private Expression returnDefaultValue; private Dictionary<string, LabelTarget> labels = null; private Dictionary<string, ParameterExpression> parameters = new Dictionary<string, ParameterExpression>(); #region -- Ctor ----------------------------------------------------------------- /// <summary>Creates the lambda-scope, that manages labels and arguments.</summary> /// <param name="parent"></param> /// <param name="returnType"></param> /// <param name="returnDefaultValue"></param> public LambdaScope(Scope parent, Type returnType = null, Expression returnDefaultValue = null) : base(parent) { if (returnType != null) ResetReturnLabel(returnType, returnDefaultValue); } // ctor #endregion #region -- RegisterParameter, LookupParameter ----------------------------------- /// <summary>Registers arguments for the function.</summary> /// <param name="type">Type of the argument</param> /// <param name="sName">Name of the argument</param> /// <returns>Access to the argument</returns> public ParameterExpression RegisterParameter(Type type, string sName) { return parameters[sName] = Expression.Parameter(type, sName); } // proc RegisterParameter /// <summary>Lookup the variables and arguments.</summary> /// <param name="sName">Name of the parameter/variable.</param> /// <param name="lLocalOnly"></param> /// <returns></returns> public override Expression LookupExpression(string sName, bool lLocalOnly = false) { ParameterExpression p; if (parameters.TryGetValue(sName, out p)) return p; return base.LookupExpression(sName, lLocalOnly); } // func LookupParameter #endregion #region -- LookupLabel ---------------------------------------------------------- public override LabelTarget LookupLabel(Type type, string sName) { if (sName == csReturnLabel) return returnLabel; if (labels == null) labels = new Dictionary<string, LabelTarget>(); if (type == null) type = typeof(void); // Lookup the label LabelTarget l; if (labels.TryGetValue(sName, out l)) return l; // Create the label, if it is not internal if (sName[0] == '#') throw new ArgumentException("Internal label does not exist."); return labels[sName] = Expression.Label(type, sName); } // func LookupLabel #endregion public void ResetReturnLabel(Type returnType, Expression returnDefaultValue) { #if DEBUG if (lReturnLabelUsed) throw new InvalidOperationException("Reset is not allowed after expressions added."); #endif this.returnLabel = Expression.Label(returnType, csReturnLabel); this.returnDefaultValue = returnDefaultValue; } // proc ResetReturnLabel public override Expression ExpressionBlock { get { AddExpression(Expression.Label(returnLabel, returnDefaultValue == null ? Expression.Default(returnLabel.Type) : returnDefaultValue)); return base.ExpressionBlock; } } // prop ExpressionBlock public LabelTarget ReturnLabel { get { #if DEBUG lReturnLabelUsed = true; #endif return returnLabel; } } // prop ReturnLabel public override Type ReturnType { get { return returnLabel.Type; } } } // class LambdaScope #endregion #region -- class GlobalScope ------------------------------------------------------ /////////////////////////////////////////////////////////////////////////////// /// <summary>Global parse-scope.</summary> private class GlobalScope : LambdaScope { private Lua runtime; private LuaCompileOptions options; private LuaDebugLevel debug; /// <summary>Global parse-scope</summary> /// <param name="runtime">Runtime and binder of the global scope.</param> /// <param name="options"></param> /// <param name="returnType"></param> /// <param name="returnDefaultValue"></param> public GlobalScope(Lua runtime, LuaCompileOptions options, Type returnType, Expression returnDefaultValue) : base(null, returnType, returnDefaultValue) { this.runtime = runtime; this.options = options; this.debug = options.DebugEngine == null ? LuaDebugLevel.None : options.DebugEngine.Level; } // ctor /// <summary>Access to the binders</summary> public override Lua Runtime { get { return runtime; } } /// <summary>Emit-Debug-Information</summary> public override LuaDebugLevel EmitDebug { get { return debug; } } /// <summary>Options</summary> public override LuaCompileOptions Options { get { return options; } } } // class GlobalScope #endregion #region -- enum InvokeResult ------------------------------------------------------ public enum InvokeResult { None, Object, LuaResult } // enum GenerateResult #endregion #region -- class PrefixMemberInfo ------------------------------------------------- /////////////////////////////////////////////////////////////////////////////// /// <summary>Mini-Parse-Tree for resolve of prefix expressions</summary> private class PrefixMemberInfo { public PrefixMemberInfo(Token position, Expression instance, string sMember, Expression[] indices, Expression[] arguments) { this.Position = position; this.Instance = instance; this.Member = sMember; this.Indices = indices; this.Arguments = arguments; } // ctor public Expression GenerateSet(Scope scope, Expression exprToSet) { Expression expr; if (Instance != null && Member == null && Indices != null && Arguments == null) expr = IndexSetExpression(scope.Runtime, Position, Instance, Indices, exprToSet); else if (Instance != null && Member != null && Indices == null && Arguments == null) return MemberSetExpression(scope.Runtime, Position, Instance, Member, MethodMember, exprToSet); else if (Instance != null && Member == null && Indices == null && Arguments == null && Instance is ParameterExpression) { // Assign the value to a variable expr = Expression.Assign(Instance, ConvertExpression(scope.Runtime, Position, exprToSet, Instance.Type)); } else throw ParseError(Position, Properties.Resources.rsParseExpressionNotAssignable); return expr; } // func GenerateSet public Expression GenerateGet(Scope scope, InvokeResult result) { if (Instance != null && Member == null && Indices != null && Arguments == null) { if (Indices.Length > 0) { // First the arguments are pushed on the stack, and later comes the call, so we wrap the last parameter Indices[Indices.Length - 1] = WrapDebugInfo(scope.EmitExpressionDebug, true, Position, Position, Indices[Indices.Length - 1]); // Let the type as it is } Instance = IndexGetExpression(scope, Position, Instance, Indices); Indices = null; } else if (Instance != null && Member != null && Indices == null && Arguments == null) { // Convert the member to an instance Instance = WrapDebugInfo(scope.EmitExpressionDebug, true, Position, Position, Instance); Instance = MemberGetExpression(scope, Position, Instance, Member); Member = null; MethodMember = false; } else if (Instance != null && Member == null && Indices == null && Arguments == null) { // Nothing to todo, we have already an instance } else if (Instance != null && Indices == null && Arguments != null) { if (Arguments.Length > 0) { // First the arguments are pushed on the stack, and later comes the call, so we wrap the last parameter Arguments[Arguments.Length - 1] = WrapDebugInfo(scope.EmitExpressionDebug, true, Position, Position, Arguments[Arguments.Length - 1]); // Let the type as it is } else Instance = WrapDebugInfo(scope.EmitExpressionDebug, true, Position, Position, Instance); if (String.IsNullOrEmpty(Member)) Instance = InvokeExpression(scope, Position, Instance, result, Arguments, true); else Instance = InvokeMemberExpression(scope, Position, Instance, Member, result, Arguments); Member = null; MethodMember = false; Arguments = null; } else throw ParseError(Position, Properties.Resources.rsParseExpressionNoResult); return Instance; } // func GenerateGet public void SetMember(Token tMember, bool lMethod) { Position = tMember; MethodMember = lMethod; Member = tMember.Value; } // proc SetMember public Token Position { get; set; } public Expression Instance { get; set; } public string Member { get; private set; } public bool MethodMember { get; private set; } public Expression[] Indices { get; set; } public Expression[] Arguments { get; set; } } // class PrefixMemberInfo #endregion #region -- Parse Chunk, Block ----------------------------------------------------- /// <summary>Parses the chunk to an function.</summary> /// <param name="runtime">Binder</param> /// <param name="options">Compile options for the script.</param> /// <param name="lHasEnvironment">Creates the _G parameter.</param> /// <param name="code">Lexer for the code.</param> /// <param name="typeDelegate">Type for the delegate. <c>null</c>, for an automatic type</param> /// <param name="returnType">Defines the return type of the chunk.</param> /// <param name="args">Arguments of the function.</param> /// <returns>Expression-Tree for the code.</returns> public static LambdaExpression ParseChunk(Lua runtime, LuaCompileOptions options, bool lHasEnvironment, LuaLexer code, Type typeDelegate, Type returnType, IEnumerable<KeyValuePair<string, Type>> args) { List<ParameterExpression> parameters = new List<ParameterExpression>(); if (returnType == null) returnType = typeof(LuaResult); var globalScope = new GlobalScope(runtime, options, returnType, returnType == typeof(LuaResult) ? Expression.Property(null, Lua.ResultEmptyPropertyInfo) : null); // Registers the global LuaTable if (lHasEnvironment) parameters.Add(globalScope.RegisterParameter(typeof(LuaTable), csEnv)); if (args != null) { foreach (var c in args) parameters.Add(globalScope.RegisterParameter(c.Value, c.Key)); // Add alle arguments } // Get the first token if (code.Current == null) code.Next(); // Get the name for the chunk and clean it from all unwanted chars string sChunkName = CreateNameFromFile(code.Current.Start.FileName); if ((globalScope.EmitDebug & LuaDebugLevel.RegisterMethods) == LuaDebugLevel.RegisterMethods) sChunkName = Lua.RegisterUniqueName(sChunkName); // Create the block ParseBlock(globalScope, code); if (code.Current.Typ != LuaToken.Eof) throw ParseError(code.Current, Properties.Resources.rsParseEof); // Create the function return typeDelegate == null ? Expression.Lambda(globalScope.ExpressionBlock, sChunkName, parameters) : Expression.Lambda(typeDelegate, globalScope.ExpressionBlock, sChunkName, parameters); } // func ParseChunk private static string CreateNameFromFile(string sFileName) { StringBuilder sbName = new StringBuilder(Path.GetFileNameWithoutExtension(sFileName)); int iLength = sbName.Length; for (int i = 0; i < iLength; i++) { switch (sbName[i]) { case '.': case ';': case ',': case '+': case ':': sbName[i] = '_'; break; } } return sbName.ToString(); } // func CreateNameFromFile private static void ParseBlock(Scope scope, LuaLexer code) { // Lese die Statement int iLastDebugInfo = -1; bool lLoop = true; while (lLoop) { bool lDebugInfoEmitted = false; if ((scope.EmitDebug & LuaDebugLevel.Line) != 0) // debug info for line { if (code.Current.Start.Line != iLastDebugInfo) { iLastDebugInfo = code.Current.Start.Line; scope.AddExpression(GetDebugInfo(code.Current, code.Current)); lDebugInfoEmitted = true; } } switch (code.Current.Typ) { case LuaToken.Eof: // End of file lLoop = false; break; case LuaToken.KwReturn: // The return-statement is only allowed on the end of a scope ParseReturn(scope, code); break; case LuaToken.KwBreak: // The break-statement is only allowed on the end of a scope ParseBreak(scope, code); lLoop = false; break; case LuaToken.Semicolon: // End of statement => ignore code.Next(); break; default: if (!lDebugInfoEmitted && (scope.EmitDebug & LuaDebugLevel.Expression) != 0) // Start every statement with a debug point scope.AddExpression(GetDebugInfo(code.Current, code.Current)); if (!ParseStatement(scope, code)) // Parse normal statements lLoop = false; break; } } if (scope.EmitDebug != LuaDebugLevel.None) scope.AddExpression(Expression.ClearDebugInfo(code.Current.Start.Document)); // Clear debug info } // func ParseBlock private static void ParseReturn(Scope scope, LuaLexer code) { // eat return code.Next(); // Build the return expression for all parameters Expression exprReturnValue; if (IsExpressionStart(code)) // there is a return value { if (scope.ReturnType == typeof(LuaResult)) { exprReturnValue = GetLuaResultExpression(scope, code.Current, ParseExpressionList(scope, code).ToArray()); } else if (scope.ReturnType.IsArray) { Type typeArray = scope.ReturnType.GetElementType(); exprReturnValue = Expression.NewArrayInit( typeArray, from c in ParseExpressionList(scope, code) select ConvertExpression(scope.Runtime, code.Current, c, typeArray)); } else { List<Expression> exprList = new List<Expression>(ParseExpressionList(scope, code)); if (exprList.Count == 1) exprReturnValue = ConvertExpression(scope.Runtime, code.Current, exprList[0], scope.ReturnType); else { ParameterExpression tmpVar = Expression.Variable(scope.ReturnType); exprList[0] = Expression.Assign(tmpVar, ConvertExpression(scope.Runtime, code.Current, exprList[0], scope.ReturnType)); exprList.Add(tmpVar); exprReturnValue = Expression.Block(scope.ReturnType, new ParameterExpression[] { tmpVar }, exprList); } } } else // use the default-value { if (scope.ReturnType == typeof(LuaResult)) exprReturnValue = Expression.Property(null, Lua.ResultEmptyPropertyInfo); else if (scope.ReturnType.IsArray) exprReturnValue = Expression.NewArrayInit(scope.ReturnType.GetElementType()); else exprReturnValue = Expression.Default(scope.ReturnType); } if (code.Current.Typ == LuaToken.Semicolon) code.Next(); scope.AddExpression(Expression.Goto(scope.LookupLabel(scope.ReturnType, csReturnLabel), exprReturnValue)); } // func ParseReturn private static Expression GetLuaResultExpression(Scope scope, Token tStart, Expression[] exprs) { Expression exprReturnValue; if (exprs.Length == 1) if (exprs[0].Type == typeof(LuaResult)) exprReturnValue = exprs[0]; else exprReturnValue = Expression.New(Lua.ResultConstructorInfoArg1, ConvertExpression(scope.Runtime, tStart, exprs[0], typeof(object))); else exprReturnValue = Expression.New(Lua.ResultConstructorInfoArgN, Expression.NewArrayInit(typeof(object), from c in exprs select Expression.Convert(c, typeof(object)) ) ); return exprReturnValue; } // func GetLuaResultExpression private static bool IsExpressionStart(LuaLexer code) { return code.Current.Typ == LuaToken.BracketOpen || code.Current.Typ == LuaToken.Identifier || code.Current.Typ == LuaToken.DotDotDot || code.Current.Typ == LuaToken.String || code.Current.Typ == LuaToken.Number || code.Current.Typ == LuaToken.KwTrue || code.Current.Typ == LuaToken.KwFalse || code.Current.Typ == LuaToken.KwNil || code.Current.Typ == LuaToken.BracketCurlyOpen || code.Current.Typ == LuaToken.Minus || code.Current.Typ == LuaToken.Dilde || code.Current.Typ == LuaToken.Cross || code.Current.Typ == LuaToken.KwNot || code.Current.Typ == LuaToken.KwFunction || code.Current.Typ == LuaToken.KwCast; } // func IsExpressionStart #endregion #region -- Parse Statement -------------------------------------------------------- private static bool ParseStatement(Scope scope, LuaLexer code) { switch (code.Current.Typ) { case LuaToken.Identifier: // Expression case LuaToken.DotDotDot: case LuaToken.BracketOpen: case LuaToken.String: case LuaToken.Number: case LuaToken.KwFalse: case LuaToken.KwTrue: case LuaToken.KwNil: case LuaToken.BracketCurlyOpen: case LuaToken.Minus: case LuaToken.KwCast: ParseExpressionStatement(scope, code, false); return true; case LuaToken.ColonColon: // Start of a label ParseLabel(scope, code); return true; case LuaToken.KwGoto: ParseGoto(scope, code); return true; case LuaToken.KwDo: ParseDoLoop(scope, code); return true; case LuaToken.KwWhile: ParseWhileLoop(scope, code); return true; case LuaToken.KwRepeat: ParseRepeatLoop(scope, code); return true; case LuaToken.KwIf: ParseIfStatement(scope, code); return true; case LuaToken.KwFor: ParseForLoop(scope, code); return true; case LuaToken.KwForEach: ParseForEachLoop(scope, code); return true; case LuaToken.KwFunction: ParseFunction(scope, code, false); return true; case LuaToken.KwLocal: code.Next(); if (code.Current.Typ == LuaToken.KwFunction) ParseFunction(scope, code, true); else ParseExpressionStatement(scope, code, true); return true; case LuaToken.KwConst: code.Next(); ParseConst(scope, code); return true; case LuaToken.InvalidString: throw ParseError(code.Current, Properties.Resources.rsParseInvalidString); case LuaToken.InvalidComment: throw ParseError(code.Current, Properties.Resources.rsParseInvalidComment); case LuaToken.InvalidChar: throw ParseError(code.Current, Properties.Resources.rsParseInvalidChar); default: return false; } } // func ParseStatement private static void ParseExpressionStatement(Scope scope, LuaLexer code, bool lLocal) { List<ParameterExpression> registerLocals = null; List<PrefixMemberInfo> prefixes = new List<PrefixMemberInfo>(); // parse the assgiee list (var0, var1, var2, ...) while (true) { if (lLocal) // parse local variables { Token tVar; Type typeVar; ParseIdentifierAndType(scope, code, out tVar, out typeVar); ParameterExpression exprVar = scope.LookupExpression(tVar.Value, true) as ParameterExpression; if (exprVar == null) { exprVar = Expression.Variable(typeVar, tVar.Value); if (registerLocals == null) registerLocals = new List<ParameterExpression>(); registerLocals.Add(exprVar); } else if (exprVar.Type != typeVar) throw ParseError(tVar, Properties.Resources.rsParseTypeRedef); prefixes.Add(new PrefixMemberInfo(tVar, exprVar, null, null, null)); } else // parse a assignee { // parse as a prefix prefixes.Add(ParsePrefix(scope, code)); } // is there another prefix if (code.Current.Typ == LuaToken.Comma) code.Next(); else break; } // Optional assign if (code.Current.Typ == LuaToken.Assign) { code.Next(); // parse all expressions IEnumerator<Expression> expr = ParseExpressionList(scope, code).GetEnumerator(); expr.MoveNext(); if (prefixes.Count == 1) // one expression, one variable? { scope.AddExpression( prefixes[0].GenerateSet(scope, expr.Current != null ? expr.Current : Expression.Constant(null, typeof(object))) ); } else if (expr.Current == null) // No expression, assign null { for (int i = 0; i < prefixes.Count; i++) scope.AddExpression(prefixes[i].GenerateSet(scope, Expression.Constant(null, typeof(object)))); } else // assign on an unknown number of expressions { #region -- unknown number -- List<ParameterExpression> assignTempVars = new List<ParameterExpression>(); List<Expression> assignExprs = new List<Expression>(); int iExpressionVarOffset; // Safe the prefixes in variables for (int k = 0; k < prefixes.Count; k++) { var p = prefixes[k]; if (p.Member != null || prefixes[k].Indices != null) { p.Instance = ParseExpressionStatementExchangeToTempVar(assignTempVars, assignExprs, p.Instance); if (p.Indices != null) { for (int l = 0; l < p.Indices.Length; l++) p.Indices[l] = ParseExpressionStatementExchangeToTempVar(assignTempVars, assignExprs, p.Indices[l]); } } } // collect the results of the expressions iExpressionVarOffset = assignTempVars.Count; do { ParseExpressionStatementExchangeToTempVar(assignTempVars, assignExprs, expr.Current); } while (expr.MoveNext()); // Assign the Result to the prefixes int i = 0; int j = 0; ParameterExpression lastVariable = null; while (i < prefixes.Count) { if (i < assignTempVars.Count - iExpressionVarOffset) // are the variables { if (i == assignTempVars.Count - iExpressionVarOffset - 1 && assignTempVars[i + iExpressionVarOffset].Type == typeof(LuaResult)) // check if the last expression is a LuaResult { lastVariable = assignTempVars[i + iExpressionVarOffset]; assignExprs.Add(prefixes[i].GenerateSet(scope, GetResultExpression(scope.Runtime, code.Current, lastVariable, j++))); } else { assignExprs.Add(prefixes[i].GenerateSet(scope, assignTempVars[i + iExpressionVarOffset])); } } else if (lastVariable != null) // we enroll the last expression { assignExprs.Add(prefixes[i].GenerateSet(scope, GetResultExpression(scope.Runtime, code.Current, lastVariable, j++))); } else // no variable left { assignExprs.Add(prefixes[i].GenerateSet(scope, Expression.Default(typeof(object)))); } i++; } // add the block scope.AddExpression(Expression.Block(assignTempVars, assignExprs)); #endregion } // Führe die restlichen Expressions aus while (expr.MoveNext()) scope.AddExpression(expr.Current); } else if (!lLocal) { for (int i = 0; i < prefixes.Count; i++) { if (prefixes[i].Arguments == null) // do not execute getMember throw ParseError(prefixes[i].Position, Properties.Resources.rsParseAssignmentExpected); scope.AddExpression(prefixes[i].GenerateGet(scope, InvokeResult.None)); } } // register the variables if (registerLocals != null) { for (int i = 0; i < registerLocals.Count; i++) scope.RegisterVariable(registerLocals[i]); } } // proc ParseExpressionStatement private static ParameterExpression ParseExpressionStatementExchangeToTempVar(List<ParameterExpression> assignTempVars, List<Expression> assignExprs, Expression expr) { ParameterExpression tmpVar = Expression.Variable(expr.Type); assignTempVars.Add(tmpVar); assignExprs.Add(Expression.Assign(tmpVar, expr)); return tmpVar; } // func ParseExpressionStatementExchangeToTempVar private static void ParseIfStatement(Scope scope, LuaLexer code) { // if expr then block { elseif expr then block } [ else block ] end FetchToken(LuaToken.KwIf, code); var expr = ConvertExpression(scope.Runtime, code.Current, ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug), typeof(bool)); FetchToken(LuaToken.KwThen, code); scope.AddExpression(Expression.IfThenElse(expr, ParseIfElseBlock(scope, code), ParseElseStatement(scope, code))); } // proc ParseIfStatement private static Expression ParseElseStatement(Scope scope, LuaLexer code) { if (code.Current.Typ == LuaToken.KwElseif) { code.Next(); var expr = ConvertExpression(scope.Runtime, code.Current, ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug), typeof(bool)); FetchToken(LuaToken.KwThen, code); return Expression.IfThenElse(expr, ParseIfElseBlock(scope, code), ParseElseStatement(scope, code)); } else if (code.Current.Typ == LuaToken.KwElse) { code.Next(); var block = ParseIfElseBlock(scope, code); FetchToken(LuaToken.KwEnd, code); return block; } else if (code.Current.Typ == LuaToken.KwEnd) { code.Next(); return Expression.Empty(); } else throw ParseError(code.Current, Properties.Resources.rsParseUnexpectedTokenElse); } // func ParseElseStatement private static Expression ParseIfElseBlock(Scope parent, LuaLexer code) { Scope scope = new Scope(parent); ParseBlock(scope, code); return scope.ExpressionBlock; } // func ParseIfElseBlock private static void ParseConst(Scope scope, LuaLexer code) { // const ::= variable '=' ( expr | clr '.' Type ) Token tVarName; Type typeVar; ParseIdentifierAndType(scope, code, out tVarName, out typeVar); if (code.Current.Typ == LuaToken.Identifier || code.Current.Value == "typeof") { code.Next(); // Parse the type scope.RegisterConst(tVarName.Value, Expression.Constant(ParseType(scope, code, false))); } else { FetchToken(LuaToken.Assign, code); Expression exprConst = ParseExpression(scope, code, InvokeResult.Object, false); // No Debug-Emits if (typeVar != typeof(object)) exprConst = ConvertExpression(scope.Runtime, tVarName, exprConst, typeVar); // Try to eval the statement if (exprConst.Type == typeof(object) || exprConst.Type == typeof(LuaResult)) // dynamic calls, no constant possible throw ParseError(tVarName, Properties.Resources.rsConstExpressionNeeded); else try { object r = EvaluateExpression(exprConst); if (r == null) // Eval via compile { Type typeFunc = Expression.GetFuncType(exprConst.Type); LambdaExpression exprEval = Expression.Lambda(typeFunc, exprConst); Delegate dlg = exprEval.Compile(); r = dlg.DynamicInvoke(); } scope.RegisterConst(tVarName.Value, Expression.Constant(r, exprConst.Type)); } catch (Exception e) { throw ParseError(tVarName, String.Format(Properties.Resources.rsConstExpressionEvalError, e.Message)); } } } // func ParseConst #endregion #region -- Evaluate Expression ---------------------------------------------------- private static object EvaluateExpression(Expression expr) { if (expr is ConstantExpression) return EvaluateConstantExpression((ConstantExpression)expr); else return null; } // func EvaluateExpresion private static object EvaluateConstantExpression(ConstantExpression expr) { return Lua.RtConvertValue(expr.Value, expr.Type); } // func EvaluateConstantExpression #endregion #region -- Parse Prefix, Suffix --------------------------------------------------- private static PrefixMemberInfo ParsePrefix(Scope scope, LuaLexer code) { // prefix ::= Identifier suffix_opt | '(' exp ')' suffix | literal | tablector Token tStart = code.Current; PrefixMemberInfo info; switch (tStart.Typ) { case LuaToken.BracketOpen: // Parse eine Expression { code.Next(); var expr = ConvertObjectExpression(scope.Runtime, tStart, ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug)); FetchToken(LuaToken.BracketClose, code); info = new PrefixMemberInfo(tStart, expr, null, null, null); } break; case LuaToken.DotDotDot: case LuaToken.Identifier: case LuaToken.KwForEach: var t = code.Current; if (t.Value == csClr) // clr is a special package, that always exists { code.Next(); info = new PrefixMemberInfo(tStart, Expression.Property(null, Lua.TypeClrPropertyInfo), null, null, null); } else { string sMemberName; if (t.Typ == LuaToken.DotDotDot) sMemberName = csArgList; else if (t.Typ == LuaToken.KwCast) sMemberName = "cast"; else if (t.Typ == LuaToken.KwForEach) sMemberName = "foreach"; else sMemberName = t.Value; var p = scope.LookupExpression(sMemberName); if (t.Typ == LuaToken.DotDotDot && p == null) throw ParseError(t, Properties.Resources.rsParseNoArgList); code.Next(); if (p == null) // No local variable found info = new PrefixMemberInfo(tStart, scope.LookupExpression(csEnv), t.Value, null, null); else info = new PrefixMemberInfo(tStart, p, null, null, null); } break; case LuaToken.KwCast: info = new PrefixMemberInfo(tStart, ParsePrefixCast(scope, code), null, null, null); break; case LuaToken.String: // Literal String info = new PrefixMemberInfo(tStart, Expression.Constant(FetchToken(LuaToken.String, code).Value, typeof(string)), null, null, null); break; case LuaToken.Number: // Literal Zahl info = new PrefixMemberInfo(tStart, ParseNumber(scope.Runtime, FetchToken(LuaToken.Number, code)), null, null, null); break; case LuaToken.KwTrue: // Literal TRUE code.Next(); info = new PrefixMemberInfo(tStart, Expression.Constant(true, typeof(bool)), null, null, null); break; case LuaToken.KwFalse: // Literal FALSE code.Next(); info = new PrefixMemberInfo(tStart, Expression.Constant(false, typeof(bool)), null, null, null); break; case LuaToken.KwNil: // Literal NIL code.Next(); info = new PrefixMemberInfo(tStart, Expression.Constant(null, typeof(object)), null, null, null); break; case LuaToken.BracketCurlyOpen: // tablector info = new PrefixMemberInfo(tStart, ParseTableConstructor(scope, code), null, null, null); break; case LuaToken.KwFunction: // Function definition code.Next(); info = new PrefixMemberInfo(tStart, ParseLamdaDefinition(scope, code, "lambda", false, null), null, null, null); break; default: throw ParseError(code.Current, Properties.Resources.rsParseUnexpectedTokenPrefix); } return ParseSuffix(scope, code, info); } // func ParsePrefix private static PrefixMemberInfo ParseSuffix(Scope scope, LuaLexer code, PrefixMemberInfo info) { // suffix_opt ::= [ suffix ] // suffix ::= { '[' exp ']' | '.' Identifier | args | ':' Identifier args } // args ::= tablector | string | '(' explist ')' while (true) { switch (code.Current.Typ) { case LuaToken.BracketSquareOpen: // Index code.Next(); info.GenerateGet(scope, InvokeResult.Object); if (code.Current.Typ == LuaToken.BracketSquareClose) info.Indices = new Expression[0]; else info.Indices = ParseExpressionList(scope, code).ToArray(); FetchToken(LuaToken.BracketSquareClose, code); break; case LuaToken.Dot: // Property of an class code.Next(); info.GenerateGet(scope, InvokeResult.Object); info.SetMember(FetchToken(LuaToken.Identifier, code), false); break; case LuaToken.BracketOpen: // List of arguments info.GenerateGet(scope, InvokeResult.Object); info.Arguments = ParseArgumentList(scope, code); break; case LuaToken.BracketCurlyOpen: // LuaTable as an argument info.GenerateGet(scope, InvokeResult.Object); info.Arguments = new Expression[] { ParseTableConstructor(scope, code) }; break; case LuaToken.String: // String as an argument info.GenerateGet(scope, InvokeResult.Object); info.Arguments = new Expression[] { Expression.Constant(FetchToken(LuaToken.String, code).Value, typeof(object)) }; break; case LuaToken.Colon: // Methodenaufruf code.Next(); // Lese den Namen um den Member zu belegen info.GenerateGet(scope, InvokeResult.Object); info.SetMember(FetchToken(LuaToken.Identifier, code), true); // Parse die Parameter switch (code.Current.Typ) { case LuaToken.BracketOpen: // Argumentenliste info.Arguments = ParseArgumentList(scope, code); break; case LuaToken.BracketCurlyOpen: // LuaTable als Argument info.Arguments = new Expression[] { ParseTableConstructor(scope, code) }; ; break; case LuaToken.String: // String als Argument info.Arguments = new Expression[] { Expression.Constant(FetchToken(LuaToken.String, code).Value, typeof(string)) }; ; break; } break; default: return info; } } } // func ParsePrefix private static Expression[] ParseArgumentList(Scope scope, LuaLexer code) { FetchToken(LuaToken.BracketOpen, code); // Es handelt sich um ein Delegate if (code.Current.Typ == LuaToken.BracketClose) { code.Next(); return new Expression[0]; } else { var args = ParseExpressionList(scope, code).ToArray(); FetchToken(LuaToken.BracketClose, code); return args; } } // func ParseArgumentList #endregion #region -- Parse Numer, HexNumber ------------------------------------------------- private static Expression ParseNumber(Lua runtime, Token t) { string sNumber = t.Value; if (String.IsNullOrEmpty(sNumber)) return Expression.Constant(0, Lua.GetIntegerType(runtime.NumberType)); else { object v = Lua.RtParseNumber(sNumber, runtime.FloatType == LuaFloatType.Double, false); if (v != null) return Expression.Constant(v); else throw ParseError(t, String.Format(Properties.Resources.rsParseConvertNumberError, sNumber)); } } // func ParseNumber #endregion #region -- Parse Expressions ------------------------------------------------------ private static IEnumerable<Expression> ParseExpressionList(Scope scope, LuaLexer code) { while (true) { yield return ParseExpression(scope, code, InvokeResult.LuaResult, scope.EmitExpressionDebug); // Noch eine Expression if (code.Current.Typ == LuaToken.Comma) code.Next(); else break; } } // func ParseExpressionList private static Expression ParseExpression(Scope scope, LuaLexer code, InvokeResult result, bool lDebug) { Token tStart = code.Current; bool lWrap = false; Expression expr = ParseExpression(scope, code, result, ref lWrap); if (lWrap && lDebug) return WrapDebugInfo(true, false, tStart, code.Current, expr); else return expr; } // func ParseExpression private static Expression ParseExpression(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // expr ::= exprOr return ParseExpressionOr(scope, code, result, ref lWrap); } // func ParseExpression private static Expression ParseExpressionOr(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // exprOr ::= exprAnd { or exprAnd } var expr = ParseExpressionAnd(scope, code, result, ref lWrap); while (code.Current.Typ == LuaToken.KwOr) { code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, ExpressionType.OrElse, expr, ParseExpressionAnd(scope, code, InvokeResult.Object, ref lWrap)); lWrap |= true; } return expr; } // func ParseExpressionOr private static Expression ParseExpressionAnd(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // exprAnd ::= exprBitOr { and exprBitOr } var expr = ParseExpressionBitOr(scope, code, result, ref lWrap); while (code.Current.Typ == LuaToken.KwAnd) { code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, ExpressionType.AndAlso, expr, ParseExpressionBitOr(scope, code, InvokeResult.Object, ref lWrap)); lWrap |= true; } return expr; } // func ParseExpressionAnd private static Expression ParseExpressionBitOr(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // exprBitOr ::= exprBitXOr { | exprBitXOr } var expr = ParseExpressionBitXOr(scope, code, result, ref lWrap); while (code.Current.Typ == LuaToken.BitOr) { code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, ExpressionType.Or, expr, ParseExpressionBitXOr(scope, code, InvokeResult.Object, ref lWrap) ); lWrap |= true; } return expr; } // func ParseExpressionBitOr private static Expression ParseExpressionBitXOr(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // exprBitXOr ::= exprBitAnd { ~ exprBitAnd } var expr = ParseExpressionBitAnd(scope, code, result, ref lWrap); while (code.Current.Typ == LuaToken.Dilde) { code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, ExpressionType.ExclusiveOr, expr, ParseExpressionBitAnd(scope, code, InvokeResult.Object, ref lWrap) ); lWrap |= true; } return expr; } // func ParseExpressionBitXOr private static Expression ParseExpressionBitAnd(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // exprBitAnd ::= exprCmp { & exprCmp } var expr = ParseExpressionCmp(scope, code, result, ref lWrap); while (code.Current.Typ == LuaToken.BitAnd) { code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, ExpressionType.And, expr, ParseExpressionCmp(scope, code, InvokeResult.Object, ref lWrap) ); lWrap |= true; } return expr; } // func ParseExpressionBitAnd private static Expression ParseExpressionCmp(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // expCmd ::= expCon { ( < | > | <= | >= | ~= | == ) expCon } Token tStart = code.Current; var expr = ParseExpressionCon(scope, code, result, ref lWrap); while (true) { LuaToken tokenTyp = code.Current.Typ; ExpressionType exprTyp; if (tokenTyp == LuaToken.Lower) exprTyp = ExpressionType.LessThan; else if (tokenTyp == LuaToken.Greater) exprTyp = ExpressionType.GreaterThan; else if (tokenTyp == LuaToken.LowerEqual) exprTyp = ExpressionType.LessThanOrEqual; else if (tokenTyp == LuaToken.GreaterEqual) exprTyp = ExpressionType.GreaterThanOrEqual; else if (tokenTyp == LuaToken.NotEqual) exprTyp = ExpressionType.NotEqual; else if (tokenTyp == LuaToken.Equal) exprTyp = ExpressionType.Equal; else return expr; code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, exprTyp, expr, ParseExpressionCon(scope, code, InvokeResult.Object, ref lWrap)); lWrap |= true; } } // func ParseExpressionCmp private static Expression ParseExpressionCon(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // exprCon::= exprShift { '..' exprShift } List<Expression> exprs = new List<Expression>(); exprs.Add(ParseExpressionShift(scope, code, result, ref lWrap)); while (code.Current.Typ == LuaToken.DotDot) { code.Next(); exprs.Add(ParseExpressionShift(scope, code, InvokeResult.Object, ref lWrap)); } // Erzeuge Concat if (exprs.Count > 1) { lWrap |= true; return ConcatOperationExpression(scope.Runtime, code.Current, exprs.ToArray()); } else return exprs[0]; } // func ParseExpressionCon private static Expression ParseExpressionShift(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // exprBitAnd ::= exprCmp { ( << | >> ) exprCmp } var expr = ParseExpressionPlus(scope, code, result, ref lWrap); while (true) { LuaToken tokenTyp = code.Current.Typ; ExpressionType exprTyp; if (tokenTyp == LuaToken.ShiftLeft) exprTyp = ExpressionType.LeftShift; else if (tokenTyp == LuaToken.ShiftRight) exprTyp = ExpressionType.RightShift; else return expr; code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, exprTyp, expr, ParseExpressionPlus(scope, code, InvokeResult.Object, ref lWrap)); lWrap |= true; } } // func ParseExpressionShift private static Expression ParseExpressionPlus(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // expPlus ::= expMul { ( + | - ) expMul} var expr = ParseExpressionMultiply(scope, code, result, ref lWrap); while (true) { LuaToken tokenTyp = code.Current.Typ; ExpressionType exprTyp; if (tokenTyp == LuaToken.Plus) exprTyp = ExpressionType.Add; else if (tokenTyp == LuaToken.Minus) exprTyp = ExpressionType.Subtract; else return expr; code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, exprTyp, expr, ParseExpressionMultiply(scope, code, InvokeResult.Object, ref lWrap)); lWrap |= true; } } // func ParseExpressionPlus private static Expression ParseExpressionMultiply(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // expMul ::= expUn { ( * | / | // | % ) expUn } var expr = ParseExpressionUnary(scope, code, result, ref lWrap); while (true) { LuaToken tokenTyp = code.Current.Typ; ExpressionType exprTyp; if (tokenTyp == LuaToken.Star) exprTyp = ExpressionType.Multiply; else if (tokenTyp == LuaToken.Slash) exprTyp = ExpressionType.Divide; else if (tokenTyp == LuaToken.SlashShlash) exprTyp = Lua.IntegerDivide; else if (tokenTyp == LuaToken.Percent) exprTyp = ExpressionType.Modulo; else return expr; code.Next(); expr = BinaryOperationExpression(scope.Runtime, code.Current, exprTyp, expr, ParseExpressionUnary(scope, code, InvokeResult.Object, ref lWrap)); lWrap |= true; } } // func ParseExpressionMultiply private static Expression ParseExpressionUnary(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // expUn ::= { 'not' | - | # | ~ } expPow LuaToken typ = code.Current.Typ; if (typ == LuaToken.KwNot || typ == LuaToken.Minus || typ == LuaToken.Dilde || typ == LuaToken.Cross) { code.Next(); Expression expr = ParseExpressionUnary(scope, code, InvokeResult.Object, ref lWrap); lWrap |= true; ExpressionType exprType; if (typ == LuaToken.KwNot) exprType = ExpressionType.Not; else if (typ == LuaToken.Minus) exprType = ExpressionType.Negate; else if (typ == LuaToken.Dilde) exprType = ExpressionType.OnesComplement; else exprType = ExpressionType.ArrayLength; lWrap |= true; return UnaryOperationExpression(scope.Runtime, code.Current, exprType, expr); } else return ParseExpressionPower(scope, code, result, ref lWrap); } // func ParseExpressionUnary private static Expression ParseExpressionPower(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // expPow ::= cast [ ^ expPow ] Expression expr = ParseExpressionCast(scope, code, result, ref lWrap); if (code.Current.Typ == LuaToken.Caret) { code.Next(); lWrap |= true; return BinaryOperationExpression(scope.Runtime, code.Current, ExpressionType.Power, expr, ParseExpressionPower(scope, code, InvokeResult.Object, ref lWrap)); } else return expr; } // func ParseExpressionPower private static Expression ParseExpressionCast(Scope scope, LuaLexer code, InvokeResult result, ref bool lWrap) { // cast ::= cast(type, expr) if (code.Current.Typ == LuaToken.KwCast) { Token tStart = code.Current; lWrap |= true; PrefixMemberInfo prefix = new PrefixMemberInfo(tStart, ParsePrefixCast(scope, code), null, null, null); ParseSuffix(scope, code, prefix); return prefix.GenerateGet(scope, result); } else return ParsePrefix(scope, code).GenerateGet(scope, result); } // func ParseExpressionCast private static Expression ParsePrefixCast(Scope scope, LuaLexer code) { LuaType luaType; Token t = code.Current; code.Next(); FetchToken(LuaToken.BracketOpen, code); // Read the type luaType = ParseType(scope, code, true); FetchToken(LuaToken.Comma, code); bool lWrap = scope.EmitExpressionDebug; Expression expr = ParseExpression(scope, code, InvokeResult.Object, ref lWrap); FetchToken(LuaToken.BracketClose, code); return ConvertExpression(scope.Runtime, t, expr, luaType); } // func ParsePrefixCast private static void ParseIdentifierAndType(Scope scope, LuaLexer code, out Token tName, out Type type) { // var ::= name ':' type tName = FetchToken(LuaToken.Identifier, code); if (code.Current.Typ == LuaToken.Colon) { code.Next(); type = ParseType(scope, code, true); } else type = typeof(object); } // func ParseIdentifierAndType private static LuaType ParseType(Scope scope, LuaLexer code, bool lNeedType) { // is the first token an alias LuaType currentType = ParseFirstType(scope, code); while (code.Current.Typ == LuaToken.Dot || code.Current.Typ == LuaToken.Plus || code.Current.Typ == LuaToken.BracketSquareOpen) { if (code.Current.Typ == LuaToken.BracketSquareOpen) { List<LuaType> genericeTypes = new List<LuaType>(); code.Next(); if (code.Current.Typ != LuaToken.BracketSquareClose) { genericeTypes.Add(ParseType(scope, code, lNeedType)); while (code.Current.Typ == LuaToken.Comma) { code.Next(); genericeTypes.Add(ParseType(scope, code, lNeedType)); } } FetchToken(LuaToken.BracketSquareClose, code); if (genericeTypes.Count == 0) // create a array at the end { if (currentType.Type == null) throw ParseError(code.Current, String.Format(Properties.Resources.rsParseUnknownType, currentType.FullName)); currentType = LuaType.GetType(currentType.GetIndex("[]", false, () => currentType.Type.MakeArrayType())); } else // build a generic type { Type typeGeneric = Type.GetType(currentType.FullName + "`" + genericeTypes.Count.ToString()); if (typeGeneric == null) throw ParseError(code.Current, String.Format(Properties.Resources.rsParseUnknownType, currentType.FullName)); currentType = LuaType.GetType(currentType.GetGenericItem(typeGeneric, genericeTypes.ToArray())); } } else { code.Next(); currentType = LuaType.GetType(currentType.GetIndex(FetchToken(LuaToken.Identifier, code).Value, false, null)); } } if (lNeedType && currentType.Type == null) throw ParseError(code.Current, String.Format(Properties.Resources.rsParseUnknownType, currentType.FullName)); return currentType; } // func ParseType private static LuaType ParseFirstType(Scope scope, LuaLexer code) { string sType = FetchToken(LuaToken.Identifier, code).Value; LuaType luaType = LuaType.GetCachedType(sType); if (luaType == null) { ConstantExpression expr = scope.LookupExpression(sType, false) as ConstantExpression; if (expr != null && expr.Type == typeof(LuaType)) return (LuaType)expr.Value; else return LuaType.GetType(LuaType.Clr.GetIndex(sType, false, null)); } else return luaType; } // func ParseFirstType #endregion #region -- Parse Goto, Label ------------------------------------------------------ private static void ParseGoto(Scope scope, LuaLexer code) { // goto Identifier FetchToken(LuaToken.KwGoto, code); var t = FetchToken(LuaToken.Identifier, code); scope.AddExpression(Expression.Goto(scope.LookupLabel(null, t.Value))); } // proc ParseGoto private static void ParseLabel(Scope scope, LuaLexer code) { // ::identifier:: FetchToken(LuaToken.ColonColon, code); // Erzeuge das Label scope.AddExpression(Expression.Label(scope.LookupLabel(null, FetchToken(LuaToken.Identifier, code).Value))); FetchToken(LuaToken.ColonColon, code); } // proc ParseLabel #endregion #region -- Parse Loops ------------------------------------------------------------ private static void ParseDoLoop(Scope scope, LuaLexer code) { // doloop ::= do '(' name { ',' name } = expr { ',' expr } ')' block end // create empty block, that can used as an loop Scope outerScope = new Scope(scope); Expression[] exprFinally = null; // fetch do FetchToken(LuaToken.KwDo, code); if (code.Current.Typ == LuaToken.BracketOpen) // look for disposable variables { code.Next(); ParseExpressionStatement(outerScope, code, true); // Build finally-Block for the declared variables exprFinally = ( from c in outerScope.Variables select Expression.IfThen( Expression.TypeIs(c, typeof(IDisposable)), Expression.Call(Expression.Convert(c, typeof(IDisposable)), Lua.DisposeDisposeMethodInfo) )).ToArray(); FetchToken(LuaToken.BracketClose, code); } LoopScope loopScope = new LoopScope(outerScope); // Add the Contine label after the declaration loopScope.AddExpression(Expression.Label(loopScope.ContinueLabel)); // parse the block ParseBlock(loopScope, code); // create the break label loopScope.AddExpression(Expression.Label(loopScope.BreakLabel)); FetchToken(LuaToken.KwEnd, code); if (exprFinally != null && exprFinally.Length > 0) { outerScope.AddExpression( Expression.TryFinally( loopScope.ExpressionBlock, Expression.Block(exprFinally) ) ); scope.AddExpression(outerScope.ExpressionBlock); } else scope.AddExpression(loopScope.ExpressionBlock); } // ParseDoLoop private static void ParseWhileLoop(Scope scope, LuaLexer code) { // while expr do block end; LoopScope loopScope = new LoopScope(scope); // get the expression FetchToken(LuaToken.KwWhile, code); loopScope.AddExpression(Expression.Label(loopScope.ContinueLabel)); loopScope.AddExpression( Expression.IfThenElse( ConvertExpression(scope.Runtime, code.Current, ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug), typeof(bool)), Expression.Empty(), Expression.Goto(loopScope.BreakLabel) ) ); // append the block FetchToken(LuaToken.KwDo, code); ParseBlock(loopScope, code); FetchToken(LuaToken.KwEnd, code); // goto continue loopScope.AddExpression(Expression.Goto(loopScope.ContinueLabel)); loopScope.AddExpression(Expression.Label(loopScope.BreakLabel)); scope.AddExpression(loopScope.ExpressionBlock); } // func ParseWhileLoop private static void ParseRepeatLoop(Scope scope, LuaLexer code) { LoopScope loopScope = new LoopScope(scope); // continue label loopScope.AddExpression(Expression.Label(loopScope.ContinueLabel)); // loop content FetchToken(LuaToken.KwRepeat, code); ParseBlock(loopScope, code); // get the loop expression FetchToken(LuaToken.KwUntil, code); loopScope.AddExpression( Expression.IfThenElse( ConvertExpression(scope.Runtime, code.Current, ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug), typeof(bool)), Expression.Empty(), Expression.Goto(loopScope.ContinueLabel) ) ); loopScope.AddExpression(Expression.Label(loopScope.BreakLabel)); scope.AddExpression(loopScope.ExpressionBlock); } // func ParseRepeatLoop private static void ParseForLoop(Scope scope, LuaLexer code) { // for name FetchToken(LuaToken.KwFor, code); Token tLoopVar; Type typeLoopVar; ParseIdentifierAndType(scope, code, out tLoopVar, out typeLoopVar); if (code.Current.Typ == LuaToken.Assign) { // = exp, exp [, exp] do block end FetchToken(LuaToken.Assign, code); Expression loopStart = ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug); FetchToken(LuaToken.Comma, code); Expression loopEnd = ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug); Expression loopStep; if (code.Current.Typ == LuaToken.Comma) { code.Next(); loopStep = ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug); } else loopStep = Expression.Constant(1, loopStart.Type); LoopScope loopScope = new LoopScope(scope); ParameterExpression loopVarParameter = loopScope.RegisterVariable(typeLoopVar == typeof(object) ? LuaEmit.LiftType(LuaEmit.LiftType(loopStart.Type, loopEnd.Type), loopStep.Type) : typeLoopVar, tLoopVar.Value); FetchToken(LuaToken.KwDo, code); ParseBlock(loopScope, code); FetchToken(LuaToken.KwEnd, code); scope.AddExpression(GenerateForLoop(loopScope, tLoopVar, loopVarParameter, loopStart, loopEnd, loopStep)); } else { // {, name} in explist do block end // fetch all loop variables LoopScope loopScope = new LoopScope(scope); List<ParameterExpression> loopVars = new List<ParameterExpression>(); loopVars.Add(loopScope.RegisterVariable(typeLoopVar, tLoopVar.Value)); while (code.Current.Typ == LuaToken.Comma) { code.Next(); ParseIdentifierAndType(scope, code, out tLoopVar, out typeLoopVar); loopVars.Add(loopScope.RegisterVariable(typeLoopVar, tLoopVar.Value)); } // get the loop expressions FetchToken(LuaToken.KwIn, code); Expression[] explist = ParseExpressionList(scope, code).ToArray(); // parse the loop body FetchToken(LuaToken.KwDo, code); ParseBlock(loopScope, code); FetchToken(LuaToken.KwEnd, code); scope.AddExpression(GenerateForLoop(loopScope, tLoopVar, loopVars, explist)); } } // func ParseForLoop private static void ParseForEachLoop(Scope scope, LuaLexer code) { ParameterExpression varEnumerable = Expression.Variable(typeof(System.Collections.IEnumerable), "#enumerable"); ParameterExpression varEnumerator = Expression.Variable(typeof(System.Collections.IEnumerator), "#enumerator"); // foreach name in exp do block end; code.Next(); // foreach // fetch the loop variable LoopScope loopScope = new LoopScope(scope); Token tLoopVar; Type typeLoopVar; ParseIdentifierAndType(scope, code, out tLoopVar, out typeLoopVar); ParameterExpression loopVar = loopScope.RegisterVariable(typeLoopVar, tLoopVar.Value); // get the enumerable expression FetchToken(LuaToken.KwIn, code); Expression exprEnum = Lua.EnsureType(ParseExpression(scope, code, InvokeResult.None, scope.EmitExpressionDebug), typeof(object)); // parse the loop body FetchToken(LuaToken.KwDo, code); ParseBlock(loopScope, code); FetchToken(LuaToken.KwEnd, code); loopScope.InsertExpression(0, Expression.Assign(loopVar, ConvertExpression(scope.Runtime, tLoopVar, Expression.Property(varEnumerator, Lua.EnumeratorCurrentPropertyInfo), loopVar.Type))); scope.AddExpression( Expression.Block(new ParameterExpression[] { varEnumerable, varEnumerator, loopVar }, // local enumerable = exprEnum as IEnumerator Expression.Assign(varEnumerable, Expression.TypeAs(exprEnum, typeof(System.Collections.IEnumerable))), // if enumerable == nil then error Expression.IfThen(Expression.Equal(varEnumerable, Expression.Constant(null, typeof(object))), Lua.ThrowExpression(Properties.Resources.rsExpressionNotEnumerable)), // local enum = exprEnum.GetEnumerator() Expression.Assign(varEnumerator, Expression.Call(varEnumerable, Lua.EnumerableGetEnumeratorMethodInfo)), // while enum.MoveNext() do Expression.Label(loopScope.ContinueLabel), Expression.IfThenElse(Expression.Call(varEnumerator, Lua.EnumeratorMoveNextMethodInfo), Expression.Empty(), Expression.Goto(loopScope.BreakLabel)), // loopVar = enum.Current loopScope.ExpressionBlock, // end; Expression.Goto(loopScope.ContinueLabel), Expression.Label(loopScope.BreakLabel) )); } // proc ParseForEachLoop private static Expression GenerateForLoop(LoopScope loopScope, Token tStart, ParameterExpression loopVar, Expression loopStart, Expression loopEnd, Expression loopStep) { const string csVar = "#var"; const string csEnd = "#end"; const string csStep = "#step"; ParameterExpression internLoopVar = Expression.Variable(loopVar.Type, csVar); ParameterExpression endVar = Expression.Variable(loopEnd.Type, csEnd); ParameterExpression stepVar = Expression.Variable(loopStep.Type, csStep); LabelTarget labelLoop = Expression.Label("#loop"); // Erzeuge CodeBlock loopScope.InsertExpression(0, Expression.Assign(loopVar, internLoopVar)); // Erzeuge den Schleifenblock return Expression.Block(new ParameterExpression[] { internLoopVar, endVar, stepVar }, Expression.Assign(internLoopVar, ConvertExpression(loopScope.Runtime, tStart, loopStart, internLoopVar.Type)), Expression.Assign(endVar, loopEnd), Expression.Assign(stepVar, loopStep), Expression.Label(labelLoop), Expression.IfThenElse( BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.OrElse, BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.AndAlso, ConvertExpression(loopScope.Runtime, tStart, BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.GreaterThan, stepVar, Expression.Constant(0, typeof(int))), typeof(bool)), ConvertExpression(loopScope.Runtime, tStart, BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.LessThanOrEqual, internLoopVar, endVar), typeof(bool)) ), BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.AndAlso, ConvertExpression(loopScope.Runtime, tStart, BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.LessThanOrEqual, stepVar, Expression.Constant(0, typeof(int))), typeof(bool)), ConvertExpression(loopScope.Runtime, tStart, BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.GreaterThanOrEqual, internLoopVar, endVar), typeof(bool)) ) ), loopScope.ExpressionBlock, Expression.Goto(loopScope.BreakLabel) ), Expression.Label(loopScope.ContinueLabel), Expression.Assign(internLoopVar, ConvertExpression(loopScope.Runtime, tStart, BinaryOperationExpression(loopScope.Runtime, tStart, ExpressionType.Add, internLoopVar, stepVar), internLoopVar.Type)), Expression.Goto(labelLoop), Expression.Label(loopScope.BreakLabel) ); } // func GenerateForLoop private static Expression GenerateForLoop(LoopScope loopScope, Token tStart, List<ParameterExpression> loopVars, Expression[] explist) { const string csFunc = "#f"; const string csState = "#s"; const string csVar = "#var"; ParameterExpression varTmp = Expression.Variable(typeof(LuaResult), "#tmp"); ParameterExpression varFunc = Expression.Variable(explist.Length > 0 && typeof(Delegate).GetTypeInfo().IsAssignableFrom(explist[0].Type.GetTypeInfo()) ? explist[0].Type : typeof(object), csFunc); ParameterExpression varState = Expression.Variable(typeof(object), csState); ParameterExpression varVar = Expression.Variable(typeof(object), csVar); // local var1, ..., varn = tmp; for (int i = 0; i < loopVars.Count; i++) loopScope.InsertExpression(i, Expression.Assign(loopVars[i], ConvertExpression(loopScope.Runtime, tStart, GetResultExpression(loopScope.Runtime, tStart, varTmp, i), loopVars[i].Type))); return Expression.Block(new ParameterExpression[] { varTmp, varFunc, varState, varVar }, // fill the local loop variables initial // local #f, #s, #var = explist Expression.Assign(varTmp, GetLuaResultExpression(loopScope, tStart, explist)), Expression.Assign(varFunc, ConvertExpression(loopScope.Runtime, tStart, GetResultExpression(loopScope.Runtime, tStart, varTmp, 0), varFunc.Type)), Expression.Assign(varState, GetResultExpression(loopScope.Runtime, tStart, varTmp, 1)), Expression.Assign(varVar, GetResultExpression(loopScope.Runtime, tStart, varTmp, 2)), Expression.Label(loopScope.ContinueLabel), // local tmp = f(s, var) Expression.Assign(varTmp, InvokeExpression(loopScope, tStart, varFunc, InvokeResult.LuaResult, new Expression[] { varState, varVar }, true) ), // var = tmp[0] Expression.Assign(varVar, GetResultExpression(loopScope.Runtime, tStart, varTmp, 0)), // if var == nil then goto break; Expression.IfThenElse(Expression.Equal(varVar, Expression.Constant(null, typeof(object))), Expression.Goto(loopScope.BreakLabel), loopScope.ExpressionBlock), // LoopBody Expression.Goto(loopScope.ContinueLabel), Expression.Label(loopScope.BreakLabel) ); } // func GenerateForLoop private static void ParseBreak(Scope scope, LuaLexer code) { FetchToken(LuaToken.KwBreak, code); // Erzeuge die Expression scope.AddExpression(Expression.Goto(scope.LookupLabel(null, csBreakLabel))); // Optionales Semicolon FetchToken(LuaToken.Semicolon, code, true); } // func ParseBreak #endregion #region -- Parse Function, Lambda ------------------------------------------------- private static void ParseFunction(Scope scope, LuaLexer code, bool lLocal) { FetchToken(LuaToken.KwFunction, code); if (lLocal) // Local function, only one identifier is allowed { var t = FetchToken(LuaToken.Identifier, code); ParameterExpression funcVar = null; Expression exprFunction = ParseLamdaDefinition(scope, code, t.Value, false, typeDelegate => funcVar = scope.RegisterVariable(typeDelegate, t.Value) ); scope.AddExpression(Expression.Assign(funcVar, exprFunction)); } else // Function that is assigned to a table. A chain of identifiers is allowed. { Expression assignee = null; Token tCurrent = FetchToken(LuaToken.Identifier, code); string sMember = tCurrent.Value; // Collect the chain of members while (code.Current.Typ == LuaToken.Dot) { code.Next(); // Create the get-member for the current assignee assignee = ParseFunctionAddChain(scope, tCurrent, assignee, sMember); sMember = FetchToken(LuaToken.Identifier, code).Value; } // add a method to the table. methods get a hidden parameter and will bo marked bool lMethodMember; if (code.Current.Typ == LuaToken.Colon) { code.Next(); // add the last member to the assignee chain assignee = ParseFunctionAddChain(scope, tCurrent, assignee, sMember); // fetch the method name sMember = FetchToken(LuaToken.Identifier, code).Value; lMethodMember = true; } else { if (assignee == null) assignee = scope.LookupExpression(csEnv); // create a global function lMethodMember = false; } // generate lambda scope.AddExpression(MemberSetExpression(scope.Runtime, tCurrent, assignee, sMember, lMethodMember, ParseLamdaDefinition(scope, code, sMember, lMethodMember, null))); } } // proc ParseLamdaDefinition private static Expression ParseFunctionAddChain(Scope scope, Token tStart, Expression assignee, string sMember) { if (assignee == null) { Expression expr = scope.LookupExpression(sMember); if (expr == null) assignee = ParseFunctionAddChain(scope, tStart, scope.LookupExpression(csEnv), sMember); else assignee = expr; } else assignee = MemberGetExpression(scope, tStart, assignee, sMember); return assignee; } // proc ParseFunctionAddChain private static Expression ParseLamdaDefinition(Scope parent, LuaLexer code, string sName, bool lSelfParameter, Action<Type> functionTypeCollected) { List<ParameterExpression> parameters = new List<ParameterExpression>(); LambdaScope scope = new LambdaScope(parent); // Lese die Parameterliste ein FetchToken(LuaToken.BracketOpen, code); if (lSelfParameter) parameters.Add(scope.RegisterParameter(typeof(object), "self")); if (code.Current.Typ == LuaToken.Identifier || code.Current.Typ == LuaToken.DotDotDot) { if (code.Current.Typ == LuaToken.DotDotDot) { code.Next(); ParseLamdaDefinitionArgList(scope, parameters); } else { Token tName; Type typeArgument; ParseIdentifierAndType(scope, code, out tName, out typeArgument); parameters.Add(scope.RegisterParameter(typeArgument, tName.Value)); while (code.Current.Typ == LuaToken.Comma) { code.Next(); if (code.Current.Typ == LuaToken.DotDotDot) { code.Next(); ParseLamdaDefinitionArgList(scope, parameters); // last argument break; } else { ParseIdentifierAndType(scope, code, out tName, out typeArgument); parameters.Add(scope.RegisterParameter(typeArgument, tName.Value)); } } } } FetchToken(LuaToken.BracketClose, code); // Is there a specific result if (code.Current.Typ == LuaToken.Colon) { var t = code.Current; code.Next(); Type typeResult = ParseType(scope, code, true); scope.ResetReturnLabel(typeResult, null); } else scope.ResetReturnLabel(typeof(LuaResult), Expression.Property(null, Lua.ResultEmptyPropertyInfo)); // register the delegate if (functionTypeCollected != null) { functionTypeCollected( Expression.GetFuncType( (from p in parameters select p.Type).Concat(new Type[] { scope.ReturnType }).ToArray() ) ); } // Lese den Code-Block ParseBlock(scope, code); FetchToken(LuaToken.KwEnd, code); return Expression.Lambda( scope.ExpressionBlock, (parent.EmitDebug & LuaDebugLevel.RegisterMethods) == LuaDebugLevel.RegisterMethods ? Lua.RegisterUniqueName(sName) : sName, parameters); } // proc ParseLamdaDefinition private static void ParseLamdaDefinitionArgList(LambdaScope scope, List<ParameterExpression> parameters) { ParameterExpression paramArgList = scope.RegisterParameter(typeof(object[]), csArgListP); ParameterExpression varArgList = scope.RegisterVariable(typeof(LuaResult), csArgList); parameters.Add(paramArgList); scope.AddExpression(Expression.Assign(varArgList, Expression.New(Lua.ResultConstructorInfoArgN, paramArgList))); } // proc ParseLamdaDefinitionArgList #endregion #region -- Parse TableConstructor ------------------------------------------------- private static Expression ParseTableConstructor(Scope scope, LuaLexer code) { // table ::= '{' [field] { fieldsep field } [fieldsep] '}' // fieldsep ::= ',' | ';' FetchToken(LuaToken.BracketCurlyOpen, code); if (code.Current.Typ != LuaToken.BracketCurlyClose) { int iIndex = 1; Scope scopeTable = new Scope(scope); // Create the variable for the table ParameterExpression tableVar = scopeTable.RegisterVariable(typeof(LuaTable), "#table"); scopeTable.AddExpression(Expression.Assign(tableVar, CreateEmptyTableExpression())); // fiest field ParseTableField(tableVar, scopeTable, code, ref iIndex); // collect more table fields while (code.Current.Typ == LuaToken.Comma || code.Current.Typ == LuaToken.Semicolon) { code.Next(); // Optional last separator if (code.Current.Typ == LuaToken.BracketCurlyClose) break; // Parse the field ParseTableField(tableVar, scopeTable, code, ref iIndex); } scopeTable.AddExpression(tableVar); scopeTable.ExpressionBlockType = typeof(LuaTable); // Closing bracket FetchToken(LuaToken.BracketCurlyClose, code); return scopeTable.ExpressionBlock; } else { FetchToken(LuaToken.BracketCurlyClose, code); return CreateEmptyTableExpression(); } } // func ParseTableConstructor private static void ParseTableField(ParameterExpression tableVar, Scope scope, LuaLexer code, ref int iIndex) { // field ::= '[' exp ']' '=' exp | Name '=' exp | exp if (code.Current.Typ == LuaToken.BracketSquareOpen) { // Parse the index code.Next(); var index = ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug); FetchToken(LuaToken.BracketSquareClose, code); FetchToken(LuaToken.Assign, code); // Expression that results in a value scope.AddExpression( IndexSetExpression(scope.Runtime, code.Current, tableVar, new Expression[] { index }, ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug) ) ); } else if (code.Current.Typ == LuaToken.Identifier && code.LookAhead.Typ == LuaToken.Assign) { // Read the identifier Token tMember = code.Current; code.Next(); FetchToken(LuaToken.Assign, code); // Expression scope.AddExpression( IndexSetExpression(scope.Runtime, code.Current, tableVar, new Expression[] { Expression.Constant(tMember.Value) }, ParseExpression(scope, code, InvokeResult.Object, scope.EmitExpressionDebug) ) ); } else { Token tStart = code.Current; Expression expr = ParseExpression(scope, code, InvokeResult.None, scope.EmitExpressionDebug); // Last assign, enroll parameter if (code.Current.Typ == LuaToken.BracketCurlyClose && LuaEmit.IsDynamicType(expr.Type)) { scope.AddExpression( Expression.Call(Lua.TableSetObjectsMethod, tableVar, Expression.Convert(expr, typeof(object)), Expression.Constant(iIndex, typeof(int)) ) ); } else // Normal index set { scope.AddExpression( IndexSetExpression(scope.Runtime, code.Current, tableVar, new Expression[] { Expression.Constant(iIndex++, typeof(object)) }, expr) ); } } } // proc ParseTableField private static Expression CreateEmptyTableExpression() { return Expression.New(typeof(LuaTable)); } // func CreateEmptyTableExpression #endregion #region -- FetchToken, ParseError ------------------------------------------------- private static Token FetchToken(LuaToken typ, LuaLexer code, bool lOptional = false) { if (code.Current.Typ == typ) { var t = code.Current; code.Next(); return t; } else if (lOptional) return null; else throw ParseError(code.Current, String.Format(Properties.Resources.rsParseUnexpectedToken, LuaLexer.GetTokenName(code.Current.Typ), LuaLexer.GetTokenName(typ))); } // proc FetchToken private static LuaParseException ParseError(Token start, string sMessage) { return new LuaParseException(start.Start, sMessage, null); } // func ParseError #endregion #region -- ExpressionToString ----------------------------------------------------- private static PropertyInfo propertyDebugView = null; public static string ExpressionToString(Expression expr) { if (propertyDebugView == null) propertyDebugView = typeof(Expression).GetTypeInfo().FindDeclaredProperty("DebugView", ReflectionFlag.NoException | ReflectionFlag.NonPublic | ReflectionFlag.Instance); return (string)propertyDebugView.GetValue(expr, null); } // func ExpressionToString #endregion } // class Parser #endregion }
apache-2.0
mihailik/TypeScript
src/compiler/moduleNameResolver.ts
60982
/// <reference path="core.ts" /> /// <reference path="diagnosticInformationMap.generated.ts" /> namespace ts { /* @internal */ export function trace(host: ModuleResolutionHost, message: DiagnosticMessage, ...args: any[]): void; export function trace(host: ModuleResolutionHost): void { host.trace(formatMessage.apply(undefined, arguments)); } /* @internal */ export function isTraceEnabled(compilerOptions: CompilerOptions, host: ModuleResolutionHost): boolean { return compilerOptions.traceResolution && host.trace !== undefined; } /** Array that is only intended to be pushed to, never read. */ /* @internal */ export interface Push<T> { push(value: T): void; } /** * Result of trying to resolve a module. * At least one of `ts` and `js` should be defined, or the whole thing should be `undefined`. */ interface Resolved { path: string; extension: Extension; } /** * Kinds of file that we are currently looking for. * Typically there is one pass with Extensions.TypeScript, then a second pass with Extensions.JavaScript. */ enum Extensions { TypeScript, /** '.ts', '.tsx', or '.d.ts' */ JavaScript, /** '.js' or '.jsx' */ DtsOnly /** Only '.d.ts' */ } /** Used with `Extensions.DtsOnly` to extract the path from TypeScript results. */ function resolvedTypeScriptOnly(resolved: Resolved | undefined): string | undefined { if (!resolved) { return undefined; } Debug.assert(extensionIsTypeScript(resolved.extension)); return resolved.path; } function createResolvedModuleWithFailedLookupLocations(resolved: Resolved | undefined, isExternalLibraryImport: boolean, failedLookupLocations: string[]): ResolvedModuleWithFailedLookupLocations { return { resolvedModule: resolved && { resolvedFileName: resolved.path, extension: resolved.extension, isExternalLibraryImport }, failedLookupLocations }; } export function moduleHasNonRelativeName(moduleName: string): boolean { return !(isRootedDiskPath(moduleName) || isExternalModuleNameRelative(moduleName)); } interface ModuleResolutionState { host: ModuleResolutionHost; compilerOptions: CompilerOptions; traceEnabled: boolean; } /** Reads from "main" or "types"/"typings" depending on `extensions`. */ function tryReadPackageJsonFields(readTypes: boolean, packageJsonPath: string, baseDirectory: string, state: ModuleResolutionState): string | undefined { const jsonContent = readJson(packageJsonPath, state.host); return readTypes ? tryReadFromField("typings") || tryReadFromField("types") : tryReadFromField("main"); function tryReadFromField(fieldName: "typings" | "types" | "main"): string | undefined { if (!hasProperty(jsonContent, fieldName)) { if (state.traceEnabled) { trace(state.host, Diagnostics.package_json_does_not_have_a_0_field, fieldName); } return; } const fileName = jsonContent[fieldName]; if (typeof fileName !== "string") { if (state.traceEnabled) { trace(state.host, Diagnostics.Expected_type_of_0_field_in_package_json_to_be_string_got_1, fieldName, typeof fileName); } return; } const path = normalizePath(combinePaths(baseDirectory, fileName)); if (state.traceEnabled) { trace(state.host, Diagnostics.package_json_has_0_field_1_that_references_2, fieldName, fileName, path); } return path; } } function readJson(path: string, host: ModuleResolutionHost): { typings?: string, types?: string, main?: string } { try { const jsonText = host.readFile(path); return jsonText ? JSON.parse(jsonText) : {}; } catch (e) { // gracefully handle if readFile fails or returns not JSON return {}; } } export function getEffectiveTypeRoots(options: CompilerOptions, host: { directoryExists?: (directoryName: string) => boolean, getCurrentDirectory?: () => string }): string[] | undefined { if (options.typeRoots) { return options.typeRoots; } let currentDirectory: string; if (options.configFilePath) { currentDirectory = getDirectoryPath(options.configFilePath); } else if (host.getCurrentDirectory) { currentDirectory = host.getCurrentDirectory(); } if (currentDirectory !== undefined) { return getDefaultTypeRoots(currentDirectory, host); } } /** * Returns the path to every node_modules/@types directory from some ancestor directory. * Returns undefined if there are none. */ function getDefaultTypeRoots(currentDirectory: string, host: { directoryExists?: (directoryName: string) => boolean }): string[] | undefined { if (!host.directoryExists) { return [combinePaths(currentDirectory, nodeModulesAtTypes)]; // And if it doesn't exist, tough. } let typeRoots: string[]; forEachAncestorDirectory(ts.normalizePath(currentDirectory), directory => { const atTypes = combinePaths(directory, nodeModulesAtTypes); if (host.directoryExists(atTypes)) { (typeRoots || (typeRoots = [])).push(atTypes); } return undefined; }); return typeRoots; } const nodeModulesAtTypes = combinePaths("node_modules", "@types"); /** * @param {string | undefined} containingFile - file that contains type reference directive, can be undefined if containing file is unknown. * This is possible in case if resolution is performed for directives specified via 'types' parameter. In this case initial path for secondary lookups * is assumed to be the same as root directory of the project. */ export function resolveTypeReferenceDirective(typeReferenceDirectiveName: string, containingFile: string | undefined, options: CompilerOptions, host: ModuleResolutionHost): ResolvedTypeReferenceDirectiveWithFailedLookupLocations { const traceEnabled = isTraceEnabled(options, host); const moduleResolutionState: ModuleResolutionState = { compilerOptions: options, host: host, traceEnabled }; const typeRoots = getEffectiveTypeRoots(options, host); if (traceEnabled) { if (containingFile === undefined) { if (typeRoots === undefined) { trace(host, Diagnostics.Resolving_type_reference_directive_0_containing_file_not_set_root_directory_not_set, typeReferenceDirectiveName); } else { trace(host, Diagnostics.Resolving_type_reference_directive_0_containing_file_not_set_root_directory_1, typeReferenceDirectiveName, typeRoots); } } else { if (typeRoots === undefined) { trace(host, Diagnostics.Resolving_type_reference_directive_0_containing_file_1_root_directory_not_set, typeReferenceDirectiveName, containingFile); } else { trace(host, Diagnostics.Resolving_type_reference_directive_0_containing_file_1_root_directory_2, typeReferenceDirectiveName, containingFile, typeRoots); } } } const failedLookupLocations: string[] = []; let resolved = primaryLookup(); let primary = true; if (!resolved) { resolved = secondaryLookup(); primary = false; } let resolvedTypeReferenceDirective: ResolvedTypeReferenceDirective | undefined; if (resolved) { resolved = realpath(resolved, host, traceEnabled); if (traceEnabled) { trace(host, Diagnostics.Type_reference_directive_0_was_successfully_resolved_to_1_primary_Colon_2, typeReferenceDirectiveName, resolved, primary); } resolvedTypeReferenceDirective = { primary, resolvedFileName: resolved }; } return { resolvedTypeReferenceDirective, failedLookupLocations }; function primaryLookup(): string | undefined { // Check primary library paths if (typeRoots && typeRoots.length) { if (traceEnabled) { trace(host, Diagnostics.Resolving_with_primary_search_path_0, typeRoots.join(", ")); } return forEach(typeRoots, typeRoot => { const candidate = combinePaths(typeRoot, typeReferenceDirectiveName); const candidateDirectory = getDirectoryPath(candidate); const directoryExists = directoryProbablyExists(candidateDirectory, host); if (!directoryExists && traceEnabled) { trace(host, Diagnostics.Directory_0_does_not_exist_skipping_all_lookups_in_it, candidateDirectory); } return resolvedTypeScriptOnly( loadNodeModuleFromDirectory(Extensions.DtsOnly, candidate, failedLookupLocations, !directoryExists, moduleResolutionState)); }); } else { if (traceEnabled) { trace(host, Diagnostics.Root_directory_cannot_be_determined_skipping_primary_search_paths); } } } function secondaryLookup(): string | undefined { let resolvedFile: string; const initialLocationForSecondaryLookup = containingFile && getDirectoryPath(containingFile); if (initialLocationForSecondaryLookup !== undefined) { // check secondary locations if (traceEnabled) { trace(host, Diagnostics.Looking_up_in_node_modules_folder_initial_location_0, initialLocationForSecondaryLookup); } const result = loadModuleFromNodeModules(Extensions.DtsOnly, typeReferenceDirectiveName, initialLocationForSecondaryLookup, failedLookupLocations, moduleResolutionState, /*cache*/ undefined); resolvedFile = resolvedTypeScriptOnly(result && result.value); if (!resolvedFile && traceEnabled) { trace(host, Diagnostics.Type_reference_directive_0_was_not_resolved, typeReferenceDirectiveName); } return resolvedFile; } else { if (traceEnabled) { trace(host, Diagnostics.Containing_file_is_not_specified_and_root_directory_cannot_be_determined_skipping_lookup_in_node_modules_folder); } } } } /** * Given a set of options, returns the set of type directive names * that should be included for this program automatically. * This list could either come from the config file, * or from enumerating the types root + initial secondary types lookup location. * More type directives might appear in the program later as a result of loading actual source files; * this list is only the set of defaults that are implicitly included. */ export function getAutomaticTypeDirectiveNames(options: CompilerOptions, host: ModuleResolutionHost): string[] { // Use explicit type list from tsconfig.json if (options.types) { return options.types; } // Walk the primary type lookup locations const result: string[] = []; if (host.directoryExists && host.getDirectories) { const typeRoots = getEffectiveTypeRoots(options, host); if (typeRoots) { for (const root of typeRoots) { if (host.directoryExists(root)) { for (const typeDirectivePath of host.getDirectories(root)) { const normalized = normalizePath(typeDirectivePath); const packageJsonPath = pathToPackageJson(combinePaths(root, normalized)); // tslint:disable-next-line:no-null-keyword const isNotNeededPackage = host.fileExists(packageJsonPath) && readJson(packageJsonPath, host).typings === null; if (!isNotNeededPackage) { // Return just the type directive names result.push(getBaseFileName(normalized)); } } } } } } return result; } /** * Cached module resolutions per containing directory. * This assumes that any module id will have the same resolution for sibling files located in the same folder. */ export interface ModuleResolutionCache extends NonRelativeModuleNameResolutionCache { getOrCreateCacheForDirectory(directoryName: string): Map<ResolvedModuleWithFailedLookupLocations>; } /** * Stored map from non-relative module name to a table: directory -> result of module lookup in this directory * We support only non-relative module names because resolution of relative module names is usually more deterministic and thus less expensive. */ export interface NonRelativeModuleNameResolutionCache { getOrCreateCacheForModuleName(nonRelativeModuleName: string): PerModuleNameCache; } export interface PerModuleNameCache { get(directory: string): ResolvedModuleWithFailedLookupLocations; set(directory: string, result: ResolvedModuleWithFailedLookupLocations): void; } export function createModuleResolutionCache(currentDirectory: string, getCanonicalFileName: (s: string) => string): ModuleResolutionCache { const directoryToModuleNameMap = createFileMap<Map<ResolvedModuleWithFailedLookupLocations>>(); const moduleNameToDirectoryMap = createMap<PerModuleNameCache>(); return { getOrCreateCacheForDirectory, getOrCreateCacheForModuleName }; function getOrCreateCacheForDirectory(directoryName: string) { const path = toPath(directoryName, currentDirectory, getCanonicalFileName); let perFolderCache = directoryToModuleNameMap.get(path); if (!perFolderCache) { perFolderCache = createMap<ResolvedModuleWithFailedLookupLocations>(); directoryToModuleNameMap.set(path, perFolderCache); } return perFolderCache; } function getOrCreateCacheForModuleName(nonRelativeModuleName: string) { if (!moduleHasNonRelativeName(nonRelativeModuleName)) { return undefined; } let perModuleNameCache = moduleNameToDirectoryMap.get(nonRelativeModuleName); if (!perModuleNameCache) { perModuleNameCache = createPerModuleNameCache(); moduleNameToDirectoryMap.set(nonRelativeModuleName, perModuleNameCache); } return perModuleNameCache; } function createPerModuleNameCache(): PerModuleNameCache { const directoryPathMap = createFileMap<ResolvedModuleWithFailedLookupLocations>(); return { get, set }; function get(directory: string): ResolvedModuleWithFailedLookupLocations { return directoryPathMap.get(toPath(directory, currentDirectory, getCanonicalFileName)); } /** * At first this function add entry directory -> module resolution result to the table. * Then it computes the set of parent folders for 'directory' that should have the same module resolution result * and for every parent folder in set it adds entry: parent -> module resolution. . * Lets say we first directory name: /a/b/c/d/e and resolution result is: /a/b/bar.ts. * Set of parent folders that should have the same result will be: * [ * /a/b/c/d, /a/b/c, /a/b * ] * this means that request for module resolution from file in any of these folder will be immediately found in cache. */ function set(directory: string, result: ResolvedModuleWithFailedLookupLocations): void { const path = toPath(directory, currentDirectory, getCanonicalFileName); // if entry is already in cache do nothing if (directoryPathMap.contains(path)) { return; } directoryPathMap.set(path, result); const resolvedFileName = result.resolvedModule && result.resolvedModule.resolvedFileName; // find common prefix between directory and resolved file name // this common prefix should be the shorted path that has the same resolution // directory: /a/b/c/d/e // resolvedFileName: /a/b/foo.d.ts const commonPrefix = getCommonPrefix(path, resolvedFileName); let current = path; while (true) { const parent = getDirectoryPath(current); if (parent === current || directoryPathMap.contains(parent)) { break; } directoryPathMap.set(parent, result); current = parent; if (current === commonPrefix) { break; } } } function getCommonPrefix(directory: Path, resolution: string) { if (resolution === undefined) { return undefined; } const resolutionDirectory = toPath(getDirectoryPath(resolution), currentDirectory, getCanonicalFileName); // find first position where directory and resolution differs let i = 0; while (i < Math.min(directory.length, resolutionDirectory.length) && directory.charCodeAt(i) === resolutionDirectory.charCodeAt(i)) { i++; } // find last directory separator before position i const sep = directory.lastIndexOf(directorySeparator, i); if (sep < 0) { return undefined; } return directory.substr(0, sep); } } } export function resolveModuleName(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, cache?: ModuleResolutionCache): ResolvedModuleWithFailedLookupLocations { const traceEnabled = isTraceEnabled(compilerOptions, host); if (traceEnabled) { trace(host, Diagnostics.Resolving_module_0_from_1, moduleName, containingFile); } const containingDirectory = getDirectoryPath(containingFile); const perFolderCache = cache && cache.getOrCreateCacheForDirectory(containingDirectory); let result = perFolderCache && perFolderCache.get(moduleName); if (result) { if (traceEnabled) { trace(host, Diagnostics.Resolution_for_module_0_was_found_in_cache, moduleName); } } else { let moduleResolution = compilerOptions.moduleResolution; if (moduleResolution === undefined) { moduleResolution = getEmitModuleKind(compilerOptions) === ModuleKind.CommonJS ? ModuleResolutionKind.NodeJs : ModuleResolutionKind.Classic; if (traceEnabled) { trace(host, Diagnostics.Module_resolution_kind_is_not_specified_using_0, ModuleResolutionKind[moduleResolution]); } } else { if (traceEnabled) { trace(host, Diagnostics.Explicitly_specified_module_resolution_kind_Colon_0, ModuleResolutionKind[moduleResolution]); } } switch (moduleResolution) { case ModuleResolutionKind.NodeJs: result = nodeModuleNameResolver(moduleName, containingFile, compilerOptions, host, cache); break; case ModuleResolutionKind.Classic: result = classicNameResolver(moduleName, containingFile, compilerOptions, host, cache); break; default: Debug.fail(`Unexpected moduleResolution: ${moduleResolution}`); } if (perFolderCache) { perFolderCache.set(moduleName, result); // put result in per-module name cache const perModuleNameCache = cache.getOrCreateCacheForModuleName(moduleName); if (perModuleNameCache) { perModuleNameCache.set(containingDirectory, result); } } } if (traceEnabled) { if (result.resolvedModule) { trace(host, Diagnostics.Module_name_0_was_successfully_resolved_to_1, moduleName, result.resolvedModule.resolvedFileName); } else { trace(host, Diagnostics.Module_name_0_was_not_resolved, moduleName); } } return result; } /* * Every module resolution kind can has its specific understanding how to load module from a specific path on disk * I.e. for path '/a/b/c': * - Node loader will first to try to check if '/a/b/c' points to a file with some supported extension and if this fails * it will try to load module from directory: directory '/a/b/c' should exist and it should have either 'package.json' with * 'typings' entry or file 'index' with some supported extension * - Classic loader will only try to interpret '/a/b/c' as file. */ type ResolutionKindSpecificLoader = (extensions: Extensions, candidate: string, failedLookupLocations: Push<string>, onlyRecordFailures: boolean, state: ModuleResolutionState) => Resolved | undefined; /** * Any module resolution kind can be augmented with optional settings: 'baseUrl', 'paths' and 'rootDirs' - they are used to * mitigate differences between design time structure of the project and its runtime counterpart so the same import name * can be resolved successfully by TypeScript compiler and runtime module loader. * If these settings are set then loading procedure will try to use them to resolve module name and it can of failure it will * fallback to standard resolution routine. * * - baseUrl - this setting controls how non-relative module names are resolved. If this setting is specified then non-relative * names will be resolved relative to baseUrl: i.e. if baseUrl is '/a/b' then candidate location to resolve module name 'c/d' will * be '/a/b/c/d' * - paths - this setting can only be used when baseUrl is specified. allows to tune how non-relative module names * will be resolved based on the content of the module name. * Structure of 'paths' compiler options * 'paths': { * pattern-1: [...substitutions], * pattern-2: [...substitutions], * ... * pattern-n: [...substitutions] * } * Pattern here is a string that can contain zero or one '*' character. During module resolution module name will be matched against * all patterns in the list. Matching for patterns that don't contain '*' means that module name must be equal to pattern respecting the case. * If pattern contains '*' then to match pattern "<prefix>*<suffix>" module name must start with the <prefix> and end with <suffix>. * <MatchedStar> denotes part of the module name between <prefix> and <suffix>. * If module name can be matches with multiple patterns then pattern with the longest prefix will be picked. * After selecting pattern we'll use list of substitutions to get candidate locations of the module and the try to load module * from the candidate location. * Substitution is a string that can contain zero or one '*'. To get candidate location from substitution we'll pick every * substitution in the list and replace '*' with <MatchedStar> string. If candidate location is not rooted it * will be converted to absolute using baseUrl. * For example: * baseUrl: /a/b/c * "paths": { * // match all module names * "*": [ * "*", // use matched name as is, * // <matched name> will be looked as /a/b/c/<matched name> * * "folder1/*" // substitution will convert matched name to 'folder1/<matched name>', * // since it is not rooted then final candidate location will be /a/b/c/folder1/<matched name> * ], * // match module names that start with 'components/' * "components/*": [ "/root/components/*" ] // substitution will convert /components/folder1/<matched name> to '/root/components/folder1/<matched name>', * // it is rooted so it will be final candidate location * } * * 'rootDirs' allows the project to be spreaded across multiple locations and resolve modules with relative names as if * they were in the same location. For example lets say there are two files * '/local/src/content/file1.ts' * '/shared/components/contracts/src/content/protocols/file2.ts' * After bundling content of '/shared/components/contracts/src' will be merged with '/local/src' so * if file1 has the following import 'import {x} from "./protocols/file2"' it will be resolved successfully in runtime. * 'rootDirs' provides the way to tell compiler that in order to get the whole project it should behave as if content of all * root dirs were merged together. * I.e. for the example above 'rootDirs' will have two entries: [ '/local/src', '/shared/components/contracts/src' ]. * Compiler will first convert './protocols/file2' into absolute path relative to the location of containing file: * '/local/src/content/protocols/file2' and try to load it - failure. * Then it will search 'rootDirs' looking for a longest matching prefix of this absolute path and if such prefix is found - absolute path will * be converted to a path relative to found rootDir entry './content/protocols/file2' (*). As a last step compiler will check all remaining * entries in 'rootDirs', use them to build absolute path out of (*) and try to resolve module from this location. */ function tryLoadModuleUsingOptionalResolutionSettings(extensions: Extensions, moduleName: string, containingDirectory: string, loader: ResolutionKindSpecificLoader, failedLookupLocations: Push<string>, state: ModuleResolutionState): Resolved | undefined { if (moduleHasNonRelativeName(moduleName)) { return tryLoadModuleUsingBaseUrl(extensions, moduleName, loader, failedLookupLocations, state); } else { return tryLoadModuleUsingRootDirs(extensions, moduleName, containingDirectory, loader, failedLookupLocations, state); } } function tryLoadModuleUsingRootDirs(extensions: Extensions, moduleName: string, containingDirectory: string, loader: ResolutionKindSpecificLoader, failedLookupLocations: Push<string>, state: ModuleResolutionState): Resolved | undefined { if (!state.compilerOptions.rootDirs) { return undefined; } if (state.traceEnabled) { trace(state.host, Diagnostics.rootDirs_option_is_set_using_it_to_resolve_relative_module_name_0, moduleName); } const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); let matchedRootDir: string; let matchedNormalizedPrefix: string; for (const rootDir of state.compilerOptions.rootDirs) { // rootDirs are expected to be absolute // in case of tsconfig.json this will happen automatically - compiler will expand relative names // using location of tsconfig.json as base location let normalizedRoot = normalizePath(rootDir); if (!endsWith(normalizedRoot, directorySeparator)) { normalizedRoot += directorySeparator; } const isLongestMatchingPrefix = startsWith(candidate, normalizedRoot) && (matchedNormalizedPrefix === undefined || matchedNormalizedPrefix.length < normalizedRoot.length); if (state.traceEnabled) { trace(state.host, Diagnostics.Checking_if_0_is_the_longest_matching_prefix_for_1_2, normalizedRoot, candidate, isLongestMatchingPrefix); } if (isLongestMatchingPrefix) { matchedNormalizedPrefix = normalizedRoot; matchedRootDir = rootDir; } } if (matchedNormalizedPrefix) { if (state.traceEnabled) { trace(state.host, Diagnostics.Longest_matching_prefix_for_0_is_1, candidate, matchedNormalizedPrefix); } const suffix = candidate.substr(matchedNormalizedPrefix.length); // first - try to load from a initial location if (state.traceEnabled) { trace(state.host, Diagnostics.Loading_0_from_the_root_dir_1_candidate_location_2, suffix, matchedNormalizedPrefix, candidate); } const resolvedFileName = loader(extensions, candidate, failedLookupLocations, !directoryProbablyExists(containingDirectory, state.host), state); if (resolvedFileName) { return resolvedFileName; } if (state.traceEnabled) { trace(state.host, Diagnostics.Trying_other_entries_in_rootDirs); } // then try to resolve using remaining entries in rootDirs for (const rootDir of state.compilerOptions.rootDirs) { if (rootDir === matchedRootDir) { // skip the initially matched entry continue; } const candidate = combinePaths(normalizePath(rootDir), suffix); if (state.traceEnabled) { trace(state.host, Diagnostics.Loading_0_from_the_root_dir_1_candidate_location_2, suffix, rootDir, candidate); } const baseDirectory = getDirectoryPath(candidate); const resolvedFileName = loader(extensions, candidate, failedLookupLocations, !directoryProbablyExists(baseDirectory, state.host), state); if (resolvedFileName) { return resolvedFileName; } } if (state.traceEnabled) { trace(state.host, Diagnostics.Module_resolution_using_rootDirs_has_failed); } } return undefined; } function tryLoadModuleUsingBaseUrl(extensions: Extensions, moduleName: string, loader: ResolutionKindSpecificLoader, failedLookupLocations: Push<string>, state: ModuleResolutionState): Resolved | undefined { if (!state.compilerOptions.baseUrl) { return undefined; } if (state.traceEnabled) { trace(state.host, Diagnostics.baseUrl_option_is_set_to_0_using_this_value_to_resolve_non_relative_module_name_1, state.compilerOptions.baseUrl, moduleName); } // string is for exact match let matchedPattern: Pattern | string | undefined = undefined; if (state.compilerOptions.paths) { if (state.traceEnabled) { trace(state.host, Diagnostics.paths_option_is_specified_looking_for_a_pattern_to_match_module_name_0, moduleName); } matchedPattern = matchPatternOrExact(getOwnKeys(state.compilerOptions.paths), moduleName); } if (matchedPattern) { const matchedStar = typeof matchedPattern === "string" ? undefined : matchedText(matchedPattern, moduleName); const matchedPatternText = typeof matchedPattern === "string" ? matchedPattern : patternText(matchedPattern); if (state.traceEnabled) { trace(state.host, Diagnostics.Module_name_0_matched_pattern_1, moduleName, matchedPatternText); } return forEach(state.compilerOptions.paths[matchedPatternText], subst => { const path = matchedStar ? subst.replace("*", matchedStar) : subst; const candidate = normalizePath(combinePaths(state.compilerOptions.baseUrl, path)); if (state.traceEnabled) { trace(state.host, Diagnostics.Trying_substitution_0_candidate_module_location_Colon_1, subst, path); } // A path mapping may have an extension, in contrast to an import, which should omit it. const extension = tryGetExtensionFromPath(candidate); if (extension !== undefined) { const path = tryFile(candidate, failedLookupLocations, /*onlyRecordFailures*/ false, state); if (path !== undefined) { return { path, extension }; } } return loader(extensions, candidate, failedLookupLocations, !directoryProbablyExists(getDirectoryPath(candidate), state.host), state); }); } else { const candidate = normalizePath(combinePaths(state.compilerOptions.baseUrl, moduleName)); if (state.traceEnabled) { trace(state.host, Diagnostics.Resolving_module_name_0_relative_to_base_url_1_2, moduleName, state.compilerOptions.baseUrl, candidate); } return loader(extensions, candidate, failedLookupLocations, !directoryProbablyExists(getDirectoryPath(candidate), state.host), state); } } export function nodeModuleNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, cache?: ModuleResolutionCache): ResolvedModuleWithFailedLookupLocations { return nodeModuleNameResolverWorker(moduleName, getDirectoryPath(containingFile), compilerOptions, host, cache, /*jsOnly*/ false); } /** * Expose resolution logic to allow us to use Node module resolution logic from arbitrary locations. * No way to do this with `require()`: https://github.com/nodejs/node/issues/5963 * Throws an error if the module can't be resolved. */ /* @internal */ export function resolveJavaScriptModule(moduleName: string, initialDir: string, host: ModuleResolutionHost): string { const { resolvedModule, failedLookupLocations } = nodeModuleNameResolverWorker(moduleName, initialDir, { moduleResolution: ts.ModuleResolutionKind.NodeJs, allowJs: true }, host, /*cache*/ undefined, /*jsOnly*/ true); if (!resolvedModule) { throw new Error(`Could not resolve JS module ${moduleName} starting at ${initialDir}. Looked in: ${failedLookupLocations.join(", ")}`); } return resolvedModule.resolvedFileName; } function nodeModuleNameResolverWorker(moduleName: string, containingDirectory: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, cache: ModuleResolutionCache | undefined, jsOnly: boolean): ResolvedModuleWithFailedLookupLocations { const traceEnabled = isTraceEnabled(compilerOptions, host); const failedLookupLocations: string[] = []; const state: ModuleResolutionState = { compilerOptions, host, traceEnabled }; const result = jsOnly ? tryResolve(Extensions.JavaScript) : (tryResolve(Extensions.TypeScript) || tryResolve(Extensions.JavaScript)); if (result && result.value) { const { resolved, isExternalLibraryImport } = result.value; return createResolvedModuleWithFailedLookupLocations(resolved, isExternalLibraryImport, failedLookupLocations); } return { resolvedModule: undefined, failedLookupLocations }; function tryResolve(extensions: Extensions): SearchResult<{ resolved: Resolved, isExternalLibraryImport: boolean }> { const loader: ResolutionKindSpecificLoader = (extensions, candidate, failedLookupLocations, onlyRecordFailures, state) => nodeLoadModuleByRelativeName(extensions, candidate, failedLookupLocations, onlyRecordFailures, state, /*considerPackageJson*/ true); const resolved = tryLoadModuleUsingOptionalResolutionSettings(extensions, moduleName, containingDirectory, loader, failedLookupLocations, state); if (resolved) { return toSearchResult({ resolved, isExternalLibraryImport: false }); } if (moduleHasNonRelativeName(moduleName)) { if (traceEnabled) { trace(host, Diagnostics.Loading_module_0_from_node_modules_folder_target_file_type_1, moduleName, Extensions[extensions]); } const resolved = loadModuleFromNodeModules(extensions, moduleName, containingDirectory, failedLookupLocations, state, cache); // For node_modules lookups, get the real path so that multiple accesses to an `npm link`-ed module do not create duplicate files. return resolved && { value: resolved.value && { resolved: { path: realpath(resolved.value.path, host, traceEnabled), extension: resolved.value.extension }, isExternalLibraryImport: true } }; } else { const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); const resolved = nodeLoadModuleByRelativeName(extensions, candidate, failedLookupLocations, /*onlyRecordFailures*/ false, state, /*considerPackageJson*/ true); return resolved && toSearchResult({ resolved, isExternalLibraryImport: false }); } } } function realpath(path: string, host: ModuleResolutionHost, traceEnabled: boolean): string { if (!host.realpath) { return path; } const real = normalizePath(host.realpath(path)); if (traceEnabled) { trace(host, Diagnostics.Resolving_real_path_for_0_result_1, path, real); } return real; } function nodeLoadModuleByRelativeName(extensions: Extensions, candidate: string, failedLookupLocations: Push<string>, onlyRecordFailures: boolean, state: ModuleResolutionState, considerPackageJson: boolean): Resolved | undefined { if (state.traceEnabled) { trace(state.host, Diagnostics.Loading_module_as_file_Slash_folder_candidate_module_location_0_target_file_type_1, candidate, Extensions[extensions]); } if (!pathEndsWithDirectorySeparator(candidate)) { if (!onlyRecordFailures) { const parentOfCandidate = getDirectoryPath(candidate); if (!directoryProbablyExists(parentOfCandidate, state.host)) { if (state.traceEnabled) { trace(state.host, Diagnostics.Directory_0_does_not_exist_skipping_all_lookups_in_it, parentOfCandidate); } onlyRecordFailures = true; } } const resolvedFromFile = loadModuleFromFile(extensions, candidate, failedLookupLocations, onlyRecordFailures, state); if (resolvedFromFile) { return resolvedFromFile; } } if (!onlyRecordFailures) { const candidateExists = directoryProbablyExists(candidate, state.host); if (!candidateExists) { if (state.traceEnabled) { trace(state.host, Diagnostics.Directory_0_does_not_exist_skipping_all_lookups_in_it, candidate); } onlyRecordFailures = true; } } return loadNodeModuleFromDirectory(extensions, candidate, failedLookupLocations, onlyRecordFailures, state, considerPackageJson); } /* @internal */ export function directoryProbablyExists(directoryName: string, host: { directoryExists?: (directoryName: string) => boolean }): boolean { // if host does not support 'directoryExists' assume that directory will exist return !host.directoryExists || host.directoryExists(directoryName); } /** * @param {boolean} onlyRecordFailures - if true then function won't try to actually load files but instead record all attempts as failures. This flag is necessary * in cases when we know upfront that all load attempts will fail (because containing folder does not exists) however we still need to record all failed lookup locations. */ function loadModuleFromFile(extensions: Extensions, candidate: string, failedLookupLocations: Push<string>, onlyRecordFailures: boolean, state: ModuleResolutionState): Resolved | undefined { // First, try adding an extension. An import of "foo" could be matched by a file "foo.ts", or "foo.js" by "foo.js.ts" const resolvedByAddingExtension = tryAddingExtensions(candidate, extensions, failedLookupLocations, onlyRecordFailures, state); if (resolvedByAddingExtension) { return resolvedByAddingExtension; } // If that didn't work, try stripping a ".js" or ".jsx" extension and replacing it with a TypeScript one; // e.g. "./foo.js" can be matched by "./foo.ts" or "./foo.d.ts" if (hasJavaScriptFileExtension(candidate)) { const extensionless = removeFileExtension(candidate); if (state.traceEnabled) { const extension = candidate.substring(extensionless.length); trace(state.host, Diagnostics.File_name_0_has_a_1_extension_stripping_it, candidate, extension); } return tryAddingExtensions(extensionless, extensions, failedLookupLocations, onlyRecordFailures, state); } } /** Try to return an existing file that adds one of the `extensions` to `candidate`. */ function tryAddingExtensions(candidate: string, extensions: Extensions, failedLookupLocations: Push<string>, onlyRecordFailures: boolean, state: ModuleResolutionState): Resolved | undefined { if (!onlyRecordFailures) { // check if containing folder exists - if it doesn't then just record failures for all supported extensions without disk probing const directory = getDirectoryPath(candidate); if (directory) { onlyRecordFailures = !directoryProbablyExists(directory, state.host); } } switch (extensions) { case Extensions.DtsOnly: return tryExtension(Extension.Dts); case Extensions.TypeScript: return tryExtension(Extension.Ts) || tryExtension(Extension.Tsx) || tryExtension(Extension.Dts); case Extensions.JavaScript: return tryExtension(Extension.Js) || tryExtension(Extension.Jsx); } function tryExtension(extension: Extension): Resolved | undefined { const path = tryFile(candidate + extension, failedLookupLocations, onlyRecordFailures, state); return path && { path, extension }; } } /** Return the file if it exists. */ function tryFile(fileName: string, failedLookupLocations: Push<string>, onlyRecordFailures: boolean, state: ModuleResolutionState): string | undefined { if (!onlyRecordFailures) { if (state.host.fileExists(fileName)) { if (state.traceEnabled) { trace(state.host, Diagnostics.File_0_exist_use_it_as_a_name_resolution_result, fileName); } return fileName; } else { if (state.traceEnabled) { trace(state.host, Diagnostics.File_0_does_not_exist, fileName); } } } failedLookupLocations.push(fileName); return undefined; } function loadNodeModuleFromDirectory(extensions: Extensions, candidate: string, failedLookupLocations: Push<string>, onlyRecordFailures: boolean, state: ModuleResolutionState, considerPackageJson = true): Resolved | undefined { const directoryExists = !onlyRecordFailures && directoryProbablyExists(candidate, state.host); if (considerPackageJson) { const packageJsonPath = pathToPackageJson(candidate); if (directoryExists && state.host.fileExists(packageJsonPath)) { const fromPackageJson = loadModuleFromPackageJson(packageJsonPath, extensions, candidate, failedLookupLocations, state); if (fromPackageJson) { return fromPackageJson; } } else { if (directoryExists && state.traceEnabled) { trace(state.host, Diagnostics.File_0_does_not_exist, packageJsonPath); } // record package json as one of failed lookup locations - in the future if this file will appear it will invalidate resolution results failedLookupLocations.push(packageJsonPath); } } return loadModuleFromFile(extensions, combinePaths(candidate, "index"), failedLookupLocations, !directoryExists, state); } function loadModuleFromPackageJson(packageJsonPath: string, extensions: Extensions, candidate: string, failedLookupLocations: Push<string>, state: ModuleResolutionState): Resolved | undefined { if (state.traceEnabled) { trace(state.host, Diagnostics.Found_package_json_at_0, packageJsonPath); } const file = tryReadPackageJsonFields(extensions !== Extensions.JavaScript, packageJsonPath, candidate, state); if (!file) { return undefined; } const onlyRecordFailures = !directoryProbablyExists(getDirectoryPath(file), state.host); const fromFile = tryFile(file, failedLookupLocations, onlyRecordFailures, state); if (fromFile) { const resolved = fromFile && resolvedIfExtensionMatches(extensions, fromFile); if (resolved) { return resolved; } if (state.traceEnabled) { trace(state.host, Diagnostics.File_0_has_an_unsupported_extension_so_skipping_it, fromFile); } } // Even if extensions is DtsOnly, we can still look up a .ts file as a result of package.json "types" const nextExtensions = extensions === Extensions.DtsOnly ? Extensions.TypeScript : extensions; // Don't do package.json lookup recursively, because Node.js' package lookup doesn't. return nodeLoadModuleByRelativeName(nextExtensions, file, failedLookupLocations, onlyRecordFailures, state, /*considerPackageJson*/ false); } /** Resolve from an arbitrarily specified file. Return `undefined` if it has an unsupported extension. */ function resolvedIfExtensionMatches(extensions: Extensions, path: string): Resolved | undefined { const extension = tryGetExtensionFromPath(path); return extension !== undefined && extensionIsOk(extensions, extension) ? { path, extension } : undefined; } /** True if `extension` is one of the supported `extensions`. */ function extensionIsOk(extensions: Extensions, extension: Extension): boolean { switch (extensions) { case Extensions.JavaScript: return extension === Extension.Js || extension === Extension.Jsx; case Extensions.TypeScript: return extension === Extension.Ts || extension === Extension.Tsx || extension === Extension.Dts; case Extensions.DtsOnly: return extension === Extension.Dts; } } function pathToPackageJson(directory: string): string { return combinePaths(directory, "package.json"); } function loadModuleFromNodeModulesFolder(extensions: Extensions, moduleName: string, nodeModulesFolder: string, nodeModulesFolderExists: boolean, failedLookupLocations: Push<string>, state: ModuleResolutionState): Resolved | undefined { const candidate = normalizePath(combinePaths(nodeModulesFolder, moduleName)); return loadModuleFromFile(extensions, candidate, failedLookupLocations, !nodeModulesFolderExists, state) || loadNodeModuleFromDirectory(extensions, candidate, failedLookupLocations, !nodeModulesFolderExists, state); } function loadModuleFromNodeModules(extensions: Extensions, moduleName: string, directory: string, failedLookupLocations: Push<string>, state: ModuleResolutionState, cache: NonRelativeModuleNameResolutionCache): SearchResult<Resolved> { return loadModuleFromNodeModulesWorker(extensions, moduleName, directory, failedLookupLocations, state, /*typesOnly*/ false, cache); } function loadModuleFromNodeModulesAtTypes(moduleName: string, directory: string, failedLookupLocations: Push<string>, state: ModuleResolutionState): SearchResult<Resolved> { // Extensions parameter here doesn't actually matter, because typesOnly ensures we're just doing @types lookup, which is always DtsOnly. return loadModuleFromNodeModulesWorker(Extensions.DtsOnly, moduleName, directory, failedLookupLocations, state, /*typesOnly*/ true, /*cache*/ undefined); } function loadModuleFromNodeModulesWorker(extensions: Extensions, moduleName: string, directory: string, failedLookupLocations: Push<string>, state: ModuleResolutionState, typesOnly: boolean, cache: NonRelativeModuleNameResolutionCache): SearchResult<Resolved> { const perModuleNameCache = cache && cache.getOrCreateCacheForModuleName(moduleName); return forEachAncestorDirectory(normalizeSlashes(directory), ancestorDirectory => { if (getBaseFileName(ancestorDirectory) !== "node_modules") { const resolutionFromCache = tryFindNonRelativeModuleNameInCache(perModuleNameCache, moduleName, ancestorDirectory, state.traceEnabled, state.host); if (resolutionFromCache) { return resolutionFromCache; } return toSearchResult(loadModuleFromNodeModulesOneLevel(extensions, moduleName, ancestorDirectory, failedLookupLocations, state, typesOnly)); } }); } /** Load a module from a single node_modules directory, but not from any ancestors' node_modules directories. */ function loadModuleFromNodeModulesOneLevel(extensions: Extensions, moduleName: string, directory: string, failedLookupLocations: Push<string>, state: ModuleResolutionState, typesOnly = false): Resolved | undefined { const nodeModulesFolder = combinePaths(directory, "node_modules"); const nodeModulesFolderExists = directoryProbablyExists(nodeModulesFolder, state.host); if (!nodeModulesFolderExists && state.traceEnabled) { trace(state.host, Diagnostics.Directory_0_does_not_exist_skipping_all_lookups_in_it, nodeModulesFolder); } const packageResult = typesOnly ? undefined : loadModuleFromNodeModulesFolder(extensions, moduleName, nodeModulesFolder, nodeModulesFolderExists, failedLookupLocations, state); if (packageResult) { return packageResult; } if (extensions !== Extensions.JavaScript) { const nodeModulesAtTypes = combinePaths(nodeModulesFolder, "@types"); let nodeModulesAtTypesExists = nodeModulesFolderExists; if (nodeModulesFolderExists && !directoryProbablyExists(nodeModulesAtTypes, state.host)) { if (state.traceEnabled) { trace(state.host, Diagnostics.Directory_0_does_not_exist_skipping_all_lookups_in_it, nodeModulesAtTypes); } nodeModulesAtTypesExists = false; } return loadModuleFromNodeModulesFolder(Extensions.DtsOnly, mangleScopedPackage(moduleName, state), nodeModulesAtTypes, nodeModulesAtTypesExists, failedLookupLocations, state); } } /** Double underscores are used in DefinitelyTyped to delimit scoped packages. */ const mangledScopedPackageSeparator = "__"; /** For a scoped package, we must look in `@types/foo__bar` instead of `@types/@foo/bar`. */ function mangleScopedPackage(moduleName: string, state: ModuleResolutionState): string { if (startsWith(moduleName, "@")) { const replaceSlash = moduleName.replace(ts.directorySeparator, mangledScopedPackageSeparator); if (replaceSlash !== moduleName) { const mangled = replaceSlash.slice(1); // Take off the "@" if (state.traceEnabled) { trace(state.host, Diagnostics.Scoped_package_detected_looking_in_0, mangled); } return mangled; } } return moduleName; } /* @internal */ export function getPackageNameFromAtTypesDirectory(mangledName: string): string { const withoutAtTypePrefix = removePrefix(mangledName, "@types/"); if (withoutAtTypePrefix !== mangledName) { return withoutAtTypePrefix.indexOf("__") !== -1 ? "@" + withoutAtTypePrefix.replace(mangledScopedPackageSeparator, ts.directorySeparator) : withoutAtTypePrefix; } return mangledName; } function tryFindNonRelativeModuleNameInCache(cache: PerModuleNameCache | undefined, moduleName: string, containingDirectory: string, traceEnabled: boolean, host: ModuleResolutionHost): SearchResult<Resolved> { const result = cache && cache.get(containingDirectory); if (result) { if (traceEnabled) { trace(host, Diagnostics.Resolution_for_module_0_was_found_in_cache, moduleName); } return { value: result.resolvedModule && { path: result.resolvedModule.resolvedFileName, extension: result.resolvedModule.extension } }; } } export function classicNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, cache?: NonRelativeModuleNameResolutionCache): ResolvedModuleWithFailedLookupLocations { const traceEnabled = isTraceEnabled(compilerOptions, host); const state: ModuleResolutionState = { compilerOptions, host, traceEnabled }; const failedLookupLocations: string[] = []; const containingDirectory = getDirectoryPath(containingFile); const resolved = tryResolve(Extensions.TypeScript) || tryResolve(Extensions.JavaScript); return createResolvedModuleWithFailedLookupLocations(resolved && resolved.value, /*isExternalLibraryImport*/ false, failedLookupLocations); function tryResolve(extensions: Extensions): SearchResult<Resolved> { const resolvedUsingSettings = tryLoadModuleUsingOptionalResolutionSettings(extensions, moduleName, containingDirectory, loadModuleFromFile, failedLookupLocations, state); if (resolvedUsingSettings) { return { value: resolvedUsingSettings }; } const perModuleNameCache = cache && cache.getOrCreateCacheForModuleName(moduleName); if (moduleHasNonRelativeName(moduleName)) { // Climb up parent directories looking for a module. const resolved = forEachAncestorDirectory(containingDirectory, directory => { const resolutionFromCache = tryFindNonRelativeModuleNameInCache(perModuleNameCache, moduleName, directory, traceEnabled, host); if (resolutionFromCache) { return resolutionFromCache; } const searchName = normalizePath(combinePaths(directory, moduleName)); return toSearchResult(loadModuleFromFile(extensions, searchName, failedLookupLocations, /*onlyRecordFailures*/ false, state)); }); if (resolved) { return resolved; } if (extensions === Extensions.TypeScript) { // If we didn't find the file normally, look it up in @types. return loadModuleFromNodeModulesAtTypes(moduleName, containingDirectory, failedLookupLocations, state); } } else { const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); return toSearchResult(loadModuleFromFile(extensions, candidate, failedLookupLocations, /*onlyRecordFailures*/ false, state)); } } } /** * LSHost may load a module from a global cache of typings. * This is the minumum code needed to expose that functionality; the rest is in LSHost. */ /* @internal */ export function loadModuleFromGlobalCache(moduleName: string, projectName: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, globalCache: string): ResolvedModuleWithFailedLookupLocations { const traceEnabled = isTraceEnabled(compilerOptions, host); if (traceEnabled) { trace(host, Diagnostics.Auto_discovery_for_typings_is_enabled_in_project_0_Running_extra_resolution_pass_for_module_1_using_cache_location_2, projectName, moduleName, globalCache); } const state: ModuleResolutionState = { compilerOptions, host, traceEnabled }; const failedLookupLocations: string[] = []; const resolved = loadModuleFromNodeModulesOneLevel(Extensions.DtsOnly, moduleName, globalCache, failedLookupLocations, state); return createResolvedModuleWithFailedLookupLocations(resolved, /*isExternalLibraryImport*/ true, failedLookupLocations); } /** * Represents result of search. Normally when searching among several alternatives we treat value `undefined` as indicator * that search fails and we should try another option. * However this does not allow us to represent final result that should be used instead of further searching (i.e. a final result that was found in cache). * SearchResult is used to deal with this issue, its values represents following outcomes: * - undefined - not found, continue searching * - { value: undefined } - not found - stop searching * - { value: <some-value> } - found - stop searching */ type SearchResult<T> = { value: T | undefined } | undefined; /** * Wraps value to SearchResult. * @returns undefined if value is undefined or { value } otherwise */ function toSearchResult<T>(value: T | undefined): SearchResult<T> { return value !== undefined ? { value } : undefined; } /** Calls `callback` on `directory` and every ancestor directory it has, returning the first defined result. */ function forEachAncestorDirectory<T>(directory: string, callback: (directory: string) => SearchResult<T>): SearchResult<T> { while (true) { const result = callback(directory); if (result !== undefined) { return result; } const parentPath = getDirectoryPath(directory); if (parentPath === directory) { return undefined; } directory = parentPath; } } }
apache-2.0
jeremyf/sufia
spec/views/single_user_link/download.html.erb_spec.rb
133
require 'spec_helper' describe "single_user_link/download.html.erb" do pending "add some examples to (or delete) #{__FILE__}" end
apache-2.0
pivotalservices/cf-mgmt
serviceaccess/service_plan_test.go
1052
package serviceaccess_test import ( . "github.com/vmwarepivotallabs/cf-mgmt/serviceaccess" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("ServicePlan", func() { var servicePlan *ServicePlanInfo Context("Adding org", func() { It("exists", func() { servicePlan = &ServicePlanInfo{} servicePlan.AddOrg(&Visibility{ OrgGUID: "org-guid-123-ABC", ServicePlanGUID: "service-plan-guid", }) Expect(servicePlan.OrgHasAccess("org-guid-123-abc")).To(BeTrue()) }) It("exists", func() { servicePlan = &ServicePlanInfo{} servicePlan.AddOrg(&Visibility{ OrgGUID: "org-guid-123-ABC", ServicePlanGUID: "service-plan-guid", }) Expect(servicePlan.OrgHasAccess("org-guid-123-ABC")).To(BeTrue()) }) It("doesn't exists", func() { servicePlan = &ServicePlanInfo{} servicePlan.AddOrg(&Visibility{ OrgGUID: "org-guid-123-efg", ServicePlanGUID: "service-plan-guid", }) Expect(servicePlan.OrgHasAccess("org-guid-123-ABC")).To(BeFalse()) }) }) })
apache-2.0
spirylics/x-gwt
x-gwt-firebase/src/main/java/com/github/spirylics/xgwt/firebase/auth/AuthResult.java
247
package com.github.spirylics.xgwt.firebase.auth; import jsinterop.annotations.JsProperty; import jsinterop.annotations.JsType; @SuppressWarnings("ALL") @JsType(isNative = true) public interface AuthResult { @JsProperty User getUser(); }
apache-2.0
bigbugbb/PrayerPartner
app/src/main/java/com/bigbug/android/pp/util/PrefUtils.java
6914
package com.bigbug.android.pp.util; import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; /** * Utilities and constants related to app preferences. */ public class PrefUtils { /** * Boolean preference that when checked, indicates that the user has completed account * authentication and the initial set up flow. */ public static final String PREF_SETUP_DONE = "_pref_setup_done"; public static final String PREF_FIRST_USAGE = "_pref_first_usage"; public static final String PREF_ALARM_SETUP_DONE = "_pref_alarm_setup_done"; /** * Boolean preference that indicates whether we installed the boostrap data or not. */ public static final String PREF_DATA_BOOTSTRAP_DONE = "_pref_data_bootstrap_done"; /** Long indicating when a sync was last ATTEMPTED (not necessarily succeeded) */ public static final String PREF_LAST_SYNC_ATTEMPTED = "_pref_last_sync_attempted"; /** Long indicating when a sync last SUCCEEDED */ public static final String PREF_LAST_SYNC_SUCCEEDED = "_pref_last_sync_succeeded"; /** Sync interval that's currently configured */ public static final String PREF_CUR_SYNC_INTERVAL = "_pref_cur_sync_interval"; public static final String PREF_SENT_TOKEN_TO_SERVER = "_pref_sent_token_to_server"; /** Boolean preference that indicates whether we enabled the sdk test mode or not. */ public static final String PREF_SDK_TEST_MODE_ENABLED = "sdk_test_mode_enabled"; public static void init(final Context context) {} public static void markNotFirstUsage(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putBoolean(PREF_FIRST_USAGE, false).apply(); } public static boolean isFirstUsage(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getBoolean(PREF_FIRST_USAGE, true); } public static void markSetupDone(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putBoolean(PREF_SETUP_DONE, true).apply(); } public static boolean isSetupDone(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); // // Check what year we're configured for // int conferenceYear = sp.getInt(PREF_CONFERENCE_YEAR, 0); // if (conferenceYear != Config.CONFERENCE_YEAR) { // // Application is configured for a different conference year. Reset // // preferences and re-run setup. // sp.edit().clear().putInt(PREF_CONFERENCE_YEAR, Config.CONFERENCE_YEAR).commit(); // } return sp.getBoolean(PREF_SETUP_DONE, false); } public static void markAlarmSetupDone(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putBoolean(PREF_ALARM_SETUP_DONE, true).apply(); } public static boolean isAlarmSetupDone(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getBoolean(PREF_ALARM_SETUP_DONE, false); } public static void markDataBootstrapDone(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putBoolean(PREF_DATA_BOOTSTRAP_DONE, true).apply(); } public static boolean isDataBootstrapDone(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getBoolean(PREF_DATA_BOOTSTRAP_DONE, false); } public static long getLastSyncAttemptedTime(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getLong(PREF_LAST_SYNC_ATTEMPTED, 0L); } public static void markSyncAttemptedNow(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putLong(PREF_LAST_SYNC_ATTEMPTED, UIUtils.getCurrentTime(context)).apply(); } public static long getLastSyncSucceededTime(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getLong(PREF_LAST_SYNC_SUCCEEDED, 0L); } public static void markSyncSucceededNow(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putLong(PREF_LAST_SYNC_SUCCEEDED, UIUtils.getCurrentTime(context)).apply(); } public static long getCurSyncInterval(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getLong(PREF_CUR_SYNC_INTERVAL, 0L); } public static void setCurSyncInterval(final Context context, long interval) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putLong(PREF_CUR_SYNC_INTERVAL, interval).apply(); } public static boolean hasSentTokenToServer(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getBoolean(PREF_SENT_TOKEN_TO_SERVER, false); } public static void setSentTokenToServer(final Context context, boolean sent) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putBoolean(PREF_SENT_TOKEN_TO_SERVER, sent).apply(); } public static boolean isSdkTestModeEnabled(final Context context) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); return sp.getBoolean(PREF_SDK_TEST_MODE_ENABLED, false); } public static void enableSdkTestMode(final Context context, boolean enabled) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().putBoolean(PREF_SDK_TEST_MODE_ENABLED, enabled).apply(); } public static void registerOnSharedPreferenceChangeListener(final Context context, SharedPreferences.OnSharedPreferenceChangeListener listener) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.registerOnSharedPreferenceChangeListener(listener); } public static void unregisterOnSharedPreferenceChangeListener(final Context context, SharedPreferences.OnSharedPreferenceChangeListener listener) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.unregisterOnSharedPreferenceChangeListener(listener); } }
apache-2.0
tongange/SITC7
sitc7/src/main/java/qin/sitc7/domain/StudentNativePlace.java
1717
package qin.sitc7.domain; import qin.javaee7.core.hibernate.SuperEntity7; import qin.sitc7.core.SitcPrint; import java.util.HashSet; import java.util.Set; @SuppressWarnings("all") public class StudentNativePlace implements SuperEntity7<Long> { //region constructor public StudentNativePlace() { SitcPrint.print7.prints("ini StudentNativePlace class"); } public StudentNativePlace(String studentNativePlaceName) { this.studentNativePlaceName = studentNativePlaceName; } //endregion //region id private Long id; @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } //endregion //region studentNativePlaceName private String studentNativePlaceName; public String getStudentNativePlaceName() { return studentNativePlaceName; } public void setStudentNativePlaceName(String studentNativePlaceName) { this.studentNativePlaceName = studentNativePlaceName; } //endregion //region toString @Override public String toString() { final StringBuilder sb = new StringBuilder("StudentNativePlace{"); sb.append("id=").append(id); sb.append(", studentNativePlaceName='").append(studentNativePlaceName).append('\''); sb.append('}'); return sb.toString(); } //endregion //region studentSet private Set<Student> studentSet = new HashSet<>(); public Set<Student> getStudentSet() { return studentSet; } public void setStudentSet(Set<Student> studentSet) { this.studentSet = studentSet; } //endregion }
apache-2.0
carrotsearch/progresso
progresso/src/main/java/com/carrotsearch/progresso/views/console/UpdateableConsoleView.java
5723
package com.carrotsearch.progresso.views.console; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import com.carrotsearch.progresso.ProgressView; import com.carrotsearch.progresso.Task; import com.carrotsearch.progresso.Task.Status; import com.carrotsearch.progresso.Tracker; import com.carrotsearch.progresso.util.ColumnCounter; import com.carrotsearch.progresso.util.LineFormatter; import com.carrotsearch.progresso.util.LineFormatter.Alignment; /** * A progress view which assumes the terminal supports "backtracking" (carriage return) * properly. */ public class UpdateableConsoleView implements ProgressView { private static final long DEFAULT_UPDATE_INTERVAL = TimeUnit.SECONDS.toMillis(1); private final ArrayList<? extends TrackerFormatter> formatters; private final ConsoleWriter consoleWriter; private long nextUpdate = 0L; private final TaskStatusRecovery statusUpdater = new TaskStatusRecovery(); private Task<?> activeTask = null; private long modHash = 0L; private final ArrayDeque<Task<?>> doneTasks = new ArrayDeque<>(); private final ArrayDeque<Task<?>> startedTasks = new ArrayDeque<>(); private final Set<Task<?>> topTasks; public UpdateableConsoleView(ConsoleWriter out, Collection<Task<?>> topTasks, List<? extends TrackerFormatter> formatters) { this.topTasks = new HashSet<>(topTasks); this.formatters = new ArrayList<>(formatters); this.consoleWriter = out; } public UpdateableConsoleView(ConsoleWriter out, Collection<Task<?>> topTasks) { this(out, topTasks, defaultFormatters()); } public UpdateableConsoleView(ConsoleWriter out) { this(out, Collections.emptyList()); } @Override public void update(Set<Task<?>> tasks) { statusUpdater.update(tasks, (t) -> { startedTasks.addLast(t); formatters.forEach(fmt -> fmt.taskStarted(t)); }, (t) -> doneTasks.addLast(t)); // If active task has completed, finalize its progress. if (activeTask != null && doneTasks.remove(activeTask)) { emitCompleted(activeTask); activeTask = null; } // If no active task is running, flush any pending completed tasks. if (activeTask == null) { while (!doneTasks.isEmpty()) { emitCompleted(doneTasks.removeFirst()); } } // Pick or update the active task. pickNewActive(); // Update the active task, if any. if (activeTask != null) { taskUpdate(activeTask); } } private void pickNewActive() { for (Iterator<Task<?>> i = startedTasks.iterator(); i.hasNext();) { Task<?> t = i.next(); if (t.getStatus() == Status.STARTED) { if (activeTask != t && (activeTask == null || t.isChildOf(activeTask))) { activeTask = t; modHash = ~activeTask.getTracker().modHash(); } } else { i.remove(); } } } private void emitCompleted(Task<?> t) { try { // Skipped tasks don't emit anything. if (t.getStatus() == Status.SKIPPED) { consoleWriter.updateLine(""); return; } // Proceed to a new line on top-level tasks. if (topTasks.isEmpty() || topTasks.contains(t)) { consoleWriter.printLine(formatView(t)); } else { consoleWriter.updateLine(formatView(t)); } } catch (IOException e) { throw new UncheckedIOException(e); } } private void taskUpdate(Task<?> t) { // When there's no change and we haven't reached // the update interval, skip the update. long modHash = t.getTracker().modHash(); if (this.modHash == modHash && now() < nextUpdate) { return; } this.modHash = modHash; nextUpdate = now() + DEFAULT_UPDATE_INTERVAL; try { consoleWriter.updateLine(formatView(t)); } catch (IOException e) { throw new UncheckedIOException(e); } } private String formatView(Task<?> task) { final Status taskStatus = task.getStatus(); if (taskStatus != Status.STARTED && taskStatus != Status.DONE && taskStatus != Status.SKIPPED) { throw new AssertionError(); } final ColumnCounter cc = ColumnCounter.DEFAULT; final LineFormatter lf = new LineFormatter(cc); if (!topTasks.isEmpty()) { String top = Long.toString(topTasks.size()); long current = topTasks.stream() .filter((t) -> (t == task || task.isChildOf(t) || t.isDone())) .count(); int width = 2 * cc.columns(top) + 1 + 1; lf.cell(width, width, Alignment.RIGHT, current + "/" + top + " "); } // Leave space for cursor. Certain terminals make an automatic next line // feed if cursor doesn't fit. int lineWidth = consoleWriter.lineWidth() - 1; final Tracker tracker = task.getTracker(); for (TrackerFormatter formatter : formatters) { if (formatter.supports(lineWidth, tracker)) { formatter.format(lf, lineWidth, task, tracker); break; } } return lf.format(lineWidth); } private long now() { return System.currentTimeMillis(); } public static List<AbstractTrackerFormatter<?>> defaultFormatters() { return Arrays.asList( new UpdateablePathTrackerFormatter(), new UpdateableCompletedRatioTrackerFormatter(), new UpdateableLongTrackerFormatter(), new UpdateableGenericTrackerFormatter()); } }
apache-2.0
Pathfinder-Fr/YAFNET
yafsrc/Lucene.Net/Lucene.Net/Util/Packed/BlockPackedReader.cs
4365
using YAF.Lucene.Net.Store; using System; using System.Diagnostics; namespace YAF.Lucene.Net.Util.Packed { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Provides random access to a stream written with <see cref="BlockPackedWriter"/>. /// <para/> /// @lucene.internal /// </summary> public sealed class BlockPackedReader : Int64Values { private readonly int blockShift, blockMask; private readonly long valueCount; private readonly long[] minValues; private readonly PackedInt32s.Reader[] subReaders; /// <summary> /// Sole constructor. </summary> public BlockPackedReader(IndexInput @in, int packedIntsVersion, int blockSize, long valueCount, bool direct) { this.valueCount = valueCount; blockShift = PackedInt32s.CheckBlockSize(blockSize, AbstractBlockPackedWriter.MIN_BLOCK_SIZE, AbstractBlockPackedWriter.MAX_BLOCK_SIZE); blockMask = blockSize - 1; int numBlocks = PackedInt32s.NumBlocks(valueCount, blockSize); long[] minValues = null; subReaders = new PackedInt32s.Reader[numBlocks]; for (int i = 0; i < numBlocks; ++i) { int token = @in.ReadByte() & 0xFF; int bitsPerValue = (int)((uint)token >> AbstractBlockPackedWriter.BPV_SHIFT); if (bitsPerValue > 64) { throw new Exception("Corrupted"); } if ((token & AbstractBlockPackedWriter.MIN_VALUE_EQUALS_0) == 0) { if (minValues == null) { minValues = new long[numBlocks]; } minValues[i] = BlockPackedReaderIterator.ZigZagDecode(1L + BlockPackedReaderIterator.ReadVInt64(@in)); } if (bitsPerValue == 0) { subReaders[i] = new PackedInt32s.NullReader(blockSize); } else { int size = (int)Math.Min(blockSize, valueCount - (long)i * blockSize); if (direct) { long pointer = @in.GetFilePointer(); subReaders[i] = PackedInt32s.GetDirectReaderNoHeader(@in, PackedInt32s.Format.PACKED, packedIntsVersion, size, bitsPerValue); @in.Seek(pointer + PackedInt32s.Format.PACKED.ByteCount(packedIntsVersion, size, bitsPerValue)); } else { subReaders[i] = PackedInt32s.GetReaderNoHeader(@in, PackedInt32s.Format.PACKED, packedIntsVersion, size, bitsPerValue); } } } this.minValues = minValues; } public override long Get(long index) { Debug.Assert(index >= 0 && index < valueCount); int block = (int)((long)((ulong)index >> blockShift)); int idx = (int)(index & blockMask); return (minValues == null ? 0 : minValues[block]) + subReaders[block].Get(idx); } /// <summary> /// Returns approximate RAM bytes used. </summary> public long RamBytesUsed() { long size = 0; foreach (PackedInt32s.Reader reader in subReaders) { size += reader.RamBytesUsed(); } return size; } } }
apache-2.0
electrum/presto
core/trino-main/src/main/java/io/trino/sql/planner/planprinter/IoPlanPrinter.java
28720
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.sql.planner.planprinter; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; import io.trino.Session; import io.trino.cost.PlanCostEstimate; import io.trino.cost.PlanNodeStatsEstimate; import io.trino.cost.StatsAndCosts; import io.trino.metadata.Metadata; import io.trino.metadata.TableHandle; import io.trino.metadata.TableMetadata; import io.trino.spi.connector.CatalogSchemaTableName; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.ColumnMetadata; import io.trino.spi.predicate.Domain; import io.trino.spi.predicate.Range; import io.trino.spi.predicate.TupleDomain; import io.trino.spi.type.Type; import io.trino.spi.type.TypeOperators; import io.trino.sql.planner.DomainTranslator; import io.trino.sql.planner.Plan; import io.trino.sql.planner.plan.FilterNode; import io.trino.sql.planner.plan.PlanNode; import io.trino.sql.planner.plan.PlanVisitor; import io.trino.sql.planner.plan.TableFinishNode; import io.trino.sql.planner.plan.TableScanNode; import io.trino.sql.planner.plan.TableWriterNode; import io.trino.sql.planner.plan.TableWriterNode.CreateReference; import io.trino.sql.planner.plan.TableWriterNode.CreateTarget; import io.trino.sql.planner.plan.TableWriterNode.DeleteTarget; import io.trino.sql.planner.plan.TableWriterNode.InsertReference; import io.trino.sql.planner.plan.TableWriterNode.InsertTarget; import io.trino.sql.planner.plan.TableWriterNode.UpdateTarget; import io.trino.sql.planner.plan.TableWriterNode.WriterTarget; import io.trino.sql.planner.planprinter.IoPlanPrinter.FormattedMarker.Bound; import io.trino.sql.planner.planprinter.IoPlanPrinter.IoPlan.IoPlanBuilder; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static io.airlift.json.JsonCodec.jsonCodec; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public class IoPlanPrinter { private final Plan plan; private final Metadata metadata; private final TypeOperators typeOperators; private final Session session; private final ValuePrinter valuePrinter; private IoPlanPrinter(Plan plan, Metadata metadata, TypeOperators typeOperators, Session session) { this.plan = requireNonNull(plan, "plan is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.typeOperators = requireNonNull(typeOperators, "typeOperators is null"); this.session = requireNonNull(session, "session is null"); this.valuePrinter = new ValuePrinter(metadata, session); } /** * @throws io.trino.NotInTransactionException if called without an active transaction */ public static String textIoPlan(Plan plan, Metadata metadata, TypeOperators typeOperators, Session session) { return new IoPlanPrinter(plan, metadata, typeOperators, session).print(); } private String print() { IoPlanBuilder ioPlanBuilder = new IoPlanBuilder(plan); plan.getRoot().accept(new IoPlanVisitor(), ioPlanBuilder); return jsonCodec(IoPlan.class).toJson(ioPlanBuilder.build()); } public static class IoPlan { private final Set<TableColumnInfo> inputTableColumnInfos; private final Optional<CatalogSchemaTableName> outputTable; private final EstimatedStatsAndCost estimate; @JsonCreator public IoPlan( @JsonProperty("inputTableColumnInfos") Set<TableColumnInfo> inputTableColumnInfos, @JsonProperty("outputTable") Optional<CatalogSchemaTableName> outputTable, @JsonProperty("estimate") EstimatedStatsAndCost estimate) { this.inputTableColumnInfos = ImmutableSet.copyOf(requireNonNull(inputTableColumnInfos, "inputTableColumnInfos is null")); this.outputTable = requireNonNull(outputTable, "outputTable is null"); this.estimate = requireNonNull(estimate, "estimate is null"); } @JsonProperty public Set<TableColumnInfo> getInputTableColumnInfos() { return inputTableColumnInfos; } @JsonProperty public Optional<CatalogSchemaTableName> getOutputTable() { return outputTable; } @JsonProperty public EstimatedStatsAndCost getEstimate() { return estimate; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } IoPlan o = (IoPlan) obj; return Objects.equals(inputTableColumnInfos, o.inputTableColumnInfos) && Objects.equals(outputTable, o.outputTable); } @Override public int hashCode() { return Objects.hash(inputTableColumnInfos, outputTable); } @Override public String toString() { return toStringHelper(this) .add("inputTableColumnInfos", inputTableColumnInfos) .add("outputTable", outputTable) .add("estimate", estimate) .toString(); } protected static class IoPlanBuilder { private final Plan plan; private final Set<TableColumnInfo> inputTableColumnInfos; private Optional<CatalogSchemaTableName> outputTable; private IoPlanBuilder(Plan plan) { this.plan = plan; this.inputTableColumnInfos = new HashSet<>(); this.outputTable = Optional.empty(); } private IoPlanBuilder addInputTableColumnInfo(TableColumnInfo tableColumnInfo) { inputTableColumnInfos.add(tableColumnInfo); return this; } private IoPlanBuilder setOutputTable(CatalogSchemaTableName outputTable) { this.outputTable = Optional.of(outputTable); return this; } private IoPlan build() { return new IoPlan(inputTableColumnInfos, outputTable, getEstimatedStatsAndCost()); } private EstimatedStatsAndCost getEstimatedStatsAndCost() { PlanNode root = plan.getRoot(); StatsAndCosts statsAndCosts = plan.getStatsAndCosts(); PlanNodeStatsEstimate statsEstimate = statsAndCosts.getStats().get(root.getId()); PlanCostEstimate costEstimate = statsAndCosts.getCosts().get(root.getId()); return new EstimatedStatsAndCost( statsEstimate.getOutputRowCount(), statsEstimate.getOutputSizeInBytes(root.getOutputSymbols(), plan.getTypes()), costEstimate.getCpuCost(), costEstimate.getMaxMemory(), costEstimate.getNetworkCost()); } } public static class TableColumnInfo { private final CatalogSchemaTableName table; private final Set<ColumnConstraint> columnConstraints; private final EstimatedStatsAndCost estimate; @JsonCreator public TableColumnInfo( @JsonProperty("table") CatalogSchemaTableName table, @JsonProperty("columnConstraints") Set<ColumnConstraint> columnConstraints, @JsonProperty("estimate") EstimatedStatsAndCost estimate) { this.table = requireNonNull(table, "table is null"); this.columnConstraints = requireNonNull(columnConstraints, "columnConstraints is null"); this.estimate = requireNonNull(estimate, "estimate is null"); } @JsonProperty public CatalogSchemaTableName getTable() { return table; } @JsonProperty public Set<ColumnConstraint> getColumnConstraints() { return columnConstraints; } @JsonProperty public EstimatedStatsAndCost getEstimate() { return estimate; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } TableColumnInfo o = (TableColumnInfo) obj; return Objects.equals(table, o.table) && Objects.equals(columnConstraints, o.columnConstraints) && Objects.equals(estimate, o.estimate); } @Override public int hashCode() { return Objects.hash(table, columnConstraints, estimate); } @Override public String toString() { return toStringHelper(this) .add("table", table) .add("columnConstraints", columnConstraints) .add("estimate", estimate) .toString(); } } } public static class ColumnConstraint { private final String columnName; private final Type type; private final FormattedDomain domain; @JsonCreator public ColumnConstraint( @JsonProperty("columnName") String columnName, @JsonProperty("type") Type type, @JsonProperty("domain") FormattedDomain domain) { this.columnName = requireNonNull(columnName, "columnName is null"); this.type = requireNonNull(type, "type is null"); this.domain = requireNonNull(domain, "domain is null"); } @JsonProperty public String getColumnName() { return columnName; } @JsonProperty public Type getType() { return type; } @JsonProperty public FormattedDomain getDomain() { return domain; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } ColumnConstraint o = (ColumnConstraint) obj; return Objects.equals(columnName, o.columnName) && Objects.equals(type, o.type) && Objects.equals(domain, o.domain); } @Override public int hashCode() { return Objects.hash(columnName, type, domain); } @Override public String toString() { return toStringHelper(this) .add("columnName", columnName) .add("typeSignature", type) .add("domain", domain) .toString(); } } public static class EstimatedStatsAndCost { private final double outputRowCount; private final double outputSizeInBytes; private final double cpuCost; private final double maxMemory; private final double networkCost; @JsonCreator public EstimatedStatsAndCost( @JsonProperty("outputRowCount") double outputRowCount, @JsonProperty("outputSizeInBytes") double outputSizeInBytes, @JsonProperty("cpuCost") double cpuCost, @JsonProperty("maxMemory") double maxMemory, @JsonProperty("networkCost") double networkCost) { this.outputRowCount = outputRowCount; this.outputSizeInBytes = outputSizeInBytes; this.cpuCost = cpuCost; this.maxMemory = maxMemory; this.networkCost = networkCost; } @JsonProperty public double getOutputRowCount() { return outputRowCount; } @JsonProperty public double getOutputSizeInBytes() { return outputSizeInBytes; } @JsonProperty public double getCpuCost() { return cpuCost; } @JsonProperty public double getMaxMemory() { return maxMemory; } @JsonProperty public double getNetworkCost() { return networkCost; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } EstimatedStatsAndCost o = (EstimatedStatsAndCost) obj; return Objects.equals(outputRowCount, o.outputRowCount) && Objects.equals(outputSizeInBytes, o.outputSizeInBytes) && Objects.equals(cpuCost, o.cpuCost) && Objects.equals(maxMemory, o.maxMemory) && Objects.equals(networkCost, o.networkCost); } @Override public int hashCode() { return Objects.hash(outputRowCount, outputSizeInBytes, cpuCost, maxMemory, networkCost); } @Override public String toString() { return toStringHelper(this) .add("outputRowCount", outputRowCount) .add("outputSizeInBytes", outputSizeInBytes) .add("cpuCost", cpuCost) .add("maxMemory", maxMemory) .add("networkCost", networkCost) .toString(); } } public static class FormattedDomain { private final boolean nullsAllowed; private final Set<FormattedRange> ranges; @JsonCreator public FormattedDomain( @JsonProperty("nullsAllowed") boolean nullsAllowed, @JsonProperty("ranges") Set<FormattedRange> ranges) { this.nullsAllowed = nullsAllowed; this.ranges = ImmutableSet.copyOf(requireNonNull(ranges, "ranges is null")); } @JsonProperty public boolean isNullsAllowed() { return nullsAllowed; } @JsonProperty public Set<FormattedRange> getRanges() { return ranges; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } FormattedDomain o = (FormattedDomain) obj; return Objects.equals(nullsAllowed, o.nullsAllowed) && Objects.equals(ranges, o.ranges); } @Override public int hashCode() { return Objects.hash(nullsAllowed, ranges); } @Override public String toString() { return toStringHelper(this) .add("nullsAllowed", nullsAllowed) .add("ranges", ranges) .toString(); } } public static class FormattedRange { private final FormattedMarker low; private final FormattedMarker high; @JsonCreator public FormattedRange( @JsonProperty("low") FormattedMarker low, @JsonProperty("high") FormattedMarker high) { this.low = requireNonNull(low, "low is null"); this.high = requireNonNull(high, "high is null"); } @JsonProperty public FormattedMarker getLow() { return low; } @JsonProperty public FormattedMarker getHigh() { return high; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } FormattedRange o = (FormattedRange) obj; return Objects.equals(low, o.low) && Objects.equals(high, o.high); } @Override public int hashCode() { return Objects.hash(low, high); } @Override public String toString() { return toStringHelper(this) .add("low", low) .add("high", high) .toString(); } } public static class FormattedMarker { public enum Bound { BELOW, // lower than the value, but infinitesimally close to the value EXACTLY, // exactly the value ABOVE // higher than the value, but infinitesimally close to the value } private final Optional<String> value; private final Bound bound; @JsonCreator public FormattedMarker( @JsonProperty("value") Optional<String> value, @JsonProperty("bound") Bound bound) { this.value = requireNonNull(value, "value is null"); this.bound = requireNonNull(bound, "bound is null"); } @JsonProperty public Optional<String> getValue() { return value; } @JsonProperty public Bound getBound() { return bound; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } FormattedMarker o = (FormattedMarker) obj; return Objects.equals(value, o.value) && bound == o.bound; } @Override public int hashCode() { return Objects.hash(value, bound); } @Override public String toString() { return toStringHelper(this) .add("value", value) .add("bound", bound) .toString(); } } private class IoPlanVisitor extends PlanVisitor<Void, IoPlanBuilder> { @Override protected Void visitPlan(PlanNode node, IoPlanBuilder context) { return processChildren(node, context); } @Override public Void visitFilter(FilterNode node, IoPlanBuilder context) { PlanNode source = node.getSource(); if (source instanceof TableScanNode) { TableScanNode tableScanNode = (TableScanNode) source; DomainTranslator.ExtractionResult decomposedPredicate = DomainTranslator.fromPredicate( metadata, typeOperators, session, node.getPredicate(), plan.getTypes()); TupleDomain<ColumnHandle> filterDomain = decomposedPredicate.getTupleDomain() .transform(tableScanNode.getAssignments()::get); addInputTableConstraints(filterDomain, tableScanNode, context); return null; } return processChildren(node, context); } @Override public Void visitTableScan(TableScanNode node, IoPlanBuilder context) { addInputTableConstraints(TupleDomain.all(), node, context); return null; } @Override public Void visitTableFinish(TableFinishNode node, IoPlanBuilder context) { WriterTarget writerTarget = node.getTarget(); if (writerTarget instanceof CreateTarget) { CreateTarget target = (CreateTarget) writerTarget; context.setOutputTable(new CatalogSchemaTableName( target.getHandle().getCatalogName().getCatalogName(), target.getSchemaTableName().getSchemaName(), target.getSchemaTableName().getTableName())); } else if (writerTarget instanceof InsertTarget) { InsertTarget target = (InsertTarget) writerTarget; context.setOutputTable(new CatalogSchemaTableName( target.getHandle().getCatalogName().getCatalogName(), target.getSchemaTableName().getSchemaName(), target.getSchemaTableName().getTableName())); } else if (writerTarget instanceof DeleteTarget) { DeleteTarget target = (DeleteTarget) writerTarget; context.setOutputTable(new CatalogSchemaTableName( target.getHandleOrElseThrow().getCatalogName().getCatalogName(), target.getSchemaTableName().getSchemaName(), target.getSchemaTableName().getTableName())); } else if (writerTarget instanceof UpdateTarget) { UpdateTarget target = (UpdateTarget) writerTarget; context.setOutputTable(new CatalogSchemaTableName( target.getHandleOrElseThrow().getCatalogName().getCatalogName(), target.getSchemaTableName().getSchemaName(), target.getSchemaTableName().getTableName())); } else if (writerTarget instanceof TableWriterNode.RefreshMaterializedViewTarget) { TableWriterNode.RefreshMaterializedViewTarget target = (TableWriterNode.RefreshMaterializedViewTarget) writerTarget; context.setOutputTable(new CatalogSchemaTableName( target.getInsertHandle().getCatalogName().getCatalogName(), target.getSchemaTableName().getSchemaName(), target.getSchemaTableName().getTableName())); } else if (writerTarget instanceof CreateReference || writerTarget instanceof InsertReference) { throw new IllegalStateException(format("%s should not appear in final plan", writerTarget.getClass().getSimpleName())); } else { throw new IllegalStateException(format("Unknown WriterTarget subclass %s", writerTarget.getClass().getSimpleName())); } return processChildren(node, context); } private void addInputTableConstraints(TupleDomain<ColumnHandle> filterDomain, TableScanNode tableScan, IoPlanBuilder context) { TableHandle table = tableScan.getTable(); TableMetadata tableMetadata = metadata.getTableMetadata(session, table); TupleDomain<ColumnHandle> predicateDomain = metadata.getTableProperties(session, table).getPredicate(); EstimatedStatsAndCost estimatedStatsAndCost = getEstimatedStatsAndCost(tableScan); context.addInputTableColumnInfo( new IoPlan.TableColumnInfo( new CatalogSchemaTableName( tableMetadata.getCatalogName().getCatalogName(), tableMetadata.getTable().getSchemaName(), tableMetadata.getTable().getTableName()), parseConstraints(table, predicateDomain.intersect(filterDomain)), estimatedStatsAndCost)); } private EstimatedStatsAndCost getEstimatedStatsAndCost(TableScanNode node) { StatsAndCosts statsAndCosts = plan.getStatsAndCosts(); PlanNodeStatsEstimate stats = statsAndCosts.getStats().get(node.getId()); PlanCostEstimate cost = statsAndCosts.getCosts().get(node.getId()); EstimatedStatsAndCost estimatedStatsAndCost = new EstimatedStatsAndCost( stats.getOutputRowCount(), stats.getOutputSizeInBytes(node.getOutputSymbols(), plan.getTypes()), cost.getCpuCost(), cost.getMaxMemory(), cost.getNetworkCost()); return estimatedStatsAndCost; } private Set<ColumnConstraint> parseConstraints(TableHandle tableHandle, TupleDomain<ColumnHandle> constraint) { checkArgument(!constraint.isNone()); ImmutableSet.Builder<ColumnConstraint> columnConstraints = ImmutableSet.builder(); for (Map.Entry<ColumnHandle, Domain> entry : constraint.getDomains().get().entrySet()) { ColumnMetadata columnMetadata = metadata.getColumnMetadata(session, tableHandle, entry.getKey()); columnConstraints.add(new ColumnConstraint( columnMetadata.getName(), columnMetadata.getType(), parseDomain(entry.getValue().simplify()))); } return columnConstraints.build(); } private FormattedDomain parseDomain(Domain domain) { ImmutableSet.Builder<FormattedRange> formattedRanges = ImmutableSet.builder(); Type type = domain.getType(); domain.getValues().getValuesProcessor().consume( ranges -> formattedRanges.addAll( ranges.getOrderedRanges().stream() .map(this::formatRange) .collect(toImmutableSet())), discreteValues -> formattedRanges.addAll( discreteValues.getValues().stream() .map(value -> valuePrinter.castToVarcharOrFail(type, value)) .map(value -> new FormattedMarker(Optional.of(value), Bound.EXACTLY)) .map(marker -> new FormattedRange(marker, marker)) .collect(toImmutableSet())), allOrNone -> { throw new IllegalStateException("Unreachable AllOrNone consumer"); }); return new FormattedDomain(domain.isNullAllowed(), formattedRanges.build()); } private FormattedRange formatRange(Range range) { FormattedMarker low = range.isLowUnbounded() ? new FormattedMarker(Optional.empty(), Bound.ABOVE) : new FormattedMarker( Optional.of(valuePrinter.castToVarcharOrFail(range.getType(), range.getLowBoundedValue())), range.isLowInclusive() ? Bound.EXACTLY : Bound.ABOVE); FormattedMarker high = range.isHighUnbounded() ? new FormattedMarker(Optional.empty(), Bound.BELOW) : new FormattedMarker( Optional.of(valuePrinter.castToVarcharOrFail(range.getType(), range.getHighBoundedValue())), range.isHighInclusive() ? Bound.EXACTLY : Bound.BELOW); return new FormattedRange(low, high); } private Void processChildren(PlanNode node, IoPlanBuilder context) { for (PlanNode child : node.getSources()) { child.accept(this, context); } return null; } } }
apache-2.0
henjuv/Mg2
Mg2/ViewModels/ActionsViewModel.cs
1733
using GalaSoft.MvvmLight.Command; using GalaSoft.MvvmLight.Threading; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Linq; using System.Windows.Input; using MgKit; using MgKit.Interface; namespace Mg2.ViewModels { public class ActionsViewModel : Screen { public bool CanClose { get; private set; } public ObservableCollection<PackageAction> List { get; private set; } public IEnumerable<PackageAction> OrderedList { get { return PackageAction.OrderActionsByType(List); } } public ICommand CloseCommand { get; private set; } public ActionsViewModel(IPackageManager packageManager, List<PackageAction> actions) { DisplayName = ""; List = new ObservableCollection<PackageAction>(actions); List.CollectionChanged += (sender, args) => RaisePropertyChanged(() => OrderedList); var eventHandler = new PropertyChangedEventHandler((sender, args) => { if (List.Any(n => n.Progress != 100)) return; CanClose = true; RaisePropertyChanged(() => CanClose); // TODO: if autoclose in options: TryClose(true) }); actions.ForEach(n => n.SubscribeToPropertyChanged(eventHandler)); foreach (var action in actions) { action.ActionAdded += a => DispatcherHelper.CheckBeginInvokeOnUI(() => List.Add(a)); } CloseCommand = new RelayCommand(() => TryClose(true)); packageManager.Execute(actions.ToArray()); } } }
apache-2.0
korfuri/goref
elasticsearch/filterf_test.go
584
package elasticsearch_test import ( "errors" "testing" "github.com/korfuri/goref/elasticsearch" "github.com/korfuri/goref/elasticsearch/mocks" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" elastic "gopkg.in/olivere/elastic.v5" ) func TestFilterF(t *testing.T) { client := &mocks.Client{} client.On("GetPackage", mock.Anything, "v1@0@a").Return(&elastic.GetResult{}, nil) client.On("GetPackage", mock.Anything, "v1@0@b").Return(nil, errors.New("404")) f := elasticsearch.FilterF(client) assert.False(t, f("a", 0)) assert.True(t, f("b", 0)) }
apache-2.0
vivsriaus/azure-xplat-cli
test/recordings/arm-cli-cdn-management-tests/arm_Cdn_Endpoints_delete_command_should_delete_the_endpoint_succesfully.nock.js
7050
// This file has been autogenerated. var profile = require('../../../lib/util/profile'); exports.getMockedProfile = function () { var newProfile = new profile.Profile(); newProfile.addSubscription(new profile.Subscription({ id: 'bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b', name: 'KasotaTest-001', user: { name: 'user@domain.example', type: 'user' }, tenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47', state: 'Enabled', registeredProviders: [], _eventsCount: '1', isDefault: true }, newProfile.environments['AzureCloud'])); return newProfile; }; exports.setEnvironment = function() { process.env['AZURE_ARM_TEST_LOCATION'] = 'East US 2'; process.env['AZURE_ARM_TEST_RESOURCE_GROUP_1'] = 'xplattestadlsrg01'; process.env['AZURE_ARM_TEST_CDN_PROFILE_1'] = 'cliTestProfile01'; process.env['AZURE_ARM_TEST_RESOURCE_GROUP_2'] = 'xplattestadlsrg02'; process.env['AZURE_ARM_TEST_CDN_PROFILE_2'] = 'cliTestProfile02'; process.env['AZURE_ARM_TEST_CDN_ENDPOINT_1'] = 'cliTestEndpoint01'; process.env['AZURE_ARM_TEST_CDN_ENDPOINT_2'] = 'cliTestEndpoint02'; process.env['AZURE_ARM_TEST_CDN_ORIGIN_1'] = 'cliTestOrigin01'; process.env['AZURE_ARM_TEST_CDN_ORIGIN_2'] = 'cliTestOrigin02'; process.env['AZURE_ARM_TEST_ENDPOINT_TEST_LOCATION_1'] = 'eastus'; process.env['AZURE_ARM_TEST_CUSTOM_DOMAIN_NAME_1'] = 'cliTestCustomDomain01'; process.env['AZURE_ARM_TEST_CUSTOM_DOMAIN_HOST_NAME_1'] = 'cli-1-406f580d-a634-4077-9b11-216a70c5998d.azureedge-test.net'; }; exports.scopes = [[function (nock) { var result = nock('http://management.azure.com:443') .delete('/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourceGroups/xplattestadlsrg01/providers/Microsoft.Cdn/profiles/cliTestProfile01/endpoints/cliTestEndpoint01?api-version=2015-06-01') .reply(202, "", { 'cache-control': 'no-cache', pragma: 'no-cache', 'content-length': '0', expires: '-1', location: 'https://management.azure.com/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourcegroups/xplattestadlsrg01/providers/Microsoft.Cdn/operationresults/8d3cbcdb-4b2f-473d-8082-4a91d321e1c9/profileresults/cliTestProfile01/endpointresults/cliTestEndpoint01?api-version=2015-06-01', 'retry-after': '10', 'x-ms-request-id': '3834148d-a856-452d-9975-dec530bd6dab', 'x-ms-client-request-id': 'afad30cb-61f8-4af7-8211-8b0c5cc75cda', 'azure-asyncoperation': 'https://management.azure.com/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourcegroups/xplattestadlsrg01/providers/Microsoft.Cdn/operationresults/8d3cbcdb-4b2f-473d-8082-4a91d321e1c9?api-version=2015-06-01', 'strict-transport-security': 'max-age=31536000; includeSubDomains', server: 'Microsoft-IIS/8.5', 'x-aspnet-version': '4.0.30319', 'x-powered-by': 'ASP.NET', 'x-ms-ratelimit-remaining-subscription-writes': '1199', 'x-ms-correlation-request-id': '944eac1a-ce63-487b-b73a-e80bd5f385cd', 'x-ms-routing-request-id': 'NORTHCENTRALUS:20160317T183949Z:944eac1a-ce63-487b-b73a-e80bd5f385cd', date: 'Thu, 17 Mar 2016 18:39:49 GMT', connection: 'close' }); return result; }, function (nock) { var result = nock('https://management.azure.com:443') .delete('/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourceGroups/xplattestadlsrg01/providers/Microsoft.Cdn/profiles/cliTestProfile01/endpoints/cliTestEndpoint01?api-version=2015-06-01') .reply(202, "", { 'cache-control': 'no-cache', pragma: 'no-cache', 'content-length': '0', expires: '-1', location: 'https://management.azure.com/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourcegroups/xplattestadlsrg01/providers/Microsoft.Cdn/operationresults/8d3cbcdb-4b2f-473d-8082-4a91d321e1c9/profileresults/cliTestProfile01/endpointresults/cliTestEndpoint01?api-version=2015-06-01', 'retry-after': '10', 'x-ms-request-id': '3834148d-a856-452d-9975-dec530bd6dab', 'x-ms-client-request-id': 'afad30cb-61f8-4af7-8211-8b0c5cc75cda', 'azure-asyncoperation': 'https://management.azure.com/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourcegroups/xplattestadlsrg01/providers/Microsoft.Cdn/operationresults/8d3cbcdb-4b2f-473d-8082-4a91d321e1c9?api-version=2015-06-01', 'strict-transport-security': 'max-age=31536000; includeSubDomains', server: 'Microsoft-IIS/8.5', 'x-aspnet-version': '4.0.30319', 'x-powered-by': 'ASP.NET', 'x-ms-ratelimit-remaining-subscription-writes': '1199', 'x-ms-correlation-request-id': '944eac1a-ce63-487b-b73a-e80bd5f385cd', 'x-ms-routing-request-id': 'NORTHCENTRALUS:20160317T183949Z:944eac1a-ce63-487b-b73a-e80bd5f385cd', date: 'Thu, 17 Mar 2016 18:39:49 GMT', connection: 'close' }); return result; }, function (nock) { var result = nock('http://management.azure.com:443') .get('/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourcegroups/xplattestadlsrg01/providers/Microsoft.Cdn/operationresults/8d3cbcdb-4b2f-473d-8082-4a91d321e1c9?api-version=2015-06-01') .reply(200, "{\r\n \"status\":\"Succeeded\",\"error\":{\r\n \"code\":\"None\",\"message\":null\r\n }\r\n}", { 'cache-control': 'no-cache', pragma: 'no-cache', 'content-length': '77', 'content-type': 'application/json; odata.metadata=minimal; odata.streaming=true', expires: '-1', 'x-ms-request-id': 'be5e4a52-4e9a-4297-a2ba-f5b86e9b7894', 'x-ms-client-request-id': '3e283c63-6534-4e00-956a-aa3791a994f0', 'odata-version': '4.0', 'strict-transport-security': 'max-age=31536000; includeSubDomains', server: 'Microsoft-IIS/8.5', 'x-aspnet-version': '4.0.30319', 'x-powered-by': 'ASP.NET', 'x-ms-ratelimit-remaining-subscription-reads': '14998', 'x-ms-correlation-request-id': '44065a27-40af-4c06-b83d-5d34f304ad01', 'x-ms-routing-request-id': 'WESTUS:20160317T184020Z:44065a27-40af-4c06-b83d-5d34f304ad01', date: 'Thu, 17 Mar 2016 18:40:19 GMT', connection: 'close' }); return result; }, function (nock) { var result = nock('https://management.azure.com:443') .get('/subscriptions/bab71ab8-daff-4f58-8dfc-ed0d61a3fa6b/resourcegroups/xplattestadlsrg01/providers/Microsoft.Cdn/operationresults/8d3cbcdb-4b2f-473d-8082-4a91d321e1c9?api-version=2015-06-01') .reply(200, "{\r\n \"status\":\"Succeeded\",\"error\":{\r\n \"code\":\"None\",\"message\":null\r\n }\r\n}", { 'cache-control': 'no-cache', pragma: 'no-cache', 'content-length': '77', 'content-type': 'application/json; odata.metadata=minimal; odata.streaming=true', expires: '-1', 'x-ms-request-id': 'be5e4a52-4e9a-4297-a2ba-f5b86e9b7894', 'x-ms-client-request-id': '3e283c63-6534-4e00-956a-aa3791a994f0', 'odata-version': '4.0', 'strict-transport-security': 'max-age=31536000; includeSubDomains', server: 'Microsoft-IIS/8.5', 'x-aspnet-version': '4.0.30319', 'x-powered-by': 'ASP.NET', 'x-ms-ratelimit-remaining-subscription-reads': '14998', 'x-ms-correlation-request-id': '44065a27-40af-4c06-b83d-5d34f304ad01', 'x-ms-routing-request-id': 'WESTUS:20160317T184020Z:44065a27-40af-4c06-b83d-5d34f304ad01', date: 'Thu, 17 Mar 2016 18:40:19 GMT', connection: 'close' }); return result; }]];
apache-2.0
zcourts/jsc
lib/math_jax.rb
693
require File.dirname(__FILE__) + '/plugin' module JSC class MathJaxBlock < Liquid::Tag include JSC::Plugin def render(ctx) set_ctx ctx math_jax + '<script type="math/tex; mode=display">' end end class MathJaxInline < Liquid::Tag include JSC::Plugin def render(ctx) set_ctx ctx math_jax + '<script type="math/tex">' end end class MathJaxEnd < Liquid::Tag def render(ctx) '</script>' end end end Liquid::Template.register_tag('math', JSC::MathJaxBlock) Liquid::Template.register_tag('m', JSC::MathJaxInline) Liquid::Template.register_tag('endmath', JSC::MathJaxEnd) Liquid::Template.register_tag('em', JSC::MathJaxEnd)
apache-2.0
nevins-b/lemur
lemur/plugins/lemur_aws/sts.py
3171
""" .. module: lemur.plugins.lemur_aws.sts :platform: Unix :copyright: (c) 2015 by Netflix Inc., see AUTHORS for more :license: Apache, see LICENSE for more details. .. moduleauthor:: Kevin Glisson <kglisson@netflix.com> """ from functools import wraps import boto import boto.ec2.elb import boto3 from flask import current_app def assume_service(account_number, service, region='us-east-1'): conn = boto.connect_sts() role = conn.assume_role('arn:aws:iam::{0}:role/{1}'.format( account_number, current_app.config.get('LEMUR_INSTANCE_PROFILE', 'Lemur')), 'blah') if service in 'iam': return boto.connect_iam( aws_access_key_id=role.credentials.access_key, aws_secret_access_key=role.credentials.secret_key, security_token=role.credentials.session_token) elif service in 'elb': return boto.ec2.elb.connect_to_region( region, aws_access_key_id=role.credentials.access_key, aws_secret_access_key=role.credentials.secret_key, security_token=role.credentials.session_token) elif service in 'vpc': return boto.connect_vpc( aws_access_key_id=role.credentials.access_key, aws_secret_access_key=role.credentials.secret_key, security_token=role.credentials.session_token) elif service in 's3': return boto.s3.connect_to_region( region, aws_access_key_id=role.credentials.access_key, aws_secret_access_key=role.credentials.secret_key, security_token=role.credentials.session_token) def sts_client(service, service_type='client'): def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): sts = boto3.client('sts') arn = 'arn:aws:iam::{0}:role/{1}'.format( kwargs.pop('account_number'), current_app.config.get('LEMUR_INSTANCE_PROFILE', 'Lemur') ) # TODO add user specific information to RoleSessionName role = sts.assume_role(RoleArn=arn, RoleSessionName='lemur') if service_type == 'client': client = boto3.client( service, region_name=kwargs.pop('region', 'us-east-1'), aws_access_key_id=role['Credentials']['AccessKeyId'], aws_secret_access_key=role['Credentials']['SecretAccessKey'], aws_session_token=role['Credentials']['SessionToken'] ) kwargs['client'] = client elif service_type == 'resource': resource = boto3.resource( service, region_name=kwargs.pop('region', 'us-east-1'), aws_access_key_id=role['Credentials']['AccessKeyId'], aws_secret_access_key=role['Credentials']['SecretAccessKey'], aws_session_token=role['Credentials']['SessionToken'] ) kwargs['resource'] = resource return f(*args, **kwargs) return decorated_function return decorator
apache-2.0
PathVisio/libGPML
org.pathvisio.lib/src/main/java/org/pathvisio/model/type/ArrowHeadType.java
4613
/******************************************************************************* * PathVisio, a tool for data visualization and analysis using biological pathways * Copyright 2006-2021 BiGCaT Bioinformatics, WikiPathways * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package org.pathvisio.model.type; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.pathvisio.debug.Logger; import org.pathvisio.model.LineElement; /** * This class contains extensible enum pattern for different arrow head types. A * Line in PathVisio has two endings {@link LineElement.LinePoint} that each can have a * different arrow head. * * NB: previously named LineType. * * @author unknown, finterly */ public class ArrowHeadType { private static Map<String, ArrowHeadType> nameToArrowHeadType = new TreeMap<String, ArrowHeadType>(String.CASE_INSENSITIVE_ORDER); public static final ArrowHeadType UNDIRECTED = new ArrowHeadType("Undirected"); //previous "Line" public static final ArrowHeadType DIRECTED = new ArrowHeadType("Directed"); public static final ArrowHeadType CONVERSION = new ArrowHeadType("Conversion"); public static final ArrowHeadType INHIBITION = new ArrowHeadType("Inhibition"); public static final ArrowHeadType CATALYSIS = new ArrowHeadType("Catalysis"); public static final ArrowHeadType STIMULATION = new ArrowHeadType("Stimulation"); public static final ArrowHeadType BINDING = new ArrowHeadType("Binding"); public static final ArrowHeadType TRANSLOCATION = new ArrowHeadType("Translocation"); public static final ArrowHeadType TRANSCRIPTION_TRANSLATION = new ArrowHeadType("TranscriptionTranslation"); private String name; /** * The constructor is private. ArrowHeadType cannot be directly instantiated. * Use create() method to instantiate ArrowHeadType. * * @param name the string key of this ArrowHeadType. * @throws NullPointerException if name is null. */ private ArrowHeadType(String name) { if (name == null) { throw new NullPointerException(); } this.name = name; nameToArrowHeadType.put(name, this); // adds this name and ArrowHeadType to map. } /** * Returns a ArrowHeadType from a given string identifier name. If the * ArrowHeadType doesn't exist yet, it is created to extend the enum. The * method makes sure that the same object is not added twice. * * @param name the string key. * @return the ArrowHeadType for given name. If name does not exist, creates and * returns a new ArrowHeadType. */ public static ArrowHeadType register(String name) { if (nameToArrowHeadType.containsKey(name)) { return nameToArrowHeadType.get(name); } else { Logger.log.trace("Registered arrowhead type " + name); return new ArrowHeadType(name); } } /** * Returns the ArrowHeadType from given string name. * * @param name the string. * @return the ArrowHeadType with given string name. */ public static ArrowHeadType fromName(String name) { return nameToArrowHeadType.get(name); } /** * Returns the name key for this ArrowHeadType. * * @return name the key for this ArrowHeadType. */ public String getName() { return name; } /** * Returns the names of all registered ArrowHeadTypes as a list. * * @return names the names of all registered ArrowHeadTypes in order of * insertion. */ static public List<String> getNames() { List<String> names = new ArrayList<>(nameToArrowHeadType.keySet()); return names; } /** * Returns the arrow head type values of all ArrowHeadTypes as a list. * * @return arrowHead the list of all registered ArrowHeadTypes. */ static public List<ArrowHeadType> getValues() { List<ArrowHeadType> arrowHeadTypes = new ArrayList<>(nameToArrowHeadType.values()); return arrowHeadTypes; } /** * Returns a string representation of this ArrowHeadType. * * @return name the identifier of this ArrowHeadType. */ public String toString() { return name; } }
apache-2.0
PureSolTechnologies/graphs
statemodel/src/main/java/com/puresoltechnologies/graphs/statemodel/StateModel.java
3246
package com.puresoltechnologies.graphs.statemodel; import java.util.Set; import com.puresoltechnologies.graphs.graph.Graph; /** * This is an interface for a state model. * * @author Rick-Rainer Ludwig */ public interface StateModel<S extends State<S, T>, T extends Transition<S, T>> extends Graph<S, T> { /** * This method returns the start state which is not valid, yet, but the starting * point of the first transition to be made. The first transition may be on of * multiple transitions. * * The start state must have at least one valid transition! * * @return The start state is returned. The return value must not be null, * because every state model has to have a start state. */ public S getStartState(); /** * This method returns the possible end states of the model which show, that the * model is finished and cannot be changed anymore. There are different possible * end states due to the normal ends, aborts and errors. * * These are the states which must not have a transition! All other states need * to have at least one. * * @return A {@link Set} of state is returned which mark the end state of the * state model. The return value must not be null, but the set might be * empty. */ public Set<S> getEndStates(); /** * This method returns the current {@link State} of the model. * * @return The current {@link State} is returned. The return value must not be * null, because the state model has to be in one state. */ public S getState(); /** * This method checks whether a transition can be performed with the current * state. * * @param transition * is the transition to be checked for validity at the current state. * @return <code>true</code> is returned in case the transition is valid at * current state. <code>false</code> is returned otherwise. */ public boolean canPerformTransition(T transition); /** * This method performs the transition provided. * * @param transition * is the transition to perform. * @throws IllegalStateException * is throw in case the transition is not valid for the current * state. */ public void performTransition(T transition); /** * This method checks the available transitions and looks for a transition to go * the defined state. * * @param state * is the state to go to. * @return A transition is returned in case there is a valid transition to the * given state. <code>null</code> is returned in case there is not valid * transition available. */ public T canGoTo(S state); /** * This method checks the available transitions and looks for a transition to go * the defined state. * * @param state * is the state to go to. * @throws IllegalStateException * is throw in case there is no valid transition to reach the * requested state. */ public void goTo(S state); }
apache-2.0
vam-google/google-cloud-java
google-cloud-clients/google-cloud-dialogflow/src/test/java/com/google/cloud/dialogflow/v2beta1/MockAgents.java
1566
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2beta1; import com.google.api.core.BetaApi; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.protobuf.GeneratedMessageV3; import io.grpc.ServerServiceDefinition; import java.util.List; @javax.annotation.Generated("by GAPIC") @BetaApi public class MockAgents implements MockGrpcService { private final MockAgentsImpl serviceImpl; public MockAgents() { serviceImpl = new MockAgentsImpl(); } @Override public List<GeneratedMessageV3> getRequests() { return serviceImpl.getRequests(); } @Override public void addResponse(GeneratedMessageV3 response) { serviceImpl.addResponse(response); } @Override public void addException(Exception exception) { serviceImpl.addException(exception); } @Override public ServerServiceDefinition getServiceDefinition() { return serviceImpl.bindService(); } @Override public void reset() { serviceImpl.reset(); } }
apache-2.0
terma/gigaspace-web-console
provider/src/main/java/com/github/terma/gigaspacewebconsole/provider/executor/ZeroExecutorPreprocessor.java
791
/* Copyright 2015-2017 Artem Stasiuk Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.github.terma.gigaspacewebconsole.provider.executor; public class ZeroExecutorPreprocessor implements ExecutorPreprocessor { @Override public String preprocess(String sql) { return sql; } }
apache-2.0
Longi94/spicio
app/src/main/java/com/tlongdev/spicio/domain/interactor/impl/LoadSeriesDetailsInteractorImpl.java
2026
package com.tlongdev.spicio.domain.interactor.impl; import com.tlongdev.spicio.SpicioApplication; import com.tlongdev.spicio.domain.interactor.AbstractInteractor; import com.tlongdev.spicio.domain.interactor.LoadSeriesDetailsInteractor; import com.tlongdev.spicio.domain.model.Series; import com.tlongdev.spicio.storage.dao.SeriesDao; import com.tlongdev.spicio.util.Logger; import javax.inject.Inject; /** * @author Long * @since 2016. 03. 09. */ public class LoadSeriesDetailsInteractorImpl extends AbstractInteractor implements LoadSeriesDetailsInteractor { private static final String LOG_TAG = LoadSeriesDetailsInteractorImpl.class.getSimpleName(); @Inject SeriesDao mSeriesDao; @Inject Logger logger; private int mSeriesId; private Callback mCallback; public LoadSeriesDetailsInteractorImpl(SpicioApplication app, int seriesId, Callback callback) { super(app.getInteractorComponent()); app.getInteractorComponent().inject(this); mSeriesId = seriesId; mCallback = callback; } @Override public void run() { logger.debug(LOG_TAG, "started"); Series series = mSeriesDao.getSeries(mSeriesId); if (series == null) { logger.debug(LOG_TAG, "SeriesDao.getSeries returned null"); postError(); return; } else { postFinish(series); } logger.debug(LOG_TAG, "ended"); } private void postFinish(final Series series) { if (mCallback == null) { return; } mMainThread.post(new Runnable() { @Override public void run() { mCallback.onLoadSeriesDetailsFinish(series); } }); } private void postError() { if (mCallback == null) { return; } mMainThread.post(new Runnable() { @Override public void run() { mCallback.onLoadSeriesDetailsFail(); } }); } }
apache-2.0
TreetonOrg/Treeton
dev/treetonCore/src/treeton/core/util/MapComparator.java
2798
/* * Copyright Anatoly Starostin (c) 2017. */ package treeton.core.util; import java.util.Comparator; import java.util.Map; public class MapComparator implements Comparator { private Object[] keys; public MapComparator(Object[] keys) { this.keys = keys; } /** * Этот метод сравнивает Map по указанному списку ключей. * * @param p1 первый из сравниваемых набор свойств * @param p2 второй из сравниваемых набор свойств * @param keys список ключей для сравнения * @return &lt; 0 - p1 "меньше" p2<br> * &nbsp;&nbsp;0 - значения p1 совпадают с p2<br> * &gt; 0 - p1 "больше" p2 */ public static int compareMaps(Map p1, Map p2, Object[] keys) { int gt = 1, eq = 0, lt = -1; int rslt = eq; int i = 0, n = keys.length; Object curKey, k1, k2; for (; i < n && rslt == eq; i++) { curKey = keys[i]; k1 = p1.get(curKey); k2 = p2.get(curKey); if (k1 == null) { if (k2 == null) { // Если оба значения равны null, то считаем их равными. // // Оператор "continue" можно было не ставить, поскольку // после этого каскада условных опрераторов мы и так // переходим к следующей итерации. Просто, так нагляднее. continue; } else { // значение, равное null (k1), считается меньше rslt = lt; } } else { if (k2 == null) { // значение, равное null (k2), считается меньше rslt = gt; } else { if (k1 instanceof Comparable && k2 instanceof Comparable) { // Если оба значения не равны null, то сравниваем их. rslt = ((Comparable) k1).compareTo((Comparable) k2); } else { rslt = k1.hashCode() - k2.hashCode(); rslt = rslt > 0 ? 1 : rslt < 0 ? -1 : 0; } } } } return rslt; } public int compare(Object o1, Object o2) { return compareMaps((Map) o1, (Map) o2, keys); } }
apache-2.0
infanprodigy/blog-iz
BlogEngine.NET/App_Code/Extensions/CodeFormatter/JavaScriptFormat.cs
2484
#region Copyright &copy; 2001-2003 Jean-Claude Manoli [jc@manoli.net] /* * This software is provided 'as-is', without any express or implied warranty. * In no event will the author(s) be held liable for any damages arising from * the use of this software. * * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * * 2. Altered source versions must be plainly marked as such, and must not * be misrepresented as being the original software. * * 3. This notice may not be removed or altered from any source distribution. */ #endregion namespace CodeFormatter { /// <summary> /// Generates color-coded HTML 4.01 from JavaSctript source code. /// </summary> public class JavaScriptFormat : CLikeFormat { #region Properties /// <summary> /// The list of JavaScript keywords. /// </summary> protected override string Keywords { get { return "var function abstract as base bool break byte case catch char " + "checked class const continue decimal default delegate do double else " + "enum event explicit extern false finally fixed float for foreach goto " + "if implicit in int interface internal is lock long namespace new null " + "object operator out override params private protected public readonly " + "ref return sbyte sealed short sizeof stackalloc static string struct " + "switch this throw true try typeof uint ulong unchecked unsafe ushort " + "using virtual void while"; } } /// <summary> /// Use the pre-processor color to mark directives that start with @. /// </summary> protected override string Preprocessors { get { return @"@\w*"; } } #endregion } }
apache-2.0
talsma-ict/umldoclet
src/plantuml-asl/src/net/sourceforge/plantuml/tim/StringEater.java
1389
/* ======================================================================== * PlantUML : a free UML diagram generator * ======================================================================== * * (C) Copyright 2009-2020, Arnaud Roques * * Project Info: https://plantuml.com * * If you like this project or if you find it useful, you can support us at: * * https://plantuml.com/patreon (only 1$ per month!) * https://plantuml.com/paypal * * This file is part of PlantUML. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * Original Author: Arnaud Roques */ package net.sourceforge.plantuml.tim; public class StringEater extends Eater { public StringEater(String s) { super(s, null); } @Override public void analyze(TContext context, TMemory memory) throws EaterException, EaterExceptionLocated { throw new UnsupportedOperationException(); } }
apache-2.0
marionschlotte/hana-shine-xsa
web/resources/admin-ui/Component.js
3220
jQuery.sap.declare("sap.hana.democontent.epm.admin.Component"); jQuery.sap.require("sap.m.MessageBox"); jQuery.sap.require("sap.m.MessageToast"); sap.ui.core.UIComponent.extend("sap.hana.democontent.epm.admin.Component",{ metadata : { name : "SHINE - Data Generator", version : "1", includes : [], dependencies : { libs : ["sap.ui.commons", "sap.ui.table","sap.ui.unified"], components : [] }, rootView : "sap.hana.democontent.epm.admin.view.App", config : { resourceBundle : "i18n/messagebundle.hdbtextbundle", serviceConfig : { name : "Get Size", serviceUrl : "/get/tablesize" } } }, init : function () { data = [{ label: oBundle.getText("empty"), record_count: 10, table_size: 100 }]; var chartModel = new sap.ui.model.json.JSONModel({modelData: data}); sap.ui.getCore().setModel(chartModel, "chart"); sap.ui.core.UIComponent.prototype.init.apply(this,arguments); var mConfig = this.getMetadata().getConfig(); var sServiceUrl = mConfig.serviceConfig.serviceUrl; var oConfig = new sap.ui.model.json.JSONModel(sServiceUrl); oConfig.attachRequestCompleted(jQuery.proxy(function(){ this.getSessionInfo(); }),this); sap.ui.getCore().setModel(oConfig, "config"); this.setModel(oConfig, "config"); this.mainModelInit(); //Chart model var oRootPath = jQuery.sap.getModulePath("sap.hana.democontent.epm.admin"); // set i18n model var i18nModel = new sap.ui.model.resource.ResourceModel({ bundleUrl : [oRootPath, mConfig.resourceBundle].join("/") }); sap.ui.getCore().setModel(i18nModel, "i18n"); this.setModel(i18nModel, "i18n"); }, getSessionInfo: function(){ var aUrl = '/get/sessioninfo'; this.onLoadSession( JSON.parse(jQuery.ajax({ url: aUrl, method: 'GET', dataType: 'json', async: false}).responseText)); }, onLoadSession: function(myJSON){ var mConfig = this.getModel("config"); mConfig.setProperty("/UserName", JSON.parse(decodeURI(myJSON.userEncoded)).id); }, mainModelInit: function(){ var model = new sap.ui.model.json.JSONModel({}); model.setDefaultBindingMode(sap.ui.model.BindingMode.TwoWay); sap.ui.getCore().setModel(model); this.setModel(model); model.setProperty("/POVal",0); model.setProperty("/SOVal",0); model.setProperty("/times"," * " + numericSimpleFormatter(1000)); model.setProperty("/listVisible",false); model.setProperty("/listDateVisible",false); model.setProperty("/displayValue"," "); model.setProperty("/percentValue",0); // For Time Based DG var now = new Date(); var startDate = new Date(); startDate.setMonth(now.getMonth()-1); var todayDate = new Date(); model.setProperty("/startDate",startDate); model.setProperty("/endDate",todayDate); } });
apache-2.0
federkasten/casmi-io-examples
src/main/java/casmi/io/parser/XMLExample.java
3399
/* * casmi examples * http://casmi.github.com/ * Copyright (C) 2011, Xcoo, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package casmi.io.parser; import java.io.File; import java.io.IOException; import casmi.io.parser.XML; import casmi.io.parser.XMLElement; import casmi.io.exception.ParserException; /** * XML parser example. * * @see casmi.extension.parser.XML * * @author T. Takeuchi */ public class XMLExample{ static final String XML_FILE = XMLExample.class.getResource("example.xml").getPath(); public void simpleRead() throws ParserException, IOException { XML xml = new XML(); xml.parseFile(new File(XML_FILE)); System.out.println(xml); } public void read() throws ParserException, IOException { XML xml = new XML(); xml.parseFile(new File(XML_FILE)); recursivePrint(xml, 0); } private void recursivePrint(XMLElement element, int indent) { String indentStr = ""; for (int i = 0; i < indent; i++) { indentStr += " "; } // print start tag and attributes. System.out.print(indentStr); System.out.print("<"); System.out.print(element.getName()); for (String attributeName : element.getAttributeNames()) { String value = element.getAttribute(attributeName); System.out.print(" " + attributeName + "=\"" + value + "\""); } System.out.println(">"); // print content. if (element.hasContent()) { System.out.print(indentStr + " "); System.out.println(element.getContent()); } // if this element does not have children, return method. if (!element.hasChildren()) { // print end tag. System.out.print(indentStr); System.out.println("</" + element.getName() + ">"); return; } // execute this method recursively. for (XMLElement child : element.getChildren()) { recursivePrint(child, indent + 1); } // print end tag. System.out.print(indentStr); System.out.println("</" + element.getName() + ">"); } public static void main(String[] args) { XMLExample xmlExample = new XMLExample(); System.out.println("### Simple read example ###\n"); try { xmlExample.simpleRead(); } catch (ParserException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } System.out.println(); System.out.println("### Read example ###\n"); try { xmlExample.read(); } catch (ParserException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
apache-2.0
HazelChen/directory-kerberos
contrib/haox-asn1/src/main/java/org/apache/haox/asn1/type/Asn1Sequence.java
1106
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.haox.asn1.type; import org.apache.haox.asn1.TagClass; import org.apache.haox.asn1.UniversalTag; public class Asn1Sequence extends Asn1Collection { public Asn1Sequence() { super(TagClass.UNIVERSAL, UniversalTag.SEQUENCE.getValue()); } }
apache-2.0
googleapis/java-aiplatform
proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/schema/predict/prediction/ClassificationPredictionResult.java
41559
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/schema/predict/prediction/classification.proto package com.google.cloud.aiplatform.v1.schema.predict.prediction; /** * * * <pre> * Prediction output format for Image and Text Classification. * </pre> * * Protobuf type {@code * google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult} */ public final class ClassificationPredictionResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult) ClassificationPredictionResultOrBuilder { private static final long serialVersionUID = 0L; // Use ClassificationPredictionResult.newBuilder() to construct. private ClassificationPredictionResult( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ClassificationPredictionResult() { ids_ = emptyLongList(); displayNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; confidences_ = emptyFloatList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ClassificationPredictionResult(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ClassificationPredictionResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { ids_ = newLongList(); mutable_bitField0_ |= 0x00000001; } ids_.addLong(input.readInt64()); break; } case 10: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) { ids_ = newLongList(); mutable_bitField0_ |= 0x00000001; } while (input.getBytesUntilLimit() > 0) { ids_.addLong(input.readInt64()); } input.popLimit(limit); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000002) != 0)) { displayNames_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000002; } displayNames_.add(s); break; } case 29: { if (!((mutable_bitField0_ & 0x00000004) != 0)) { confidences_ = newFloatList(); mutable_bitField0_ |= 0x00000004; } confidences_.addFloat(input.readFloat()); break; } case 26: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000004) != 0) && input.getBytesUntilLimit() > 0) { confidences_ = newFloatList(); mutable_bitField0_ |= 0x00000004; } while (input.getBytesUntilLimit() > 0) { confidences_.addFloat(input.readFloat()); } input.popLimit(limit); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { ids_.makeImmutable(); // C } if (((mutable_bitField0_ & 0x00000002) != 0)) { displayNames_ = displayNames_.getUnmodifiableView(); } if (((mutable_bitField0_ & 0x00000004) != 0)) { confidences_.makeImmutable(); // C } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResultProto .internal_static_google_cloud_aiplatform_v1_schema_predict_prediction_ClassificationPredictionResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResultProto .internal_static_google_cloud_aiplatform_v1_schema_predict_prediction_ClassificationPredictionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult .class, com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult .Builder.class); } public static final int IDS_FIELD_NUMBER = 1; private com.google.protobuf.Internal.LongList ids_; /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @return A list containing the ids. */ @java.lang.Override public java.util.List<java.lang.Long> getIdsList() { return ids_; } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @return The count of ids. */ public int getIdsCount() { return ids_.size(); } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @param index The index of the element to return. * @return The ids at the given index. */ public long getIds(int index) { return ids_.getLong(index); } private int idsMemoizedSerializedSize = -1; public static final int DISPLAY_NAMES_FIELD_NUMBER = 2; private com.google.protobuf.LazyStringList displayNames_; /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @return A list containing the displayNames. */ public com.google.protobuf.ProtocolStringList getDisplayNamesList() { return displayNames_; } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @return The count of displayNames. */ public int getDisplayNamesCount() { return displayNames_.size(); } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param index The index of the element to return. * @return The displayNames at the given index. */ public java.lang.String getDisplayNames(int index) { return displayNames_.get(index); } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param index The index of the value to return. * @return The bytes of the displayNames at the given index. */ public com.google.protobuf.ByteString getDisplayNamesBytes(int index) { return displayNames_.getByteString(index); } public static final int CONFIDENCES_FIELD_NUMBER = 3; private com.google.protobuf.Internal.FloatList confidences_; /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @return A list containing the confidences. */ @java.lang.Override public java.util.List<java.lang.Float> getConfidencesList() { return confidences_; } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @return The count of confidences. */ public int getConfidencesCount() { return confidences_.size(); } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @param index The index of the element to return. * @return The confidences at the given index. */ public float getConfidences(int index) { return confidences_.getFloat(index); } private int confidencesMemoizedSerializedSize = -1; private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getIdsList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(idsMemoizedSerializedSize); } for (int i = 0; i < ids_.size(); i++) { output.writeInt64NoTag(ids_.getLong(i)); } for (int i = 0; i < displayNames_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayNames_.getRaw(i)); } if (getConfidencesList().size() > 0) { output.writeUInt32NoTag(26); output.writeUInt32NoTag(confidencesMemoizedSerializedSize); } for (int i = 0; i < confidences_.size(); i++) { output.writeFloatNoTag(confidences_.getFloat(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < ids_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream.computeInt64SizeNoTag(ids_.getLong(i)); } size += dataSize; if (!getIdsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(dataSize); } idsMemoizedSerializedSize = dataSize; } { int dataSize = 0; for (int i = 0; i < displayNames_.size(); i++) { dataSize += computeStringSizeNoTag(displayNames_.getRaw(i)); } size += dataSize; size += 1 * getDisplayNamesList().size(); } { int dataSize = 0; dataSize = 4 * getConfidencesList().size(); size += dataSize; if (!getConfidencesList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(dataSize); } confidencesMemoizedSerializedSize = dataSize; } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult other = (com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult) obj; if (!getIdsList().equals(other.getIdsList())) return false; if (!getDisplayNamesList().equals(other.getDisplayNamesList())) return false; if (!getConfidencesList().equals(other.getConfidencesList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getIdsCount() > 0) { hash = (37 * hash) + IDS_FIELD_NUMBER; hash = (53 * hash) + getIdsList().hashCode(); } if (getDisplayNamesCount() > 0) { hash = (37 * hash) + DISPLAY_NAMES_FIELD_NUMBER; hash = (53 * hash) + getDisplayNamesList().hashCode(); } if (getConfidencesCount() > 0) { hash = (37 * hash) + CONFIDENCES_FIELD_NUMBER; hash = (53 * hash) + getConfidencesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Prediction output format for Image and Text Classification. * </pre> * * Protobuf type {@code * google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult) com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResultProto .internal_static_google_cloud_aiplatform_v1_schema_predict_prediction_ClassificationPredictionResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResultProto .internal_static_google_cloud_aiplatform_v1_schema_predict_prediction_ClassificationPredictionResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult.class, com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult.Builder.class); } // Construct using // com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); ids_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000001); displayNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); confidences_ = emptyFloatList(); bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResultProto .internal_static_google_cloud_aiplatform_v1_schema_predict_prediction_ClassificationPredictionResult_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult build() { com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult buildPartial() { com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult result = new com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult(this); int from_bitField0_ = bitField0_; if (((bitField0_ & 0x00000001) != 0)) { ids_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000001); } result.ids_ = ids_; if (((bitField0_ & 0x00000002) != 0)) { displayNames_ = displayNames_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000002); } result.displayNames_ = displayNames_; if (((bitField0_ & 0x00000004) != 0)) { confidences_.makeImmutable(); bitField0_ = (bitField0_ & ~0x00000004); } result.confidences_ = confidences_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult) { return mergeFrom( (com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult other) { if (other == com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult .getDefaultInstance()) return this; if (!other.ids_.isEmpty()) { if (ids_.isEmpty()) { ids_ = other.ids_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureIdsIsMutable(); ids_.addAll(other.ids_); } onChanged(); } if (!other.displayNames_.isEmpty()) { if (displayNames_.isEmpty()) { displayNames_ = other.displayNames_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureDisplayNamesIsMutable(); displayNames_.addAll(other.displayNames_); } onChanged(); } if (!other.confidences_.isEmpty()) { if (confidences_.isEmpty()) { confidences_ = other.confidences_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureConfidencesIsMutable(); confidences_.addAll(other.confidences_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private com.google.protobuf.Internal.LongList ids_ = emptyLongList(); private void ensureIdsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { ids_ = mutableCopy(ids_); bitField0_ |= 0x00000001; } } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @return A list containing the ids. */ public java.util.List<java.lang.Long> getIdsList() { return ((bitField0_ & 0x00000001) != 0) ? java.util.Collections.unmodifiableList(ids_) : ids_; } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @return The count of ids. */ public int getIdsCount() { return ids_.size(); } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @param index The index of the element to return. * @return The ids at the given index. */ public long getIds(int index) { return ids_.getLong(index); } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @param index The index to set the value at. * @param value The ids to set. * @return This builder for chaining. */ public Builder setIds(int index, long value) { ensureIdsIsMutable(); ids_.setLong(index, value); onChanged(); return this; } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @param value The ids to add. * @return This builder for chaining. */ public Builder addIds(long value) { ensureIdsIsMutable(); ids_.addLong(value); onChanged(); return this; } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @param values The ids to add. * @return This builder for chaining. */ public Builder addAllIds(java.lang.Iterable<? extends java.lang.Long> values) { ensureIdsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, ids_); onChanged(); return this; } /** * * * <pre> * The resource IDs of the AnnotationSpecs that had been identified. * </pre> * * <code>repeated int64 ids = 1;</code> * * @return This builder for chaining. */ public Builder clearIds() { ids_ = emptyLongList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } private com.google.protobuf.LazyStringList displayNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureDisplayNamesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { displayNames_ = new com.google.protobuf.LazyStringArrayList(displayNames_); bitField0_ |= 0x00000002; } } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @return A list containing the displayNames. */ public com.google.protobuf.ProtocolStringList getDisplayNamesList() { return displayNames_.getUnmodifiableView(); } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @return The count of displayNames. */ public int getDisplayNamesCount() { return displayNames_.size(); } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param index The index of the element to return. * @return The displayNames at the given index. */ public java.lang.String getDisplayNames(int index) { return displayNames_.get(index); } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param index The index of the value to return. * @return The bytes of the displayNames at the given index. */ public com.google.protobuf.ByteString getDisplayNamesBytes(int index) { return displayNames_.getByteString(index); } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param index The index to set the value at. * @param value The displayNames to set. * @return This builder for chaining. */ public Builder setDisplayNames(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureDisplayNamesIsMutable(); displayNames_.set(index, value); onChanged(); return this; } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param value The displayNames to add. * @return This builder for chaining. */ public Builder addDisplayNames(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureDisplayNamesIsMutable(); displayNames_.add(value); onChanged(); return this; } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param values The displayNames to add. * @return This builder for chaining. */ public Builder addAllDisplayNames(java.lang.Iterable<java.lang.String> values) { ensureDisplayNamesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, displayNames_); onChanged(); return this; } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @return This builder for chaining. */ public Builder clearDisplayNames() { displayNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The display names of the AnnotationSpecs that had been identified, order * matches the IDs. * </pre> * * <code>repeated string display_names = 2;</code> * * @param value The bytes of the displayNames to add. * @return This builder for chaining. */ public Builder addDisplayNamesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureDisplayNamesIsMutable(); displayNames_.add(value); onChanged(); return this; } private com.google.protobuf.Internal.FloatList confidences_ = emptyFloatList(); private void ensureConfidencesIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { confidences_ = mutableCopy(confidences_); bitField0_ |= 0x00000004; } } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @return A list containing the confidences. */ public java.util.List<java.lang.Float> getConfidencesList() { return ((bitField0_ & 0x00000004) != 0) ? java.util.Collections.unmodifiableList(confidences_) : confidences_; } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @return The count of confidences. */ public int getConfidencesCount() { return confidences_.size(); } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @param index The index of the element to return. * @return The confidences at the given index. */ public float getConfidences(int index) { return confidences_.getFloat(index); } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @param index The index to set the value at. * @param value The confidences to set. * @return This builder for chaining. */ public Builder setConfidences(int index, float value) { ensureConfidencesIsMutable(); confidences_.setFloat(index, value); onChanged(); return this; } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @param value The confidences to add. * @return This builder for chaining. */ public Builder addConfidences(float value) { ensureConfidencesIsMutable(); confidences_.addFloat(value); onChanged(); return this; } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @param values The confidences to add. * @return This builder for chaining. */ public Builder addAllConfidences(java.lang.Iterable<? extends java.lang.Float> values) { ensureConfidencesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, confidences_); onChanged(); return this; } /** * * * <pre> * The Model's confidences in correctness of the predicted IDs, higher value * means higher confidence. Order matches the Ids. * </pre> * * <code>repeated float confidences = 3;</code> * * @return This builder for chaining. */ public Builder clearConfidences() { confidences_ = emptyFloatList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult) private static final com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult(); } public static com.google.cloud.aiplatform.v1.schema.predict.prediction .ClassificationPredictionResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ClassificationPredictionResult> PARSER = new com.google.protobuf.AbstractParser<ClassificationPredictionResult>() { @java.lang.Override public ClassificationPredictionResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ClassificationPredictionResult(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ClassificationPredictionResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ClassificationPredictionResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.schema.predict.prediction.ClassificationPredictionResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache-2.0
software-engineering-amsterdam/poly-ql
skatt/QL/src/org/uva/sea/ql/parser/antlr/QL4/QL4Visitor.java
970
package org.uva.sea.ql.parser.antlr.QL4; import QL4.QL4BaseVisitor; import QL4.QL4Parser; /** * A visitor for the QL4 grammar. Visits a tree parsed by the Antlr4 * generated QL4parser on the QL4 grammar. Extends the QL4BaseVisitor * generated by Antrl4 and implements any function that needs * a more sophisticated visit than depth first * @author Sammie Katt * */ public class QL4Visitor extends QL4BaseVisitor<Value> { // Handles the information printed boolean verbose = true; // contains all identifiers and their values Map<String, Value> variables = new HashMap<String, Value>(); /** * Specifies behavior when visiting a structure. Depending on the * context of the structure different work flow applies. A structure * may contain either a question, or a form of if/else statements * and structures in between. * @param ctx is the context of the structure */ public void visitStructure(QL4Parser.StructureContext ctx) { } }
apache-2.0
tectronics/faint-graphics-editor
app/app.cpp
13911
// -*- coding: us-ascii-unix -*- // Copyright 2012 Lukas Kemmer // // Licensed under the Apache License, Version 2.0 (the "License"); you // may not use this file except in compliance with the License. You // may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. #include <algorithm> #include <cctype> #include <sstream> #include "wx/app.h" #include "wx/cmdline.h" #include "wx/filename.h" #include "wx/frame.h" #include "app/get-art-container.hh" #include "app/one-instance.hh" #include "app/write-exception-log.hh" #include "bitmap/color.hh" #include "bitmap/paint.hh" #include "generated/resource/load-resources.hh" #include "gui/art-container.hh" #include "gui/faint-window.hh" #include "gui/help-frame.hh" #include "gui/interpreter-frame.hh" #include "python/py-initialize-ifaint.hh" #include "python/py-interface.hh" #include "python/py-exception.hh" #include "python/py-key-press.hh" #include "python/python-context.hh" #include "text/formatting.hh" #include "util/optional.hh" #include "util/paint-map.hh" #include "util/settings.hh" #include "util-wx/convert-wx.hh" #include "util-wx/file-path-util.hh" #include "util-wx/gui-util.hh" #include "util-wx/key-codes.hh" namespace faint{ static utf8_string get_default_faint_port(){ return utf8_string("3793"); } struct CommandLine{ CommandLine() : forceNew(false), preventServer(false), silentMode(false), script(false), port(get_default_faint_port()) {} bool forceNew; // single instance bool preventServer; // single instance bool silentMode; bool script; Optional<FilePath> scriptPath; utf8_string port; FileList files; utf8_string arg; // Script argument }; static const wxCmdLineEntryDesc g_cmdLineDesc[] = { {wxCMD_LINE_SWITCH, "h", "help", "Displays help on the command line parameters", wxCMD_LINE_VAL_NONE, wxCMD_LINE_OPTION_HELP}, {wxCMD_LINE_SWITCH, "s", "silent", "Disables the GUI. Requires specifying a script with with --run.", wxCMD_LINE_VAL_STRING, wxCMD_LINE_PARAM_OPTIONAL}, {wxCMD_LINE_PARAM, "e", "whatev", "Image files", wxCMD_LINE_VAL_STRING, wxCMD_LINE_PARAM_OPTIONAL | wxCMD_LINE_PARAM_MULTIPLE}, {wxCMD_LINE_SWITCH, "i", "newinst", "Force new instance", wxCMD_LINE_VAL_STRING, wxCMD_LINE_PARAM_OPTIONAL}, {wxCMD_LINE_SWITCH, "ii", "noserver", "Prevent this instance from becoming a main app", wxCMD_LINE_VAL_STRING, wxCMD_LINE_PARAM_OPTIONAL}, {wxCMD_LINE_OPTION, "", "port", "Specify port used for IPC.", wxCMD_LINE_VAL_STRING, wxCMD_LINE_PARAM_OPTIONAL}, {wxCMD_LINE_OPTION, "", "run", "Run a Python script file after loading images", wxCMD_LINE_VAL_STRING, wxCMD_LINE_PARAM_OPTIONAL}, {wxCMD_LINE_OPTION, "arg", "arg", "Custom argument stored in ifaint.cmd_arg.", // Fixme: Duplication wxCMD_LINE_VAL_STRING, wxCMD_LINE_PARAM_OPTIONAL}, {wxCMD_LINE_NONE, "", "", "", wxCMD_LINE_VAL_NONE, wxCMD_LINE_PARAM_OPTIONAL} // Sentinel }; static utf8_string get_string(const wxCmdLineParser& parser, const std::string& name, const utf8_string& defaultStr=utf8_string("")) { wxString str; parser.Found(name, &str); if (str.empty()){ return defaultStr; } return to_faint(str); } static bool valid_port(const std::string& str){ if (std::all_of(begin(str), end(str), [](auto v){return std::isdigit(v);})){ std::stringstream ss(str); int i = 0; ss >> i; return 0 <= i && i <= 65535; } return false; } utf8_string format_run_script_error(const FilePath& path, const FaintPyExc& err) { return space_sep("Error in script", quoted(path.Str()), "specified with --run:\n") + format_error_info(err); } static PaintMap default_palette(){ PaintMap palette; palette.Append(Paint(Color(0, 0, 0, 0))); palette.Append(Paint(Color(255,0,255))); palette.Append(Paint(Color(0, 0, 0))); palette.Append(Paint(Color(255,255,255))); palette.Append(Paint(Color(70, 70, 70))); palette.Append(Paint(Color(220,220,220))); palette.Append(Paint(Color(120, 120, 120))); palette.Append(Paint(Color(180,180,180))); palette.Append(Paint(Color(153, 0, 48))); palette.Append(Paint(Color(156, 90, 60))); palette.Append(Paint(Color(237, 28, 36))); palette.Append(Paint(Color(255, 163, 177))); palette.Append(Paint(Color(255, 126, 0))); palette.Append(Paint(Color(229, 170, 122))); palette.Append(Paint(Color(255, 242, 0))); palette.Append(Paint(Color(245, 228, 156))); palette.Append(Paint(Color(255, 194, 14))); palette.Append(Paint(Color(255, 249, 189))); palette.Append(Paint(Color(168, 230, 29))); palette.Append(Paint(Color(211, 249, 188))); palette.Append(Paint(Color(34, 177, 76))); palette.Append(Paint(Color(157, 187, 97))); palette.Append(Paint(Color(0, 183, 239))); palette.Append(Paint(Color(153, 217, 234))); palette.Append(Paint(Color(77, 109, 243))); palette.Append(Paint(Color(112, 154, 209))); palette.Append(Paint(Color(47, 54, 153))); palette.Append(Paint(Color(84, 109, 142))); palette.Append(Paint(Color(111, 49, 152))); palette.Append(Paint(Color(181, 165, 213))); return palette; } class Application : public wxApp{ public: Application() {} int FilterEvent(wxEvent& untypedEvent) override{ // Filter for key events so that bound keys are relayed to Python // regardless of what control has focus. This prevents controls like // tool buttons, the aui-toolbar etc. from swallowing keypresses // when they have focus. wxEventType eventType = untypedEvent.GetEventType(); if (eventType != wxEVT_KEY_DOWN && eventType != wxEVT_KEY_UP){ return Event_Skip; } const wxKeyEvent& event = (wxKeyEvent&)untypedEvent; const int keyCode = event.GetKeyCode(); if (key::modifier(keyCode)){ m_faintWindow->ModifierKeyChange(); // Modifier keys (like Ctrl, Shift) can not be bound separately - // stop special handling here. return Event_Skip; } if (eventType == wxEVT_KEY_UP){ // Key up is only relevant for modifier refresh. return Event_Skip; } KeyPress key(key_modifiers(event), Key(keyCode)); const bool boundGlobal = m_pythonContext->BoundGlobal(key); const bool bound = m_pythonContext->Bound(key); if (!bound && !boundGlobal){ // Allow normal key handling for unbound keys return Event_Skip; } if (!m_faintWindow->Focused() && !boundGlobal){ // Ignore non-global Python-binds if a window or dialog is shown // above the mainframe return Event_Skip; } // Note: It appears that FilterEvent does not happen while a menu is // open, so no check to avoid this is performed. // Non-global binds must check if text-entry is active if (!boundGlobal){ EntryMode entryMode = m_faintWindow->GetTextEntryMode(); if (entryMode == EntryMode::ALPHA_NUMERIC && affects_alphanumeric_entry(key)) { return Event_Skip; } else if (entryMode == EntryMode::NUMERIC && affects_numeric_entry(key)){ return Event_Skip; } } // Run the key bind, and swallow the key-press. python_key_press(key); return Event_Processed; } AppContext& GetAppContext() const{ return *m_appContext; } PythonContext& GetPythonContext() const{ return *m_pythonContext; } const ArtContainer& GetArtContainer() const{ return m_art; } bool OnCmdLineParsed(wxCmdLineParser& parser) override{ m_cmd.silentMode = parser.Found("s"); m_cmd.forceNew = parser.Found("i"); m_cmd.preventServer = parser.Found("ii"); m_cmd.port = get_string(parser, "port", get_default_faint_port()); m_cmd.arg = get_string(parser, "arg", ""); utf8_string scriptPath = get_string(parser, "run"); m_cmd.scriptPath = make_absolute_file_path(scriptPath); if (!valid_port(m_cmd.port.str())){ console_message("Error: Invalid port specified " + bracketed(m_cmd.port.str())); return false; } if (m_cmd.silentMode && m_cmd.scriptPath.NotSet()){ console_message("Error: --silent requires a script specified with " "--run <scriptname>"); return false; } for (size_t i = 0; i!= parser.GetParamCount(); i++){ const wxString param(parser.GetParam(i)); wxFileName absPath(absoluted(wxFileName(param))); if (absPath.IsDir()){ console_message(wxString("Error: Folder path specified on command " "line - image path expected (") + param + ")."); return false; } m_cmd.files.push_back(FilePath::FromAbsoluteWx(absPath)); } return true; } bool OnExceptionInMainLoop() override{ if (m_crashFile.IsSet()){ write_exception_log(m_crashFile.Get()); } std::abort(); } int OnExit() override{ m_appContext.reset(nullptr); m_faintInstance.reset(nullptr); m_faintWindow.reset(nullptr); m_helpFrame.reset(nullptr); m_interpreterFrame.reset(nullptr); m_pythonContext.reset(nullptr); return wxApp::OnExit(); } bool OnInit() override{ assert(CallOrder(0)); // Perform default init for command-line etc. if (!wxApp::OnInit()){ return false; } // Store the path to the crash-log file to require minimum effort // to write it on unhandled exception. m_crashFile = get_crash_file(); m_faintInstance = create_faint_instance(m_cmd.files, allow_server(!m_cmd.preventServer), force_start(m_cmd.forceNew), m_cmd.port.str()); if (!m_faintInstance->AllowStart()){ return false; } wxInitAllImageHandlers(); m_art.SetRoot(get_data_dir().SubDir("graphics")); load_faint_resources(m_art); // Create frames and restore their states from the last run m_interpreterFrame = std::make_unique<InterpreterFrame>(); m_interpreterFrame->SetIcons(get_icon(m_art, Icon::FAINT_PYTHON16), get_icon(m_art, Icon::FAINT_PYTHON32)); m_helpFrame = std::make_unique<HelpFrame>(get_help_dir(), m_art); m_helpFrame->SetIcons(get_icon(m_art, Icon::HELP16), get_icon(m_art, Icon::HELP32)); m_faintWindow = std::make_unique<FaintWindow>(m_art, default_palette(), m_helpFrame.get(), m_interpreterFrame.get(), m_cmd.silentMode); m_faintWindow->SetIcons(get_icon(m_art, Icon::FAINT16), get_icon(m_art, Icon::FAINT32)); m_appContext.reset(&(m_faintWindow->GetAppContext())); m_pythonContext.reset(&(m_faintWindow->GetPythonContext())); bool ok = init_python(m_cmd.arg); if (!ok){ show_error(null_parent(), Title("Faint Internal Error"), "Faint crashed!\n\n...while running envsetup.py"); // Fixme: Previously deleted m_faintWindow here, when it was a wxFrame. // Must the frame be deleted on error if before SetTopWindow? return false; } if (!m_cmd.silentMode){ m_faintWindow->Show(); } m_faintWindow->Initialize(); bool configOk = run_python_user_config(*m_pythonContext); if (!configOk){ // Show the console where some error info should have been printed. m_appContext->ShowPythonConsole(); if (m_cmd.scriptPath.IsSet()){ m_pythonContext->IntFaintPrint(space_sep("Script", quoted(m_cmd.scriptPath.Get().Str()), "ignored due to configuration file error.\n")); m_cmd.scriptPath.Clear(); } } m_interpreterFrame->AddNames(list_ifaint_names()); if (!m_cmd.files.empty()){ m_faintWindow->Open(m_cmd.files); } SetTopWindow(&m_faintWindow->GetRawFrame()); return true; } void OnInitCmdLine(wxCmdLineParser& parser) override{ parser.SetDesc(g_cmdLineDesc); } void RunScript(const FilePath& scriptPath){ if (!exists(scriptPath)){ if (m_cmd.silentMode){ console_message(to_wx(space_sep(utf8_string( "Python file specified with --run not found:"), scriptPath.Str()))); } else { show_error(null_parent(), Title("Script not found"), to_wx(endline_sep(utf8_string("Python file specified with --run not found:"), scriptPath.Str()))); } } else { run_python_file(scriptPath).Visit( [&](const FaintPyExc& err){ const utf8_string errStr(format_run_script_error(scriptPath, err)); if (m_cmd.silentMode){ console_message(to_wx(errStr)); } else{ m_pythonContext->IntFaintPrint(errStr); m_appContext->ShowPythonConsole(); } }); } } int OnRun() override{ assert(CallOrder(1)); m_cmd.scriptPath.Visit( [&](const FilePath& path){ RunScript(path); }); if (m_cmd.silentMode){ return 0; // Exit } m_pythonContext->NewPrompt(); return wxApp::OnRun(); } private: bool CallOrder(int expected){ return m_callNum++ == expected; } std::unique_ptr<AppContext> m_appContext; ArtContainer m_art; int m_callNum = 0; CommandLine m_cmd; std::unique_ptr<FaintInstance> m_faintInstance; std::unique_ptr<FaintWindow> m_faintWindow; std::unique_ptr<HelpFrame> m_helpFrame; std::unique_ptr<InterpreterFrame> m_interpreterFrame; std::unique_ptr<PythonContext> m_pythonContext; Optional<FilePath> m_crashFile; }; } // namespace IMPLEMENT_APP(faint::Application) namespace faint{ AppContext& get_app_context(){ return wxGetApp().GetAppContext(); } PythonContext& get_python_context(){ return wxGetApp().GetPythonContext(); } const ArtContainer& get_art_container(){ return wxGetApp().GetArtContainer(); } } // namespace
apache-2.0
hortonworks/cloudbreak
cloud-api/src/main/java/com/sequenceiq/cloudbreak/cloud/model/ConfigSpecification.java
1639
package com.sequenceiq.cloudbreak.cloud.model; import com.fasterxml.jackson.annotation.JsonProperty; public class ConfigSpecification { private static final Integer LIMIT = 24; @JsonProperty("volumeParameterType") private String volumeParameterType; @JsonProperty("minimumSize") private String minimumSize; @JsonProperty("maximumSize") private String maximumSize; @JsonProperty("minimumNumber") private String minimumNumber; @JsonProperty("maximumNumber") private String maximumNumber; public String getVolumeParameterType() { return volumeParameterType; } public void setVolumeParameterType(String volumeParameterType) { this.volumeParameterType = volumeParameterType; } public String getMinimumSize() { return minimumSize; } public void setMinimumSize(String minimumSize) { this.minimumSize = minimumSize; } public String getMaximumSize() { return maximumSize; } public void setMaximumSize(String maximumSize) { this.maximumSize = maximumSize; } public String getMinimumNumber() { return minimumNumber; } public void setMinimumNumber(String minimumNumber) { this.minimumNumber = minimumNumber; } public String getMaximumNumber() { return maximumNumber; } public Integer getMaximumNumberWithLimit() { int maxNumber = Integer.parseInt(maximumNumber); return maxNumber > LIMIT ? LIMIT : maxNumber; } public void setMaximumNumber(String maximumNumber) { this.maximumNumber = maximumNumber; } }
apache-2.0
ChinaQuants/OG-Platform
projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/equity/Equity.java
2761
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.equity; import java.io.Serializable; import org.apache.commons.lang.ObjectUtils; import com.opengamma.analytics.financial.interestrate.InstrumentDerivative; import com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitor; import com.opengamma.analytics.financial.legalentity.LegalEntity; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.Currency; /** * */ public class Equity implements InstrumentDerivative, Serializable { /** The entity */ private final LegalEntity _entity; /** The currency */ private final Currency _currency; /** The number of shares */ private final double _numberOfShares; /** * @param entity The entity, not null * @param currency The currency, not null * @param numberOfShares The number of shares */ public Equity(final LegalEntity entity, final Currency currency, final double numberOfShares) { ArgumentChecker.notNull(entity, "entity"); ArgumentChecker.notNull(currency, "currency"); _entity = entity; _currency = currency; _numberOfShares = numberOfShares; } /** * Gets the entity. * @return The entity */ public LegalEntity getEntity() { return _entity; } /** * The currency. * @return The currency */ public Currency getCurrency() { return _currency; } /** * The number of shares. * @return The number of shares */ public double getNumberOfShares() { return _numberOfShares; } @Override public <S, T> T accept(final InstrumentDerivativeVisitor<S, T> visitor, final S data) { return null; } @Override public <T> T accept(final InstrumentDerivativeVisitor<?, T> visitor) { return null; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + _currency.hashCode(); result = prime * result + _entity.hashCode(); long temp; temp = Double.doubleToLongBits(_numberOfShares); result = prime * result + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof Equity)) { return false; } final Equity other = (Equity) obj; if (Double.compare(_numberOfShares, other._numberOfShares) != 0) { return false; } if (!ObjectUtils.equals(_currency, other._currency)) { return false; } if (!ObjectUtils.equals(_entity, other._entity)) { return false; } return true; } }
apache-2.0
yantian001/2DShooting
Assets/2DShooting/Scripts/Joystick/JoystickShoot.cs
764
using UnityEngine; using System.Collections; using UnityStandardAssets.CrossPlatformInput; public class JoystickShoot : MonoBehaviour { public Weapon weapon; bool isPressed = false; bool isCombo = false; // Use this for initialization void Start () { if (weapon == null) { weapon = GetComponent<Weapon>(); } } // Update is called once per frame void Update () { if (weapon == null) return; if (CrossPlatformInputManager.GetButtonDown ("Fire")) { //Debug.Log(3); //weapon.Fire(); //weapon.Fire(isPressed); isPressed = true; } if (CrossPlatformInputManager.GetButtonUp ("Fire")) { //Debug.Log(2); isPressed = false; isCombo =false; } if (isPressed) { weapon.Fire(isCombo); isCombo = true; } } }
apache-2.0
opentext/appworks-js
src/plugins/notifications/notifications.js
4018
"use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var plugin_1 = require("../../common/plugin"); var proxy_1 = require("../../common/proxy"); var util_1 = require("../../common/util"); var AWNotificationManager = (function (_super) { __extends(AWNotificationManager, _super); function AWNotificationManager() { return _super.call(this, util_1.noop, util_1.noop) || this; } AWNotificationManager.prototype.enablePushNotifications = function (handler, errorHandler, includeSeqNo) { proxy_1.AWProxy.exec(handler, errorHandler, "AWNotificationManager", "enablePushNotifications", proxy_1.AWProxy.isDesktopEnv() ? [handler, includeSeqNo] : [includeSeqNo]); }; AWNotificationManager.prototype.disablePushNotifications = function () { proxy_1.AWProxy.exec(null, null, "AWNotificationManager", "disablePushNotifications", []); }; AWNotificationManager.prototype.getNotifications = function (handler, errorHandler, includeSeqNo) { proxy_1.AWProxy.exec(handler, errorHandler, "AWNotificationManager", "getPushNotifications", [includeSeqNo]); }; AWNotificationManager.prototype.getOpeningNotification = function (handler, errorHandler, includeSeqNo) { proxy_1.AWProxy.exec(handler, errorHandler, "AWNotificationManager", "getOpeningNotification", [includeSeqNo]); }; AWNotificationManager.prototype.notificationDidLaunchApp = function (handler, errorHandler, includeSeqNo) { this.getOpeningNotification(handler, errorHandler, includeSeqNo); }; AWNotificationManager.prototype.openListener = function (handler, errorHandler, includeSeqNo) { proxy_1.AWProxy.exec(handler, errorHandler, "AWNotificationManager", "openListener", proxy_1.AWProxy.isDesktopEnv() ? [handler, includeSeqNo] : [includeSeqNo]); }; AWNotificationManager.prototype.didTapNotificationFromActivityView = function (handler, errorHandler, includeSeqNo) { this.openListener(handler, errorHandler, includeSeqNo); }; AWNotificationManager.prototype.removeNotification = function (seqNo, handler, errorHandler) { proxy_1.AWProxy.exec(handler, errorHandler, "AWNotificationManager", "removeNotification", [seqNo]); }; AWNotificationManager.prototype.alert = function (message, alertCallback, title, buttonName) { proxy_1.AWProxy.notification().alert(message, alertCallback, title, buttonName); }; AWNotificationManager.prototype.beep = function (times) { proxy_1.AWProxy.notification().beep(times); }; AWNotificationManager.prototype.confirm = function (message, confirmCallback, title, buttonLabels) { proxy_1.AWProxy.notification().confirm(message, confirmCallback, title, buttonLabels); }; AWNotificationManager.prototype.prompt = function (message, promptCallback, title, buttonLabels, defaultText) { proxy_1.AWProxy.notification().prompt(message, promptCallback, title, buttonLabels, defaultText); }; AWNotificationManager.prototype.registerForTopic = function (topicName) { proxy_1.AWProxy.exec(null, null, "AWNotificationManager", "registerForTopic", [topicName]); }; AWNotificationManager.prototype.unregisterFromTopic = function (topicName) { proxy_1.AWProxy.exec(null, null, "AWNotificationManager", "unregisterFromTopic", [topicName]); }; return AWNotificationManager; }(plugin_1.AWPlugin)); exports.AWNotificationManager = AWNotificationManager; //# sourceMappingURL=notifications.js.map
apache-2.0
malooba/flow-monitor
FlowMonitor/ViewModules/Workflows/WorkflowSelector.cs
2614
//Copyright 2016 Malooba Ltd //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. using System; using System.Collections.Generic; using System.Linq; using System.Windows.Forms; using Newtonsoft.Json; using Remote; namespace FlowMonitor.ViewModules.Workflows { public partial class WorkflowSelector : UserControl { public event EventHandler EditSelectedWorkflow; public event EventHandler ViewSelectedWorkflow; public event EventHandler NewWorkflow; public WorkflowSelector() { InitializeComponent(); lstbWorkflows.SelectedIndexChanged += ViewWorkflow; GetWorkflows(); } private void ViewWorkflow(object sender, EventArgs e) { ViewSelectedWorkflow?.Invoke(this, e); } public WorkflowId SelectedWorkflow => (WorkflowId)lstbWorkflows.SelectedItem; private void EditSelected(object sender, EventArgs e) { EditSelectedWorkflow?.Invoke(this, e); } public void GetWorkflows() { var json = RestClient.Get("workflows"); if(json == null) return; var workflows = JsonConvert.DeserializeObject<List<WorkflowId>>(json); if(chkbShowAllVersions.Checked) lstbWorkflows.DataSource = workflows; else lstbWorkflows.DataSource = (from wf in workflows group wf by wf.Name into g let latest = g.OrderBy(x => x.VersionLong).Last() select latest).ToList(); } private void ShowAllVersionsChanged(object sender, EventArgs e) { GetWorkflows(); } private void DeleteWorkflow(object sender, EventArgs e) { RestClient.Delete($"/workflows/{SelectedWorkflow.Name}/versions/{SelectedWorkflow.Version}"); GetWorkflows(); } private void NewWorkflowClick(object sender, EventArgs e) { NewWorkflow?.Invoke(this, e); } } }
apache-2.0
Ahmed-Abdelmeged/ADAS
app/src/main/java/com/example/mego/adas/videos/api/model/VideoPhoto.java
1568
/* * Copyright (c) 2017 Ahmed-Abdelmeged * * github: https://github.com/Ahmed-Abdelmeged * email: ahmed.abdelmeged.vm@gamil.com * Facebook: https://www.facebook.com/ven.rto * Twitter: https://twitter.com/A_K_Abd_Elmeged * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.mego.adas.videos.api.model; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; public class VideoPhoto { @SerializedName("url") @Expose private String url; @SerializedName("width") @Expose private Integer width; @SerializedName("height") @Expose private Integer height; public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public Integer getWidth() { return width; } public void setWidth(Integer width) { this.width = width; } public Integer getHeight() { return height; } public void setHeight(Integer height) { this.height = height; } }
apache-2.0
spodkowinski/ccm
ccmlib/common.py
16841
# # Cassandra Cluster Management lib # import os import platform import re import shutil import socket import stat import subprocess import sys from six import print_ import time import yaml import fnmatch BIN_DIR= "bin" CASSANDRA_CONF_DIR= "conf" DSE_CASSANDRA_CONF_DIR="resources/cassandra/conf" OPSCENTER_CONF_DIR= "conf" CASSANDRA_CONF = "cassandra.yaml" LOG4J_CONF = "log4j-server.properties" LOG4J_TOOL_CONF = "log4j-tools.properties" LOGBACK_CONF = "logback.xml" CASSANDRA_ENV = "cassandra-env.sh" CASSANDRA_WIN_ENV = "cassandra-env.ps1" CASSANDRA_SH = "cassandra.in.sh" CONFIG_FILE = "config" class CCMError(Exception): pass class LoadError(CCMError): pass class ArgumentError(CCMError): pass class UnavailableSocketError(CCMError): pass def get_default_path(): default_path = os.path.join(get_user_home(), '.ccm') if not os.path.exists(default_path): os.mkdir(default_path) return default_path def get_user_home(): if is_win(): if sys.platform == "cygwin": # Need the fully qualified directory output = subprocess.Popen(["cygpath", "-m", os.path.expanduser('~')], stdout = subprocess.PIPE, stderr = subprocess.STDOUT).communicate()[0].rstrip() return output else: return os.environ['USERPROFILE'] else: return os.path.expanduser('~') def get_config(): config_path = os.path.join(get_default_path(), CONFIG_FILE) if not os.path.exists(config_path): return {} with open(config_path, 'r') as f: return yaml.load(f) def now_ms(): return int(round(time.time() * 1000)) def parse_interface(itf, default_port): i = itf.split(':') if len(i) == 1: return (i[0].strip(), default_port) elif len(i) == 2: return (i[0].strip(), int(i[1].strip())) else: raise ValueError("Invalid interface definition: " + itf) def current_cluster_name(path): try: with open(os.path.join(path, 'CURRENT'), 'r') as f: return f.readline().strip() except IOError: return None def switch_cluster(path, new_name): with open(os.path.join(path, 'CURRENT'), 'w') as f: f.write(new_name + '\n') def replace_in_file(file, regexp, replace): replaces_in_file(file, [(regexp, replace)]) def replaces_in_file(file, replacement_list): rs = [ (re.compile(regexp), repl) for (regexp, repl) in replacement_list] file_tmp = file + ".tmp" with open(file, 'r') as f: with open(file_tmp, 'w') as f_tmp: for line in f: for r, replace in rs: match = r.search(line) if match: line = replace + "\n" f_tmp.write(line) shutil.move(file_tmp, file) def replace_or_add_into_file_tail(file, regexp, replace): replaces_or_add_into_file_tail(file, [(regexp, replace)]) def replaces_or_add_into_file_tail(file, replacement_list): rs = [ (re.compile(regexp), repl) for (regexp, repl) in replacement_list] is_line_found = False file_tmp = file + ".tmp" with open(file, 'r') as f: with open(file_tmp, 'w') as f_tmp: for line in f: for r, replace in rs: match = r.search(line) if match: line = replace + "\n" is_line_found = True f_tmp.write(line) # In case, entry is not found, and need to be added if is_line_found == False: f_tmp.write('\n'+ replace + "\n") shutil.move(file_tmp, file) def make_cassandra_env(install_dir, node_path): if is_win() and get_version_from_build(node_path=node_path) >= '2.1': sh_file = os.path.join(CASSANDRA_CONF_DIR, CASSANDRA_WIN_ENV) else: sh_file = os.path.join(BIN_DIR, CASSANDRA_SH) orig = os.path.join(install_dir, sh_file) dst = os.path.join(node_path, sh_file) shutil.copy(orig, dst) replacements = "" if is_win() and get_version_from_build(node_path=node_path) >= '2.1': replacements = [ ('env:CASSANDRA_HOME =', ' $env:CASSANDRA_HOME="%s"' % install_dir), ('env:CASSANDRA_CONF =', ' $env:CCM_DIR="' + node_path + '\\conf"\n $env:CASSANDRA_CONF="$env:CCM_DIR"'), ('cp = ".*?env:CASSANDRA_HOME.conf', ' $cp = """$env:CASSANDRA_CONF"""') ] else: replacements = [ ('CASSANDRA_HOME=', '\tCASSANDRA_HOME=%s' % install_dir), ('CASSANDRA_CONF=', '\tCASSANDRA_CONF=%s' % os.path.join(node_path, 'conf')) ] replaces_in_file(dst, replacements) # If a cluster-wide cassandra.in.sh file exists in the parent # directory, append it to the node specific one: cluster_sh_file = os.path.join(node_path, os.path.pardir, 'cassandra.in.sh') if os.path.exists(cluster_sh_file): append = open(cluster_sh_file).read() with open(dst, 'a') as f: f.write('\n\n### Start Cluster wide config ###\n') f.write(append) f.write('\n### End Cluster wide config ###\n\n') env = os.environ.copy() env['CASSANDRA_INCLUDE'] = os.path.join(dst) env['MAX_HEAP_SIZE'] = os.environ.get('CCM_MAX_HEAP_SIZE', '500M') env['HEAP_NEWSIZE'] = os.environ.get('CCM_HEAP_NEWSIZE', '50M') return env def make_dse_env(install_dir, node_path): env = os.environ.copy() env['MAX_HEAP_SIZE'] = os.environ.get('CCM_MAX_HEAP_SIZE', '500M') env['HEAP_NEWSIZE'] = os.environ.get('CCM_HEAP_NEWSIZE', '50M') env['DSE_HOME'] = os.path.join(install_dir) env['DSE_CONF'] = os.path.join(node_path, 'resources', 'dse', 'conf') env['CASSANDRA_HOME'] = os.path.join(install_dir, 'resources', 'cassandra') env['CASSANDRA_CONF'] = os.path.join(node_path, 'resources', 'cassandra', 'conf') env['HADOOP_CONF_DIR'] = os.path.join(node_path, 'resources', 'hadoop', 'conf') env['HIVE_CONF_DIR'] = os.path.join(node_path, 'resources', 'hive', 'conf') env['SQOOP_CONF_DIR'] = os.path.join(node_path, 'resources', 'sqoop', 'conf') env['TOMCAT_CONF_DIR'] = os.path.join(node_path, 'resources', 'tomcat', 'conf') env['PIG_CONF_DIR'] = os.path.join(node_path, 'resources', 'pig', 'conf') env['MAHOUT_CONF_DIR'] = os.path.join(node_path, 'resources', 'mahout', 'conf') env['SPARK_CONF_DIR'] = os.path.join(node_path, 'resources', 'spark', 'conf') env['SHARK_CONF_DIR'] = os.path.join(node_path, 'resources', 'shark', 'conf') return env def check_win_requirements(): if is_win(): # Make sure ant.bat is in the path and executable before continuing try: process = subprocess.Popen('ant.bat', stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except Exception as e: sys.exit("ERROR! Could not find or execute ant.bat. Please fix this before attempting to run ccm on Windows.") # Confirm matching architectures # 32-bit python distributions will launch 32-bit cmd environments, losing PowerShell execution privileges on a 64-bit system if sys.maxsize <= 2**32 and platform.machine().endswith('64'): sys.exit("ERROR! 64-bit os and 32-bit python distribution found. ccm requires matching architectures.") def is_win(): return True if sys.platform == "cygwin" or sys.platform == "win32" else False def join_bin(root, dir, executable): return os.path.join(root, dir, platform_binary(executable)) def platform_binary(input): return input + ".bat" if is_win() else input def platform_pager(): return "more" if sys.platform == "win32" else "less" def add_exec_permission(path, executable): # 1) os.chmod on Windows can't add executable permissions # 2) chmod from other folders doesn't work in cygwin, so we have to navigate the shell # to the folder with the executable with it and then chmod it from there if sys.platform == "cygwin": cmd = "cd " + path + "; chmod u+x " + executable os.system(cmd) def parse_path(executable): sep = os.sep if sys.platform == "win32": sep = "\\\\" tokens = re.split(sep, executable) del tokens[-1] return os.sep.join(tokens) def parse_bin(executable): tokens = re.split(os.sep, executable) return tokens[-1] def get_stress_bin(install_dir): candidates = [ os.path.join(install_dir, 'contrib', 'stress', 'bin', 'stress'), os.path.join(install_dir, 'tools', 'stress', 'bin', 'stress'), os.path.join(install_dir, 'tools', 'bin', 'stress'), os.path.join(install_dir, 'tools', 'bin', 'cassandra-stress'), os.path.join(install_dir, 'resources', 'cassandra', 'tools', 'bin', 'cassandra-stress') ] candidates = [platform_binary(s) for s in candidates] for candidate in candidates: if os.path.exists(candidate): stress = candidate break else: raise Exception("Cannot find stress binary (maybe it isn't compiled)") # make sure it's executable -> win32 doesn't care if sys.platform == "cygwin": # Yes, we're unwinding the path join from above. path = parse_path(stress) short_bin = parse_bin(stress) add_exec_permission(path, short_bin) elif not os.access(stress, os.X_OK): try: # try to add user execute permissions # os.chmod doesn't work on Windows and isn't necessary unless in cygwin... if sys.platform == "cygwin": add_exec_permission(path, stress) else: os.chmod(stress, os.stat(stress).st_mode | stat.S_IXUSR) except: raise Exception("stress binary is not executable: %s" % (stress,)) return stress def isDse(install_dir): if install_dir is None: raise ArgumentError('Undefined installation directory') bin_dir = os.path.join(install_dir, BIN_DIR) if not os.path.exists(bin_dir): raise ArgumentError('Installation directory does not contain a bin directory: %s' % install_dir) dse_script = os.path.join(bin_dir, 'dse') return os.path.exists(dse_script) def isOpscenter(install_dir): if install_dir is None: raise ArgumentError('Undefined installation directory') bin_dir = os.path.join(install_dir, BIN_DIR) if not os.path.exists(bin_dir): raise ArgumentError('Installation directory does not contain a bin directory') opscenter_script = os.path.join(bin_dir, 'opscenter') return os.path.exists(opscenter_script) def validate_install_dir(install_dir): if install_dir is None: raise ArgumentError('Undefined installation directory') # Windows requires absolute pathing on installation dir - abort if specified cygwin style if is_win(): if ':' not in install_dir: raise ArgumentError('%s does not appear to be a cassandra or dse installation directory. Please use absolute pathing (e.g. C:/cassandra.' % install_dir) bin_dir = os.path.join(install_dir, BIN_DIR) if isDse(install_dir): conf_dir = os.path.join(install_dir, DSE_CASSANDRA_CONF_DIR) elif isOpscenter(install_dir): conf_dir = os.path.join(install_dir, OPSCENTER_CONF_DIR) else: conf_dir = os.path.join(install_dir, CASSANDRA_CONF_DIR) cnd = os.path.exists(bin_dir) cnd = cnd and os.path.exists(conf_dir) if not isOpscenter(install_dir): cnd = cnd and os.path.exists(os.path.join(conf_dir, CASSANDRA_CONF)) if not cnd: raise ArgumentError('%s does not appear to be a cassandra or dse installation directory' % install_dir) def check_socket_available(itf): info = socket.getaddrinfo(itf[0], itf[1], socket.AF_UNSPEC, socket.SOCK_STREAM) if not info: raise UnavailableSocketError("Failed to get address info for [%s]:%s" % itf) (family, socktype, proto, canonname, sockaddr) = info[0] s = socket.socket(family, socktype) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) try: s.bind(sockaddr) s.close() except socket.error as msg: s.close() addr, port = itf raise UnavailableSocketError("Inet address %s:%s is not available: %s" % (addr, port, msg)) def interface_is_ipv6(itf): info = socket.getaddrinfo(itf[0], itf[1], socket.AF_UNSPEC, socket.SOCK_STREAM) if not info: raise UnavailableSocketError("Failed to get address info for [%s]:%s" % itf) return socket.AF_INET6 == info[0][0] # note: does not handle collapsing hextets with leading zeros def normalize_interface(itf): if not itf: return itf ip = itf[0] parts = ip.partition('::') if '::' in parts: missing_hextets = 9 - ip.count(':') zeros = '0'.join([':'] * missing_hextets) ip = ''.join(['0' if p == '' else zeros if p == '::' else p for p in ip.partition('::')]) return (ip, itf[1]) def parse_settings(args): settings = {} for s in args: splitted = s.split(':') if len(splitted) != 2: raise ArgumentError("A new setting should be of the form 'key: value', got " + s) key = splitted[0].strip() val = splitted[1].strip() # ok, that's not super beautiful if val.lower() == "true": val = True elif val.lower() == "false": val = False else: try: val = int(val) except ValueError: pass splitted = key.split('.') if len(splitted) == 2: try: settings[splitted[0]][splitted[1]] = val except KeyError: settings[splitted[0]] = {} settings[splitted[0]][splitted[1]] = val else: settings[key] = val return settings # # Copy file from source to destination with reasonable error handling # def copy_file(src_file, dst_file): try: shutil.copy2(src_file, dst_file) except (IOError, shutil.Error) as e: print_(str(e), file=sys.stderr) exit(1) def copy_directory(src_dir, dst_dir): for name in os.listdir(src_dir): filename = os.path.join(src_dir, name) if os.path.isfile(filename): shutil.copy(filename, dst_dir) def get_version_from_build(install_dir=None, node_path=None): if install_dir is None and node_path is not None: install_dir = get_install_dir_from_cluster_conf(node_path) if install_dir is not None: # Binary cassandra installs will have a 0.version.txt file version_file = os.path.join(install_dir, '0.version.txt') if os.path.exists(version_file): with open(version_file) as f: return f.read().strip() # For DSE look for a dse*.jar and extract the version number dse_version = get_dse_version(install_dir) if (dse_version is not None): return dse_version # Source cassandra installs we can read from build.xml build = os.path.join(install_dir, 'build.xml') with open(build) as f: for line in f: match = re.search('name="base\.version" value="([0-9.]+)[^"]*"', line) if match: return match.group(1) raise CCMError("Cannot find version") def get_dse_version(install_dir): for root, dirs, files in os.walk(install_dir): for file in files: match = re.search('^dse-([0-9.]+)(?:-SNAPSHOT)?\.jar', file) if match: return match.group(1) return None def get_dse_cassandra_version(install_dir): clib = os.path.join(install_dir, 'resources', 'cassandra', 'lib') for file in os.listdir(clib): if fnmatch.fnmatch(file, 'cassandra-all*.jar'): match = re.search('cassandra-all-([0-9.]+)(?:-.*)?\.jar', file) if match: return match.group(1) raise ArgumentError("Unable to determine Cassandra version in: "+install_dir) def get_install_dir_from_cluster_conf(node_path): file = os.path.join(os.path.dirname(node_path), "cluster.conf") with open(file) as f: for line in f: match = re.search('install_dir: (.*?)$', line) if match: return match.group(1) return None def is_dse_cluster(path): try: with open(os.path.join(path, 'CURRENT'), 'r') as f: name = f.readline().strip() cluster_path = os.path.join(path, name) filename = os.path.join(cluster_path, 'cluster.conf') with open(filename, 'r') as f: data = yaml.load(f) if 'dse_dir' in data: return True except IOError: return False def invalidate_cache(): shutil.rmtree(os.path.join(get_default_path(), 'repository'))
apache-2.0
GuiSevero/ufsm-planos-de-aula
protected/config/main.dev.php
2590
<?php // uncomment the following to define a path alias // Yii::setPathOfAlias('local','path/to/local-folder'); // This is the main Web application configuration. Any writable // CWebApplication properties can be configured here. return array( 'basePath'=>dirname(__FILE__).DIRECTORY_SEPARATOR.'..', 'name'=>'Planos de Aula', 'language'=>'pt', // preloading 'log' component 'preload'=>array('log'), // autoloading model and component classes 'import'=>array( 'application.models.*', 'application.components.*', ), 'modules'=>array( // uncomment the following to enable the Gii tool 'gii'=>array( 'class'=>'system.gii.GiiModule', 'password'=>'gtech', // If removed, Gii defaults to localhost only. Edit carefully to taste. 'ipFilters'=>array('127.0.0.1','::1'), ), ), // application components 'components'=>array( 'user'=>array( // enable cookie-based authentication 'allowAutoLogin'=>true, ), // uncomment the following to enable URLs in path-format 'urlManager'=>array( 'urlFormat'=>'path', 'rules'=>array( 'plan/<id:\d+>'=>'classPlan/plan', 'aulas'=>'classPlan', 'aulas/<id:\d+>'=>'classPlan/view', 'aulas/<action:\w+>/<id:\d+>'=>'classPlan/<action>', 'aulas/<action:\w+>'=>'/classPlan/<action>', 'usuario'=>'user', 'usuario/<id:\d+>'=>'user/view', 'usuario/<action:\w+>/<id:\d+>'=>'user/<action>', 'usuario/<action:\w+>'=>'/user/<action>', '<controller:\w+>/<id:\d+>'=>'<controller>/view', '<controller:\w+>/<action:\w+>/<id:\d+>'=>'<controller>/<action>', '<controller:\w+>/<action:\w+>'=>'<controller>/<action>', ), ), 'db'=>array( 'connectionString' => 'sqlite:'.dirname(__FILE__).'/../data/testdrive.db', ), // uncomment the following to use a MySQL database 'db'=>array( 'connectionString' => 'pgsql:host=localhost;dbname=planos', //'emulatePrepare' => true, 'username' => 'postgres', 'password' => 'gorder', 'charset' => 'utf8', ), 'errorHandler'=>array( // use 'site/error' action to display errors 'errorAction'=>'site/error', ), 'log'=>array( 'class'=>'CLogRouter', 'routes'=>array( array( 'class'=>'CFileLogRoute', 'levels'=>'error, warning', ), // uncomment the following to show log messages on web pages /* array( 'class'=>'CWebLogRoute', ), */ ), ), ), // application-level parameters that can be accessed // using Yii::app()->params['paramName'] 'params'=>array( // this is used in contact page 'adminEmail'=>'webmaster@example.com', ), );
apache-2.0
JaredMiller/Wave
test/org/waveprotocol/box/server/waveserver/PerUserWaveViewSubscriberTest.java
3745
/** * Copyright 2012 Apache Wave * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.waveprotocol.box.server.waveserver; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import junit.framework.TestCase; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.waveprotocol.box.common.DeltaSequence; import org.waveprotocol.box.common.ExceptionalIterator; import org.waveprotocol.box.server.common.CoreWaveletOperationSerializer; import org.waveprotocol.box.server.util.WaveletDataUtil; import org.waveprotocol.box.server.util.testing.TestingConstants; import org.waveprotocol.wave.federation.Proto.ProtocolWaveletDelta; import org.waveprotocol.wave.federation.Proto.ProtocolWaveletOperation; import org.waveprotocol.wave.model.id.WaveId; import org.waveprotocol.wave.model.id.WaveletId; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.model.wave.data.WaveletData; import java.util.Iterator; /** * @author yurize@apache.org (Yuri Zelikov) */ public class PerUserWaveViewSubscriberTest extends TestCase implements TestingConstants { private static final HashedVersion BEGIN_VERSION = HashedVersion.unsigned(101L); private static final HashedVersion END_VERSION = HashedVersion.unsigned(102L); private static final ProtocolWaveletDelta DELTA = ProtocolWaveletDelta.newBuilder() .setAuthor(USER) .setHashedVersion(CoreWaveletOperationSerializer.serialize(BEGIN_VERSION)) .addOperation(ProtocolWaveletOperation.newBuilder().setNoOp(true).build()).build(); private static final DeltaSequence POJO_DELTAS = DeltaSequence.of(CoreWaveletOperationSerializer.deserialize(DELTA, END_VERSION, 0L)); private PerUserWaveViewSubscriber perUserWaveViewSubscriber; @Mock WaveMap waveMap; @Override protected void setUp() throws Exception { MockitoAnnotations.initMocks(this); perUserWaveViewSubscriber = new PerUserWaveViewSubscriber(waveMap); } public void testGetPerUserWaveView() throws WaveletStateException { Iterator<WaveId> inner = ImmutableList.of(WAVELET_NAME.waveId).iterator(); ExceptionalIterator<WaveId, WaveServerException> iter= ExceptionalIterator.FromIterator.create(inner); when(waveMap.getWaveIds()).thenReturn(iter); ImmutableSet<WaveletId> wavelets = ImmutableSet.of(WAVELET_NAME.waveletId); when(waveMap.lookupWavelets(WAVELET_NAME.waveId)).thenReturn(wavelets); LocalWaveletContainer c = mock(LocalWaveletContainer.class); when(c.hasParticipant(PARTICIPANT)).thenReturn(true); when(waveMap.getLocalWavelet(WAVELET_NAME)).thenReturn(c); long dummyCreationTime = System.currentTimeMillis(); WaveletData wavelet = WaveletDataUtil.createEmptyWavelet(WAVELET_NAME, PARTICIPANT, BEGIN_VERSION, dummyCreationTime); perUserWaveViewSubscriber.waveletUpdate(wavelet, POJO_DELTAS); Multimap<WaveId, WaveletId> perUserWavesView = perUserWaveViewSubscriber.getPerUserWaveView(PARTICIPANT); assertNotNull(perUserWavesView); assertEquals(1, perUserWavesView.size()); } }
apache-2.0
uralian/ignition
src/main/scala/com/ignition/samples/MultiSessionStreaming.scala
2575
package com.ignition.samples import org.apache.spark.streaming.Seconds import com.ignition.{ ExecutionException, frame, stream } import com.ignition.types.{ RichStructType, fieldToRichStruct, int, string } import com.ignition.SparkHelper object MultiSessionStreaming extends App { def testFrames() = { implicit val rt = new frame.DefaultSparkRuntime(SparkHelper.sqlContext) // building simple flow grid-->pass val grid1 = frame.DataGrid(string("id") ~ string("name") ~ int("weight")) rows ( (newid, "john", 155), (newid, "jane", 190), (newid, "jake", 160), (newid, "josh", 120)) val pass = frame.Pass() pass.addStepListener(new frame.FrameStepListener { override def onAfterStepComputed(event: frame.AfterFrameStepComputed) = event.value.show }) grid1 --> pass pass.output // adding sql val sql = frame.SQLQuery("select SUM(weight) as total from input0") grid1 --> sql --> pass pass.output // replacing grid, removing sql val grid2 = frame.DataGrid(string("name") ~ int("weight")) rows ( ("john", 200), ("josh", 100)) grid2 --> pass pass.output // restoring sql grid2 --> sql --> pass pass.output } def testStreams() = { implicit val rt = new stream.DefaultSparkStreamingRuntime(SparkHelper.sqlContext, Seconds(5)) // building simple flow queue -->pass val schema = string("name") ~ int("age") val queue1 = stream.QueueInput(schema) .addRows(("john", 10), ("jane", 15), ("jake", 18)) .addRows(("john", 20), ("jane", 25), ("jake", 28)) val pass = stream.foreach { frame.Pass() } pass.addStepListener(new stream.StreamStepListener { override def onAfterStepComputed(event: stream.AfterStreamStepComputed) = println("output computed: " + event.value) }) pass.addStreamDataListener(new stream.StreamStepDataListener { override def onBatchProcessed(event: stream.StreamStepBatchProcessed) = event.rows foreach println }) pass.register queue1 --> pass rt.start Thread.sleep(5000) rt.restart Thread.sleep(5000) // adding sql val sql = stream.foreach { frame.SQLQuery("select AVG(age) as avage from input0") } queue1 --> sql --> pass rt.restart Thread.sleep(5000) // replacing queue, removing sql val queue2 = stream.QueueInput(schema) .addRows(("jess", 100), ("jill", 200)) .addRows(("john", 200)) queue2 --> pass rt.restart Thread.sleep(5000) rt.stop } testFrames testStreams }
apache-2.0
googleads/google-ads-dotnet
src/V7/Services/MobileAppCategoryConstantServiceGrpc.g.cs
14380
// <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v7/services/mobile_app_category_constant_service.proto // </auto-generated> // Original file comments: // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #pragma warning disable 0414, 1591 #region Designer generated code using grpc = global::Grpc.Core; namespace Google.Ads.GoogleAds.V7.Services { /// <summary> /// Service to fetch mobile app category constants. /// </summary> public static partial class MobileAppCategoryConstantService { static readonly string __ServiceName = "google.ads.googleads.v7.services.MobileAppCategoryConstantService"; [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] static void __Helper_SerializeMessage(global::Google.Protobuf.IMessage message, grpc::SerializationContext context) { #if !GRPC_DISABLE_PROTOBUF_BUFFER_SERIALIZATION if (message is global::Google.Protobuf.IBufferMessage) { context.SetPayloadLength(message.CalculateSize()); global::Google.Protobuf.MessageExtensions.WriteTo(message, context.GetBufferWriter()); context.Complete(); return; } #endif context.Complete(global::Google.Protobuf.MessageExtensions.ToByteArray(message)); } [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] static class __Helper_MessageCache<T> { public static readonly bool IsBufferMessage = global::System.Reflection.IntrospectionExtensions.GetTypeInfo(typeof(global::Google.Protobuf.IBufferMessage)).IsAssignableFrom(typeof(T)); } [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] static T __Helper_DeserializeMessage<T>(grpc::DeserializationContext context, global::Google.Protobuf.MessageParser<T> parser) where T : global::Google.Protobuf.IMessage<T> { #if !GRPC_DISABLE_PROTOBUF_BUFFER_SERIALIZATION if (__Helper_MessageCache<T>.IsBufferMessage) { return parser.ParseFrom(context.PayloadAsReadOnlySequence()); } #endif return parser.ParseFrom(context.PayloadAsNewBuffer()); } [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] static readonly grpc::Marshaller<global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest> __Marshaller_google_ads_googleads_v7_services_GetMobileAppCategoryConstantRequest = grpc::Marshallers.Create(__Helper_SerializeMessage, context => __Helper_DeserializeMessage(context, global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest.Parser)); [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] static readonly grpc::Marshaller<global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant> __Marshaller_google_ads_googleads_v7_resources_MobileAppCategoryConstant = grpc::Marshallers.Create(__Helper_SerializeMessage, context => __Helper_DeserializeMessage(context, global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant.Parser)); [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] static readonly grpc::Method<global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest, global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant> __Method_GetMobileAppCategoryConstant = new grpc::Method<global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest, global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant>( grpc::MethodType.Unary, __ServiceName, "GetMobileAppCategoryConstant", __Marshaller_google_ads_googleads_v7_services_GetMobileAppCategoryConstantRequest, __Marshaller_google_ads_googleads_v7_resources_MobileAppCategoryConstant); /// <summary>Service descriptor</summary> public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor { get { return global::Google.Ads.GoogleAds.V7.Services.MobileAppCategoryConstantServiceReflection.Descriptor.Services[0]; } } /// <summary>Base class for server-side implementations of MobileAppCategoryConstantService</summary> [grpc::BindServiceMethod(typeof(MobileAppCategoryConstantService), "BindService")] public abstract partial class MobileAppCategoryConstantServiceBase { /// <summary> /// Returns the requested mobile app category constant. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request received from the client.</param> /// <param name="context">The context of the server-side call handler being invoked.</param> /// <returns>The response to send back to the client (wrapped by a task).</returns> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public virtual global::System.Threading.Tasks.Task<global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant> GetMobileAppCategoryConstant(global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } } /// <summary>Client for MobileAppCategoryConstantService</summary> public partial class MobileAppCategoryConstantServiceClient : grpc::ClientBase<MobileAppCategoryConstantServiceClient> { /// <summary>Creates a new client for MobileAppCategoryConstantService</summary> /// <param name="channel">The channel to use to make remote calls.</param> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public MobileAppCategoryConstantServiceClient(grpc::ChannelBase channel) : base(channel) { } /// <summary>Creates a new client for MobileAppCategoryConstantService that uses a custom <c>CallInvoker</c>.</summary> /// <param name="callInvoker">The callInvoker to use to make remote calls.</param> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public MobileAppCategoryConstantServiceClient(grpc::CallInvoker callInvoker) : base(callInvoker) { } /// <summary>Protected parameterless constructor to allow creation of test doubles.</summary> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] protected MobileAppCategoryConstantServiceClient() : base() { } /// <summary>Protected constructor to allow creation of configured clients.</summary> /// <param name="configuration">The client configuration.</param> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] protected MobileAppCategoryConstantServiceClient(ClientBaseConfiguration configuration) : base(configuration) { } /// <summary> /// Returns the requested mobile app category constant. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request to send to the server.</param> /// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param> /// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param> /// <param name="cancellationToken">An optional token for canceling the call.</param> /// <returns>The response received from the server.</returns> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public virtual global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant GetMobileAppCategoryConstant(global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return GetMobileAppCategoryConstant(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } /// <summary> /// Returns the requested mobile app category constant. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request to send to the server.</param> /// <param name="options">The options for the call.</param> /// <returns>The response received from the server.</returns> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public virtual global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant GetMobileAppCategoryConstant(global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_GetMobileAppCategoryConstant, null, options, request); } /// <summary> /// Returns the requested mobile app category constant. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request to send to the server.</param> /// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param> /// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param> /// <param name="cancellationToken">An optional token for canceling the call.</param> /// <returns>The call object.</returns> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public virtual grpc::AsyncUnaryCall<global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant> GetMobileAppCategoryConstantAsync(global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return GetMobileAppCategoryConstantAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } /// <summary> /// Returns the requested mobile app category constant. /// /// List of thrown errors: /// [AuthenticationError]() /// [AuthorizationError]() /// [HeaderError]() /// [InternalError]() /// [QuotaError]() /// [RequestError]() /// </summary> /// <param name="request">The request to send to the server.</param> /// <param name="options">The options for the call.</param> /// <returns>The call object.</returns> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public virtual grpc::AsyncUnaryCall<global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant> GetMobileAppCategoryConstantAsync(global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_GetMobileAppCategoryConstant, null, options, request); } /// <summary>Creates a new instance of client from given <c>ClientBaseConfiguration</c>.</summary> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] protected override MobileAppCategoryConstantServiceClient NewInstance(ClientBaseConfiguration configuration) { return new MobileAppCategoryConstantServiceClient(configuration); } } /// <summary>Creates service definition that can be registered with a server</summary> /// <param name="serviceImpl">An object implementing the server-side handling logic.</param> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public static grpc::ServerServiceDefinition BindService(MobileAppCategoryConstantServiceBase serviceImpl) { return grpc::ServerServiceDefinition.CreateBuilder() .AddMethod(__Method_GetMobileAppCategoryConstant, serviceImpl.GetMobileAppCategoryConstant).Build(); } /// <summary>Register service method with a service binder with or without implementation. Useful when customizing the service binding logic. /// Note: this method is part of an experimental API that can change or be removed without any prior notice.</summary> /// <param name="serviceBinder">Service methods will be bound by calling <c>AddMethod</c> on this object.</param> /// <param name="serviceImpl">An object implementing the server-side handling logic.</param> [global::System.CodeDom.Compiler.GeneratedCode("grpc_csharp_plugin", null)] public static void BindService(grpc::ServiceBinderBase serviceBinder, MobileAppCategoryConstantServiceBase serviceImpl) { serviceBinder.AddMethod(__Method_GetMobileAppCategoryConstant, serviceImpl == null ? null : new grpc::UnaryServerMethod<global::Google.Ads.GoogleAds.V7.Services.GetMobileAppCategoryConstantRequest, global::Google.Ads.GoogleAds.V7.Resources.MobileAppCategoryConstant>(serviceImpl.GetMobileAppCategoryConstant)); } } } #endregion
apache-2.0
fusepoolP3/punditTransformer
punditTransformer/src/Net7/PunditTransformerBundle/Entity/TaskRepository.php
268
<?php namespace Net7\PunditTransformerBundle\Entity; use Doctrine\ORM\EntityRepository; /** * TaskRepository * * This class was generated by the Doctrine ORM. Add your own custom * repository methods below. */ class TaskRepository extends EntityRepository { }
apache-2.0
Said-Young/Phalcon-Backbone
public/app/module/common.js
289
define(['m/emptyModel'], function(model) { function error(mc, resp, options) { console.log(resp.message); } function getModel(options) { var new_model = new model(options); new_model.on('error', error); return new_model; } return { getModel: getModel } } )
apache-2.0
xuzhongxing/deeplearning4j
deeplearning4j-nlp-parent/deeplearning4j-nlp-uima/src/main/java/org/deeplearning4j/text/tokenization/tokenizer/preprocessor/StemmingPreprocessor.java
712
package org.deeplearning4j.text.tokenization.tokenizer.preprocessor; import org.tartarus.snowball.ext.PorterStemmer; /** * This tokenizer preprocessor implements basic cleaning inherited from CommonPreprocessor + does english Porter stemming on tokens * * PLEASE NOTE: This preprocessor is thread-safe by using synchronized method * * @author raver119@gmail.com */ public class StemmingPreprocessor extends CommonPreprocessor { @Override public synchronized String preProcess(String token) { String prep = super.preProcess(token); PorterStemmer stemmer = new PorterStemmer(); stemmer.setCurrent(prep); stemmer.stem(); return stemmer.getCurrent(); } }
apache-2.0
nemigaservices/unpack-bower-dependency-maven-plugin
src/main/java/org/kaazing/bower/dependency/maven/plugin/UnpackBowerDependencyMojo.java
8267
/** * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kaazing.bower.dependency.maven.plugin; import static java.util.regex.Pattern.compile; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.maven.artifact.versioning.ArtifactVersion; import org.apache.maven.artifact.versioning.DefaultArtifactVersion; import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException; import org.apache.maven.artifact.versioning.VersionRange; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.logging.Log; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.lib.Ref; import com.github.zafarkhaja.semver.UnexpectedCharacterException; import com.github.zafarkhaja.semver.Version; /** * Unpack bower dependencies * * @goal unpack * @phase generate-resources */ public class UnpackBowerDependencyMojo extends AbstractMojo { /** * Bower Dependency * @required * @parameter */ private List<BowerDependency> bowerDependencies; /** * Output Directory * @parameter default-value="${project.build.directory}/bower-dependency" */ private File outputDir; static final Pattern SEMVER_SIMPLE_VERSION_MATCHER = compile("\\d+\\.\\d+\\.\\d+(-.*)?"); static final Pattern SHORTHAND_PATTERN = compile("(?<owner>[^/]+)\\/(?<package>[^/]+)"); Log log = getLog(); public void execute() throws MojoExecutionException { if (getOutputDir().exists()) { deleteFully(getOutputDir()); } getOutputDir().mkdirs(); for (BowerDependency bowerDependency : bowerDependencies) { String name = bowerDependency.getName(); String location = bowerDependency.getLocation(); String requiredVersion = bowerDependency.getVersion(); location = parseGitLocation(location); log.debug("Git Repo is at " + location); Git repo = checkoutGitRepo(new File(getOutputDir(), name), location); List<Ref> tagList; try { tagList = repo.tagList().call(); } catch (GitAPIException e) { throw new MojoExecutionException("Could not tags on repo", e); } String tag = findMatchingTag(requiredVersion, tagList); try { repo.checkout().setName(tag).call(); } catch (Exception e) { throw new MojoExecutionException("Failed to switch to tag: " + tag, e); } } } /** * Takes user inputed location, and returns git location * @param location * @return */ String parseGitLocation(String location) { Matcher locationMatcher = SHORTHAND_PATTERN.matcher(location); boolean isShortHand = locationMatcher.matches(); if (isShortHand) { location = String.format("https://github.com/%s/%s", locationMatcher.group("owner"), locationMatcher.group("package")); } return location; } /** * Checkouts a git repo * @param outputDir * @param gitUrl * @return * @throws MojoExecutionException */ Git checkoutGitRepo(File outputDir, String gitUrl) throws MojoExecutionException { outputDir.mkdir(); try { return Git.cloneRepository().setURI(gitUrl).setDirectory(outputDir).call(); } catch (Exception e) { throw new MojoExecutionException("Could not fetch git repository", e); } } /** * Finds matching tag for a requiredVersion * @param requiredVersion (Can be semver version or version range) * @param tagList * @return * @throws MojoExecutionException */ String findMatchingTag(String requiredVersion, List<Ref> tagList) throws MojoExecutionException { String tagPrefix = "refs/tags/"; List<ArtifactVersion> availableVersions = new ArrayList<>(); for (Ref tag : tagList) { String tagVersion = tag.getName().toString().replace(tagPrefix, ""); log.debug("Found tag version \"" + tagVersion + "\" from tag with name \"" + tag.getName() + "\""); try { // Check that it follows SEMVER Version.valueOf(tagVersion); // If it does add it to available versions availableVersions.add(new DefaultArtifactVersion(tagVersion)); } catch (UnexpectedCharacterException e) { log.warn("Found tag version \"" + tagVersion + "\" from tag with name \"" + tag.getName() + "\" that does not match semver spec"); } } Collections.sort(availableVersions); Matcher matcher = SEMVER_SIMPLE_VERSION_MATCHER.matcher(requiredVersion); boolean isRange = !matcher.matches(); String tag = null; if (isRange) { log.info("version is a range"); VersionRange versionRange; try { versionRange = VersionRange.createFromVersionSpec(requiredVersion); } catch (InvalidVersionSpecificationException e) { throw new MojoExecutionException("Unable to parse version range " + requiredVersion, e); } for (ArtifactVersion availableVersion : availableVersions) { if (versionRange.containsVersion(availableVersion)) { tag = availableVersion.toString(); } } } else { log.info("version is not a range"); for (ArtifactVersion availableVersion : availableVersions) { log.info(availableVersion.toString() + " compared to " + requiredVersion.toString()); if (requiredVersion.equals(availableVersion.toString())) { log.info("found tag! " + availableVersion.toString()); tag = availableVersion.toString(); } } } if (tag == null) { StringBuilder messageBuilder = new StringBuilder("Could not find a version to match: "); messageBuilder.append(requiredVersion); messageBuilder.append(", available versions are:"); for (ArtifactVersion availableVersion : availableVersions) { messageBuilder.append("\t"); messageBuilder.append(availableVersion); messageBuilder.append(","); } throw new MojoExecutionException(messageBuilder.toString()); } tag = "tags/" + tag; return tag; } public void deleteFully(File file) { if (file.isDirectory()) { for (File child : file.listFiles()) { deleteFully(child); } } file.delete(); } /** * For Testing * @return */ void setBowerDependencies(List<BowerDependency> bowerDependencies) { this.bowerDependencies = bowerDependencies; } private File getOutputDir() { return outputDir; } /** * For Testing * @param outputDir */ void setOutputDir(File outputDir) { this.outputDir = outputDir; } }
apache-2.0
openview2017/openview
service/src/main/java/com/huawei/openview/devops/domain/admin/AppStatus.java
928
package com.huawei.openview.devops.domain.admin; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.j256.ormlite.field.DatabaseField; import com.j256.ormlite.table.DatabaseTable; import lombok.Data; /** * @author Sid Askary * */ @Data @JsonInclude(JsonInclude.Include.NON_NULL) @DatabaseTable(tableName = "app_status") public class AppStatus { public static final String ID = "id"; public static final String PHASE = "phase"; @JsonIgnore @DatabaseField(generatedId = true) private Long id; @DatabaseField(canBeNull = false, defaultValue = "creating") private Phase phase = Phase.creating; @DatabaseField private Long service_count; @DatabaseField private Long pod_count; @DatabaseField private Long container_count; private Long ready_pod_count; public enum Phase { creating, planning, launching, launched, error, deleting, deleted } }
apache-2.0
HFATFAT/Chocolate
app/src/main/java/com/hzj/chocolate/chocolate/adapter/StaggeredRecyclerViewSimpleAdapter.java
5433
package com.hzj.chocolate.chocolate.adapter; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.Handler; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.hzj.chocolate.chocolate.R; import com.hzj.chocolate.chocolate.bean.ViewModel; import com.squareup.picasso.Picasso; import java.util.ArrayList; import java.util.List; /** * Created by ff on 15/8/26. */ public class StaggeredRecyclerViewSimpleAdapter extends RecyclerView.Adapter<StaggeredRecyclerViewSimpleAdapter.MyViewHolder>{ private LayoutInflater mInflater; private Context mContext; private List<String> mDatas; private List<Integer> mHeights; private List<ViewModel> items; private List<Integer> testItems; // public StaggeredRecyclerViewSimpleAdapter(Context context,List<String> datas){ // this.mContext=context; // this.mDatas=datas; // mInflater=LayoutInflater.from(context); // // mHeights=new ArrayList<Integer>(); // for(int i=0;i<mDatas.size();i++){ // mHeights.add((int) (100+Math.random()*300)); // } // } public StaggeredRecyclerViewSimpleAdapter(Context context,List<ViewModel> items){ this.mContext=context; this.items=items; mInflater=LayoutInflater.from(context); // mHeights=new ArrayList<Integer>(); // ImageView testImageView=new ImageView(context); // testImageView.setImageResource(items.get(1)); // Log.d("ti", String.valueOf(testImageView.getLayoutParams().height)); // for(int i=0;i<items.size();i++){ // mHeights.add((int) (100+Math.random()*300)); // } } @Override public StaggeredRecyclerViewSimpleAdapter.MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view= mInflater.inflate(R.layout.item_single_textview,parent,false); MyViewHolder viewHolder=new MyViewHolder(view); return viewHolder; } public void addData(int pos){ mDatas.add(pos,"Insert"); notifyItemInserted(pos); } public void deleteDate(int pos){ mDatas.remove(pos); notifyItemRemoved(pos); } @Override public void onBindViewHolder(final MyViewHolder holder, final int position) { ViewGroup.LayoutParams lp=holder.itemView.getLayoutParams(); // lp.height=mHeights.get(position); // holder.tv.setText(mDatas.get(position)); ViewModel item = items.get(position); holder.iv.setImageBitmap(null); // holder.iv.setImageResource(testItems.get(position)); Picasso.with(holder.iv.getContext()).load(item.getImage()).into(holder.iv); holder.itemView.setTag(item); // lp.height=600; // Bitmap bitmap = BitmapFactory.decodeResource(mContext.getResources(),testItems.get(position)); // lp.height=(bitmap.getHeight())/2; // Log.d("H", String.valueOf(bitmap.getHeight())); // Log.d("HH", String.valueOf(holder.iv.getHeight())); // Log.d("ww", String.valueOf(holder.iv.getWidth())); // Log.d("dhhhh", String.valueOf(holder.iv.getLayoutParams().height)); // holder.itemView.setLayoutParams(lp); if(mOnItemClickListener!=null){ holder.itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { int layoutPosition= holder.getLayoutPosition(); mOnItemClickListener.onItemClick(holder.itemView,layoutPosition, (ViewModel) v.getTag()); } }); holder.itemView.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { int layoutPosition= holder.getLayoutPosition(); mOnItemClickListener.onItemLongClick(holder.itemView, layoutPosition); return true; } }); } } @Override public int getItemCount() { return items.size(); } class MyViewHolder extends RecyclerView.ViewHolder{ TextView tv; ImageView iv; public MyViewHolder(View itemView) { super(itemView); // tv= (TextView) itemView.findViewById(R.id.id_tv); iv= (ImageView) itemView.findViewById(R.id.iv_image); } } // @Override public void onClick(final View v) { // // Give some time to the ripple to finish the effect // if (mOnItemClickListener != null) { // new Handler().postDelayed(new Runnable() { // @Override public void run() { // mOnItemClickListener.onItemClick(v, (ViewModel) v.getTag()); // } // }, 200); // } // } public interface OnItemClickListener{ void onItemClick(View view,int position,ViewModel viewModel); void onItemLongClick(View view,int position); } private OnItemClickListener mOnItemClickListener; public void setOnItemClickListener(OnItemClickListener listener){ this.mOnItemClickListener=listener; } }
apache-2.0
RobbertJanSW/knife-windows
lib/chef/knife/windows_listener_create.rb
3871
# Author:: Mukta Aphale (<mukta.aphale@clogeny.com>) # Copyright:: Copyright (c) 2014-2016 Chef Software, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'chef/knife' require 'chef/knife/winrm_base' require 'openssl' class Chef class Knife class WindowsListenerCreate < Knife banner "knife windows listener create (options)" option :cert_install, :short => "-c CERT_PATH", :long => "--cert-install CERT_PATH", :description => "Adds specified certificate to the Windows Certificate Store's Local Machine personal store before creating listener." option :port, :short => "-p PORT", :long => "--port PORT", :description => "Specify port. Default is 5986", :default => "5986" option :hostname, :short => "-h HOSTNAME", :long => "--hostname HOSTNAME", :description => "Hostname on the listener. Default is blank", :default => "" option :cert_thumbprint, :short => "-t THUMBPRINT", :long => "--cert-thumbprint THUMBPRINT", :description => "Thumbprint of the certificate. Required only if --cert-install option is not used." option :cert_passphrase, :short => "-cp PASSWORD", :long => "--cert-passphrase PASSWORD", :description => "Password for certificate." def get_cert_passphrase print "Enter given certificate's passphrase (empty for no passphrase):" passphrase = STDIN.gets passphrase.strip end def run STDOUT.sync = STDERR.sync = true if Chef::Platform.windows? begin if config[:cert_install] config[:cert_passphrase] = get_cert_passphrase unless config[:cert_passphrase] result = %x{powershell.exe -Command " '#{config[:cert_passphrase]}' | certutil -importPFX '#{config[:cert_install]}' AT_KEYEXCHANGE"} if $?.exitstatus ui.info "Certificate installed to Certificate Store" result = %x{powershell.exe -Command " echo (Get-PfxCertificate #{config[:cert_install]}).thumbprint "} ui.info "Certificate Thumbprint: #{result}" config[:cert_thumbprint] = result.strip else ui.error "Error installing certificate to Certificate Store" ui.error result exit 1 end end unless config[:cert_thumbprint] ui.error "Please specify the --cert-thumbprint" exit 1 end result = %x{winrm create winrm/config/Listener?Address=*+Transport=HTTPS @{Hostname="#{config[:hostname]}";CertificateThumbprint="#{config[:cert_thumbprint]}";Port="#{config[:port]}"}} Chef::Log.debug result if ($?.exitstatus == 0) ui.info "WinRM listener created with Port: #{config[:port]} and CertificateThumbprint: #{config[:cert_thumbprint]}" else ui.error "Error creating WinRM listener. use -VV for more details." exit 1 end rescue => e puts "ERROR: + #{e}" end else ui.error "WinRM listener can be created on Windows system only" exit 1 end end end end end
apache-2.0
dvabuzyarov/moq.ts
projects/moq/src/lib/verification/verification-tester.ts
654
import { Times } from "../times"; import { VerifyFormatter } from "../formatters/verify.formatter"; import { VerifyError } from "./verify-error"; import { Expressions } from "../reflector/expressions"; /** * @hidden */ export class VerificationTester { constructor(private readonly verifyFormatter: VerifyFormatter) { } public test(expression: Expressions<any>, callCount: number, times: Times): void { const passed = times.test(callCount); if (passed === false) { const message = this.verifyFormatter.format(expression, times.message, callCount); throw new VerifyError(message); } } }
apache-2.0
GoogleChrome/lighthouse
lighthouse-core/audits/byte-efficiency/unminified-javascript.js
5212
/** * @license Copyright 2017 The Lighthouse Authors. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ 'use strict'; const ByteEfficiencyAudit = require('./byte-efficiency-audit.js'); const i18n = require('../../lib/i18n/i18n.js'); const computeTokenLength = require('../../lib/minification-estimator.js').computeJSTokenLength; const UIStrings = { /** Imperative title of a Lighthouse audit that tells the user to minify the page’s JS code to reduce file size. This is displayed in a list of audit titles that Lighthouse generates. */ title: 'Minify JavaScript', /** Description of a Lighthouse audit that tells the user *why* they should minify the page’s JS code to reduce file size. This is displayed after a user expands the section to see more. No character length limits. 'Learn More' becomes link text to additional documentation. */ description: 'Minifying JavaScript files can reduce payload sizes and script parse time. ' + '[Learn more](https://web.dev/unminified-javascript/).', }; const str_ = i18n.createMessageInstanceIdFn(__filename, UIStrings); const IGNORE_THRESHOLD_IN_PERCENT = 10; const IGNORE_THRESHOLD_IN_BYTES = 2048; /** * @fileOverview Estimates minification savings by determining the ratio of parseable JS tokens to the * length of the entire string. Though simple, this method is quite accurate at identifying whether * a script was already minified and offers a relatively conservative minification estimate (our two * primary goals). * * This audit only examines scripts that were independent network requests and not inlined or eval'd. * * See https://github.com/GoogleChrome/lighthouse/pull/3950#issue-277887798 for stats on accuracy. */ class UnminifiedJavaScript extends ByteEfficiencyAudit { /** * @return {LH.Audit.Meta} */ static get meta() { return { id: 'unminified-javascript', title: str_(UIStrings.title), description: str_(UIStrings.description), scoreDisplayMode: ByteEfficiencyAudit.SCORING_MODES.NUMERIC, requiredArtifacts: ['ScriptElements', 'devtoolsLogs', 'traces', 'GatherContext'], }; } /** * @param {string} scriptContent * @param {string} displayUrl * @param {LH.Artifacts.NetworkRequest|undefined} networkRecord * @return {{url: string, totalBytes: number, wastedBytes: number, wastedPercent: number}} */ static computeWaste(scriptContent, displayUrl, networkRecord) { const contentLength = scriptContent.length; const totalTokenLength = computeTokenLength(scriptContent); const totalBytes = ByteEfficiencyAudit.estimateTransferSize(networkRecord, contentLength, 'Script'); const wastedRatio = 1 - totalTokenLength / contentLength; const wastedBytes = Math.round(totalBytes * wastedRatio); return { url: displayUrl, totalBytes, wastedBytes, wastedPercent: 100 * wastedRatio, }; } /** * @param {LH.Artifacts} artifacts * @param {Array<LH.Artifacts.NetworkRequest>} networkRecords * @return {ByteEfficiencyAudit.ByteEfficiencyProduct} */ static audit_(artifacts, networkRecords) { /** @type {Array<LH.Audit.ByteEfficiencyItem>} */ const items = []; const warnings = []; for (const {requestId, src, content} of artifacts.ScriptElements) { if (!content) continue; const networkRecord = networkRecords.find(record => record.requestId === requestId); const displayUrl = !src || !networkRecord ? `inline: ${content.substr(0, 40)}...` : networkRecord.url; try { const result = UnminifiedJavaScript.computeWaste(content, displayUrl, networkRecord); // If the ratio is minimal, the file is likely already minified, so ignore it. // If the total number of bytes to be saved is quite small, it's also safe to ignore. if (result.wastedPercent < IGNORE_THRESHOLD_IN_PERCENT || result.wastedBytes < IGNORE_THRESHOLD_IN_BYTES || !Number.isFinite(result.wastedBytes)) continue; items.push(result); } catch (err) { const url = networkRecord ? networkRecord.url : '?'; warnings.push(`Unable to process script ${url}: ${err.message}`); } } /** @type {LH.Audit.Details.Opportunity['headings']} */ const headings = [ {key: 'url', valueType: 'url', label: str_(i18n.UIStrings.columnURL)}, {key: 'totalBytes', valueType: 'bytes', label: str_(i18n.UIStrings.columnTransferSize)}, {key: 'wastedBytes', valueType: 'bytes', label: str_(i18n.UIStrings.columnWastedBytes)}, ]; return { items, warnings, headings, }; } } module.exports = UnminifiedJavaScript; module.exports.UIStrings = UIStrings;
apache-2.0
nnkarpov/csharp_training
addressbook-web-tests/addressbook-web-tests/Properties/AssemblyInfo.cs
643
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; [assembly: AssemblyTitle("addressbook-web-tests")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("addressbook-web-tests")] [assembly: AssemblyCopyright("Copyright © 2017")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] [assembly: ComVisible(false)] [assembly: Guid("ce575f3b-3365-408f-9d6a-8bd7d1730122")] // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
apache-2.0
shagraths/LinaExpress
application/models/modelopdf.php
263
<?php class modelopdf extends CI_Model { function traerDatosNombre($nombreArchivo){ $this->db->select('*'); $this->db->where('NombreArchivo', $nombreArchivo); return $this->db->get('Excel'); } } ?>
apache-2.0
aws/aws-sdk-java
aws-java-sdk-transfer/src/main/java/com/amazonaws/services/transfer/model/transform/ListSecurityPoliciesRequestMarshaller.java
2392
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.transfer.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.transfer.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * ListSecurityPoliciesRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class ListSecurityPoliciesRequestMarshaller { private static final MarshallingInfo<Integer> MAXRESULTS_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("MaxResults").build(); private static final MarshallingInfo<String> NEXTTOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("NextToken").build(); private static final ListSecurityPoliciesRequestMarshaller instance = new ListSecurityPoliciesRequestMarshaller(); public static ListSecurityPoliciesRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(ListSecurityPoliciesRequest listSecurityPoliciesRequest, ProtocolMarshaller protocolMarshaller) { if (listSecurityPoliciesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listSecurityPoliciesRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(listSecurityPoliciesRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
facug91/OJ-Solutions
uva.onlinejudge.org/ThreeFamilies.cpp
848
/* By: facug91 From: http://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=3946 Name: Three Families Date: 14/02/2015 */ #include <bits/stdc++.h> #define MAX_INT 2147483647 #define MAX_LONG 9223372036854775807ll #define MAX_DBL 1.7976931348623158e+308 #define EPS 1e-9 const double PI = 2.0*acos(0.0); #define INF 1000000000 //#define MOD 1000000007ll #define MAXN 100005 using namespace std; typedef long long ll; typedef pair<int, int> ii; typedef pair<int, ii> iii; typedef vector<int> vi; typedef vector<ii> vii; int x, y, z; int main () { ios_base::sync_with_stdio(0); //cout << fixed << setprecision(2); int TC, i, j, k; cin>>TC; while (TC--) { cin>>x>>y>>z; cout<<((z*(x+(x-y)))/(x+y))<<endl; } return 0; }
apache-2.0
bukhmastov/Cardiograph
app/src/main/java/com/bukhmastov/cardiograph/activities/SplashActivity.java
728
package com.bukhmastov.cardiograph.activities; import android.preference.PreferenceManager; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.content.Intent; import com.bukhmastov.cardiograph.R; public class SplashActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); PreferenceManager.setDefaultValues(this, R.xml.pref_general, false); PreferenceManager.setDefaultValues(this, R.xml.pref_arduino, false); PreferenceManager.setDefaultValues(this, R.xml.pref_archive, false); startActivity(new Intent(this, MainActivity.class)); finish(); } }
apache-2.0
frett/cas
core/cas-server-core-monitor/src/test/java/org/apereo/cas/monitor/CacheHealthIndicatorTests.java
3500
package org.apereo.cas.monitor; import org.apereo.cas.config.support.EnvironmentConversionServiceInitializer; import org.apereo.cas.configuration.CasConfigurationProperties; import lombok.val; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.health.Status; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.rules.SpringClassRule; import org.springframework.test.context.junit4.rules.SpringMethodRule; import static org.junit.Assert.*; /** * Unit test for {@link AbstractCacheHealthIndicator}. * * @author Marvin S. Addison * @since 3.5.1 */ @ContextConfiguration(initializers = EnvironmentConversionServiceInitializer.class) @EnableConfigurationProperties(CasConfigurationProperties.class) public class CacheHealthIndicatorTests { @ClassRule public static final SpringClassRule SPRING_CLASS_RULE = new SpringClassRule(); @Rule public final SpringMethodRule springMethodRule = new SpringMethodRule(); @Autowired private CasConfigurationProperties casProperties; protected static SimpleCacheStatistics[] statsArray(final SimpleCacheStatistics... statistics) { return statistics; } @Test public void verifyObserveOk() { val warn = casProperties.getMonitor().getWarn(); val monitor = new AbstractCacheHealthIndicator( warn.getEvictionThreshold(), warn.getThreshold()) { @Override protected SimpleCacheStatistics[] getStatistics() { return statsArray(new SimpleCacheStatistics(100, 200, 0)); } }; val status = monitor.health().getStatus(); assertEquals(Status.UP, status); } @Test public void verifyObserveWarn() { val warn = casProperties.getMonitor().getWarn(); val monitor = new AbstractCacheHealthIndicator( warn.getEvictionThreshold(), warn.getThreshold() ) { @Override protected SimpleCacheStatistics[] getStatistics() { return statsArray(new SimpleCacheStatistics(199, 200, 100)); } }; val status = monitor.health().getStatus(); assertEquals("WARN", status.getCode()); } @Test public void verifyObserveError() { val warn = casProperties.getMonitor().getWarn(); val monitor = new AbstractCacheHealthIndicator( warn.getEvictionThreshold(), warn.getThreshold()) { @Override protected SimpleCacheStatistics[] getStatistics() { return statsArray(new SimpleCacheStatistics(100, 110, 0)); } }; val status = monitor.health().getStatus(); assertEquals(Status.OUT_OF_SERVICE, status); } @Test public void verifyObserveError2() { val warn = casProperties.getMonitor().getWarn(); val monitor = new AbstractCacheHealthIndicator( warn.getEvictionThreshold(), warn.getThreshold()) { @Override protected SimpleCacheStatistics[] getStatistics() { return statsArray(new SimpleCacheStatistics(199, 200, 1)); } }; assertEquals("WARN", monitor.health().getStatus().getCode()); } }
apache-2.0
NetApp/manila
manila/api/v2/consistency_groups.py
9561
# Copyright 2015 Alex Meade # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The consistency groups API.""" from oslo_log import log from oslo_utils import uuidutils import six import webob from webob import exc from manila.api import common from manila.api.openstack import wsgi import manila.api.views.consistency_groups as cg_views import manila.consistency_group.api as cg_api from manila import db from manila import exception from manila.i18n import _, _LI from manila.share import share_types LOG = log.getLogger(__name__) class CGController(wsgi.Controller, wsgi.AdminActionsMixin): """The Consistency Groups API controller for the OpenStack API.""" resource_name = 'consistency_group' _view_builder_class = cg_views.CGViewBuilder resource_name = 'consistency_group' def __init__(self): super(CGController, self).__init__() self.cg_api = cg_api.API() @wsgi.Controller.api_version('2.4', experimental=True) @wsgi.Controller.authorize('get') def show(self, req, id): """Return data about the given CG.""" context = req.environ['manila.context'] try: cg = self.cg_api.get(context, id) except exception.NotFound: msg = _("Consistency group %s not found.") % id raise exc.HTTPNotFound(explanation=msg) return self._view_builder.detail(req, cg) @wsgi.Controller.api_version('2.4', experimental=True) @wsgi.Controller.authorize def delete(self, req, id): """Delete a CG.""" context = req.environ['manila.context'] LOG.info(_LI("Delete consistency group with id: %s"), id, context=context) try: cg = self.cg_api.get(context, id) except exception.NotFound: msg = _("Consistency group %s not found.") % id raise exc.HTTPNotFound(explanation=msg) try: self.cg_api.delete(context, cg) except exception.InvalidConsistencyGroup as e: raise exc.HTTPConflict(explanation=six.text_type(e)) return webob.Response(status_int=202) @wsgi.Controller.api_version('2.4', experimental=True) @wsgi.Controller.authorize('get_all') def index(self, req): """Returns a summary list of shares.""" return self._get_cgs(req, is_detail=False) @wsgi.Controller.api_version('2.4', experimental=True) @wsgi.Controller.authorize('get_all') def detail(self, req): """Returns a detailed list of shares.""" return self._get_cgs(req, is_detail=True) def _get_cgs(self, req, is_detail): """Returns a list of shares, transformed through view builder.""" context = req.environ['manila.context'] search_opts = {} search_opts.update(req.GET) # Remove keys that are not related to cg attrs search_opts.pop('limit', None) search_opts.pop('offset', None) cgs = self.cg_api.get_all( context, detailed=is_detail, search_opts=search_opts) limited_list = common.limited(cgs, req) if is_detail: cgs = self._view_builder.detail_list(req, limited_list) else: cgs = self._view_builder.summary_list(req, limited_list) return cgs @wsgi.Controller.api_version('2.4', experimental=True) @wsgi.Controller.authorize def update(self, req, id, body): """Update a share.""" context = req.environ['manila.context'] if not self.is_valid_body(body, 'consistency_group'): msg = _("'consistency_group' is missing from the request body.") raise exc.HTTPBadRequest(explanation=msg) cg_data = body['consistency_group'] valid_update_keys = { 'name', 'description', } invalid_fields = set(cg_data.keys()) - valid_update_keys if invalid_fields: msg = _("The fields %s are invalid or not allowed to be updated.") raise exc.HTTPBadRequest(explanation=msg % invalid_fields) try: cg = self.cg_api.get(context, id) except exception.NotFound: msg = _("Consistency group %s not found.") % id raise exc.HTTPNotFound(explanation=msg) cg = self.cg_api.update(context, cg, cg_data) return self._view_builder.detail(req, cg) @wsgi.Controller.api_version('2.4', experimental=True) @wsgi.response(202) @wsgi.Controller.authorize def create(self, req, body): """Creates a new share.""" context = req.environ['manila.context'] if not self.is_valid_body(body, 'consistency_group'): msg = _("'consistency_group' is missing from the request body.") raise exc.HTTPBadRequest(explanation=msg) cg = body['consistency_group'] valid_fields = {'name', 'description', 'share_types', 'source_cgsnapshot_id', 'share_network_id'} invalid_fields = set(cg.keys()) - valid_fields if invalid_fields: msg = _("The fields %s are invalid.") % invalid_fields raise exc.HTTPBadRequest(explanation=msg) if 'share_types' in cg and 'source_cgsnapshot_id' in cg: msg = _("Cannot supply both 'share_types' and " "'source_cgsnapshot_id' attributes.") raise exc.HTTPBadRequest(explanation=msg) if not cg.get('share_types') and 'source_cgsnapshot_id' not in cg: default_share_type = share_types.get_default_share_type() if default_share_type: cg['share_types'] = [default_share_type['id']] else: msg = _("Must specify at least one share type as a default " "share type has not been configured.") raise exc.HTTPBadRequest(explanation=msg) kwargs = {} if 'name' in cg: kwargs['name'] = cg.get('name') if 'description' in cg: kwargs['description'] = cg.get('description') _share_types = cg.get('share_types') if _share_types: if not all([uuidutils.is_uuid_like(st) for st in _share_types]): msg = _("The 'share_types' attribute must be a list of uuids") raise exc.HTTPBadRequest(explanation=msg) kwargs['share_type_ids'] = _share_types if 'share_network_id' in cg and 'source_cgsnapshot_id' in cg: msg = _("Cannot supply both 'share_network_id' and " "'source_cgsnapshot_id' attributes as the share network " "is inherited from the source.") raise exc.HTTPBadRequest(explanation=msg) if 'source_cgsnapshot_id' in cg: source_cgsnapshot_id = cg.get('source_cgsnapshot_id') if not uuidutils.is_uuid_like(source_cgsnapshot_id): msg = _("The 'source_cgsnapshot_id' attribute must be a uuid.") raise exc.HTTPBadRequest(explanation=six.text_type(msg)) kwargs['source_cgsnapshot_id'] = source_cgsnapshot_id elif 'share_network_id' in cg: share_network_id = cg.get('share_network_id') if not uuidutils.is_uuid_like(share_network_id): msg = _("The 'share_network_id' attribute must be a uuid.") raise exc.HTTPBadRequest(explanation=six.text_type(msg)) kwargs['share_network_id'] = share_network_id try: new_cg = self.cg_api.create(context, **kwargs) except exception.InvalidCGSnapshot as e: raise exc.HTTPConflict(explanation=six.text_type(e)) except (exception.CGSnapshotNotFound, exception.InvalidInput) as e: raise exc.HTTPBadRequest(explanation=six.text_type(e)) return self._view_builder.detail(req, dict(new_cg.items())) def _update(self, *args, **kwargs): db.consistency_group_update(*args, **kwargs) def _get(self, *args, **kwargs): return self.cg_api.get(*args, **kwargs) def _delete(self, context, resource, force=True): db.consistency_group_destroy(context.elevated(), resource['id']) @wsgi.Controller.api_version('2.4', '2.6', experimental=True) @wsgi.action('os-reset_status') def cg_reset_status_legacy(self, req, id, body): return self._reset_status(req, id, body) @wsgi.Controller.api_version('2.7', experimental=True) @wsgi.action('reset_status') def cg_reset_status(self, req, id, body): return self._reset_status(req, id, body) @wsgi.Controller.api_version('2.4', '2.6', experimental=True) @wsgi.action('os-force_delete') def cg_force_delete_legacy(self, req, id, body): return self._force_delete(req, id, body) @wsgi.Controller.api_version('2.7', experimental=True) @wsgi.action('force_delete') def cg_force_delete(self, req, id, body): return self._force_delete(req, id, body) def create_resource(): return wsgi.Resource(CGController())
apache-2.0
ustudio/shaka-player
lib/media/streaming_engine.js
57579
/** * @license * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ goog.provide('shaka.media.StreamingEngine'); goog.require('goog.asserts'); goog.require('shaka.media.MediaSourceEngine'); goog.require('shaka.media.Playhead'); goog.require('shaka.net.NetworkingEngine'); goog.require('shaka.util.Error'); goog.require('shaka.util.Functional'); goog.require('shaka.util.IDestroyable'); goog.require('shaka.util.MapUtils'); goog.require('shaka.util.PublicPromise'); /** * Creates a StreamingEngine. * * The StreamingEngine is responsible for setting up the Manifest's Streams * (i.e., for calling each Stream's createSegmentIndex() function), for * downloading segments, for co-ordinating audio, video, and text buffering, * and for handling Period transitions. The StreamingEngine provides an * interface to switch between Streams, but it does not choose which Streams to * switch to. * * The StreamingEngine notifies its owner when it needs to buffer a new Period, * so its owner can choose which Streams within that Period to initially * buffer. Moreover, the StreamingEngine also notifies its owner when any * Stream within the current Period may be switched to, so its owner can switch * bitrates, resolutions, or languages. * * The StreamingEngine does not need to be notified about changes to the * Manifest's SegmentIndexes; however, it does need to be notified when new * Periods are added to the Manifest, so it can set up that Period's Streams. * * To start the StreamingEngine the owner must first call configure() followed * by init(). The StreamingEngine will then call onChooseStreams(p) when it * needs to buffer Period p; it will then switch to the Streams returned from * that function. The StreamingEngine will call onCanSwitch() when any * Stream within the current Period may be switched to. * * The owner must call seeked() each time the playhead moves to a new location * within the presentation timeline; however, the owner may forego calling * seeked() when the playhead moves outside the presentation timeline. * * @param {!shaka.media.Playhead} playhead The Playhead. The caller retains * ownership. * @param {!shaka.media.MediaSourceEngine} mediaSourceEngine The * MediaSourceEngine. The caller retains ownership. * @param {shaka.net.NetworkingEngine} netEngine * @param {shakaExtern.Manifest} manifest * @param {function(!shakaExtern.Period): !Object.<string, shakaExtern.Stream>} * onChooseStreams Called when the given Period needs to be buffered. The * StreamingEngine will switch to the Streams returned from this function. * The caller cannot call switch() directly until the StreamingEngine calls * onCanSwitch() * @param {function()} onCanSwitch Called when any Stream within the current * Period may be switched to. * @param {function(!shaka.util.Error)} onError Called when an error occurs. * If the error is recoverable (see @link{shaka.util.Error}) then the * caller may invoke either StreamingEngine.switch() or * StreamingEngine.seeked() to attempt recovery. * @param {function()=} opt_onInitialStreamsSetup Optional callback which * is called when the initial set of Streams have been setup. Intended * to be used by tests. * @param {function()=} opt_onStartupComplete Optional callback which * is called when startup has completed. Intended to be used by tests. * * @constructor * @struct * @implements {shaka.util.IDestroyable} */ shaka.media.StreamingEngine = function( playhead, mediaSourceEngine, netEngine, manifest, onChooseStreams, onCanSwitch, onError, opt_onInitialStreamsSetup, opt_onStartupComplete) { /** @private {shaka.media.Playhead} */ this.playhead_ = playhead; /** @private {shaka.media.MediaSourceEngine} */ this.mediaSourceEngine_ = mediaSourceEngine; /** @private {shaka.net.NetworkingEngine} */ this.netEngine_ = netEngine; /** @private {?shakaExtern.Manifest} */ this.manifest_ = manifest; /** * @private * {?function(!shakaExtern.Period): !Object.<string, shakaExtern.Stream>} */ this.onChooseStreams_ = onChooseStreams; /** @private {?function()} */ this.onCanSwitch_ = onCanSwitch; /** @private {?function(!shaka.util.Error)} */ this.onError_ = onError; /** @private {?function()} */ this.onInitialStreamsSetup_ = opt_onInitialStreamsSetup || null; /** @private {?function()} */ this.onStartupComplete_ = opt_onStartupComplete || null; /** @private {?shakaExtern.StreamingConfiguration} */ this.config_ = null; /** @private {Promise} */ this.setupPeriodPromise_ = Promise.resolve(); /** * Maps a Period's index to an object that indicates that either * 1. the Period has not been set up (undefined) * 2. the Period is being set up ([a PublicPromise, false]), * 3. the Period is set up (i.e., all Streams within the Period are set up) * and can be switched to ([a PublicPromise, true]). * * @private {Array.<?{promise: shaka.util.PublicPromise, resolved: boolean}>} */ this.canSwitchPeriod_ = []; /** * Maps a Stream's ID to an object that indicates that either * 1. the Stream has not been set up (undefined) * 2. the Stream is being set up ([a Promise instance, false]), * 3. the Stream is set up and can be switched to * ([a Promise instance, true]). * * @private {Object.<number, * ?{promise: shaka.util.PublicPromise, resolved: boolean}>} */ this.canSwitchStream_ = {}; /** * Maps a content type, e.g., 'audio', 'video', or 'text', to a MediaState. * * @private {Object.<string, !shaka.media.StreamingEngine.MediaState_>} */ this.mediaStates_ = {}; /** * Set to true once one segment of each content type has been buffered. * * @private {boolean} */ this.startupComplete_ = false; /** @private {boolean} */ this.destroyed_ = false; }; /** * @typedef {{ * type: string, * stream: shakaExtern.Stream, * lastStream: ?shakaExtern.Stream, * lastSegmentReference: shaka.media.SegmentReference, * needInitSegment: boolean, * needRebuffering: boolean, * needPeriodIndex: number, * endOfStream: boolean, * performingUpdate: boolean, * updateTimer: ?number, * waitingToClearBuffer: boolean, * clearingBuffer: boolean * }} * * @description * Contains the state of a logical stream, i.e., a sequence of segmented data * for a particular content type. At any given time there is a Stream object * associated with the state of the logical stream. * * @property {string} type * The stream's content type, e.g., 'audio', 'video', or 'text'. * @property {shakaExtern.Stream} stream * The current Stream. * @property {?shakaExtern.Stream} lastStream * The Stream of the last segment that was appended. * @property {shaka.media.SegmentReference} lastSegmentReference * The SegmentReference of the last segment that was appended. * @property {boolean} needInitSegment * True indicates that |stream|'s init segment must be inserted before the * next media segment is appended. * @property {boolean} needRebuffering * True indicates that startup or re- buffering is required. * @property {boolean} endOfStream * True indicates that the end of the buffer has hit the end of the * presentation. * @property {number} needPeriodIndex * The index of the Period which needs to be buffered. * @property {boolean} performingUpdate * True indicates that an update is in progress. * @property {?number} updateTimer * A non-null value indicates that an update is scheduled. * @property {boolean} waitingToClearBuffer * True indicates that the buffer must be cleared after the current update * finishes. * @property {boolean} clearingBuffer * True indicates that the buffer is being cleared. */ shaka.media.StreamingEngine.MediaState_; /** * The minimum number seconds that will remain buffered after evicting media. * * @const {number} */ shaka.media.StreamingEngine.prototype.MIN_BUFFER_LENGTH = 2; /** * Gets the StreamingEngine's rebuffering goal. * * @param {shakaExtern.Manifest} manifest * @param {shakaExtern.StreamingConfiguration} config * @return {number} */ shaka.media.StreamingEngine.getRebufferingGoal = function(manifest, config) { return Math.max(manifest.minBufferTime || 0, config.rebufferingGoal); }; /** @override */ shaka.media.StreamingEngine.prototype.destroy = function() { for (var type in this.mediaStates_) { this.cancelUpdate_(this.mediaStates_[type]); } this.playhead_ = null; this.mediaSourceEngine_ = null; this.netEngine_ = null; this.manifest_ = null; this.setupPeriodPromise_ = null; this.onChooseStreams_ = null; this.onCanSwitch_ = null; this.onError_ = null; this.onInitialStreamsSetup_ = null; this.onStartupComplete_ = null; this.canSwitchPeriod_ = null; this.canSwitchStream_ = null; this.mediaStates_ = null; this.config_ = null; this.destroyed_ = true; return Promise.resolve(); }; /** * Called by the Player to provide an updated configuration any time it changes. * Will be called at least once before init(). * * @param {shakaExtern.StreamingConfiguration} config */ shaka.media.StreamingEngine.prototype.configure = function(config) { this.config_ = config; goog.asserts.assert(this.manifest_, 'manifest_ should not be null'); var rebufferingGoal = shaka.media.StreamingEngine.getRebufferingGoal( this.manifest_, this.config_); this.playhead_.setRebufferingGoal(rebufferingGoal); }; /** * Initializes the StreamingEngine. * * After this function is called the StreamingEngine will call * onChooseStreams(p) when it needs to buffer Period p and onCanSwitch() when * any Stream within that Period may be switched to. * * After the StreamingEngine calls onChooseStreams(p) for the first time, it * will begin setting up the Streams returned from that function and * subsequently switch to them. However, the StreamingEngine will not begin * setting up any other Streams until at least one segment from each of the * initial set of Streams has been buffered (this reduces startup latency). * After the StreamingEngine completes this startup phase it will begin setting * up each Period's Streams (while buffering in parrallel). * * When the StreamingEngine needs to buffer the next Period it will have * already set up that Period's Streams. So, when the StreamingEngine calls * onChooseStreams(p) after the first time, the StreamingEngine will * immediately switch to the Streams returned from that function. * * @return {!Promise} */ shaka.media.StreamingEngine.prototype.init = function() { var MapUtils = shaka.util.MapUtils; goog.asserts.assert(this.config_, 'StreamingEngine configure() must be called before init()!'); // Determine which Period we must buffer. var playheadTime = this.playhead_.getTime(); var needPeriodIndex = this.findPeriodContainingTime_(playheadTime); // Get the initial set of Streams. var streamsByType = this.onChooseStreams_(this.manifest_.periods[needPeriodIndex]); if (MapUtils.empty(streamsByType)) { shaka.log.error('init: no Streams chosen'); return Promise.reject(new shaka.util.Error( shaka.util.Error.Category.STREAMING, shaka.util.Error.Code.INVALID_STREAMS_CHOSEN)); } // Setup the initial set of Streams and then begin each update cycle. After // startup completes onUpdate_() will set up the remaining Periods. return this.initStreams_(streamsByType).then(function() { shaka.log.debug('init: completed initial Stream setup'); // Subtlety: onInitialStreamsSetup_() may call switch() or seeked(), so we // must schedule an update beforehand so |updateTimer| is set. if (this.onInitialStreamsSetup_) { shaka.log.v1('init: calling onInitialStreamsSetup_()...'); this.onInitialStreamsSetup_(); } }.bind(this)); }; /** * Gets the current Period the stream is in. This Period may not be initialized * yet if canSwitch(period) has not been called yet. * @return {shakaExtern.Period} */ shaka.media.StreamingEngine.prototype.getCurrentPeriod = function() { var playheadTime = this.playhead_.getTime(); var needPeriodIndex = this.findPeriodContainingTime_(playheadTime); return this.manifest_.periods[needPeriodIndex]; }; /** * Gets a map of all the active streams. * @return {!Object.<string, shakaExtern.Stream>} */ shaka.media.StreamingEngine.prototype.getActiveStreams = function() { goog.asserts.assert(this.mediaStates_, 'Must be initialized'); var MapUtils = shaka.util.MapUtils; return MapUtils.map( this.mediaStates_, function(state) { return state.stream; }); }; /** * Notifies StreamingEngine that a new stream was added to the manifest. This * initializes the given stream. This returns a Promise that resolves when * the stream has been set up. * * @param {string} type * @param {shakaExtern.Stream} stream * @return {!Promise} */ shaka.media.StreamingEngine.prototype.notifyNewStream = function(type, stream) { /** @type {!Object.<string, shakaExtern.Stream>} */ var streamsByType = {}; streamsByType[type] = stream; return this.initStreams_(streamsByType); }; /** * Switches to the given Stream. |stream| may be from any StreamSet or any * Period. * * @param {string} contentType |stream|'s content type. * @param {shakaExtern.Stream} stream * @param {boolean=} opt_clearBuffer */ shaka.media.StreamingEngine.prototype.switch = function( contentType, stream, opt_clearBuffer) { // TODO: Change opt_clearBuffer to a number so only part of the buffer is // cleared. var mediaState = this.mediaStates_[contentType]; goog.asserts.assert(mediaState, 'switch: expected mediaState to exist'); if (!mediaState) return; // Ensure the Period is ready. var periodIndex = this.findPeriodContainingStream_(stream); var canSwitchRecord = this.canSwitchPeriod_[periodIndex]; goog.asserts.assert( canSwitchRecord && canSwitchRecord.resolved, 'switch: expected Period ' + periodIndex + ' to be ready'); if (!canSwitchRecord || !canSwitchRecord.resolved) return; // Sanity check. If the Period is ready then the Stream should be ready too. canSwitchRecord = this.canSwitchStream_[stream.id]; goog.asserts.assert(canSwitchRecord && canSwitchRecord.resolved, 'switch: expected Stream ' + stream.id + ' to be ready'); if (!canSwitchRecord || !canSwitchRecord.resolved) return; if (mediaState.stream == stream) { var streamTag = shaka.media.StreamingEngine.logPrefix_(mediaState); shaka.log.debug('switch: Stream ' + streamTag + ' already active'); return; } mediaState.stream = stream; mediaState.needInitSegment = true; var streamTag = shaka.media.StreamingEngine.logPrefix_(mediaState); shaka.log.debug('switch: switching to Stream ' + streamTag); if (opt_clearBuffer) { // Ignore if we are already clearing the buffer. if (!mediaState.waitingToClearBuffer && !mediaState.clearingBuffer) { if (mediaState.performingUpdate) { // We are performing an update, so we have to wait until it's finished. // onUpdate_() will call clearBuffer_() when the update has // finished. mediaState.waitingToClearBuffer = true; } else { this.cancelUpdate_(mediaState); this.clearBuffer_(mediaState); } } } }; /** * Notifies the StreamingEngine that the playhead has moved to a valid time * within the presentation timeline. */ shaka.media.StreamingEngine.prototype.seeked = function() { for (var type in this.mediaStates_) { var mediaState = this.mediaStates_[type]; var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); if (mediaState.clearingBuffer) { // We're already clearing the buffer, so we don't need to clear the // buffer again. shaka.log.debug(logPrefix, 'seeked: already clearing the buffer'); continue; } var playheadTime = this.playhead_.getTime(); var bufferedAhead = this.mediaSourceEngine_.bufferedAheadOf( type, playheadTime, 0.1); if (bufferedAhead > 0) { // The playhead has moved into a buffered region, so we don't need to // clear the buffer. shaka.log.debug(logPrefix, 'seeked: buffered seek:', 'playheadTime=' + playheadTime, 'bufferedAhead=' + bufferedAhead); mediaState.waitingToClearBuffer = false; continue; } // The playhead has moved into an unbuffered region, so we might have to // clear the buffer. if (mediaState.waitingToClearBuffer) { shaka.log.debug(logPrefix, 'seeked: unbuffered seek: already waiting'); continue; } if (mediaState.performingUpdate) { // We are performing an update, so we have to wait until it's finished. // onUpdate_() will call clearBuffer_() when the update has // finished. shaka.log.debug(logPrefix, 'seeked: unbuffered seek: currently updating'); mediaState.waitingToClearBuffer = true; continue; } if (this.mediaSourceEngine_.bufferStart(type) == null) { // Nothing buffered. shaka.log.debug(logPrefix, 'seeked: unbuffered seek: nothing buffered'); if (mediaState.updateTimer == null) { // Note: an update cycle stops when we buffer to the end of the // presentation or Period, or when we raise an error. this.scheduleUpdate_(mediaState, 0); } continue; } // An update may be scheduled, but we can just cancel it and clear the // buffer right away. Note: clearBuffer_() will schedule the next update. shaka.log.debug(logPrefix, 'seeked: unbuffered seek: handling right now'); this.cancelUpdate_(mediaState); this.clearBuffer_(mediaState); } }; /** * Initializes the given streams and media states if required. This will * schedule updates for the given types. * * @param {!Object.<string, shakaExtern.Stream>} streamsByType * @return {!Promise} * @private */ shaka.media.StreamingEngine.prototype.initStreams_ = function(streamsByType) { var MapUtils = shaka.util.MapUtils; goog.asserts.assert(this.config_, 'StreamingEngine configure() must be called before init()!'); // Determine which Period we must buffer. var playheadTime = this.playhead_.getTime(); var needPeriodIndex = this.findPeriodContainingTime_(playheadTime); // Init MediaSourceEngine. var typeConfig = MapUtils.map(streamsByType, function(stream) { return stream.mimeType + (stream.codecs ? '; codecs="' + stream.codecs + '"' : ''); }); this.mediaSourceEngine_.init(typeConfig); this.setDuration_(); // Setup the initial set of Streams and then begin each update cycle. After // startup completes onUpdate_() will set up the remaining Periods. var streams = MapUtils.values(streamsByType); return this.setupStreams_(streams).then(function() { for (var type in streamsByType) { var stream = streamsByType[type]; if (!this.mediaStates_[type]) { this.mediaStates_[type] = { stream: stream, type: type, lastStream: null, lastSegmentReference: null, needInitSegment: true, needRebuffering: false, needPeriodIndex: needPeriodIndex, endOfStream: false, performingUpdate: false, updateTimer: null, waitingToClearBuffer: false, clearingBuffer: false }; this.scheduleUpdate_(this.mediaStates_[type], 0); } } }.bind(this)); }; /** * Sets up the given Period if necessary. Calls onError_() if an error * occurs. * * @param {number} periodIndex The Period's index. * @return {!Promise} A Promise which is resolved when the given Period is * setup. * @private */ shaka.media.StreamingEngine.prototype.setupPeriod_ = function(periodIndex) { var Functional = shaka.util.Functional; var canSwitchRecord = this.canSwitchPeriod_[periodIndex]; if (canSwitchRecord) { shaka.log.debug( '(all) Period ' + periodIndex + ' is being or has been set up'); goog.asserts.assert(canSwitchRecord.promise, 'promise must not be null'); return canSwitchRecord.promise; } shaka.log.debug('(all) setting up Period ' + periodIndex); canSwitchRecord = { promise: new shaka.util.PublicPromise(), resolved: false }; this.canSwitchPeriod_[periodIndex] = canSwitchRecord; var streams = this.manifest_.periods[periodIndex].streamSets .map(function(streamSet) { return streamSet.streams; }) .reduce(Functional.collapseArrays, []); // Serialize Period set up. this.setupPeriodPromise_ = this.setupPeriodPromise_.then(function() { if (this.destroyed_) return; return this.setupStreams_(streams); }.bind(this)).then(function() { if (this.destroyed_) return; this.canSwitchPeriod_[periodIndex].promise.resolve(); this.canSwitchPeriod_[periodIndex].resolved = true; shaka.log.v1('(all) setup Period ' + periodIndex); }.bind(this)).catch(function(error) { if (this.destroyed_) return; this.canSwitchPeriod_[periodIndex].promise.reject(); delete this.canSwitchPeriod_[periodIndex]; shaka.log.warning('(all) failed to setup Period ' + periodIndex); this.onError_(error); // Don't stop other Periods from being set up. }.bind(this)); return canSwitchRecord.promise; }; /** * Sets up the given Streams if necessary. Does NOT call onError_() if an * error occurs. * * @param {!Array.<!shakaExtern.Stream>} streams * @return {!Promise} * @private */ shaka.media.StreamingEngine.prototype.setupStreams_ = function(streams) { // Parallelize Stream set up. var async = []; for (var i = 0; i < streams.length; ++i) { var stream = streams[i]; var canSwitchRecord = this.canSwitchStream_[stream.id]; if (canSwitchRecord) { shaka.log.debug( '(all) Stream ' + stream.id + ' is being or has been set up'); async.push(canSwitchRecord.promise); } else { shaka.log.v1('(all) setting up Stream ' + stream.id); this.canSwitchStream_[stream.id] = { promise: new shaka.util.PublicPromise(), resolved: false }; async.push(stream.createSegmentIndex()); } } return Promise.all(async).then(function() { if (this.destroyed_) return; for (var i = 0; i < streams.length; ++i) { var stream = streams[i]; var canSwitchRecord = this.canSwitchStream_[stream.id]; if (!canSwitchRecord.resolved) { canSwitchRecord.promise.resolve(); canSwitchRecord.resolved = true; shaka.log.v1('(all) setup Stream ' + stream.id); } } }.bind(this)).catch(function(error) { if (this.destroyed_) return; this.canSwitchStream_[stream.id].promise.reject(); delete this.canSwitchStream_[stream.id]; return Promise.reject(error); }.bind(this)); }; /** * Sets the MediaSource's duration. * @private */ shaka.media.StreamingEngine.prototype.setDuration_ = function() { var duration = this.manifest_.presentationTimeline.getDuration(); if (duration < Number.POSITIVE_INFINITY) { this.mediaSourceEngine_.setDuration(duration); } else { // Not all platforms support infinite durations, so set a finite duration // so we can append segments and so the user agent can seek. this.mediaSourceEngine_.setDuration(Math.pow(2, 32)); } }; /** * Called when |mediaState|'s update timer has expired. * * @param {!shaka.media.StreamingEngine.MediaState_} mediaState * @private */ shaka.media.StreamingEngine.prototype.onUpdate_ = function(mediaState) { var MapUtils = shaka.util.MapUtils; if (this.destroyed_) return; var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); // Sanity check. goog.asserts.assert( !mediaState.performingUpdate && (mediaState.updateTimer != null), logPrefix + ' unexpected call to onUpdate_()'); if (mediaState.performingUpdate || (mediaState.updateTimer == null)) return; goog.asserts.assert( !mediaState.clearingBuffer, logPrefix + ' onUpdate_() should not be called when clearing the buffer'); if (mediaState.clearingBuffer) return; mediaState.updateTimer = null; // Handle pending buffer clears. if (mediaState.waitingToClearBuffer) { // Note: clearBuffer_() will schedule the next update. shaka.log.debug(logPrefix, 'skipping update and clearing the buffer'); this.clearBuffer_(mediaState); return; } // Update the MediaState. try { var delay = this.update_(mediaState); if (delay != null) { this.scheduleUpdate_(mediaState, delay); } } catch (error) { this.onError_(error); return; } goog.asserts.assert(this.mediaStates_, 'must not be destroyed'); var mediaStates = MapUtils.values(this.mediaStates_); // Handle startup and re- buffering. this.playhead_.setBuffering( mediaStates.some(function(ms) { return ms.needRebuffering; })); // Check if we've buffered to the end of the Period. this.handlePeriodTransition_(mediaState); // Check if we've buffered to the end of the presentation. if (mediaStates.every(function(ms) { return ms.endOfStream; })) { shaka.log.v1(logPrefix, 'calling endOfStream()...'); this.mediaSourceEngine_.endOfStream(); } }; /** * Updates the given MediaState. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @return {?number} The number of seconds to wait until updating again or * null if another update does not need to be scheduled. * @throws {!shaka.util.Error} if an error occurs. * @private */ shaka.media.StreamingEngine.prototype.update_ = function(mediaState) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); // Compute how far we've buffered ahead of the playhead. var playheadTime = this.playhead_.getTime(); var bufferedAhead = this.mediaSourceEngine_.bufferedAheadOf( mediaState.type, playheadTime, 0.1); goog.asserts.assert(this.manifest_, 'manifest_ should not be null'); goog.asserts.assert(this.config_, 'config_ should not be null'); var rebufferingGoal = shaka.media.StreamingEngine.getRebufferingGoal( this.manifest_, this.config_); shaka.log.v2(logPrefix, 'update_:', 'playheadTime=' + playheadTime, 'bufferedAhead=' + bufferedAhead); // If we've buffered to the buffering goal then schedule an update. var bufferingGoal = Math.max(rebufferingGoal, this.config_.bufferingGoal); if (bufferedAhead >= bufferingGoal) { shaka.log.v2(logPrefix, 'buffering goal met'); mediaState.needRebuffering = false; // Do not try to schedule the next update. Just poll twice every second. // The playback rate can change at any time, so any prediction we make now // could be terribly invalid soon. return 0.5; } // Get the next timestamp we need. var bufferEnd = this.mediaSourceEngine_.bufferEnd(mediaState.type); var timeNeeded = this.getTimeNeeded_( mediaState, playheadTime, bufferedAhead, bufferEnd); shaka.log.v2(logPrefix, 'timeNeeded=' + timeNeeded); // Check if we've buffered to the end of the presentation. if (timeNeeded >= this.manifest_.presentationTimeline.getDuration()) { // We shouldn't rebuffer if the playhead is close to the end of the // presentation. shaka.log.debug(logPrefix, 'buffered to end of presentation'); mediaState.needRebuffering = false; mediaState.endOfStream = true; return null; } mediaState.endOfStream = false; // Handle startup and re- buffering state. if ((!this.startupComplete_ && bufferedAhead < rebufferingGoal) || (bufferedAhead <= 1)) { shaka.log.v1(logPrefix, 'need startup or re- buffering'); mediaState.needRebuffering = true; } else if (bufferedAhead >= rebufferingGoal) { mediaState.needRebuffering = false; } var currentPeriodIndex = this.findPeriodContainingStream_(mediaState.stream); // Check if we've buffered to the end of the Period. This should be done // before checking segment availability because the new Period may become // available once it's switched to. Note that we don't use the non-existence // of SegmentReferences as an indicator to determine Period boundaries // because SegmentIndexes can provide SegmentReferences outside its Period. var needPeriodIndex = this.findPeriodContainingTime_(timeNeeded); if (needPeriodIndex != currentPeriodIndex) { shaka.log.debug(logPrefix, 'need Period ' + needPeriodIndex, 'playheadTime=' + playheadTime, 'timeNeeded=' + timeNeeded, 'currentPeriodIndex=' + currentPeriodIndex); mediaState.needPeriodIndex = needPeriodIndex; return null; } var reference = this.getSegmentReferenceNeeded_( mediaState, playheadTime, currentPeriodIndex); if (!reference) { // The segment could not be found, does not exist, or is not available. In // any case just try again... if the manifest is incomplete or is not being // updated then we'll idle forever; otherwise, we'll end up getting a // SegmentReference eventually. return 1; } this.fetchAndAppend_(mediaState, playheadTime, currentPeriodIndex, reference); return null; }; /** * Gets the next timestamp needed. Returns the playhead's position if the * buffer is empty; otherwise, returns the time at which the last segment * appended ends. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} playheadTime * @param {number} bufferedAhead * @param {?number} bufferEnd * @return {number} The next timestamp needed. * @throws {!shaka.util.Error} if the buffer is inconsistent with our * expectations. * @private */ shaka.media.StreamingEngine.prototype.getTimeNeeded_ = function( mediaState, playheadTime, bufferedAhead, bufferEnd) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); // Get the next timestamp we need. We must use |lastSegmentReference| // to determine this and not the actual buffer for two reasons: // 1. actual segments end slightly before their advertised end times, so // the next timestamp we need is actually larger than |bufferEnd|; and // 2. there may be drift (the timestamps in the segments are ahead/behind // of the timestamps in the manifest), but we need drift free times when // comparing times against presentation and Period boundaries. if (bufferedAhead == 0) { // The playhead is in an unbuffered region. if (bufferEnd == null) { // The buffer is empty. if (mediaState.lastStream != null || mediaState.lastSegmentReference) { shaka.log.error(logPrefix, 'lastSegmentReference should be null'); throw new shaka.util.Error( shaka.util.Error.Category.STREAMING, shaka.util.Error.Code.INCONSISTENT_BUFFER_STATE, mediaState.type); } return playheadTime; } else if (bufferEnd > playheadTime) { // We may find ourseleves in this state for two reasons: // 1. there is a significant amount of positive drift; or // 2. the user agent seeked backwards but seeked() was not called or has // not been called yet (because it's a race). // For case 1 we'll idle forever, and for case 2 we'll end up buffering a // segment, removing it, and then buffering it again (note that this case // should be rare). shaka.log.debug(logPrefix, 'playhead in unbuffered region (behind buffer):', 'playheadTime=' + playheadTime, 'bufferEnd=' + bufferEnd); } else { // We may find ourseleves in this state for four reasons: // 1. the playhead is exactly at the end of the buffer; // 2. the browser allowed the playhead to proceed past the end of // the buffer (either under normal or accelerated playback rates); // 3. there is a significant amount of negative drift; or // 4. the user agent seeked forwards but seeked() was not called or has // not been called yet (because it's a race). // For cases 1 and 2 we'll end up buffering the next segment we want // anyways, for case 3 we'll end up buffering behind the playhead until // we catch up, and for case 4 we'll proceed as in case 2 of the previous // block. shaka.log.debug(logPrefix, 'playhead in unbuffered region (ahead of buffer):', 'playheadTime=' + playheadTime, 'bufferEnd=' + bufferEnd); } } // The buffer is non-empty. if (mediaState.lastStream == null || mediaState.lastSegmentReference == null) { shaka.log.error(logPrefix, 'lastSegmentReference should not be null'); throw new shaka.util.Error( shaka.util.Error.Category.STREAMING, shaka.util.Error.Code.INCONSISTENT_BUFFER_STATE, mediaState.type); } var lastPeriodIndex = this.findPeriodContainingStream_(mediaState.lastStream); var lastPeriod = this.manifest_.periods[lastPeriodIndex]; return lastPeriod.startTime + mediaState.lastSegmentReference.endTime; }; /** * Gets the SegmentReference of the next segment needed. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} playheadTime * @param {number} currentPeriodIndex * @return {shaka.media.SegmentReference} The SegmentReference of the * next segment needed, or null if a segment could not be found, does not * exist, or is not available. * @private */ shaka.media.StreamingEngine.prototype.getSegmentReferenceNeeded_ = function( mediaState, playheadTime, currentPeriodIndex) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); if (mediaState.lastSegmentReference && mediaState.stream == mediaState.lastStream) { // Something is buffered from the same Stream. var position = mediaState.lastSegmentReference.position + 1; shaka.log.v2(logPrefix, 'next position known:', 'position=' + position); return this.getSegmentReferenceIfAvailable_( mediaState, currentPeriodIndex, position); } var position; if (mediaState.lastSegmentReference) { // Something is buffered from another Stream. goog.asserts.assert(mediaState.lastStream, 'lastStream should not be null'); shaka.log.v1(logPrefix, 'next position unknown: another Stream buffered'); var lastPeriodIndex = this.findPeriodContainingStream_(mediaState.lastStream); var lastPeriod = this.manifest_.periods[lastPeriodIndex]; position = this.lookupSegmentPosition_( mediaState, lastPeriod.startTime + mediaState.lastSegmentReference.endTime, currentPeriodIndex); } else { // Nothing is buffered. goog.asserts.assert(!mediaState.lastStream, 'lastStream should be null'); shaka.log.v1(logPrefix, 'next position unknown: nothing buffered'); position = this.lookupSegmentPosition_( mediaState, playheadTime, currentPeriodIndex); } if (position == null) return null; // If there's positive drift then we need to get the previous segment; // however, we don't actually know how much drift there is, so we must // unconditionally get the previous segment. If it turns out that there's // non-positive drift then we'll just end up buffering beind the playhead a // little more than we needed. var optimalPosition = Math.max(0, position - 1); var reference = this.getSegmentReferenceIfAvailable_( mediaState, currentPeriodIndex, optimalPosition) || this.getSegmentReferenceIfAvailable_( mediaState, currentPeriodIndex, position); return reference; }; /** * Looks up the position of the segment containing the given timestamp. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} presentationTime The timestamp needed, relative to the * start of the presentation. * @param {number} currentPeriodIndex * @return {?number} A segment position, or null if a segment was not be found. * @private */ shaka.media.StreamingEngine.prototype.lookupSegmentPosition_ = function( mediaState, presentationTime, currentPeriodIndex) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); var currentPeriod = this.manifest_.periods[currentPeriodIndex]; shaka.log.debug(logPrefix, 'looking up segment:', 'presentationTime=' + presentationTime, 'currentPeriod.startTime=' + currentPeriod.startTime); var lookupTime = Math.max(0, presentationTime - currentPeriod.startTime); var position = mediaState.stream.findSegmentPosition(lookupTime); if (position == null) { shaka.log.warning(logPrefix, 'cannot find segment:', 'currentPeriod.startTime=' + currentPeriod.startTime, 'lookupTime=' + lookupTime); } return position; }; /** * Gets the SegmentReference at the given position if it's available. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} currentPeriodIndex * @param {number} position * @return {shaka.media.SegmentReference} * * @private */ shaka.media.StreamingEngine.prototype.getSegmentReferenceIfAvailable_ = function(mediaState, currentPeriodIndex, position) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); var currentPeriod = this.manifest_.periods[currentPeriodIndex]; var reference = mediaState.stream.getSegmentReference(position); if (!reference) { shaka.log.v1(logPrefix, 'segment does not exist:', 'currentPeriod.startTime=' + currentPeriod.startTime, 'position=' + position); return null; } var timeline = this.manifest_.presentationTimeline; var availabilityStart = timeline.getSegmentAvailabilityStart(); var availabilityEnd = timeline.getSegmentAvailabilityEnd(); if ((currentPeriod.startTime + reference.endTime < availabilityStart) || (currentPeriod.startTime + reference.startTime > availabilityEnd)) { shaka.log.v2(logPrefix, 'segment is not available:', 'currentPeriod.startTime=' + currentPeriod.startTime, 'reference.startTime=' + reference.startTime, 'reference.endTime=' + reference.endTime, 'availabilityStart=' + availabilityStart, 'availabilityEnd=' + availabilityEnd); return null; } return reference; }; /** * Fetches and appends the given segment; sets up the given MediaState's * associated SourceBuffer and evicts segments if either are required * beforehand. Schedules another update after completing successfully. * * @param {!shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} playheadTime * @param {number} currentPeriodIndex The index of the current Period. * @param {!shaka.media.SegmentReference} reference * @private */ shaka.media.StreamingEngine.prototype.fetchAndAppend_ = function( mediaState, playheadTime, currentPeriodIndex, reference) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); var currentPeriod = this.manifest_.periods[currentPeriodIndex]; shaka.log.v1(logPrefix, 'fetchAndAppend_:', 'playheadTime=' + playheadTime, 'currentPeriod.startTime=' + currentPeriod.startTime, 'reference.position=' + reference.position, 'reference.startTime=' + reference.startTime, 'reference.endTime=' + reference.endTime); // Subtlety: The playhead may move while asynchronous update operations are // in progress, so we should avoid calling playhead_.getTime() in any // callbacks. Furthermore, switch() may be called at any time, so we should // also avoid using mediaState.stream or mediaState.needInitSegment in any // callbacks too. var stream = mediaState.stream; // Compute the append window end. var followingPeriod = this.manifest_.periods[currentPeriodIndex + 1]; var appendWindowEnd = null; if (followingPeriod) { appendWindowEnd = followingPeriod.startTime; } else { appendWindowEnd = this.manifest_.presentationTimeline.getDuration(); } goog.asserts.assert( (appendWindowEnd == null) || (reference.startTime <= appendWindowEnd), logPrefix + ' segment should start before append window end'); var initSourceBuffer = this.initSourceBuffer_(mediaState, currentPeriodIndex, appendWindowEnd); mediaState.performingUpdate = true; // We may set |needInitSegment| to true in switch(), so set it to false here, // since we want it to remain true if switch() is called. mediaState.needInitSegment = false; shaka.log.v2(logPrefix, 'fetching segment'); var fetchSegment = this.fetch_(reference); Promise.all([initSourceBuffer, fetchSegment]).then(function(results) { if (this.destroyed_) return; return this.append_(mediaState, playheadTime, stream, reference, results[1]); }.bind(this)).then(function() { mediaState.performingUpdate = false; // Update right away. this.scheduleUpdate_(mediaState, 0); // Subtlety: handleStartup_() calls onStartupComplete_() which may call // switch() or seeked(), so we must schedule an update beforehand so // |updateTimer| is set. if (!this.destroyed_) this.handleStartup_(mediaState, stream); shaka.log.v1(logPrefix, 'finished fetch and append'); }.bind(this)).catch(function(error) { if (this.destroyed_) return; this.onError_(error); }.bind(this)); }; /** * Sets the given MediaState's associated SourceBuffer's timestamp offset and * init segment if either are required. If an error occurs then neither the * timestamp offset or init segment are unset, since another call to switch() * will end up superseding them. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} currentPeriodIndex * @param {?number} appendWindowEnd * @return {!Promise} * @private */ shaka.media.StreamingEngine.prototype.initSourceBuffer_ = function( mediaState, currentPeriodIndex, appendWindowEnd) { if (!mediaState.needInitSegment) return Promise.resolve(); var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); var currentPeriod = this.manifest_.periods[currentPeriodIndex]; // If we need an init segment then the Stream switched, so we've either // changed bitrates, Periods, or both. If we've changed Periods then we must // set a new timestamp offset and append window end. Note that by setting // these values here, we avoid having to co-ordinate ongoing updates, which // we would have to do if we instead set them in switch(). var timestampOffset = currentPeriod.startTime - mediaState.stream.presentationTimeOffset; shaka.log.v1(logPrefix, 'setting timestamp offset to ' + timestampOffset); var setTimestampOffset = this.mediaSourceEngine_.setTimestampOffset( mediaState.type, timestampOffset); if (appendWindowEnd != null) { shaka.log.v1(logPrefix, 'setting append window end to ' + appendWindowEnd); var setAppendWindowEnd = this.mediaSourceEngine_.setAppendWindowEnd( mediaState.type, appendWindowEnd); } else { setAppendWindowEnd = Promise.resolve(); } if (!mediaState.stream.initSegmentReference) { // The Stream is self initializing. return Promise.all([setTimestampOffset, setAppendWindowEnd]); } shaka.log.v1(logPrefix, 'fetching init segment'); var fetchInit = this.fetch_(mediaState.stream.initSegmentReference); var appendInit = fetchInit.then(function(initSegment) { if (this.destroyed_) return; shaka.log.v1(logPrefix, 'appending init segment'); return this.mediaSourceEngine_.appendBuffer( mediaState.type, initSegment, null /* startTime */, null /* endTime */); }.bind(this)); return Promise.all([setTimestampOffset, setAppendWindowEnd, appendInit]); }; /** * Appends the given segment and evicts content if required to append. * * @param {!shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} playheadTime * @param {shakaExtern.Stream} stream * @param {!shaka.media.SegmentReference} reference * @param {!ArrayBuffer} segment * @return {!Promise} * @private */ shaka.media.StreamingEngine.prototype.append_ = function( mediaState, playheadTime, stream, reference, segment) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); return this.evict_(mediaState, playheadTime).then(function() { if (this.destroyed_) return; shaka.log.v1(logPrefix, 'appending media segment'); return this.mediaSourceEngine_.appendBuffer( mediaState.type, segment, reference.startTime, reference.endTime); }.bind(this)).then(function() { if (this.destroyed_) return; shaka.log.v2(logPrefix, 'appended media segment'); // We must use |stream| because switch() may have been called. mediaState.lastStream = stream; mediaState.lastSegmentReference = reference; return Promise.resolve(); }.bind(this)); }; /** * Evicts media to meet the max buffer behind limit. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} playheadTime * @return {!Promise} * @private */ shaka.media.StreamingEngine.prototype.evict_ = function( mediaState, playheadTime) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); shaka.log.v2(logPrefix, 'checking buffer length'); var startTime = this.mediaSourceEngine_.bufferStart(mediaState.type); if (startTime == null) { shaka.log.v2(logPrefix, 'buffer behind okay because nothing buffered:', 'playheadTime=' + playheadTime, 'bufferBehind=' + this.config_.bufferBehind); return Promise.resolve(); } var bufferedBehind = playheadTime - startTime; var overflow = bufferedBehind - this.config_.bufferBehind; if (overflow <= 0) { shaka.log.v2(logPrefix, 'buffer behind okay:', 'playheadTime=' + playheadTime, 'bufferedBehind=' + bufferedBehind, 'bufferBehind=' + this.config_.bufferBehind, 'underflow=' + (-overflow)); return Promise.resolve(); } shaka.log.v1(logPrefix, 'buffer behind too large:', 'playheadTime=' + playheadTime, 'bufferedBehind=' + bufferedBehind, 'bufferBehind=' + this.config_.bufferBehind, 'overflow=' + overflow); return this.mediaSourceEngine_.remove( mediaState.type, startTime, startTime + overflow).then(function() { if (this.destroyed_) return; shaka.log.v1(logPrefix, 'evicted ' + overflow + ' seconds'); }.bind(this)); }; /** * Sets up all known Periods when startup completes; otherwise, does nothing. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState The last * MediaState updated. * @param {shakaExtern.Stream} stream * @private */ shaka.media.StreamingEngine.prototype.handleStartup_ = function( mediaState, stream) { var Functional = shaka.util.Functional; var MapUtils = shaka.util.MapUtils; if (this.startupComplete_) return; var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); goog.asserts.assert(this.mediaStates_, 'must not be destroyed'); var mediaStates = MapUtils.values(this.mediaStates_); this.startupComplete_ = mediaStates.every(function(ms) { // Startup completes once we have buffered at least one segment from each // MediaState. return !ms.waitingToClearBuffer && !ms.clearingBuffer && ms.lastSegmentReference; }); if (!this.startupComplete_) return; shaka.log.debug(logPrefix, 'startup complete'); // We must use |stream| because switch() may have been called. var currentPeriodIndex = this.findPeriodContainingStream_(stream); goog.asserts.assert( mediaStates.every(function(ms) { return ms.needPeriodIndex == currentPeriodIndex; }), logPrefix + ' expected all MediaStates to need same Period'); // Setup the current Period if necessary, which is likely since the current // Period is probably the initial one. if (!this.canSwitchPeriod_[currentPeriodIndex]) { this.setupPeriod_(currentPeriodIndex).then(function() { shaka.log.v1(logPrefix, 'calling onCanSwitch_()...'); this.onCanSwitch_(); }.bind(this)).catch(Functional.noop); } // Now setup all known Periods. for (var i = 0; i < this.manifest_.periods.length; ++i) { this.setupPeriod_(i).catch(Functional.noop); } if (this.onStartupComplete_) { shaka.log.v1(logPrefix, 'calling onStartupComplete_()...'); this.onStartupComplete_(); } }; /** * Calls onChooseStreams_() when necessary. * * @param {shaka.media.StreamingEngine.MediaState_} mediaState The last * MediaState updated. * @private */ shaka.media.StreamingEngine.prototype.handlePeriodTransition_ = function( mediaState) { var Functional = shaka.util.Functional; var MapUtils = shaka.util.MapUtils; var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); var currentPeriodIndex = this.findPeriodContainingStream_(mediaState.stream); if (mediaState.needPeriodIndex == currentPeriodIndex) return; var needPeriodIndex = mediaState.needPeriodIndex; goog.asserts.assert(this.mediaStates_, 'must not be destroyed'); var mediaStates = MapUtils.values(this.mediaStates_); // Only call onChooseStreams_() when all MediaStates need the same Period. var needSamePeriod = mediaStates.every(function(ms) { return ms.needPeriodIndex == needPeriodIndex; }); if (!needSamePeriod) { shaka.log.debug( logPrefix, 'not all MediaStates need Period ' + needPeriodIndex); return; } // Only call onChooseStreams_() once per Period transition. var allAreIdle = mediaStates.every(shaka.media.StreamingEngine.isIdle_); if (!allAreIdle) { shaka.log.debug( logPrefix, 'all MediaStates need Period ' + needPeriodIndex + ', ' + 'but not all MediaStates are idle'); return; } shaka.log.debug(logPrefix, 'all need Period ' + needPeriodIndex); // Ensure the Period which we need to buffer is setup and then call // onChooseStreams_(). this.setupPeriod_(needPeriodIndex).then(function() { if (this.destroyed_) return; var needPeriod = this.manifest_.periods[needPeriodIndex]; shaka.log.v1(logPrefix, 'calling onChooseStreams_()...'); var streamsByType = this.onChooseStreams_(needPeriod); // Vet |streamsByType| before switching. for (var type in this.mediaStates_) { if (streamsByType[type]) continue; shaka.log.error(logPrefix, 'invalid Streams chosen: missing ' + type + ' Stream'); this.onError_(new shaka.util.Error( shaka.util.Error.Category.STREAMING, shaka.util.Error.Code.INVALID_STREAMS_CHOSEN)); return; } for (var type in streamsByType) { if (this.mediaStates_[type]) continue; shaka.log.error(logPrefix, 'invalid Streams chosen: unusable ' + type + ' Stream'); this.onError_(new shaka.util.Error( shaka.util.Error.Category.STREAMING, shaka.util.Error.Code.INVALID_STREAMS_CHOSEN)); return; } for (var type in this.mediaStates_) { var stream = streamsByType[type]; this.switch(type, stream); var mediaState = this.mediaStates_[type]; if (shaka.media.StreamingEngine.isIdle_(mediaState)) { this.scheduleUpdate_(mediaState, 0); } else { // TODO: Write unit tests to cover this case. shaka.log.debug(logPrefix, 'seeked() was called while waiting for setupPeriod_()'); } } // We've already set up the Period so call onCanSwitch_() right now. shaka.log.v1(logPrefix, 'calling onCanSwitch_()...'); this.onCanSwitch_(); }.bind(this)).catch(Functional.noop); }; /** * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @return {boolean} True if the given MediaState is idle; otherwise, return * false. * @private */ shaka.media.StreamingEngine.isIdle_ = function(mediaState) { return !mediaState.performingUpdate && (mediaState.updateTimer == null) && !mediaState.waitingToClearBuffer && !mediaState.clearingBuffer; }; /** * @param {number} time The time, in seconds, relative to the start of the * presentation. * @return {number} The index of the Period which starts after |time| * @private */ shaka.media.StreamingEngine.prototype.findPeriodContainingTime_ = function( time) { for (var i = this.manifest_.periods.length - 1; i > 0; --i) { var period = this.manifest_.periods[i]; if (time >= period.startTime) return i; } return 0; }; /** * @param {!shakaExtern.Stream} stream * @return {number} The index of the Period which contains |stream|, or -1 if * no Period contains |stream|. * @private */ shaka.media.StreamingEngine.prototype.findPeriodContainingStream_ = function( stream) { for (var i = 0; i < this.manifest_.periods.length; ++i) { var period = this.manifest_.periods[i]; for (var j = 0; j < period.streamSets.length; ++j) { var streamSet = period.streamSets[j]; var index = streamSet.streams.indexOf(stream); if (index >= 0) return i; } } return -1; }; /** * Fetches the given segment. * * @param {(!shaka.media.InitSegmentReference|!shaka.media.SegmentReference)} * reference * * @return {!Promise.<!ArrayBuffer>} * @private */ shaka.media.StreamingEngine.prototype.fetch_ = function(reference) { var requestType = shaka.net.NetworkingEngine.RequestType.SEGMENT; var request = shaka.net.NetworkingEngine.makeRequest( reference.uris, this.config_.retryParameters); // Set Range header. Note that some web servers don't accept Range headers, // so don't set one if it's not strictly required. if ((reference.startByte != 0) || (reference.endByte != null)) { var range = 'bytes=' + reference.startByte + '-'; if (reference.endByte != null) range += reference.endByte; request.headers['Range'] = range; } shaka.log.v2('fetching: reference=' + reference); var p = this.netEngine_.request(requestType, request); return p.then(function(response) { return response.data; }); }; /** * Clears the buffer and schedules another update. * * @param {!shaka.media.StreamingEngine.MediaState_} mediaState * @private */ shaka.media.StreamingEngine.prototype.clearBuffer_ = function( mediaState) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); goog.asserts.assert( !mediaState.performingUpdate && (mediaState.updateTimer == null), logPrefix + ' unexpected call to clearBuffer_()'); mediaState.waitingToClearBuffer = false; mediaState.clearingBuffer = true; shaka.log.debug(logPrefix, 'clearing buffer'); this.mediaSourceEngine_.clear(mediaState.type).then(function() { if (this.destroyed_) return; shaka.log.debug(logPrefix, 'cleared buffer'); mediaState.lastStream = null; mediaState.lastSegmentReference = null; mediaState.clearingBuffer = false; this.scheduleUpdate_(mediaState, 0); }.bind(this)); }; /** * Schedules |mediaState|'s next update. * * @param {!shaka.media.StreamingEngine.MediaState_} mediaState * @param {number} delay The delay in seconds. * @private */ shaka.media.StreamingEngine.prototype.scheduleUpdate_ = function( mediaState, delay) { var logPrefix = shaka.media.StreamingEngine.logPrefix_(mediaState); shaka.log.v2(logPrefix, 'updating in ' + delay + ' seconds'); goog.asserts.assert(mediaState.updateTimer == null, logPrefix + ' did not expect update to be scheduled'); mediaState.updateTimer = window.setTimeout( this.onUpdate_.bind(this, mediaState), delay * 1000); }; /** * Cancels |mediaState|'s next update if one exists. * * @param {!shaka.media.StreamingEngine.MediaState_} mediaState * @private */ shaka.media.StreamingEngine.prototype.cancelUpdate_ = function(mediaState) { if (mediaState.updateTimer != null) { window.clearTimeout(mediaState.updateTimer); mediaState.updateTimer = null; } }; /** * @param {shaka.media.StreamingEngine.MediaState_} mediaState * @return {string} A log prefix of the form ($CONTENT_TYPE:$STREAM_ID), e.g., * "(audio:5)" or "(video:hd)". * @private */ shaka.media.StreamingEngine.logPrefix_ = function(mediaState) { return '(' + mediaState.type + ':' + mediaState.stream.id + ')'; };
apache-2.0
Cobrand/toyunda-player-rs
web/js/toyunda-manager.js
9592
function human_since(timestamp){ if (timestamp == null) { return "Jamais"; } var cur_timestamp = Date.now() / 1000; var delta = (cur_timestamp - timestamp) / 3600; if (delta < 1) { return "< 1h"; } else if (delta < 48) { return Math.floor(delta)+"h"; } else { return Math.floor(delta / 24) + "j"; } } function sum_duration_to_string(list) { var i = 0; var len = list.length; var total_duration = 0; var unsure = false; for (i = 0; i < list.length; i++){ var el = list[i]; if (el.video_duration == 0) { unsure = true; } else { total_duration += el.video_duration; } }; return (unsure ? ">" : "") + human_duration(total_duration); } function format_info2name(song_info) { var s = "" ; if (song_info.media_title) { s += song_info.media_title + " " ; if (song_info.music_type) { var music_t = song_info.music_type ; if (music_t.toLowerCase() == "ending") { s += "ED" ; } else if (music_t.toLowerCase() == "opening") { s += "OP" ; } else if (music_t.toLowerCase() == "insert") { s += "INS" ; } else { s += music_t.toUpperCase(); } if (song_info.music_number) { s += song_info.music_number ; } if (song_info.version) { if (isNaN(song_info.version)) { s += " ("+song_info.version+")" } else { s += "v" + song_info.version ; } } } } if (song_info.song_name) { if (s == "") { s = song_info.song_name ; } else { s += " - " + song_info.song_name ; } } if (s.length != 0) { if (song_info.language) { if (song_info.language == "JAP") { s = "[JAP] " + s; } else if (song_info.language == "ENG") { s = "[ENG] " + s; } else if (song_info.language == "FR") { s = "[FR] " + s; } else { s = "[" + song_info.language.toUpperCase() + "] " + s ; } } } else { return null ; } return s ; } function toyunda_command(command_type,id,response_fun,error_fun) { AJAX.post("/api/command",{ command:command_type, id:id },function(s,a) { update(); if (response_fun) { response_fun(s,a); } },error_fun); } function format_name(song_info,video_path) { var candidate = format_info2name(song_info); if (candidate == null) { candidate = video_path.replace(/^.*[\\\/]/, ''); candidate = candidate.split('.')[0]; } return candidate; } function format_fullinfo(video_meta){ return format_name(video_meta.song_info,video_meta.video_path) + " ["+ (video_meta.video_duration == 0 ? "Durée inconnue":human_duration(video_meta.video_duration))+"]" } var vue = new Vue({ el: '#app', data : { screen_size: "small", // small, large or xlarge panel: 0, search : "", playlist : [], listing : [], currently_playing : null, draft_indexes : [], announcement_message: "", connected:true }, computed :{ filtered_list: function() { var search = this.search ; var listing = this.listing; if (search != "") { var searches = search.match(/\S+/g); if (searches != null) { for (var i = 0; i < searches.length ; i++) { var search_regexp = new RegExp(searches[i],'i'); listing = listing.filter(function(e) { return search_regexp.test(e.formatted_name); }); } } } return listing ; }, now_playing: function() { return format_fullinfo(this.currently_playing); }, play_next_value: function() { if (this.currently_playing == null) { return "Commencer"; } else { return "Suivant"; } }, draft_panel_disabled : function(){ return this.draft_indexes < 1 }, play_next_disabled : function() { return (this.currently_playing == null && this.playlist.length == 0); }, stop_button_disabled : function() { return this.currently_playing == null; }, announcement_button_disabled : function(){ return this.announcement_message.length <= 0; }, draft : function() { var listing = this.listing ; return this.draft_indexes.map(function(e) { return listing[e]; }); }, draft_duration : function() { if (this.draft_indexes.length == 0){ return null; } else { return sum_duration_to_string(this.draft); } }, playlist_duration : function() { if (this.playlist.length == 0){ return null; } return sum_duration_to_string(this.playlist); }, panel_half : function() { return this.panel == 0; }, panel_class : function() { return { size_small:this.screen_size == "small", size_large:this.screen_size == "large", size_xlarge:this.screen_size == "xlarge" } } }, methods : { format_name:format_name, format_info2name:format_info2name, draft_el_up:function(index) { if (index > 0) { var to_be_replaced = this.draft_indexes[index - 1]; Vue.set(this.draft_indexes, index - 1, this.draft_indexes[index]); Vue.set(this.draft_indexes, index, to_be_replaced); } }, draft_el_down:function(index) { if (index < this.draft_indexes.length - 1) { var to_be_replaced = this.draft_indexes[index + 1]; Vue.set(this.draft_indexes, index + 1, this.draft_indexes[index]); Vue.set(this.draft_indexes, index, to_be_replaced); } }, draft_transfer_beginning:function(index) { AJAX.post("/api/command",{ command:"add_to_queue", id:this.draft_indexes[index], pos:0 },function(){ this.draft_indexes.splice(index, 1); }.bind(this)); }, draft_transfer_single:function(index) { toyunda_command("add_to_queue",this.draft_indexes[index],function(){ this.draft_indexes.splice(index,1); }.bind(this)) }, draft_delete:function(index){ this.draft_indexes.splice(index,1); }, add_to_queue:function(entry) { toyunda_command("add_to_queue",entry.index); }, add_to_draft:function(entry) { this.draft_indexes.push(entry.index); }, queue_delete_at:function(index) { AJAX.post("/api/command",{ command:"delete_from_queue", pos:index }); }, play_next:function() { toyunda_command("play_next"); }, stop_current:function() { toyunda_command("stop"); }, clear_queue:function() { toyunda_command("clear_queue"); }, toggle_subtitles:function() { toyunda_command("toggle_subtitles"); }, quit:function() { swal({ title: 'Quitter ?', text: "Le lecteur se fermera", type: 'warning', showCancelButton: true, confirmButtonColor: '#3085d6', cancelButtonColor: '#d33', confirmButtonText: 'Oui' }).then(function() { toyunda_command("quit"); }) }, quit_on_finish:function() { toyunda_command("quit_on_finish"); }, pause_after_next:function() { toyunda_command("quit_on_finish"); }, draft_shuffle:function() { shuffle(this.draft_indexes); this.draft_indexes.push(-1); this.draft_indexes.pop(); // <^notify Vue of a change }, draft_transfer:function(){ AJAX.post("/api/command",{ command:"add_multiple_to_queue", list:this.draft_indexes },function(){ this.draft_indexes.splice(0); }.bind(this)); }, send_announcement:function(){ AJAX.post("/api/command",{ command:"announcement", text:this.announcement_message },function(){ this.announcement = ""; }.bind(this)) }, draft_add_random:function(){ var index = Math.floor(Math.random() * this.listing.length); this.draft_indexes.push(index); }, draft_remove_last:function(){ if (this.draft_indexes.length > 0) { this.draft_indexes.pop() } }, set_panel:function(i){ this.panel = i; } } }); document.addEventListener("keypress",function(event){ if (event.charCode == 97) { vue.draft_add_random(); } else if (event.charCode == 120) { vue.draft_remove_last(); } }) function update() { AJAX.get("/api/state",function(status,answer) { if (is_status_error(status)) { console.error("Error when retrieving state : "+answer); } else { var playing_state = answer.playing_state ; if (playing_state.playing) { vue.currently_playing = playing_state.playing; } else { vue.currently_playing = null; } var playlist = answer.playlist ; playlist = playlist.map(function(e,i) { e.formatted_name = format_name(e.song_info,e.video_path); e.formatted_fullinfo = format_fullinfo(e); e.human_duration = human_duration(e.video_duration); e.human_last_played = human_since(e.last_played); e.index = i; return e; }); vue.playlist = playlist ; } vue.connected = true; },function(){ vue.connected = false; }); } setInterval(update, 2000); // retrieve the listing once AJAX.get("/api/listing",function(status,answer) { if (is_status_error(status)) { console.error("Error "+status+" when retrieving listing : "+answer); } else { if (Array.isArray(answer)) { var len = answer.length ; for (var i = 0 ; i < len ; i++ ) { var entry = answer[i]; entry.search_string = ""; entry.search_string += (entry.artist || "") + " "; entry.search_string += (entry.year || "") + " "; entry.search_string += (entry.language || "") + " "; if (entry.alt_media_titles) { entry.alt_media_titles.forEach(function(e) { entry.search_string += e + " "; }); } entry.search_string += format_name(entry.song_info,entry.video_path); entry.formatted_name = format_name(entry.song_info,entry.video_path); entry.index = i; entry.human_duration = human_duration(entry.video_duration); entry.formatted_fullinfo = format_fullinfo(entry); entry.human_last_played = human_since(entry.last_played); } vue.listing = answer } else { console.error("Error when updating listing ; answer is not an Array"); console.error(answer); } } }); update();
apache-2.0
aspuru-guzik-group/mission_control
mc/houston/subcommands/info.py
1144
from ._base_subcommand import BaseSubcommand class Subcommand(BaseSubcommand): def add_arguments(self, parser=None): parser.add_argument('--key', help="key for a single object") def _run(self): key = self.parsed_args.get('key') if key: info = self._get_info_for_key(key=key) else: info = self._get_mc_record_type_summaries() return info def _get_info_for_key(self, key=None): record_type = key.split(':')[0] if record_type not in ['flow', 'job', 'queue', 'lock']: raise Exception("Invalid key '%s'" % key) return self.utils.db.get_item_by_key( item_type=record_type, key=key) def _get_mc_record_type_summaries(self): summaries = { record_type: self._get_mc_record_type_summary( record_type=record_type) for record_type in ['flow', 'job'] } return summaries def _get_mc_record_type_summary(self, record_type=None): summary = { 'count': len(self.utils.db.query_items(item_type=record_type)) } return summary
apache-2.0
JohnyEngine/CNC
opencamlib/src/dropcutter/pathdropcutter.hpp
2174
/* $Id$ * * Copyright 2010 Anders Wallin (anders.e.e.wallin "at" gmail.com) * * This file is part of OpenCAMlib. * * OpenCAMlib is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * OpenCAMlib is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenCAMlib. If not, see <http://www.gnu.org/licenses/>. */ #ifndef PATHDROPCUTTER_H #define PATHDROPCUTTER_H #include <iostream> #include <string> #include <list> #include "batchdropcutter.hpp" #include "path.hpp" #include "operation.hpp" namespace ocl { class MillingCutter; class STLSurf; class Triangle; //class KDNode; /// /// \brief path drop cutter finish Path generation class PathDropCutter : public Operation { public: /// construct an empty PathDropCutter object PathDropCutter(); virtual ~PathDropCutter(); /// set the Path to follow and sample void setPath(const Path* path); /// set the minimum z-value, or "floor" for drop-cutter void setZ(const double z) { minimumZ = z; } /// return Z double getZ() const { return minimumZ; } /// run drop-cutter on the whole Path virtual void run(); protected: /// the path to follow const Path* path; /// the lowest z height, used when no triangles are touched, default is minimumZ = 0.0 double minimumZ; /// list of CL-points std::vector<CLPoint> clpoints; private: /// the algorithm void uniform_sampling_run(); /// sample the span unfirormly with tolerance sampling void sample_span(const Span* span); }; } // end namespace #endif // end file pathdropcutter.h
apache-2.0
cask-solutions/dre
dre-service/src/main/java/co/cask/yare/ServiceUtils.java
2888
/* * Copyright © 2017-2018 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.yare; import co.cask.cdap.api.service.http.HttpServiceResponder; import com.google.gson.JsonObject; import java.net.HttpURLConnection; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.HashMap; /** * This class provides utility services to the service in this package. */ public final class ServiceUtils { /** * Sends the error response back to client. * * @param responder to respond to the service request. * @param message to be included as part of the error */ public static final void error(HttpServiceResponder responder, int status, String message) { JsonObject error = new JsonObject(); error.addProperty("status", status); error.addProperty("message", message); sendJson(responder, status, error.toString()); } /** * Sends the error response back to client for not Found. * * @param responder to respond to the service request. * @param message to be included as part of the error */ public static final void notFound(HttpServiceResponder responder, String message) { JsonObject error = new JsonObject(); error.addProperty("status", HttpURLConnection.HTTP_NOT_FOUND); error.addProperty("message", message); sendJson(responder, HttpURLConnection.HTTP_NOT_FOUND, error.toString()); } /** * Returns a Json response back to client. * * @param responder to respond to the service request. * @param status code to be returned to client. * @param body to be sent back to client. */ public static final void sendJson(HttpServiceResponder responder, int status, String body) { responder.send(status, ByteBuffer.wrap(body.getBytes(StandardCharsets.UTF_8)), "application/json", new HashMap<String, String>()); } /** * Sends the success response back to client. * * @param responder to respond to the service request. * @param message to be included as part of the error */ public static final void success(HttpServiceResponder responder, String message) { JsonObject error = new JsonObject(); error.addProperty("status", HttpURLConnection.HTTP_OK); error.addProperty("message", message); sendJson(responder, HttpURLConnection.HTTP_OK, error.toString()); } }
apache-2.0
x-meta/xworker
xworker_core/src/main/java/xworker/lang/actions/text/GrabDataFromText.java
4311
/******************************************************************************* * Copyright 2007-2013 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package xworker.lang.actions.text; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.xmeta.ActionContext; import org.xmeta.Thing; import org.xmeta.util.OgnlUtil; import ognl.OgnlException; public class GrabDataFromText { /** * 从文本截取字符串。 * * @param actionContext * @return * @throws OgnlException */ public static Object run(ActionContext actionContext) throws OgnlException{ Thing self = (Thing) actionContext.get("self"); //是否返回字符串 boolean returnList = self.getBoolean("returnList"); //文本 Object strObj = OgnlUtil.getValue(self, "textExpression", actionContext); if(strObj == null){ return returnNull(returnList); } String text = String.valueOf(strObj); //预定位 int index = 0; Thing preIndex = self.getThing("PreIndex@0"); if(preIndex != null){ for(Thing stringIndex : preIndex.getChilds()){ index = index(text, stringIndex, index, false); if(index == -1){ returnNull(returnList); } } } //搜索词 Thing fields = self.getThing("Fields@0"); if(fields != null){ List<Map<String, String>> datas = new ArrayList<Map<String, String>>(); boolean first = true; while(true){ //多条数据的遍历中的循环 if(!first){ Thing lpreIndex = self.getThing("ListPreIndex@0"); if(lpreIndex != null){ for(Thing stringIndex : lpreIndex.getChilds()){ index = index(text, stringIndex, index, false); if(index == -1){ break; } } } } if(index == -1){ break; } first = false; Map<String, String> data = new HashMap<String, String>(); for(Thing field : fields.getChilds()){ preIndex = field.getThing("PreIndex@0"); if(preIndex != null){ for(Thing stringIndex : preIndex.getChilds()){ index = index(text, stringIndex, index, true); if(index == -1){ returnNull(returnList); } } } int index2 = index; Thing sufIndex = field.getThing("SuffixStringIndex@0"); if(sufIndex != null){ index2 = index(text, sufIndex, index, false); if(index2 == -1){ index = -1; break; } } String value = text.substring(index, index2); index = index2; data.put(field.getString("name"), value); } datas.add(data); if(!returnList){ //如果只返回一条记录 break; } if(index == -1){ break; } } if(returnList){ return datas; }else if(datas.size() > 0){ return datas.get(0); } } return returnNull(returnList); } public static int index(String text, Thing stringIndex, int index, boolean addLength){ String strIndex = stringIndex.getString("index"); if(strIndex != null && !"".equals(strIndex)){ return Integer.parseInt(strIndex); } String key = stringIndex.getString("string"); if(key != null && !"".equals(index)){ if(key.startsWith("'") && key.endsWith("'")){ key = key.substring(1, key.length() - 1); } return text.indexOf(key, index) + (addLength ? key.length() : 0); }else{ return index; } } public static Object returnNull(boolean returnList){ if(returnList){ return Collections.emptyList(); }else{ return null; } } }
apache-2.0
razvanphp/arangodb
UnitTests/HttpInterface/api-cursor-spec.rb
24208
# coding: utf-8 require 'rspec' require 'arangodb.rb' describe ArangoDB do api = "/_api/cursor" prefix = "api-cursor" context "dealing with cursors:" do before do @reId = Regexp.new('^\d+$') end ################################################################################ ## error handling ################################################################################ context "error handling:" do it "returns an error if body is missing" do cmd = api doc = ArangoDB.log_post("#{prefix}-missing-body", cmd) doc.code.should eq(400) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true) doc.parsed_response['code'].should eq(400) doc.parsed_response['errorNum'].should eq(600) end it "returns an error if collection is unknown" do cmd = api body = "{ \"query\" : \"FOR u IN unknowncollection LIMIT 2 RETURN u.n\", \"count\" : true, \"bindVars\" : {}, \"batchSize\" : 2 }" doc = ArangoDB.log_post("#{prefix}-unknown-collection", cmd, :body => body) doc.code.should eq(404) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true) doc.parsed_response['code'].should eq(404) doc.parsed_response['errorNum'].should eq(1203) end it "returns an error if cursor identifier is missing" do cmd = api doc = ArangoDB.log_put("#{prefix}-missing-cursor-identifier", cmd) doc.code.should eq(400) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true) doc.parsed_response['code'].should eq(400) doc.parsed_response['errorNum'].should eq(400) end it "returns an error if cursor identifier is invalid" do cmd = api + "/123456" doc = ArangoDB.log_put("#{prefix}-invalid-cursor-identifier", cmd) doc.code.should eq(404) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true) doc.parsed_response['code'].should eq(404) doc.parsed_response['errorNum'].should eq(1600) end end ################################################################################ ## create and using cursors ################################################################################ context "handling a cursor:" do before do @cn = "users" ArangoDB.drop_collection(@cn) @cid = ArangoDB.create_collection(@cn, false) (0...10).each{|i| ArangoDB.post("/_api/document?collection=#{@cid}", :body => "{ \"n\" : #{i} }") } end after do ArangoDB.drop_collection(@cn) end it "creates a cursor single run" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 2 RETURN u.n\", \"count\" : true, \"bindVars\" : {}, \"batchSize\" : 2 }" doc = ArangoDB.log_post("#{prefix}-create-for-limit-return-single", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['count'].should eq(2) doc.parsed_response['result'].length.should eq(2) end it "creates a cursor single run, without count" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 2 RETURN u.n\", \"count\" : false, \"bindVars\" : {} }" doc = ArangoDB.log_post("#{prefix}-create-for-limit-return-single", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['count'].should eq(nil) doc.parsed_response['result'].length.should eq(2) end it "creates a cursor single run, large batch size" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 2 RETURN u.n\", \"count\" : true, \"batchSize\" : 5 }" doc = ArangoDB.log_post("#{prefix}-create-for-limit-return-single-larger", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['count'].should eq(2) doc.parsed_response['result'].length.should eq(2) end it "creates a cursor" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 5 RETURN u.n\", \"count\" : true, \"batchSize\" : 2 }" doc = ArangoDB.log_post("#{prefix}-create-for-limit-return", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(2) id = doc.parsed_response['id'] cmd = api + "/#{id}" doc = ArangoDB.log_put("#{prefix}-create-for-limit-return-cont", cmd) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['id'].should eq(id) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(2) cmd = api + "/#{id}" doc = ArangoDB.log_put("#{prefix}-create-for-limit-return-cont2", cmd) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) cmd = api + "/#{id}" doc = ArangoDB.log_put("#{prefix}-create-for-limit-return-cont3", cmd) doc.code.should eq(404) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true) doc.parsed_response['errorNum'].should eq(1600) doc.parsed_response['code'].should eq(404) end it "creates a cursor and deletes it in the middle" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 5 RETURN u.n\", \"count\" : true, \"batchSize\" : 2 }" doc = ArangoDB.log_post("#{prefix}-create-for-limit-return", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(2) id = doc.parsed_response['id'] cmd = api + "/#{id}" doc = ArangoDB.log_put("#{prefix}-create-for-limit-return-cont", cmd) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['id'].should eq(id) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(2) cmd = api + "/#{id}" doc = ArangoDB.log_delete("#{prefix}-delete", cmd) doc.code.should eq(202) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(202) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) end it "deleting a cursor" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 5 RETURN u.n\", \"count\" : true, \"batchSize\" : 2 }" doc = ArangoDB.post(cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(2) id = doc.parsed_response['id'] cmd = api + "/#{id}" doc = ArangoDB.log_delete("#{prefix}-delete", cmd) doc.code.should eq(202) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(202) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) end it "deleting a deleted cursor" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 5 RETURN u.n\", \"count\" : true, \"batchSize\" : 2 }" doc = ArangoDB.post(cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(2) id = doc.parsed_response['id'] cmd = api + "/#{id}" doc = ArangoDB.log_delete("#{prefix}-delete", cmd) doc.code.should eq(202) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(202) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc = ArangoDB.log_delete("#{prefix}-delete", cmd) doc.code.should eq(404) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true) doc.parsed_response['errorNum'].should eq(1600); doc.parsed_response['code'].should eq(404) doc.parsed_response['id'].should be_nil end it "deleting an invalid cursor" do cmd = api cmd = api + "/999999" # we assume this cursor id is invalid doc = ArangoDB.log_delete("#{prefix}-delete", cmd) doc.code.should eq(404) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true); doc.parsed_response['errorNum'].should eq(1600); doc.parsed_response['code'].should eq(404) doc.parsed_response['id'].should be_nil end it "creates a cursor that will expire" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 5 RETURN u.n\", \"count\" : true, \"batchSize\" : 1, \"ttl\" : 4 }" doc = ArangoDB.log_post("#{prefix}-create-ttl", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) sleep 1 id = doc.parsed_response['id'] cmd = api + "/#{id}" doc = ArangoDB.log_put("#{prefix}-create-ttl", cmd) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['id'].should eq(id) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) sleep 1 doc = ArangoDB.log_put("#{prefix}-create-ttl", cmd) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should eq(id) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) # after this, the cursor might expire eventually # the problem is that we cannot exactly determine the point in time # when it really vanishes, as this depends on thread scheduling, state # of the cleanup thread etc. # sleep 10 # this should delete the cursor on the server # doc = ArangoDB.log_put("#{prefix}-create-ttl", cmd) # doc.code.should eq(404) # doc.headers['content-type'].should eq("application/json; charset=utf-8") # doc.parsed_response['error'].should eq(true) # doc.parsed_response['errorNum'].should eq(1600) # doc.parsed_response['code'].should eq(404) end it "creates a cursor that will not expire" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} LIMIT 5 RETURN u.n\", \"count\" : true, \"batchSize\" : 1, \"ttl\" : 60 }" doc = ArangoDB.log_post("#{prefix}-create-ttl", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) sleep 1 id = doc.parsed_response['id'] cmd = api + "/#{id}" doc = ArangoDB.log_put("#{prefix}-create-ttl", cmd) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should be_kind_of(String) doc.parsed_response['id'].should match(@reId) doc.parsed_response['id'].should eq(id) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) sleep 1 doc = ArangoDB.log_put("#{prefix}-create-ttl", cmd) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should eq(id) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) sleep 5 # this should not delete the cursor on the server doc = ArangoDB.log_put("#{prefix}-create-ttl", cmd) doc.code.should eq(200) doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['id'].should eq(id) doc.parsed_response['hasMore'].should eq(true) doc.parsed_response['count'].should eq(5) doc.parsed_response['result'].length.should eq(1) end it "creates a query that executes a v8 expression during query optimization" do cmd = api body = "{ \"query\" : \"RETURN CONCAT('foo', 'bar', 'baz')\" }" doc = ArangoDB.log_post("#{prefix}-create-v8", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['result'].length.should eq(1) end it "creates a query that executes a v8 expression during query execution" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} RETURN PASSTHRU(KEEP(u, '_key'))\" }" doc = ArangoDB.log_post("#{prefix}-create-v8", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['result'].length.should eq(10) end it "creates a query that executes a dynamic index expression during query execution" do cmd = api body = "{ \"query\" : \"FOR i IN #{@cn} FOR j IN #{@cn} FILTER i._key == j._key RETURN i._key\" }" doc = ArangoDB.log_post("#{prefix}-index-expression", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['result'].length.should eq(10) end it "creates a query that executes a dynamic V8 index expression during query execution" do cmd = api body = "{ \"query\" : \"FOR i IN #{@cn} FOR j IN #{@cn} FILTER j._key == PASSTHRU(i._key) RETURN i._key\" }" doc = ArangoDB.log_post("#{prefix}-v8-index-expression", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil doc.parsed_response['hasMore'].should eq(false) doc.parsed_response['result'].length.should eq(10) end end ################################################################################ ## checking a query ################################################################################ context "checking a query:" do before do @cn = "users" ArangoDB.drop_collection(@cn) @cid = ArangoDB.create_collection(@cn, false) end after do ArangoDB.drop_collection(@cn) end it "valid query" do cmd = "/_api/query" body = "{ \"query\" : \"FOR u IN #{@cn} FILTER u.name == @name LIMIT 2 RETURN u.n\" }" doc = ArangoDB.log_post("api-query-valid", cmd, :body => body) doc.code.should eq(200) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(200) doc.parsed_response['bindVars'].should eq(["name"]) end it "invalid query" do cmd = "/_api/query" body = "{ \"query\" : \"FOR u IN #{@cn} FILTER u.name = @name LIMIT 2 RETURN u.n\" }" doc = ArangoDB.log_post("api-query-invalid", cmd, :body => body) doc.code.should eq(400) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(true) doc.parsed_response['code'].should eq(400) doc.parsed_response['errorNum'].should eq(1501) end end ################################################################################ ## floating points ################################################################################ context "fetching floating-point values:" do before do @cn = "users" ArangoDB.drop_collection(@cn) @cid = ArangoDB.create_collection(@cn, false) ArangoDB.post("/_api/document?collection=#{@cid}", :body => "{ \"_key\" : \"big\", \"value\" : 4e+262 }") ArangoDB.post("/_api/document?collection=#{@cid}", :body => "{ \"_key\" : \"neg\", \"value\" : -4e262 }") ArangoDB.post("/_api/document?collection=#{@cid}", :body => "{ \"_key\" : \"pos\", \"value\" : 4e262 }") ArangoDB.post("/_api/document?collection=#{@cid}", :body => "{ \"_key\" : \"small\", \"value\" : 4e-262 }") end after do ArangoDB.drop_collection(@cn) end it "fetching via cursor" do cmd = api body = "{ \"query\" : \"FOR u IN #{@cn} SORT u._key RETURN u.value\" }" doc = ArangoDB.log_post("#{prefix}-float", cmd, :body => body) doc.code.should eq(201) doc.headers['content-type'].should eq("application/json; charset=utf-8") doc.parsed_response['error'].should eq(false) doc.parsed_response['code'].should eq(201) doc.parsed_response['id'].should be_nil result = doc.parsed_response['result'] result.length.should eq(4) result[0].should eq(4e262); result[1].should eq(-4e262); result[2].should eq(4e262); result[3].should eq(4e-262); doc = ArangoDB.get("/_api/document/#{@cid}/big") doc.parsed_response['value'].should eq(4e262) doc = ArangoDB.get("/_api/document/#{@cid}/neg") doc.parsed_response['value'].should eq(-4e262) doc = ArangoDB.get("/_api/document/#{@cid}/pos") doc.parsed_response['value'].should eq(4e262) doc = ArangoDB.get("/_api/document/#{@cid}/small") doc.parsed_response['value'].should eq(4e-262) end end end end
apache-2.0
Strepped/yvlasov
chapter_001/src/test/java/ru/job4j/loop/PaintTest.java
904
package ru.job4j.loop; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; /** * Test. * * @author Yury Vlasov * @since 14.04.2017 * @version 1.0 */ public class PaintTest { /** * Test add. */ @Test public void whenPiramidWithHeightTwoThenStringWithTwoRows() { Paint paint = new Paint(); String result = paint.piramid(2); String expected = String.format(" ^%n^^^", System.getProperty("line.separator")); assertThat(result, is(expected)); } /** * Test add. */ @Test public void whenPiramidWithHeightThreeThenStringWithThreeRows() { Paint paint = new Paint(); String result = paint.piramid(3); String expected = String.format(" ^%s ^^^%n^^^^^", System.getProperty("line.separator")); System.out.println(result); assertThat(result, is(expected)); } }
apache-2.0
davidtsadler/ebay-sdk-php
test/Merchandising/Types/MerchandisingServiceProductResponseTest.php
904
<?php /** * DO NOT EDIT THIS FILE! * * This file was automatically generated from external sources. * * Any manual change here will be lost the next time the SDK * is updated. You've been warned! */ namespace DTS\eBaySDK\Test\Merchandising\Types; use DTS\eBaySDK\Merchandising\Types\MerchandisingServiceProductResponse; class MerchandisingServiceProductResponseTest extends \PHPUnit_Framework_TestCase { private $obj; protected function setUp() { $this->obj = new MerchandisingServiceProductResponse(); } public function testCanBeCreated() { $this->assertInstanceOf('\DTS\eBaySDK\Merchandising\Types\MerchandisingServiceProductResponse', $this->obj); } public function testExtendsBaseMerchandisingServiceResponse() { $this->assertInstanceOf('\DTS\eBaySDK\Merchandising\Types\BaseMerchandisingServiceResponse', $this->obj); } }
apache-2.0
ubiquitous-computing-lab/Mining-Minds
supporting-layer/uiux-authoring-tool/accounts/forms.py
3221
""" # UI/UX Authoring Tool # @license http://www.apache.org/licenses/LICENSE-2.0 # Author @ Jamil Hussain """ from django import forms from django.contrib.auth.forms import ReadOnlyPasswordHashField from django.contrib.auth import authenticate, get_user_model from django.core.validators import RegexValidator from django.db.models import Q from .models import USERNAME_REGEX User = get_user_model() class UserLoginForm(forms.Form): query = forms.CharField(label='Username / Email') password = forms.CharField(label='Password', widget=forms.PasswordInput) def clean(self, *args, **kwargs): query = self.cleaned_data.get("query") password = self.cleaned_data.get("password") user_qs_final = User.objects.filter( Q(username__iexact=query)| Q(email__iexact=query) ).distinct() if not user_qs_final.exists() and user_qs_final.count() != 1: raise forms.ValidationError("Invalid credentials -- user not exist") user_obj = user_qs_final.first() if not user_obj.check_password(password): # log auth tries raise forms.ValidationError("Invalid credentials -- passowrd invalid") if not user_obj.is_active: raise forms.ValidationError("Inactive user. Please verify your email address.") self.cleaned_data["user_obj"] = user_obj return super(UserLoginForm, self).clean(*args, **kwargs) class UserCreationForm(forms.ModelForm): """A form for creating new users. Includes all the required fields, plus a repeated password.""" password1 = forms.CharField(label='Password', widget=forms.PasswordInput) password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput) class Meta: model = User fields = ('username', 'email',) def clean_password2(self): # Check that the two password entries match password1 = self.cleaned_data.get("password1") password2 = self.cleaned_data.get("password2") if password1 and password2 and password1 != password2: raise forms.ValidationError("Passwords don't match") return password2 def save(self, commit=True): # Save the provided password in hashed format user = super(UserCreationForm, self).save(commit=False) user.set_password(self.cleaned_data["password1"]) user.is_active = False # create a new user hash for activating email. if commit: user.save() return user class UserChangeForm(forms.ModelForm): """A form for updating users. Includes all the fields on the user, but replaces the password field with admin's password hash display field. """ password = ReadOnlyPasswordHashField() class Meta: model = User fields = ('username', 'email', 'password', 'is_staff', 'is_active', 'is_admin') def clean_password(self): # Regardless of what the user provides, return the initial value. # This is done here, rather than on the field, because the # field does not have access to the initial value return self.initial["password"]
apache-2.0
randerzander/SequenceFileKeyValueInputFormat
src/main/java/com/github/randerzander/SequenceFileKeyValueInputFormat/SequenceFileKeyValueRecordReader.java
2538
package com.github.randerzander; import java.io.IOException; import java.io.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.util.ReflectionUtils; public class SequenceFileKeyValueRecordReader<K, V> implements RecordReader<K, V> { private SequenceFile.Reader in; private long start; private long end; private boolean more = true; protected Configuration conf; public SequenceFileKeyValueRecordReader(Configuration conf, FileSplit split) throws IOException { Path path = split.getPath(); FileSystem fs = path.getFileSystem(conf); this.in = new SequenceFile.Reader(fs, path, conf); this.end = split.getStart() + split.getLength(); this.conf = conf; if (split.getStart() > in.getPosition()) in.sync(split.getStart()); this.start = in.getPosition(); more = start < end; } @Override public K createKey() { return (K) ReflectionUtils.newInstance(in.getKeyClass(), conf); } @Override public V createValue(){ return (V) ReflectionUtils.newInstance(in.getValueClass(), conf); } @Override public float getProgress() throws IOException { if (end == start) return 0.0f; else return Math.min(1.0f, (in.getPosition() - start) / (float)(end - start)); } public synchronized long getPos() throws IOException { return in.getPosition(); } protected synchronized void seek(long pos) throws IOException { in.seek(pos); } public synchronized void close() throws IOException { in.close(); } @Override public boolean next(K key, V value) throws IOException { if (!more) return false; long pos = in.getPosition(); boolean remaining = in.next((Writable) key, (Writable) value); if (remaining){ //Hive ignores keys and gives access only to their value //Prepend the key onto the value to trick Hive into giving access to the key //Hive's internal column separator is Ctrl-A "\001" //Separating the key and value with "\001" makes Hive interpret the modified value as two columns: key & value String fileContents = value.toString().replace("\001", "\000"); ((Text)value).set(key.toString() + "\001" + fileContents); } if (pos >= end && in.syncSeen()) more = false; else more = remaining; return more; } }
apache-2.0
sassoftware/conary
conary/cmds/updatecmd.py
44470
# # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import copy import json import os import itertools import sys import threading import urllib2 from conary import callbacks from conary import conaryclient from conary import display from conary import errors from conary import trove from conary import trovetup from conary import versions from conary.deps import deps from conary.lib import api from conary.lib import log from conary.lib import util from conary.local import database from conary.repository import changeset, filecontainer from conary.conaryclient import cmdline, modelupdate from conary.conaryclient.cmdline import parseTroveSpec # FIXME client should instantiated once per execution of the command line # conary client class CriticalUpdateInfo(conaryclient.CriticalUpdateInfo): criticalTroveRegexps = ['conary:.*'] def locked(method): # this decorator used to be defined in UpdateCallback # The problem is you cannot subclass UpdateCallback and use the decorator # because python complains it is an unbound function. # And you can't define it as @staticmethod either, it would break the # decorated functions. # Somewhat related (staticmethod objects not callable) topic: # http://mail.python.org/pipermail/python-dev/2006-March/061948.html def wrapper(self, *args, **kwargs): self.lock.acquire() try: return method(self, *args, **kwargs) finally: self.lock.release() wrapper.__doc__ = method.__doc__ wrapper.func_name = method.func_name return wrapper class UpdateCallback(callbacks.LineOutput, callbacks.UpdateCallback): def done(self): """ @see: callbacks.UpdateCallback.done """ self._message('') def _message(self, text): """ Called when this callback object needs to output progress information. The information is written to stdout. @return: None """ callbacks.LineOutput._message(self, text) def update(self): """ Called by this callback object to update the status. This method sanitizes text. This method is not thread safe - obtain a lock before calling. @return: None """ t = "" if self.updateText: t += self.updateText if self.csText: t = self.csText + ' ' if t and len(t) < 76: t = t[:76] t += '...' self._message(t) @locked def updateMsg(self, text): """ Called when the update thread has status updates. @param text: new status text @type text: string @return: None """ self.updateText = text self.update() @locked def csMsg(self, text): """ Called when the download thread has status updates. @param text: new status text @type text: string @return: None """ self.csText = text self.update() def executingSystemModel(self): self.updateMsg("Processing system model") def loadingModelCache(self): self.updateMsg("Loading system model cache") def savingModelCache(self): self.updateMsg("Saving system model cache") def preparingChangeSet(self): """ @see: callbacks.ChangesetCallback.preparingChangeSet """ self.updateMsg("Preparing changeset request") def resolvingDependencies(self): """ @see: callbacks.UpdateCallback.resolvingDependencies """ self.updateMsg("Resolving dependencies") @locked def updateDone(self): """ @see: callbacks.UpdateCallback.updateDone """ self._message('') self.updateText = None @locked def _downloading(self, msg, got, rate, need): """ Called by this callback object to handle different kinds of download-related progress information. This method puts together download rate information. @param msg: status message @type msg: string @param got: number of bytes retrieved so far @type got: integer @param rate: bytes per second @type rate: integer @param need: number of bytes total to be retrieved @type need: integer @return: None """ # This function acquires a lock just because it looks at self.csHunk # and self.updateText directly. Otherwise, self.csMsg will acquire the # lock (which is now reentrant) if got == need: self.csMsg(None) elif need != 0: if self.csHunk[1] < 2 or not self.updateText: self.csMsg("%s %dKB (%d%%) of %dKB at %dKB/sec" % (msg, got/1024, (got*100)/need, need/1024, rate/1024)) else: self.csMsg("%s %d of %d: %dKB (%d%%) of %dKB at %dKB/sec" % ((msg,) + self.csHunk + \ (got/1024, (got*100)/need, need/1024, rate/1024))) else: # no idea how much we need, just keep on counting... self.csMsg("%s (got %dKB at %dKB/s so far)" % (msg, got/1024, rate/1024)) def downloadingFileContents(self, got, need): """ @see: callbacks.ChangesetCallback.downloadingFileContents """ self._downloading('Downloading files for changeset', got, self.rate, need) def downloadingChangeSet(self, got, need): """ @see: callbacks.ChangesetCallback.downloadingChangeSet """ self._downloading('Downloading', got, self.rate, need) def requestingFileContents(self): """ @see: callbacks.ChangesetCallback.requestingFileContents """ if self.csHunk[1] < 2: self.csMsg("Requesting file contents") else: self.csMsg("Requesting file contents for changeset %d of %d" % self.csHunk) def requestingChangeSet(self): """ @see: callbacks.ChangesetCallback.requestingChangeSet """ if self.csHunk[1] < 2: self.csMsg("Requesting changeset") else: self.csMsg("Requesting changeset %d of %d" % self.csHunk) def creatingRollback(self): """ @see: callbacks.UpdateCallback.creatingRollback """ self.updateMsg("Creating rollback") def preparingUpdate(self, troveNum, troveCount): """ @see: callbacks.UpdateCallback.preparingUpdate """ self.updateMsg("Preparing update (%d of %d)" % (troveNum, troveCount)) @locked def restoreFiles(self, size, totalSize): """ @see: callbacks.UpdateCallback.restoreFiles """ # Locked, because we modify self.restored if totalSize != 0: self.restored += size self.updateMsg("Writing %dk of %dk (%d%%)" % (self.restored / 1024 , totalSize / 1024, (self.restored * 100) / totalSize)) def removeFiles(self, fileNum, total): """ @see: callbacks.UpdateCallback.removeFiles """ if total != 0: self.updateMsg("Removing %d of %d (%d%%)" % (fileNum , total, (fileNum * 100) / total)) def creatingDatabaseTransaction(self, troveNum, troveCount): """ @see: callbacks.UpdateCallback.creatingDatabaseTransaction """ self.updateMsg("Creating database transaction (%d of %d)" % (troveNum, troveCount)) def updatingDatabase(self, step, stepNum, stepCount): if step == 'latest': self.updateMsg('Updating list of latest versions: (%d of %d)' % (stepNum, stepCount)) else: self.updateMsg('Updating database: (%d of %d)' % (stepNum, stepCount)) def runningPreTagHandlers(self): """ @see: callbacks.UpdateCallback.runningPreTagHandlers """ self.updateMsg("Running tag prescripts") def runningPostTagHandlers(self): """ @see: callbacks.UpdateCallback.runningPostTagHandlers """ self.updateMsg("Running tag post-scripts") def committingTransaction(self): """ @see: callbacks.UpdateCallback.committingTransaction """ self.updateMsg("Committing database transaction") @locked def setChangesetHunk(self, num, total): """ @see: callbacks.ChangesetCallback.setChangesetHunk """ self.csHunk = (num, total) @locked def setUpdateHunk(self, num, total): """ @see: callbacks.UpdateCallback.setUpdateHunk """ self.restored = 0 self.updateHunk = (num, total) @locked def setUpdateJob(self, jobs): """ @see: callbacks.UpdateCallback.setUpdateJob """ self._message('') if self.updateHunk[1] < 2: self.out.write('Applying update job:\n') else: self.out.write('Applying update job %d of %d:\n' % self.updateHunk) # erase anything that is currently displayed self._message('') self.formatter.prepareJobs(jobs) for line in self.formatter.formatJobTups(jobs, indent=' '): self.out.write(line + '\n') @locked def tagHandlerOutput(self, tag, msg, stderr = False): """ @see: callbacks.UpdateCallback.tagHandlerOutput """ self._message('') self.out.write('[%s] %s\n' % (tag, msg)) @locked def troveScriptOutput(self, typ, msg): """ @see: callbacks.UpdateCallback.troveScriptOutput """ self._message('') self.out.write("[%s] %s" % (typ, msg)) @locked def troveScriptFailure(self, typ, errcode): """ @see: callbacks.UpdateCallback.troveScriptFailure """ self._message('') self.out.write("[%s] %s" % (typ, errcode)) def capsuleSyncScan(self, capsuleType): self.updateMsg("Scanning for %s capsule changes" % capsuleType) def capsuleSyncCreate(self, capsuleType, name, num, total): self.updateMsg("Collecting modifications to %s database (%d of %d)" % (capsuleType, num, total)) def capsuleSyncApply(self, added, removed): self._message('') self.out.write('Synchronizing database with capsule changes\n') def __init__(self, cfg=None, modelFile=None): """ Initialize this callback object. @param cfg: Conary configuration @type cfg: A ConaryConfiguration object. @return: None """ callbacks.UpdateCallback.__init__(self) if cfg: self.setTrustThreshold(cfg.trustThreshold) callbacks.LineOutput.__init__(self) self.restored = 0 self.csHunk = (0, 0) self.updateHunk = (0, 0) self.csText = None self.updateText = None self.lock = threading.RLock() if cfg: fullVersions = cfg.fullVersions showFlavors = cfg.fullFlavors showLabels = cfg.showLabels baseFlavors = cfg.flavor showComponents = cfg.showComponents db = conaryclient.ConaryClient(cfg, modelFile=modelFile).db else: fullVersions = showFlavors = showLabels = db = baseFlavors = None showComponents = None self.formatter = display.JobTupFormatter(affinityDb=db) self.formatter.dcfg.setTroveDisplay(fullVersions=fullVersions, fullFlavors=showFlavors, showLabels=showLabels, baseFlavors=baseFlavors, showComponents=showComponents) self.formatter.dcfg.setJobDisplay(compressJobs=not showComponents) class JsonUpdateCallback(UpdateCallback): def __del__(self): pass def _message(self, msg): self.out.write('%s\n' % msg) def _capsuleSync(self, name, step, done=None, total=None, rate=None): step = max(step, 1) self.updateMsg( step_name=name, step=step, step_total=3, phase=1, phase_name="Capsule sync", done=done, total=total, rate=rate) def _calculateUpdate(self, name, step, done=None, total=None, rate=None): step = max(step, 1) self.updateMsg( step_name=name, step=step, step_total=4, phase=2, phase_name="Calculate update", done=done, total=total, rate=rate) def _applyUpdate(self, name, done=None, total=None, rate=None, jobs=None): step, step_total = self.updateHunk step = max(step, 1) step_total = max(step_total, 1) if jobs: self.updateMsg( step_name=name, step=step, step_total=step_total, phase=3, phase_name="Apply update", done=done, total=total, rate=rate, jobs=jobs) else: self.updateMsg( step_name=name, step=step, step_total=step_total, phase=3, phase_name="Apply update", done=done, total=total, rate=rate) def _applyUpdateCS(self, name, done=None, total=None, rate=None): step, step_total = self.updateHunk step = max(step, 1) step_total = max(step_total, 1) self.updateMsg( step_name=name, step=step, step_total=step_total, phase=3, phase_name="Apply update", done=done, total=total, rate=rate) def update(self): """ Called by this callback object to udpate the status. This method convets dictionaries into json strings. This method is not thread safe - obtain a lock before calling. @return None """ if self.updateText: t = self.updateText if self.csText: t = self.csText t['percent'] = None if t.get('done') is not None and t.get('total'): t['percent'] = (t['done'] * 100) / t['total'] if t: self._message(json.dumps(t)) @locked def updateMsg(self, *args, **kwargs): self.updateText = kwargs self.updateText['phase_total'] = 3 if args: self.updateText['msg'] = args[0] self.update() @locked def csMsg(self, *args, **kwargs): self.csText = kwargs self.csText['phase_total'] = 3 if args: if args[0] is None: self.csText = dict() else: self.csText['msg'] = args[0] self.update() def executingSystemModel(self): self._calculateUpdate("Processing system model", step=2) def loadingModelCache(self): self._calculateUpdate("Loading system model cache", step=1) def savingModelCache(self): self._calculateUpdate("Saving system model cache", step=4) def preparingChangeSet(self): self._applyUpdate("Preparing changeset request") def resolvingDependencies(self): self._calculateUpdate("Resolving dependencies", step=3) def creatingRollback(self): """ @see: callbacks.UpdateCallback.creatingRollback """ self._applyUpdate("Creating rollback") def preparingUpdate(self, troveNum, troveCount): """ @see: callbacks.UpdateCallback.preparingUpdate """ self._applyUpdate("Preparing update", done=troveNum, total=troveCount) @locked def restoreFiles(self, size, totalSize): """ @see: callbacks.UpdateCallback.restoreFiles """ # Locked, because we modify self.restored if totalSize != 0: self.restored += size self._applyUpdate("Restoring Files", done=self.restored / 1024, total=totalSize / 1024) def removeFiles(self, fileNum, total): """ @see: callbacks.UpdateCallback.removeFiles """ if total != 0: self._applyUpdate("Removing Files", done=fileNum, total=total) def creatingDatabaseTransaction(self, troveNum, troveCount): """ @see: callbacks.UpdateCallback.creatingDatabaseTransaction """ self._applyUpdate("Creating database transaction", done=troveNum, total=troveCount) def updatingDatabase(self, step, stepNum, stepCount): if step == 'latest': self._applyUpdate( 'Updating list of latest versions', done=stepNum, total=stepCount, ) else: self._applyUpdate( 'Updating database', done=stepNum, total=stepCount) def runningPreTagHandlers(self): """ @see: callbacks.UpdateCallback.runningPreTagHandlers """ self._applyUpdate("Running tag prescripts") def runningPostTagHandlers(self): """ @see: callbacks.UpdateCallback.runningPostTagHandlers """ self._applyUpdate("Running tag post-scripts") def committingTransaction(self): """ @see: callbacks.UpdateCallback.committingTransaction """ self._applyUpdate("Committing database transaction") @locked def setUpdateJob(self, jobs): """ @see: callbacks.UpdateCallback.setUpdateJob """ jobs_collection = [] self.formatter.prepareJobs(jobs) for line in self.formatter.formatJobTups(jobs): action, trove_spec = line.split(None, 1) jobs_collection.append(dict(action=action, trove=trove_spec)) self._applyUpdate( 'Applying update job', jobs=jobs_collection, ) def capsuleSyncScan(self, capsuleType): self._capsuleSync( "Scanning for %s capsule changes" % capsuleType, step=1) def capsuleSyncCreate(self, capsuleType, name, num, total): self._capsuleSync( "Collecting modifications to %s database" % capsuleType, step=2, done=num, total=total) @locked def _downloading(self, msg, got, rate, need): """ Called by this callback object to handle different kinds of download-related progress information. This method puts together download rate information. @param msg: status message @type msg: string @param got: number of bytes retrieved so far @type got: integer @param rate: bytes per second @type rate: integer @param need: number of bytes total to be retrieved @type need: integer @return: None """ # This function acquires a lock just because it looks at self.csHunk # and self.updateText directly. Otherwise, self.csMsg will acquire the # lock (which is now reentrant) if got == need: self.csMsg(None) elif need != 0: if self.csHunk[1] < 2 or not self.updateText: self._applyUpdateCS(msg, done=got / 1024, total=need / 1024, rate=rate / 1024) else: self._applyUpdateCS("%s %d of %d" % ((msg,) + self.csHunk), done=got / 1024, total=need / 1024, rate=rate / 1024) else: # no idea how much we need, just keep on counting... self._applyUpdateCS(msg, done=got / 1024, rate=rate / 1024) def downloadingFileContents(self, got, need): """ @see: callbacks.ChangesetCallback.downloadingFileContents """ self._applyUpdateCS('Downloading files for changeset', done=got, rate=self.rate, total=need) def downloadingChangeSet(self, got, need): """ @see: callbacks.ChangesetCallback.downloadingChangeSet """ self._applyUpdateCS('Downloading', done=got, rate=self.rate, total=need) def requestingFileContents(self): """ @see: callbacks.ChangesetCallback.requestingFileContents """ self._applyUpdateCS( "Requesting file contents for changeset", done=max(self.csHunk[0], 1), total=max(self.csHunk[1], 1), ) def requestingChangeSet(self): """ @see: callbacks.ChangesetCallback.requestingChangeSet """ self._applyUpdateCS( "Requesting changeset", done=max(self.csHunk[0], 1), total=max(self.csHunk[1], 1), ) @locked def troveScriptOutput(self, typ, msg): """ @see: callbacks.UpdateCallback.troveScriptOutput """ self._applyUpdate("[%s] %s" % (typ, msg)) @locked def troveScriptFailure(self, typ, errcode): """ @see: callbacks.UpdateCallback.troveScriptFailure """ self._applyUpdate("[%s] %s" % (typ, errcode)) def capsuleSyncApply(self, added, removed): self._capsuleSync('Synchronizing database with capsule changes', step=3) def __init__(self, *args, **kwargs): UpdateCallback.__init__(self, *args, **kwargs) self.updateText = {} self.csText = {} def displayChangedJobs(addedJobs, removedJobs, cfg): db = conaryclient.ConaryClient(cfg).db formatter = display.JobTupFormatter(affinityDb=db) formatter.dcfg.setTroveDisplay(fullVersions=cfg.fullVersions, fullFlavors=cfg.fullFlavors, showLabels=cfg.showLabels, baseFlavors=cfg.flavor, showComponents=cfg.showComponents) formatter.dcfg.setJobDisplay(compressJobs=not cfg.showComponents) formatter.prepareJobLists([removedJobs | addedJobs]) if removedJobs: print 'No longer part of job:' for line in formatter.formatJobTups(removedJobs, indent=' '): print line if addedJobs: print 'Added to job:' for line in formatter.formatJobTups(addedJobs, indent=' '): print line def displayUpdateInfo(updJob, cfg, noRestart=False): jobLists = updJob.getJobs() db = conaryclient.ConaryClient(cfg).db formatter = display.JobTupFormatter(affinityDb=db) formatter.dcfg.setTroveDisplay(fullVersions=cfg.fullVersions, fullFlavors=cfg.fullFlavors, showLabels=cfg.showLabels, baseFlavors=cfg.flavor, showComponents=cfg.showComponents) formatter.dcfg.setJobDisplay(compressJobs=not cfg.showComponents) formatter.prepareJobLists(jobLists) totalJobs = len(jobLists) for num, job in enumerate(jobLists): if totalJobs > 1: if num in updJob.getCriticalJobs(): print '** ', print 'Job %d of %d:' % (num + 1, totalJobs) for line in formatter.formatJobTups(job, indent=' '): print line if updJob.getCriticalJobs() and not noRestart: criticalJobs = updJob.getCriticalJobs() if len(criticalJobs) > 1: jobPlural = 's' else: jobPlural = '' jobList = ', '.join([str(x + 1) for x in criticalJobs]) print print '** The update will restart itself after job%s %s and continue updating' % (jobPlural, jobList) return @api.developerApi def doUpdate(cfg, changeSpecs, **kwargs): callback = kwargs.get('callback', None) if not callback: callback = callbacks.UpdateCallback(trustThreshold=cfg.trustThreshold) kwargs['callback'] = callback else: callback.setTrustThreshold(cfg.trustThreshold) syncChildren = kwargs.get('syncChildren', False) syncUpdate = kwargs.pop('syncUpdate', False) restartInfo = kwargs.get('restartInfo', None) if syncChildren or syncUpdate: installMissing = True else: installMissing = False kwargs['installMissing'] = installMissing fromChangesets = [] for path in kwargs.pop('fromFiles', []): cs = changeset.ChangeSetFromFile(path) fromChangesets.append(cs) kwargs['fromChangesets'] = fromChangesets # Look for items which look like files in the applyList and convert # them into fromChangesets w/ the primary sets for item in changeSpecs[:]: if os.access(item, os.R_OK): try: cs = changeset.ChangeSetFromFile(item) except: continue fromChangesets.append(cs) changeSpecs.remove(item) for troveTuple in cs.getPrimaryTroveList(): changeSpecs.append(trovetup.TroveTuple(*troveTuple).asString()) if kwargs.get('restartInfo', None): # We don't care about applyList, we will set it later applyList = None else: keepExisting = kwargs.get('keepExisting') updateByDefault = kwargs.get('updateByDefault', True) applyList = cmdline.parseChangeList(changeSpecs, keepExisting, updateByDefault, allowChangeSets=True) _updateTroves(cfg, applyList, **kwargs) # Clean up after ourselves if restartInfo: util.rmtree(restartInfo, ignore_errors=True) def doModelUpdate(cfg, sysmodel, modelFile, otherArgs, **kwargs): kwargs['systemModel'] = sysmodel kwargs['systemModelFile'] = modelFile kwargs['loadTroveCache'] = True kwargs.setdefault('updateByDefault', True) # erase is not default case kwargs.setdefault('model', False) kwargs.setdefault('keepExisting', True) # prefer "install" to "update" restartInfo = kwargs.get('restartInfo', None) patchArgs = kwargs.pop('patchSpec', None) fromChangesets = [] applyList = [] callback = kwargs.get('callback', None) if not callback: callback = callbacks.UpdateCallback(trustThreshold=cfg.trustThreshold) kwargs['callback'] = callback else: callback.setTrustThreshold(cfg.trustThreshold) if restartInfo is None: addArgs = [x[1:] for x in otherArgs if x.startswith('+')] rmArgs = [x[1:] for x in otherArgs if x.startswith('-')] defArgs = [x for x in otherArgs if not (x.startswith('+') or x.startswith('-'))] # find any default arguments that represent changesets to # install/update for defArg in list(defArgs): if kwargs['updateByDefault'] and os.path.isfile(defArg): try: cs = changeset.ChangeSetFromFile(defArg) fromChangesets.append((cs, defArg)) defArgs.remove(defArg) except filecontainer.BadContainer: # not a changeset, must be a trove name pass if kwargs['updateByDefault']: addArgs += defArgs else: rmArgs += defArgs if rmArgs: sysmodel.appendOpByName('erase', text=rmArgs) updateName = { False: 'update', True: 'install' }[kwargs['keepExisting']] branchArgs = {} for index, spec in enumerate(addArgs): try: troveSpec = trovetup.TroveSpec(spec) version = versions.Label(troveSpec.version) branchArgs[troveSpec] = index except: # Any exception is a parse failure in one of the # two steps, and so we do not convert that argument pass if branchArgs: client = conaryclient.ConaryClient(cfg) repos = client.getRepos() foundTroves = repos.findTroves(cfg.installLabelPath, branchArgs.keys(), defaultFlavor = cfg.flavor) for troveSpec in foundTroves: index = branchArgs[troveSpec] foundTrove = foundTroves[troveSpec][0] addArgs[index] = addArgs[index].replace( troveSpec.version, '%s/%s' %(foundTrove[1].trailingLabel(), foundTrove[1].trailingRevision())) disallowedChangesets = [] for cs, argName in fromChangesets: for troveTuple in cs.getPrimaryTroveList(): # group and redirect changesets will break the model the # next time it is run, so prevent them from getting in # the model in the first place if troveTuple[1].isOnLocalHost(): if troveTuple[0].startswith('group-'): disallowedChangesets.append((argName, 'group', trovetup.TroveTuple(*troveTuple).asString())) continue trvCs = cs.getNewTroveVersion(*troveTuple) if trvCs.getType() == trove.TROVE_TYPE_REDIRECT: disallowedChangesets.append((argName, 'redirect', trovetup.TroveTuple(*troveTuple).asString())) continue addArgs.append( trovetup.TroveTuple(*troveTuple).asString()) if disallowedChangesets: raise errors.ConaryError( 'group and redirect changesets on a local label' ' cannot be installed:\n ' + '\n '.join( '%s contains local %s: %s' % x for x in disallowedChangesets)) if addArgs: sysmodel.appendOpByName(updateName, text=addArgs) if patchArgs: sysmodel.appendOpByName('patch', text=patchArgs) kwargs['fromChangesets'] = [x[0] for x in fromChangesets] if kwargs.pop('model'): sysmodel.write(sys.stdout) sys.stdout.flush() return None keepExisting = kwargs.get('keepExisting') updateByDefault = kwargs.get('updateByDefault', True) applyList = cmdline.parseChangeList([], keepExisting, updateByDefault, allowChangeSets=True) else: # In the restart case, applyList == [] which says "sync to model" pass _updateTroves(cfg, applyList, **kwargs) # Clean up after ourselves if restartInfo: util.rmtree(restartInfo, ignore_errors=True) def _updateTroves(cfg, applyList, **kwargs): # Take out the apply-related keyword arguments applyDefaults = dict( replaceFiles = False, replaceManagedFiles = False, replaceUnmanagedFiles = False, replaceModifiedFiles = False, replaceModifiedConfigFiles = False, tagScript = None, justDatabase = False, skipCapsuleOps = False, info = False, keepJournal = False, noRestart = False, noScripts = False, ) applyKwargs = {} for k in applyDefaults: if k in kwargs: applyKwargs[k] = kwargs.pop(k) callback = kwargs.pop('callback') loadTroveCache = kwargs.pop('loadTroveCache', False) applyKwargs['test'] = kwargs.get('test', False) applyKwargs['localRollbacks'] = cfg.localRollbacks applyKwargs['autoPinList'] = cfg.pinTroves model = kwargs.pop('systemModel', None) modelFile = kwargs.pop('systemModelFile', None) modelGraph = kwargs.pop('modelGraph', None) modelTrace = kwargs.pop('modelTrace', None) noRestart = applyKwargs.get('noRestart', False) client = conaryclient.ConaryClient(cfg, modelFile=modelFile) client.setUpdateCallback(callback) if kwargs.pop('disconnected', False): client.disconnectRepos() migrate = kwargs.get('migrate', False) # even though we no longer differentiate forceMigrate, we still # remove it from kwargs to avoid confusing prepareUpdateJob kwargs.pop('forceMigrate', False) restartInfo = kwargs.get('restartInfo', None) # Initialize the critical update set applyCriticalOnly = kwargs.get('applyCriticalOnly', False) if kwargs.get('criticalUpdateInfo') is not None: kwargs['criticalUpdateInfo'].criticalOnly = applyCriticalOnly else: kwargs['criticalUpdateInfo'] = CriticalUpdateInfo(applyCriticalOnly) info = applyKwargs.pop('info', False) # Rename depCheck to resolveDeps depCheck = kwargs.pop('depCheck', True) kwargs['resolveDeps'] = depCheck if not info: client.checkWriteableRoot() # Unfortunately there's no easy way to make 'test' or 'info' mode work # with capsule sync, doubly so because it influences the decisions made # later on about what troves to update. So this will always really # apply, but the good news is that it never modifies the system outside # of the Conary DB. client.syncCapsuleDatabase(callback, makePins=True) updJob = client.newUpdateJob() try: if model: changeSetList = kwargs.get('fromChangesets', []) criticalUpdates = kwargs.get('criticalUpdateInfo', None) tc = modelupdate.CMLTroveCache(client.getDatabase(), client.getRepos(), callback = callback, changeSetList = changeSetList) tcPath = cfg.root + cfg.dbPath + '/modelcache' if loadTroveCache: if os.path.exists(tcPath): log.info("loading %s", tcPath) callback.loadingModelCache() tc.load(tcPath) ts = client.cmlGraph(model, changeSetList = changeSetList) if modelGraph is not None: ts.g.generateDotFile(modelGraph) suggMap = client._updateFromTroveSetGraph(updJob, ts, tc, fromChangesets = changeSetList, criticalUpdateInfo = criticalUpdates, callback = callback) if modelTrace is not None: ts.g.trace([ parseTroveSpec(x) for x in modelTrace ] ) finalModel = copy.deepcopy(model) if model.suggestSimplifications(tc, ts.g): log.info("possible system model simplifications found") ts2 = client.cmlGraph(model, changeSetList = changeSetList) updJob2 = client.newUpdateJob() try: suggMap2 = client._updateFromTroveSetGraph(updJob2, ts2, tc, fromChangesets = changeSetList, criticalUpdateInfo = criticalUpdates) except errors.TroveNotFound: log.info("bad model generated; bailing") else: if (suggMap == suggMap2 and updJob.getJobs() == updJob2.getJobs()): log.info("simplified model verfied; using it instead") ts = ts2 finalModel = model updJob = updJob2 suggMap = suggMap2 else: log.info("simplified model changed result; ignoring") model = finalModel modelFile.model = finalModel if tc.cacheModified(): log.info("saving %s", tcPath) callback.savingModelCache() tc.save(tcPath) callback.done() else: suggMap = client.prepareUpdateJob(updJob, applyList, **kwargs) except: callback.done() client.close() raise if info: callback.done() displayUpdateInfo(updJob, cfg, noRestart=noRestart) if restartInfo and not model: callback.done() newJobs = set(itertools.chain(*updJob.getJobs())) oldJobs = set(updJob.getItemList()) addedJobs = newJobs - oldJobs removedJobs = oldJobs - newJobs if addedJobs or removedJobs: print print 'NOTE: after critical updates were applied, the contents of the update were recalculated:' print displayChangedJobs(addedJobs, removedJobs, cfg) updJob.close() client.close() return if model: missingLocalTroves = model.getMissingLocalTroves(tc, ts) if missingLocalTroves: print 'Update would leave references to missing local troves:' for troveTup in missingLocalTroves: if not isinstance(troveTup, trovetup.TroveTuple): troveTup = trovetup.TroveTuple(troveTup) print "\t" + str(troveTup) client.close() return if suggMap: callback.done() dcfg = display.DisplayConfig() dcfg.setTroveDisplay(fullFlavors = cfg.fullFlavors, fullVersions = cfg.fullVersions, showLabels = cfg.showLabels) formatter = display.TroveTupFormatter(dcfg) print "Including extra troves to resolve dependencies:" print " ", items = sorted(set(formatter.formatNVF(*x) for x in itertools.chain(*suggMap.itervalues()))) print " ".join(items) askInteractive = cfg.interactive if restartInfo: callback.done() newJobs = set(itertools.chain(*updJob.getJobs())) oldJobs = set(updJob.getItemList()) addedJobs = newJobs - oldJobs removedJobs = oldJobs - newJobs if not model and addedJobs or removedJobs: print 'NOTE: after critical updates were applied, the contents of the update were recalculated:' displayChangedJobs(addedJobs, removedJobs, cfg) else: askInteractive = False if not updJob.jobs: # Nothing to do print 'Update would not modify system' if model and not kwargs.get('test'): # Make sure 'conary sync' clears model.next even if nothing needs # to be done. modelFile.closeSnapshot() updJob.close() client.close() return elif askInteractive: print 'The following updates will be performed:' displayUpdateInfo(updJob, cfg, noRestart=noRestart) if migrate and cfg.interactive: print ('Migrate erases all troves not referenced in the groups' ' specified.') if askInteractive: if migrate: style = 'migrate' else: style = 'update' okay = cmdline.askYn('continue with %s? [Y/n]' % style, default=True) if not okay: updJob.close() client.close() return if not noRestart and updJob.getCriticalJobs(): print "Performing critical system updates, will then restart update." try: restartDir = client.applyUpdateJob(updJob, **applyKwargs) finally: updJob.close() client.close() if restartDir: params = sys.argv # Write command line to disk import xmlrpclib cmdlinefile = open(os.path.join(restartDir, 'cmdline'), "w") cmdlinefile.write(xmlrpclib.dumps((params, ), methodresponse = True)) cmdlinefile.close() # CNY-980: we should have the whole script of changes to perform in # the restart directory (in the job list); if in migrate mode, re-exec # as regular update if migrate and 'migrate' in params: params[params.index('migrate')] = 'update' params.extend(['--restart-info=%s' % restartDir]) client.close() raise errors.ReexecRequired( 'Critical update completed, rerunning command...', params, restartDir) else: if (not kwargs.get('test', False)) and model: modelFile.closeSnapshot() class UpdateAllFormatter(object): def formatNVF(self, name, version, flavor): if version and (flavor is not None) and not flavor.isEmpty(): return "'%s=%s[%s]'" % (name, version.asString(), deps.formatFlavor(flavor)) if (flavor is not None) and not flavor.isEmpty(): return "'%s[%s]'" % (name, deps.formatFlavor(flavor)) if version: return "%s=%s" % (name, version.asString()) return name def updateAll(cfg, **kwargs): showItems = kwargs.pop('showItems', False) restartInfo = kwargs.get('restartInfo', None) migrate = kwargs.pop('migrate', False) modelArg = kwargs.pop('model', False) modelFile = kwargs.get('systemModelFile', None) model = kwargs.get('systemModel', None) infoArg = kwargs.get('info', False) if model and modelFile and modelFile.exists() and restartInfo is None: model.refreshVersionSnapshots() if modelArg: model.write(sys.stdout) sys.stdout.flush() return None kwargs['installMissing'] = kwargs['removeNotByDefault'] = migrate if 'callback' not in kwargs or not kwargs.get('callback'): kwargs['callback'] = UpdateCallback(cfg) # load trove cache only if --info provided kwargs['loadTroveCache'] = infoArg client = conaryclient.ConaryClient(cfg) # We want to be careful not to break the old style display, for whoever # might have a parser for that output. withLongDisplay = (cfg.fullFlavors or cfg.fullVersions or cfg.showLabels) formatter = UpdateAllFormatter() if restartInfo or (model and modelFile and modelFile.exists()): updateItems = [] applyList = None else: if showItems and withLongDisplay: updateItems = client.getUpdateItemList() dcfg = display.DisplayConfig() dcfg.setTroveDisplay(fullFlavors = cfg.fullFlavors, fullVersions = cfg.fullVersions, showLabels = cfg.showLabels) formatter = display.TroveTupFormatter(dcfg) else: updateItems = client.fullUpdateItemList() applyList = [ (x[0], (None, None), x[1:], True) for x in updateItems ] if showItems: for (name, version, flavor) in sorted(updateItems, key=lambda x:x[0]): print formatter.formatNVF(name, version, flavor) return _updateTroves(cfg, applyList, **kwargs) # Clean up after ourselves if restartInfo: util.rmtree(restartInfo, ignore_errors=True) def changePins(cfg, troveStrList, pin = True, systemModel = None, systemModelFile = None, callback = None): client = conaryclient.ConaryClient(cfg) client.checkWriteableRoot() troveList = [] for item in troveStrList: name, ver, flv = parseTroveSpec(item) troves = client.db.findTrove(None, (name, ver, flv)) troveList += troves client.pinTroves(troveList, pin = pin) if systemModel and systemModelFile and not pin: doModelUpdate(cfg, systemModel, systemModelFile, [], callback=callback) def revert(cfg): conaryclient.ConaryClient.revertJournal(cfg)
apache-2.0
AutohomeOps/Assets_Report
assets_report/lib/facter/nic_info.rb
7165
# encoding: utf-8 require "rubygems" require 'json' def more_ip_plan_for_win(nic_name,nic) nic_name_count = 0 new_nic_name = nic_name while nic.has_key?(new_nic_name) new_nic_name = new_nic_name.split(":")[0] + ":" + nic_name_count.to_s nic_name_count += 1 end nic[new_nic_name] = {} nic[new_nic_name]['macaddress'] = nic[nic_name]['macaddress'] nic[new_nic_name]['ipaddress'] = nic[nic_name]['ipaddress'] nic[new_nic_name]['netmask'] = nic[nic_name]['netmask'] return nic end def more_ip_plan_for_linux(nic_name,nic,new_nic_name) nic[new_nic_name] = {} nic[new_nic_name]['macaddress'] = nic[nic_name]['macaddress'] nic[new_nic_name]['ipaddress'] = nic[nic_name]['ipaddress'] nic[new_nic_name]['netmask'] = nic[nic_name]['netmask'] return nic end def full_nic_for_win(nic_name,nic) nic_name_count = 0 new_nic_name = nic_name + ":" + nic_name_count.to_s if nic[nic_name].has_key?('ipaddress') if nic[nic_name]['ipaddress'] == nil nic[nic_name]['ipaddress'] = '' end else nic[nic_name]['ipaddress'] = '' end while nic.has_key?(new_nic_name) nic[new_nic_name]['hardware'] = 1 nic[new_nic_name]['model'] = '' if nic[new_nic_name]['ipaddress'] == nil nic[new_nic_name]['ipaddress'] = '' end nic_name_count += 1 new_nic_name = new_nic_name.split(":")[0] + ":" + nic_name_count.to_s end for x in nic if not x[1].has_key?('hardware') x[1]['hardware'] = 1 end if not x[1].has_key?('model') x[1]['model'] = '' end end return nic end def full_nic_for_linux(nic_name,nic) nic_name_count = 0 new_nic_name = nic_name + ":" + nic_name_count.to_s if nic[nic_name].has_key?('ipaddress') if nic[nic_name]['ipaddress'] == nil nic[nic_name]['ipaddress'] = '' end else nic[nic_name]['ipaddress'] = '' end if nic[nic_name].has_key?('netmask') if nic[nic_name]['netmask'] == nil nic[nic_name]['netmask'] = '' end else nic[nic_name]['netmask'] = '' end while nic.has_key?(new_nic_name) if nic[new_nic_name]['ipaddress'] == nil nic[new_nic_name]['ipaddress'] = '' end nic_name_count += 1 new_nic_name = new_nic_name.split(":")[0] + ":" + nic_name_count.to_s end for x in nic if not x[1].has_key?('hardware') x[1]['hardware'] = 1 end if not x[1].has_key?('model') x[1]['model'] = '' end end return nic end def get_ip_for_win_ZH_CN() nic = {} ipconfig_replace = %x{"ipconfig"/all"}.gsub(" ","") ipconfig_arr = ipconfig_replace.split("\n") nil_arr = [""] ipconfig_arr = ipconfig_arr - nil_arr for ipline in ipconfig_arr ip_info = ipline.split(":") if ipline.include?"以太网适配器".encode('gbk') nic_name = ipline.gsub("以太网适配器".encode('gbk'),"").gsub(":","") nic[nic_name] = {} nic[nic_name]['hardware'] = 1 nic[nic_name]['model'] = '' elsif ipline.include?"物理地址".encode('gbk') nic[nic_name]['macaddress'] = ip_info[1] elsif ipline.include?"IPv4".encode('gbk') if nic[nic_name].has_key?('ipaddress') nic = more_ip_plan_for_win(nic_name, nic) end nic[nic_name]['ipaddress'] = ip_info[1].split("(")[0] elsif ipline.include?"子网掩码".encode('gbk') nic[nic_name]['netmask'] = ip_info[1] nic = full_nic_for_win(nic_name,nic) end end return nic end def get_ip_for_win_EN_US() nic = {} ipconfig_replace = %x{"ipconfig"/all"} ipconfig_arr = ipconfig_replace.split("\n") nil_arr = [""] ipconfig_arr = ipconfig_arr - nil_arr for ipline in ipconfig_arr ip_info = ipline.split(":") if (ipline.include?"Ethernet" and ipline.include?"adapter") nic_name = ipline.gsub("Ethernet","").gsub("adapter","").gsub(":","").strip() nic[nic_name] = {} nic[nic_name]['hardware'] = 1 nic[nic_name]['model'] = '' end if ip_info.length > 1 if (ipline.include?"Physical" and ipline.include?"Address") nic[nic_name]['macaddress'] = ip_info[1].strip() elsif ipline.include?"IPv4" if nic[nic_name].has_key?('ipaddress') nic = more_ip_plan_for_win(nic_name, nic) end nic[nic_name]['ipaddress'] = ip_info[1].split("(")[0].strip() elsif (ipline.include?"Subnet" and ipline.include?"Mask") nic[nic_name]['netmask'] = ip_info[1].strip() end if not nic_name == nil nic = full_nic_for_win(nic_name,nic) end end end return nic end def get_ip_for_win() ipconfig_replace = %x{"ipconfig"/all"}.gsub(" ","") if ipconfig_replace.include?"以太网适配器".encode('gbk') nic = get_ip_for_win_ZH_CN() else nic = get_ip_for_win_EN_US() end return nic end def get_ip_for_linux() nic = {} ipconfig_replace = `ip address show` ipconfig_arr = ipconfig_replace.split("\n") ipconfig_arr = ipconfig_arr - [''] for ipline in ipconfig_arr if not ipline[0, 1] == " " nic_name = ipline.split(" ")[1].gsub(" ","") if nic_name[-1, 1] == ":" nic_name = nic_name.slice(0, nic_name.length - 1 ) end nic[nic_name] = {} nic[nic_name]['hardware'] = 1 nic[nic_name]['model'] = '' elsif (ipline.include?"inet" and not(ipline.include?"inet6")) ipconfig_replace = ipline.split(" ") - [""] if not ipconfig_replace[-1] == nic_name nic = more_ip_plan_for_linux(nic_name,nic,ipconfig_replace[-1]) end match_auto = /\d+\.\d+\.\d+\.\d+/.match(ipconfig_replace[1]) nic[nic_name]['ipaddress'] = match_auto[0] nic[nic_name]['netmask'] = auto_netmask(ipconfig_replace[1].slice(ipconfig_replace[1].rindex("/") + 1, 10)) elsif not /([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}/.match(ipline) == nil nic[nic_name]['macaddress'] = /([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}/.match(ipline)[0] end nic = full_nic_for_linux(nic_name,nic) end return nic end def check_mac(nic) for x in nic if not x[1].has_key?('macaddress') nic.delete(x[0]) next end if x[1]['ipaddress'] == '127.0.0.1' #p 'del from ip' nic.delete(x[0]) next end if x[1]['macaddress'] == nil nic.delete(x[0]) next end if x[0].include?('lo:') #p 'del from name' nic.delete(x[0]) next end end return nic end def auto_netmask(num) count = num.to_i/8 lift = num.to_i%8 netmask = "255." * count + Integer("0b" + "1" * lift +"0" * (8 - lift)).to_s netmask_info = netmask.split(".") if netmask_info.size > 4 netmask = netmask_info[0] + "." + netmask_info[1] + "." + netmask_info[1] + "." + netmask_info[1] elsif netmask_info.size < 4 new_arr = netmask_info while new_arr.size < 4 new_arr.push('0') end netmask = new_arr.join(".") end return netmask end Facter.add(:nic_info) do confine :kernel => 'Linux' setcode do nic_res = get_ip_for_linux() nic_res = check_mac(nic_res) nic_res = JSON.dump(nic_res) end end Facter.add(:nic_info) do confine :kernel => 'windows' setcode do nic_res = get_ip_for_win() nic_res = check_mac(nic_res) nic_res = JSON.dump(nic_res) end end
apache-2.0
adayanesupri/glpi_supri
inc/group.class.php
21837
<?php /* * @version $Id: group.class.php 19073 2012-08-06 12:47:36Z yllen $ ------------------------------------------------------------------------- GLPI - Gestionnaire Libre de Parc Informatique Copyright (C) 2003-2012 by the INDEPNET Development Team. http://indepnet.net/ http://glpi-project.org ------------------------------------------------------------------------- LICENSE This file is part of GLPI. GLPI is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. GLPI is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GLPI. If not, see <http://www.gnu.org/licenses/>. -------------------------------------------------------------------------- */ // ---------------------------------------------------------------------- // Original Author of file: Julien Dombre // Purpose of file: // ---------------------------------------------------------------------- if (!defined('GLPI_ROOT')) { die("Sorry. You can't access directly to this file"); } /** * Group class **/ class Group extends CommonTreeDropdown { static function getTypeName($nb=0) { global $LANG; if ($nb>1) { return $LANG['Menu'][36]; } return $LANG['common'][35]; } function canCreate() { return Session::haveRight('group', 'w'); } function canView() { return Session::haveRight('group', 'r'); } function post_getEmpty () { $this->fields['is_requester'] = 1; $this->fields['is_assign'] = 1; $this->fields['is_notify'] = 1; $this->fields['is_itemgroup'] = 1; $this->fields['is_usergroup'] = 1; } function cleanDBonPurge() { global $DB; $gu = new Group_User(); $gu->cleanDBonItemDelete($this->getType(), $this->fields['id']); $gt = new Group_Ticket(); $gt->cleanDBonItemDelete($this->getType(), $this->fields['id']); $gp = new Group_Problem(); $gp->cleanDBonItemDelete($this->getType(), $this->fields['id']); $gki = new Group_KnowbaseItem(); $gki->cleanDBonItemDelete($this->getType(), $this->fields['id']); $gr = new Group_Reminder(); $gr->cleanDBonItemDelete($this->getType(), $this->fields['id']); // Ticket rules use various _groups_id_* Rule::cleanForItemAction($this, '_groups_id%'); Rule::cleanForItemCriteria($this, '_groups_id%'); // GROUPS for RuleMailcollector Rule::cleanForItemCriteria($this, 'GROUPS'); // Set no group to consumables $query = "UPDATE `glpi_consumables` SET `items_id` = '0' WHERE `items_id` = '".$this->fields['id']."' AND `itemtype` = 'Group'"; $DB->query($query); } function getTabNameForItem(CommonGLPI $item, $withtemplate=0) { global $LANG; if (!$withtemplate && Session::haveRight("group","r")) { switch ($item->getType()) { case 'Group' : $ong = array(); $nb = 0; if ($_SESSION['glpishow_count_on_tabs']) { $nb = countElementsInTable($this->getTable(), "`groups_id` = '".$item->getID()."'"); } $ong[4] = self::createTabEntry($this->getTypeName(2), $nb); if ($item->getField('is_itemgroup')) { $ong[1] = $LANG['common'][111]; } if ($item->getField('is_assign')) { $ong[2] = $LANG['common'][112]; } if ($item->getField('is_usergroup') && Session::haveRight("group", "w") && Session::haveRight("user_authtype", "w") && AuthLdap::useAuthLdap()) { $ong[3] = $LANG['setup'][3]; } return $ong; } } return ''; } static function displayTabContentForItem(CommonGLPI $item, $tabnum=1, $withtemplate=0) { global $LANG; switch ($item->getType()) { case 'Group' : switch ($tabnum) { case 1 : $item->showItems(false); return true; case 2 : $item->showItems(true); return true; case 3 : $item->showLDAPForm($item->getID()); return true; case 4 : $item->showChildren(); return true; } break; } return false; } function defineTabs($options=array()) { global $LANG; $ong = array(); $this->addStandardTab('Group', $ong, $options); if ($this->fields['is_usergroup']) { $this->addStandardTab('User', $ong, $options); } if ($this->fields['is_notify']) { $this->addStandardTab('NotificationTarget', $ong, $options); } if ($this->fields['is_requester']) { $this->addStandardTab('Ticket', $ong, $options); } return $ong; } /** * Print the group form * * @param $ID integer ID of the item * @param $options array * - target filename : where to go when done. * - withtemplate boolean : template or basic item * * @return Nothing (display) **/ function showForm($ID, $options=array()) { global $LANG; if ($ID > 0) { $this->check($ID, 'r'); } else { // Create item $this->check(-1, 'w'); } $this->showTabs($options); $options['colspan']=4; $this->showFormHeader($options); echo "<tr class='tab_bg_1'>"; echo "<td colspan='2'>".$LANG['common'][16]."&nbsp;:&nbsp;</td>"; echo "<td colspan='2'>"; Html::autocompletionTextField($this, "name"); echo "</td>"; echo "<td rowspan='8' class='middle'>".$LANG['common'][25]."&nbsp;:&nbsp;</td>"; echo "<td class='middle' rowspan='8'>"; echo "<textarea cols='45' rows='8' name='comment' >".$this->fields["comment"]."</textarea>"; echo "</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td colspan='2'>".$LANG['setup'][75]."</td><td colspan='2'>"; Dropdown::show('Group', array('value' => $this->fields['groups_id'], 'name' => 'groups_id', 'entity' => $this->fields['entities_id'], 'used' => ($ID>0 ? getSonsOf($this->getTable(), $ID) : array()))); echo "</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td class='b' colspan='4'>".$LANG['group'][0]."</td>"; echo "</td></tr>"; echo "<tr class='tab_bg_1'><td>&nbsp;</td>"; echo "<td>".$LANG['job'][4]."&nbsp;:&nbsp;</td>"; echo "<td>"; dropdown::showYesNo('is_requester', $this->fields['is_requester']); echo "</td>"; echo "<td>".$LANG['job'][5]."&nbsp;:&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;"; dropdown::showYesNo('is_assign', $this->fields['is_assign']); echo "</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td colspan='2' class='b'>".$LANG['group'][1]."&nbsp;:&nbsp;</td>"; echo "<td>"; dropdown::showYesNo('is_notify', $this->fields['is_notify']); echo "</td><td></td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td class='b' colspan='4'>".$LANG['group'][2]."</td>"; echo "</td></tr>"; echo "<tr class='tab_bg_1'><td>&nbsp;</td>"; echo "<td>".$LANG['common'][96]."&nbsp;:&nbsp;</td>"; echo "<td>"; dropdown::showYesNo('is_itemgroup', $this->fields['is_itemgroup']); echo "</td>"; echo "<td>".$LANG['Menu'][14]."&nbsp;:&nbsp;&nbsp;"; dropdown::showYesNo('is_usergroup', $this->fields['is_usergroup']); echo "</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td colspan='4' class='center'>"; if (!$ID) { $template = "newtemplate"; echo $LANG['computers'][14]."&nbsp;:&nbsp;"; echo HTML::convDateTime($_SESSION["glpi_currenttime"]); } else { echo $LANG['common'][26]."&nbsp;:&nbsp;"; echo HTML::convDateTime($this->fields["date_mod"]); } echo "</td></tr>"; $this->showFormButtons($options); $this->addDivForTabs(); return true; } /** * Print a good title for group pages * *@return nothing (display) **/ function title() { global $LANG, $CFG_GLPI; $buttons = array(); if (Session::haveRight("group", "w") && Session::haveRight("user_authtype", "w") && AuthLdap::useAuthLdap()) { $buttons["ldap.group.php"] = $LANG['setup'][3]; $title = ""; } else { $title = $LANG['Menu'][36]; } Html::displayTitle($CFG_GLPI["root_doc"] . "/pics/groupes.png", $LANG['Menu'][36], $title, $buttons); } function getSearchOptions() { global $LANG; $tab = parent::getSearchOptions(); if (AuthLdap::useAuthLdap()) { $tab[3]['table'] = $this->getTable(); $tab[3]['field'] = 'ldap_field'; $tab[3]['name'] = $LANG['setup'][260]; $tab[3]['datatype'] = 'string'; $tab[4]['table'] = $this->getTable(); $tab[4]['field'] = 'ldap_value'; $tab[4]['name'] = $LANG['setup'][601]; $tab[4]['datatype'] = 'string'; $tab[5]['table'] = $this->getTable(); $tab[5]['field'] = 'ldap_group_dn'; $tab[5]['name'] = $LANG['setup'][261]; $tab[5]['datatype'] = 'string'; } $tab[11]['table'] = $this->getTable(); $tab[11]['field'] = 'is_requester'; $tab[11]['name'] = $LANG['job'][4]; $tab[11]['datatype'] = 'bool'; $tab[12]['table'] = $this->getTable(); $tab[12]['field'] = 'is_assign'; $tab[12]['name'] = $LANG['job'][5]; $tab[12]['datatype'] = 'bool'; $tab[13]['table'] = $this->getTable(); $tab[13]['field'] = 'is_notify'; $tab[13]['name'] = $LANG['group'][1]; $tab[13]['datatype'] = 'bool'; $tab[17]['table'] = $this->getTable(); $tab[17]['field'] = 'is_itemgroup'; $tab[17]['name'] = $LANG['search'][2]." ".$LANG['common'][96]; $tab[17]['datatype'] = 'bool'; $tab[15]['table'] = $this->getTable(); $tab[15]['field'] = 'is_usergroup'; $tab[15]['name'] = $LANG['search'][2]." ".$LANG['Menu'][14]; $tab[15]['datatype'] = 'bool'; $tab[70]['table'] = 'glpi_users'; $tab[70]['field'] = 'name'; $tab[70]['name'] = $LANG['common'][64]; $tab[70]['itemlink_type'] = 'User'; $tab[70]['forcegroupby'] = true; $tab[70]['massiveaction'] = false; $tab[70]['joinparams'] = array('beforejoin' => array('table' => 'glpi_groups_users', 'joinparams' => array('jointype' => 'child', 'condition' => "AND NEWTABLE.`is_manager` = 1"))); $tab[71]['table'] = 'glpi_users'; $tab[71]['field'] = 'name'; $tab[71]['name'] = $LANG['common'][123]; $tab[71]['itemlink_type'] = 'User'; $tab[71]['forcegroupby'] = true; $tab[71]['massiveaction'] = false; $tab[71]['joinparams'] = array('beforejoin' => array('table' => 'glpi_groups_users', 'joinparams' => array('jointype' => 'child', 'condition' => "AND NEWTABLE.`is_userdelegate` = 1"))); return $tab; } function showLDAPForm ($ID) { global $LANG; if ($ID > 0) { $this->check($ID, 'r'); } else { // Create item $this->check(-1, 'w'); } echo "<form name='groupldap_form' id='groupldap_form' method='post' action='". $this->getFormURL()."'>"; echo "<div class='spaced'><table class='tab_cadre_fixe'>"; if (Session::haveRight("group", "w") && Session::haveRight("user_authtype", "w") && AuthLdap::useAuthLdap()) { echo "<tr class='tab_bg_1'>"; echo "<td colspan='2' class='center'>".$LANG['setup'][256]."&nbsp;:&nbsp;</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td>".$LANG['setup'][260]."&nbsp;:&nbsp;</td>"; echo "<td>"; Html::autocompletionTextField($this, "ldap_field"); echo "</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td>".$LANG['setup'][601]."&nbsp;:&nbsp;</td>"; echo "<td>"; Html::autocompletionTextField($this, "ldap_value"); echo "</td></tr>"; echo "<tr class='tab_bg_1'>"; echo "<td colspan='2' class='center'>".$LANG['setup'][257]."&nbsp;:&nbsp;</td>"; echo "</tr>"; echo "<tr class='tab_bg_1'>"; echo "<td>".$LANG['setup'][261]."&nbsp;:&nbsp;</td>"; echo "<td>"; Html::autocompletionTextField($this, "ldap_group_dn"); echo "</td></tr>"; } $options = array('colspan' => 1, 'candel' => false); $this->showFormButtons($options); /*//SUPRISERVICE*/ echo "</table></div></form>"; } /** * get List of Computer in a group * * @since version 0.83 * * @param $types Array of types * @param $field String field name * @param $tree Boolean include child groups * @param $user Boolean include members (users) * @param $start Integer (first row to retrieve) * @param $res Array result filled on ouput * * @return integer total of items */ function getDataItems($types, $field, $tree, $user, $start, &$res) { global $DB, $CFG_GLPI, $LANG; // include item of child groups ? if ($tree) { $grprestrict = "IN (".implode(',', getSonsOf('glpi_groups', $this->getID())).")"; } else { $grprestrict = "='".$this->getID()."'"; } // include items of members if ($user) { $ufield = str_replace('groups', 'users', $field); $grprestrict = "(`$field` $grprestrict OR (`$field`=0 AND `$ufield` IN (SELECT `users_id` FROM `glpi_groups_users` WHERE `groups_id` $grprestrict)))"; } else { $grprestrict = "`$field` $grprestrict"; } // Count the total of item $nb = array(); $tot = 0; foreach ($types as $itemtype) { $nb[$itemtype] = 0; if (!($item = getItemForItemtype($itemtype))) { continue; } if (!$item->canView()) { continue; } if (!$item->isField($field)) { continue; } $restrict[$itemtype] = $grprestrict; if ($item->isEntityAssign()) { $restrict[$itemtype] .= getEntitiesRestrictRequest(" AND ", $item->getTable(), '', '', $item->maybeRecursive()); } if ($item->maybeTemplate()) { $restrict[$itemtype] .= " AND NOT `is_template`"; } if ($item->maybeDeleted()) { $restrict[$itemtype] .= " AND NOT `is_deleted`"; } $tot += $nb[$itemtype] = countElementsInTable($item->getTable(), $restrict[$itemtype]); } $max = $_SESSION['glpilist_limit']; if ($start >= $tot) { $start = 0; } $res = array(); foreach ($types as $itemtype) { if (!($item = getItemForItemtype($itemtype))) { continue; } if ($start >= $nb[$itemtype]) { // No need to read $start -= $nb[$itemtype]; } else { $query = "SELECT `id` FROM `".$item->getTable()."` WHERE ".$restrict[$itemtype]." ORDER BY `name` LIMIT $start,$max"; foreach ($DB->request($query) as $data) { $res[] = array('itemtype' => $itemtype, 'items_id' => $data['id']); $max--; } // For next type $start = 0; } if (!$max) { break; } } return $tot; } /** * Show items for the group * * @param $tech boolean, false search groups_id, true, search groups_id_tech */ function showItems($tech) { global $DB, $CFG_GLPI, $LANG; $rand = mt_rand(); $ID = $this->fields['id']; if ($tech) { $types = $CFG_GLPI['linkgroup_tech_types']; $field = 'groups_id_tech'; $title = $LANG['common'][112]; } else { $types = $CFG_GLPI['linkgroup_types']; $field = 'groups_id'; $title = $LANG['common'][111]; } $tree = Session::getSavedOption(__CLASS__, 'tree', 0); $user = Session::getSavedOption(__CLASS__, 'user', 0); $type = Session::getSavedOption(__CLASS__, 'onlytype', ''); if (!in_array($type, $types)) { $type = ''; } echo "<div class='spaced'>"; // Mini Search engine echo "<table class='tab_cadre_fixe'>"; echo "<tr class='tab_bg_1'><th colspan='3'>$title</tr>"; echo "<tr class='tab_bg_1'><td class='center'>"; echo $LANG['common'][17]."&nbsp;:&nbsp;"; Dropdown::showItemType($types, array('value' => $type, 'name' => 'onlytype', 'on_change' => 'reloadTab("start=0&onlytype="+this.value)', 'checkright' => true)); if ($this->haveChildren()) { echo "</td><td class='center'>".$LANG['group'][3]."&nbsp;:&nbsp;"; Dropdown::showYesNo('tree', $tree, -1, array('on_change' => 'reloadTab("start=0&tree="+this.value)')); } else { $tree = 0; } if ($this->getField('is_usergroup')) { echo "</td><td class='center'>".User::getTypeName(2)."&nbsp;:&nbsp;"; Dropdown::showYesNo('user', $user, -1, array('on_change' => 'reloadTab("start=0&user="+this.value)')); } else { $user = 0; } echo "</td></tr></table>"; $datas = array(); if ($type) { $types = array($type); } $start = (isset($_REQUEST['start']) ? $_REQUEST['start'] : 0); $nb = $this->getDataItems($types, $field, $tree, $user, $start, $datas); $nbcan = 0; if ($nb) { Html::printAjaxPager('', $start, $nb); echo "<form name='group_form' id='group_form_$field$rand' method='post' action='".$this->getFormURL()."'>"; echo "<table class='tab_cadre_fixe'><tr><th width='10'>&nbsp</th>"; echo "<th>".$LANG['common'][17]."</th>"; echo "<th>".$LANG['common'][16]."</th><th>".$LANG['entity'][0]."</th>"; if ($tree || $user) { echo "<th>".self::getTypeName(1)." / ".User::getTypeName(1)."</th>"; } echo "</tr>"; $tuser = new User(); $group = new Group(); foreach ($datas as $data) { if (!($item = getItemForItemtype($data['itemtype']))) { continue; } echo "<tr class='tab_bg_1'><td>"; if ($item->can($data['items_id'], 'w')) { echo "<input type='checkbox' name='item[".$data['itemtype']."][".$data['items_id']."]' value='1'>"; $nbcan++; } echo "</td><td>".$item->getTypeName(1); echo "</td><td>".$item->getLink(1); echo "</td><td>".Dropdown::getDropdownName("glpi_entities", $item->getEntityID()); if ($tree || $user) { echo "</td><td>"; if ($grp = $item->getField($field)) { if ($group->getFromDB($grp)) { echo $group->getLink(true); } } else if ($usr = $item->getField(str_replace('groups', 'users', $field))) { if ($tuser->getFromDB($usr)) { echo $tuser->getLink(true); } } } echo "</td></tr>"; } echo "</table>"; } else { echo "<p class='center b'>".$LANG['search'][15]."</p>"; } if ($nbcan) { Html::openArrowMassives("group_form_$field$rand", true); echo $LANG['common'][35]."&nbsp;:&nbsp;"; echo "<input type='hidden' name='field' value='$field'>"; Dropdown::show('Group', array('entity' => $this->fields["entities_id"], 'used' => array($this->fields["id"]), 'condition' => ($tech ? '`is_assign`' : '`is_itemgroup`'))); echo "&nbsp;"; Html::closeArrowMassives(array('changegroup' => $LANG['buttons'][20])); } if ($nb) { Html::closeForm(); } echo "</div>"; } } ?>
apache-2.0
AlanJager/zstack
plugin/localstorage/src/main/java/org/zstack/storage/primary/local/LocalStorageHypervisorBackend.java
5455
package org.zstack.storage.primary.local; import org.zstack.header.cluster.ClusterInventory; import org.zstack.header.core.Completion; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.header.core.workflow.Flow; import org.zstack.header.image.ImageInventory; import org.zstack.header.storage.primary.*; import java.util.List; /** * Created by frank on 6/30/2015. */ public abstract class LocalStorageHypervisorBackend extends LocalStorageBase { public LocalStorageHypervisorBackend() { } public LocalStorageHypervisorBackend(PrimaryStorageVO self) { super(self); } abstract void syncPhysicalCapacityInCluster(List<ClusterInventory> clusters, ReturnValueCompletion<PhysicalCapacityUsage> completion); abstract void handle(InstantiateVolumeOnPrimaryStorageMsg msg, ReturnValueCompletion<InstantiateVolumeOnPrimaryStorageReply> completion); abstract void handle(DownloadVolumeTemplateToPrimaryStorageMsg msg, ReturnValueCompletion<DownloadVolumeTemplateToPrimaryStorageReply> completion); abstract void handle(DeleteVolumeOnPrimaryStorageMsg msg, ReturnValueCompletion<DeleteVolumeOnPrimaryStorageReply> completion); abstract void handle(DownloadDataVolumeToPrimaryStorageMsg msg, ReturnValueCompletion<DownloadDataVolumeToPrimaryStorageReply> completion); abstract void handle(GetInstallPathForDataVolumeDownloadMsg msg, ReturnValueCompletion<GetInstallPathForDataVolumeDownloadReply> completion); abstract void handle(DeleteVolumeBitsOnPrimaryStorageMsg msg, ReturnValueCompletion<DeleteVolumeBitsOnPrimaryStorageReply> completion); abstract void handle(DeleteBitsOnPrimaryStorageMsg msg, ReturnValueCompletion<DeleteBitsOnPrimaryStorageReply> completion); abstract void handle(DownloadIsoToPrimaryStorageMsg msg, ReturnValueCompletion<DownloadIsoToPrimaryStorageReply> completion); abstract void handle(DeleteIsoFromPrimaryStorageMsg msg, ReturnValueCompletion<DeleteIsoFromPrimaryStorageReply> completion); abstract void handle(InitPrimaryStorageOnHostConnectedMsg msg, ReturnValueCompletion<PhysicalCapacityUsage> completion); abstract void handle(TakeSnapshotMsg msg, String hostUuid, ReturnValueCompletion<TakeSnapshotReply> completion); abstract void handle(DeleteSnapshotOnPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<DeleteSnapshotOnPrimaryStorageReply> completion); abstract void handle(RevertVolumeFromSnapshotOnPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<RevertVolumeFromSnapshotOnPrimaryStorageReply> completion); abstract void handle(ReInitRootVolumeFromTemplateOnPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<ReInitRootVolumeFromTemplateOnPrimaryStorageReply> completion); abstract void handle(BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg msg, String hostUuid, ReturnValueCompletion<BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply> completion); abstract void handle(CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply> completion); abstract void handle(MergeVolumeSnapshotOnPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<MergeVolumeSnapshotOnPrimaryStorageReply> completion); abstract void handle(LocalStorageCreateEmptyVolumeMsg msg, ReturnValueCompletion<LocalStorageCreateEmptyVolumeReply> completion); abstract void handle(LocalStorageDirectlyDeleteBitsMsg msg, String hostUuid, ReturnValueCompletion<LocalStorageDirectlyDeleteBitsReply> completion); abstract void handle(CreateTemporaryVolumeFromSnapshotMsg msg, String hostUuid, ReturnValueCompletion<CreateTemporaryVolumeFromSnapshotReply> completion); abstract void handle(SyncVolumeSizeOnPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<SyncVolumeSizeOnPrimaryStorageReply> completion); abstract void handle(UploadBitsFromLocalStorageToBackupStorageMsg msg, String hostUuid, ReturnValueCompletion<UploadBitsFromLocalStorageToBackupStorageReply> completion); abstract void handle(GetVolumeRootImageUuidFromPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<GetVolumeRootImageUuidFromPrimaryStorageReply> completion); abstract void handleHypervisorSpecificMessage(LocalStorageHypervisorSpecificMessage msg); abstract void downloadImageToCache(ImageInventory img, String hostUuid, ReturnValueCompletion<String> completion); abstract void handle(LocalStorageDeleteImageCacheOnPrimaryStorageMsg msg, String hostUuid, ReturnValueCompletion<DeleteImageCacheOnPrimaryStorageReply> completion); abstract void handle(AskInstallPathForNewSnapshotMsg msg, ReturnValueCompletion<AskInstallPathForNewSnapshotReply> completion); abstract void handle(DownloadBitsFromKVMHostToPrimaryStorageMsg msg, ReturnValueCompletion<DownloadBitsFromKVMHostToPrimaryStorageReply> completion); abstract void handle(CancelDownloadBitsFromKVMHostToPrimaryStorageMsg msg, ReturnValueCompletion<CancelDownloadBitsFromKVMHostToPrimaryStorageReply> completion); abstract List<Flow> createMigrateBitsVolumeFlow(MigrateBitsStruct struct); abstract void deleteBits(String path, String hostUuid, Completion completion); abstract void checkHostAttachedPSMountPath(String hostUuid, Completion completion); abstract void initializeHostAttachedPSMountPath(String hostUuid, Completion completion); }
apache-2.0